From fdb3d5c9486d6e6cc632e4b83b74bffaeabb7668 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Thu, 12 Sep 2024 16:58:24 +0200 Subject: [PATCH 01/36] feat: initial template for the feature --- cmd/arc/services/metamorph.go | 5 +- internal/blocktx/processor.go | 119 +++++++++++++++--- internal/blocktx/processor_helpers.go | 24 ++++ .../postgresql/get_mined_transactions.go | 15 +-- internal/blocktx/store/store.go | 2 +- .../double_spend_integration_test.go | 2 +- internal/metamorph/peer_handler.go | 14 +-- internal/metamorph/peer_handler_test.go | 14 +-- internal/metamorph/processor.go | 20 ++- internal/metamorph/processor_test.go | 4 +- internal/metamorph/types.go | 2 +- internal/metamorph/zmq.go | 16 +-- internal/metamorph/zmq_test.go | 8 +- 13 files changed, 180 insertions(+), 65 deletions(-) diff --git a/cmd/arc/services/metamorph.go b/cmd/arc/services/metamorph.go index 1e7831e23..cce7f6ad3 100644 --- a/cmd/arc/services/metamorph.go +++ b/cmd/arc/services/metamorph.go @@ -196,7 +196,6 @@ func StartMetamorph(logger *slog.Logger, arcConfig *config.ArcConfig, cacheStore server, err = metamorph.NewServer(arcConfig.PrometheusEndpoint, arcConfig.GrpcMessageSize, logger, metamorphStore, processor, arcConfig.Tracing, optsServer...) - if err != nil { stopFn() return nil, fmt.Errorf("create GRPCServer failed: %v", err) @@ -273,7 +272,7 @@ func NewMetamorphStore(dbConfig *config.DbConfig, tracingConfig *config.TracingC return s, err } -func initPeerManager(logger *slog.Logger, s store.MetamorphStore, arcConfig *config.ArcConfig) (p2p.PeerManagerI, *metamorph.PeerHandler, chan *metamorph.PeerTxMessage, error) { +func initPeerManager(logger *slog.Logger, s store.MetamorphStore, arcConfig *config.ArcConfig) (p2p.PeerManagerI, *metamorph.PeerHandler, chan *metamorph.TxStatusMessage, error) { network, err := config.GetNetwork(arcConfig.Network) if err != nil { return nil, nil, nil, fmt.Errorf("failed to get network: %v", err) @@ -281,7 +280,7 @@ func initPeerManager(logger *slog.Logger, s store.MetamorphStore, arcConfig *con logger.Info("Assuming bitcoin network", "network", network) - messageCh := make(chan *metamorph.PeerTxMessage, 10000) + messageCh := make(chan *metamorph.TxStatusMessage, 10000) var pmOpts []p2p.PeerManagerOptions if arcConfig.Metamorph.MonitorPeers { pmOpts = append(pmOpts, p2p.WithRestartUnhealthyPeers()) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 217a825dd..81a511f74 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -378,7 +378,12 @@ func (p *Processor) StartProcessRequestTxs() { } func (p *Processor) publishMinedTxs(txHashes []*chainhash.Hash) error { - minedTxs, err := p.store.GetMinedTransactions(p.ctx, txHashes) + hashesBytes := make([][]byte, len(txHashes)) + for i, h := range txHashes { + hashesBytes[i] = h[:] + } + + minedTxs, err := p.store.GetMinedTransactions(p.ctx, hashesBytes, blocktx_api.Status_LONGEST) if err != nil { return fmt.Errorf("failed to get mined transactions: %v", err) } @@ -479,9 +484,9 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return err } - // find competing chains back to the common ancestor - // get all registered transactions - // prepare msg with competing blocks + // check for all registered transactions in the longest chain + // any registered transactions that are in this block but not + // in the longest chain - publish to metamorph as MINED_IN_STALE_CHAIN incomingBlock.Status = blocktx_api.Status_STALE if hasGreatestChainwork { @@ -512,7 +517,7 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return err } - if err = p.markTransactionsAsMined(ctx, blockID, calculatedMerkleTree, msg.Height, &blockHash); err != nil { + if err = p.storeAndPublishTransactions(ctx, blockId, incomingBlock, calculatedMerkleTree); err != nil { p.logger.Error("unable to mark block as mined", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) return err } @@ -592,7 +597,7 @@ func (p *Processor) hasGreatestChainwork(ctx context.Context, incomingBlock *blo return tipChainWork.Cmp(incomingBlockChainwork) < 0, nil } -func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block) error { +func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block, transactionHashes []*chainhash.Hash) error { staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, incomingBlock.PreviousHash) if err != nil { return err @@ -608,23 +613,78 @@ func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api return err } + staleHashes := make([][]byte, 0) + longestHashes := make([][]byte, len(longestBlocks)) blockStatusUpdates := make([]store.BlockStatusUpdate, 0) for _, b := range staleBlocks { + staleHashes = append(staleHashes, b.Hash) update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_LONGEST} blockStatusUpdates = append(blockStatusUpdates, update) } - for _, b := range longestBlocks { + for i, b := range longestBlocks { + longestHashes[i] = b.Hash update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_STALE} blockStatusUpdates = append(blockStatusUpdates, update) } + prevStaleTxs, err := p.store.GetRegisteredTxsByBlockHashes(staleHashes, blocktx_api.Status_STALE) // TODO: implement this query + if err != nil { + return err + } + + prevStaleTxs2, err := p.store.GetRegisteredTxsByBlockHashes(transactionHashes, blocktx_api.Status_STALE) // TODO: implement this query + if err != nil { + return err + } + + prevStaleTxs = append(prevStaleTxs, prevStaleTxs2) + + prevLongestTxs, err := p.store.GetRegisteredTxsByBlockHashes(longestHashes, blocktx_api.Status_LONGEST) // TODO: implement this query + if err != nil { + return err + } + + minedTxs, staleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) + err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) - return err + if err != nil { + return err + } + + for _, minedTx := range minedTxs { + minedTxBlock := &blocktx_api.TransactionBlock{ + TransactionHash: minedTx.TxHash, + BlockHash: minedTx.BlockHash, + BlockHeight: minedTx.BlockHeight, + MerklePath: minedTx.MerklePath, + } + + err := p.mqClient.PublishMarshal(MinedTxsTopic, minedTxBlock) + if err != nil { + p.logger.Error("failed to publish mined tx after reorg", slog.Uint64("height", minedTx.BlockHeight), slog.String("err", err.Error())) + } + } + + for _, staleTx := range staleTxs { + staleTxBlock := &blocktx_api.TransactionBlock{ + TransactionHash: staleTx.TxHash, + BlockHash: staleTx.BlockHash, + BlockHeight: staleTx.BlockHeight, + MerklePath: staleTx.MerklePath, + } + + p.mqClient.PublishMarshal(StaleTxsTopic, staleTxBlock) // TODO: add this topic + if err != nil { + p.logger.Error("failed to publish stale tx after reorg", slog.Uint64("height", staleTx.BlockHeight), slog.String("err", err.Error())) + } + } + + return nil } -func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, merkleTree []*chainhash.Hash, blockHeight uint64, blockhash *chainhash.Hash) (err error) { +func (p *Processor) storeAndPublishTransactions(ctx context.Context, blockId uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { ctx, span := tracing.StartTracing(ctx, "markTransactionsAsMined", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -633,6 +693,11 @@ func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, txs := make([]store.TxWithMerklePath, 0, p.transactionStorageBatchSize) leaves := merkleTree[:(len(merkleTree)+1)/2] + blockhash, err := chainhash.NewHash(block.Hash) + if err != nil { + return fmt.Errorf("failed to create block hash for block at height %d", block.Height) + } + var totalSize int for totalSize = 1; totalSize < len(leaves); totalSize++ { if leaves[totalSize] == nil { @@ -653,9 +718,9 @@ func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, break } - bump, err := bc.NewBUMPFromMerkleTreeAndIndex(blockHeight, merkleTree, uint64(txIndex)) + bump, err := bc.NewBUMPFromMerkleTreeAndIndex(block.Height, merkleTree, uint64(txIndex)) if err != nil { - return errors.Join(ErrFailedToCreateBUMP, err) + return fmt.Errorf("failed to create new bump for tx hash %s from merkle tree and index at block height %d: %v", hash.String(), block.Height, err) } bumpHex, err := bump.String() @@ -676,23 +741,31 @@ func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, // free up memory txs = txs[:0] + // when the block is not from the longest chain, just store + // tranasctions in db and don't publish them to metamorph + // TODO: change this to != Status_LONGEST when handling + // ORPHANED blocks is implemented + if block.Status == blocktx_api.Status_STALE { + continue + } + for _, updResp := range updateResp { txBlock := &blocktx_api.TransactionBlock{ TransactionHash: updResp.Hash[:], - BlockHash: blockhash[:], - BlockHeight: blockHeight, + BlockHash: block.Hash, + BlockHeight: block.Height, MerklePath: updResp.MerklePath, } err = p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) if err != nil { - p.logger.Error("failed to publish mined txs", slog.String("hash", blockhash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) + p.logger.Error("failed to publish mined txs", slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("err", err.Error())) } } } if percentage, found := progress[txIndex+1]; found { if totalSize > 0 { - p.logger.Info(fmt.Sprintf("%d txs out of %d marked as mined", txIndex+1, totalSize), slog.Int("percentage", percentage), slog.String("hash", blockhash.String()), slog.Uint64("height", blockHeight), slog.String("duration", time.Since(now).String())) + p.logger.Info(fmt.Sprintf("%d txs out of %d marked as mined", txIndex+1, totalSize), slog.Int("percentage", percentage), slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("duration", time.Since(now).String())) } } } @@ -702,19 +775,27 @@ func (p *Processor) markTransactionsAsMined(ctx context.Context, blockID uint64, // update all remaining transactions updateResp, err := p.store.UpsertBlockTransactions(ctx, blockID, txs) if err != nil { - return errors.Join(ErrFailedToInsertBlockTransactions, fmt.Errorf("block height: %d", blockHeight), err) + return errors.Join(ErrFailedToInsertBlockTransactions, fmt.Errorf("block height: %d", block.Height), err) + } + + // when the block is not from the longest chain, just store + // tranasctions in db and don't publish them to metamorph + // TODO: change this to != Status_LONGEST when handling + // ORPHANED blocks is implemented + if block.Status == blocktx_api.Status_STALE { + return nil } for _, updResp := range updateResp { txBlock := &blocktx_api.TransactionBlock{ TransactionHash: updResp.Hash[:], - BlockHash: blockhash[:], - BlockHeight: blockHeight, + BlockHash: block.Hash, + BlockHeight: block.Height, MerklePath: updResp.MerklePath, } err = p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) if err != nil { - p.logger.Error("failed to publish mined txs", slog.String("hash", blockhash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) + p.logger.Error("failed to publish mined txs", slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("err", err.Error())) } } diff --git a/internal/blocktx/processor_helpers.go b/internal/blocktx/processor_helpers.go index 078972a04..23eb5f77c 100644 --- a/internal/blocktx/processor_helpers.go +++ b/internal/blocktx/processor_helpers.go @@ -6,6 +6,7 @@ import ( blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store" ) func createBlock(msg *blockchain.BlockMessage, prevBlock *blocktx_api.Block, longestTipExists bool) *blocktx_api.Block { @@ -50,6 +51,29 @@ func getLowestHeight(blocks []*blocktx_api.Block) uint64 { return lowest } +func findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs []store.GetMinedTransactionResult) (nowMinedTxs, nowStaleTxs []store.GetMinedTransactionResult) { + prevStaleMap := make(map[string]store.GetMinedTransactionResult) + + for _, tx := range prevStaleTxs { + prevStaleMap[string(tx.TxHash)] = tx + // every tx that was in previously stale blocks is to + // be mined regardless of whether it was also in the + // previously longest chain (update block info) + // or previously stale chain (new mined) + nowMinedTxs = append(nowMinedTxs, tx) + } + + for _, longestTx := range prevLongestTxs { + if _, found := prevStaleMap[string(longestTx.TxHash)]; !found { + // if a transaction that was previously in a longest chain is + // not found in the previously stale blocks - it is now stale + nowStaleTxs = append(nowStaleTxs, longestTx) + } + } + + return +} + // calculateChainwork calculates chainwork from the given difficulty bits // // This function comes from block-header-service: diff --git a/internal/blocktx/store/postgresql/get_mined_transactions.go b/internal/blocktx/store/postgresql/get_mined_transactions.go index 67032c79a..525e03adc 100644 --- a/internal/blocktx/store/postgresql/get_mined_transactions.go +++ b/internal/blocktx/store/postgresql/get_mined_transactions.go @@ -4,24 +4,19 @@ import ( "context" "github.com/lib/pq" - "github.com/libsv/go-p2p/chaincfg/chainhash" + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/bitcoin-sv/arc/internal/tracing" ) -func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes []*chainhash.Hash) (result []store.GetMinedTransactionResult, err error) { +func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, blockStatus blocktx_api.Status) (result []store.GetMinedTransactionResult, err error) { ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) }() - var hashSlice [][]byte - for _, hash := range hashes { - hashSlice = append(hashSlice, hash[:]) - } - - result = make([]store.GetMinedTransactionResult, 0, len(hashSlice)) + result = make([]store.GetMinedTransactionResult, 0, len(hashes)) q := ` SELECT @@ -32,10 +27,10 @@ func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes []*chainha FROM blocktx.transactions AS t JOIN blocktx.block_transactions_map AS m ON t.id = m.txid JOIN blocktx.blocks AS b ON m.blockid = b.id - WHERE t.hash = ANY($1) + WHERE t.hash = ANY($1) AND b.status = $2 ` - rows, err := p.db.QueryContext(ctx, q, pq.Array(hashSlice)) + rows, err := p.db.QueryContext(ctx, q, pq.Array(hashes), blockStatus) if err != nil { return nil, err } diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index 97d45b7dd..4c18e5287 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -37,7 +37,7 @@ type BlocktxStore interface { MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error GetBlockGaps(ctx context.Context, heightRange int) ([]*BlockGap, error) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) - GetMinedTransactions(ctx context.Context, hashes []*chainhash.Hash) ([]GetMinedTransactionResult, error) + GetMinedTransactions(ctx context.Context, hashes [][]byte, blockStatus blocktx_api.Status) ([]GetMinedTransactionResult, error) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []BlockStatusUpdate) error diff --git a/internal/metamorph/integration_test/double_spend_integration_test.go b/internal/metamorph/integration_test/double_spend_integration_test.go index dd6ce5ef6..9d8452c46 100644 --- a/internal/metamorph/integration_test/double_spend_integration_test.go +++ b/internal/metamorph/integration_test/double_spend_integration_test.go @@ -102,7 +102,7 @@ func TestDoubleSpendDetection(t *testing.T) { logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) - statusMessageChannel := make(chan *metamorph.PeerTxMessage, 10) + statusMessageChannel := make(chan *metamorph.TxStatusMessage, 10) minedTxChannel := make(chan *blocktx_api.TransactionBlock, 10) mockedZMQ := &mocks.ZMQIMock{ diff --git a/internal/metamorph/peer_handler.go b/internal/metamorph/peer_handler.go index 9c8f403e6..64ee94e9d 100644 --- a/internal/metamorph/peer_handler.go +++ b/internal/metamorph/peer_handler.go @@ -15,13 +15,13 @@ var ErrTxRejectedByPeer = errors.New("transaction rejected by peer") type PeerHandler struct { store store.MetamorphStore - messageCh chan *PeerTxMessage + messageCh chan *TxStatusMessage cancelAll context.CancelFunc ctx context.Context } -func NewPeerHandler(s store.MetamorphStore, messageCh chan *PeerTxMessage) *PeerHandler { +func NewPeerHandler(s store.MetamorphStore, messageCh chan *TxStatusMessage) *PeerHandler { ph := &PeerHandler{ store: s, messageCh: messageCh, @@ -37,7 +37,7 @@ func NewPeerHandler(s store.MetamorphStore, messageCh chan *PeerTxMessage) *Peer // HandleTransactionSent is called when a transaction is sent to a peer. func (m *PeerHandler) HandleTransactionSent(msg *wire.MsgTx, peer p2p.PeerI) error { hash := msg.TxHash() - m.messageCh <- &PeerTxMessage{ + m.messageCh <- &TxStatusMessage{ Hash: &hash, Status: metamorph_api.Status_SENT_TO_NETWORK, Peer: peer.String(), @@ -49,7 +49,7 @@ func (m *PeerHandler) HandleTransactionSent(msg *wire.MsgTx, peer p2p.PeerI) err // HandleTransactionAnnouncement is a message sent to the PeerHandler when a transaction INV message is received from a peer. func (m *PeerHandler) HandleTransactionAnnouncement(msg *wire.InvVect, peer p2p.PeerI) error { select { - case m.messageCh <- &PeerTxMessage{ + case m.messageCh <- &TxStatusMessage{ Hash: &msg.Hash, Status: metamorph_api.Status_SEEN_ON_NETWORK, Peer: peer.String(), @@ -62,7 +62,7 @@ func (m *PeerHandler) HandleTransactionAnnouncement(msg *wire.InvVect, peer p2p. // HandleTransactionRejection is called when a transaction is rejected by a peer. func (m *PeerHandler) HandleTransactionRejection(rejMsg *wire.MsgReject, peer p2p.PeerI) error { - m.messageCh <- &PeerTxMessage{ + m.messageCh <- &TxStatusMessage{ Hash: &rejMsg.Hash, Status: metamorph_api.Status_REJECTED, Peer: peer.String(), @@ -77,7 +77,7 @@ func (m *PeerHandler) HandleTransactionsGet(msgs []*wire.InvVect, peer p2p.PeerI hashes := make([][]byte, len(msgs)) for i, msg := range msgs { - m.messageCh <- &PeerTxMessage{ + m.messageCh <- &TxStatusMessage{ Hash: &msg.Hash, Status: metamorph_api.Status_REQUESTED_BY_NETWORK, Peer: peer.String(), @@ -93,7 +93,7 @@ func (m *PeerHandler) HandleTransactionsGet(msgs []*wire.InvVect, peer p2p.PeerI func (m *PeerHandler) HandleTransaction(msg *wire.MsgTx, peer p2p.PeerI) error { hash := msg.TxHash() - m.messageCh <- &PeerTxMessage{ + m.messageCh <- &TxStatusMessage{ Hash: &hash, Status: metamorph_api.Status_SEEN_ON_NETWORK, Peer: peer.String(), diff --git a/internal/metamorph/peer_handler_test.go b/internal/metamorph/peer_handler_test.go index b6a571451..e9dd4805d 100644 --- a/internal/metamorph/peer_handler_test.go +++ b/internal/metamorph/peer_handler_test.go @@ -17,7 +17,7 @@ import ( ) func TestPeerHandler(t *testing.T) { - messageCh := make(chan *metamorph.PeerTxMessage, 100) + messageCh := make(chan *metamorph.TxStatusMessage, 100) mtmStore := &storeMocks.MetamorphStoreMock{ GetRawTxsFunc: func(_ context.Context, _ [][]byte) ([][]byte, error) { rawTx := []byte("1234") @@ -36,7 +36,7 @@ func TestPeerHandler(t *testing.T) { msgTx := wire.NewMsgTx(70001) hash := msgTx.TxHash() - expectedMsg := &metamorph.PeerTxMessage{ + expectedMsg := &metamorph.TxStatusMessage{ Hash: &hash, Status: metamorph_api.Status_SENT_TO_NETWORK, Peer: "mock_peer", @@ -64,7 +64,7 @@ func TestPeerHandler(t *testing.T) { msgInv := wire.NewInvVect(wire.InvTypeBlock, hash) require.NoError(t, err) - expectedMsg := &metamorph.PeerTxMessage{ + expectedMsg := &metamorph.TxStatusMessage{ Hash: &msgInv.Hash, Status: metamorph_api.Status_SEEN_ON_NETWORK, Peer: "mock_peer", @@ -88,7 +88,7 @@ func TestPeerHandler(t *testing.T) { // given msgReject := wire.NewMsgReject("command", wire.RejectMalformed, "malformed") - expectedMsg := &metamorph.PeerTxMessage{ + expectedMsg := &metamorph.TxStatusMessage{ Hash: &msgReject.Hash, Status: metamorph_api.Status_REJECTED, Peer: "mock_peer", @@ -113,7 +113,7 @@ func TestPeerHandler(t *testing.T) { // given txsCount := 2 invMsgs := make([]*wire.InvVect, txsCount) - expectedMsgs := make([]*metamorph.PeerTxMessage, txsCount) + expectedMsgs := make([]*metamorph.TxStatusMessage, txsCount) for i := 0; i < txsCount; i++ { hash, err := chainhash.NewHashFromStr("1234") @@ -124,7 +124,7 @@ func TestPeerHandler(t *testing.T) { invMsgs[i] = msgInv - expectedMsgs[i] = &metamorph.PeerTxMessage{ + expectedMsgs[i] = &metamorph.TxStatusMessage{ Hash: hash, Status: metamorph_api.Status_REQUESTED_BY_NETWORK, Peer: "mock_peer", @@ -154,7 +154,7 @@ func TestPeerHandler(t *testing.T) { msgTx := wire.NewMsgTx(70001) hash := msgTx.TxHash() - expectedMsg := &metamorph.PeerTxMessage{ + expectedMsg := &metamorph.TxStatusMessage{ Hash: &hash, Status: metamorph_api.Status_SEEN_ON_NETWORK, Peer: "mock_peer", diff --git a/internal/metamorph/processor.go b/internal/metamorph/processor.go index 7be86546d..6f28658c2 100644 --- a/internal/metamorph/processor.go +++ b/internal/metamorph/processor.go @@ -74,7 +74,7 @@ type Processor struct { callbackSender CallbackSender responseProcessor *ResponseProcessor - statusMessageCh chan *PeerTxMessage + statusMessageCh chan *TxStatusMessage waitGroup *sync.WaitGroup @@ -111,7 +111,7 @@ type CallbackSender interface { SendCallback(ctx context.Context, data *store.Data) } -func NewProcessor(s store.MetamorphStore, c cache.Store, pm p2p.PeerManagerI, statusMessageChannel chan *PeerTxMessage, opts ...Option) (*Processor, error) { +func NewProcessor(s store.MetamorphStore, c cache.Store, pm p2p.PeerManagerI, statusMessageChannel chan *TxStatusMessage, opts ...Option) (*Processor, error) { if s == nil { return nil, ErrStoreNil } @@ -194,6 +194,22 @@ func (p *Processor) Start() error { return errors.Join(ErrFailedToSubscribe, fmt.Errorf("to %s topic", MinedTxsTopic), err) } + err = p.mqClient.Subscribe(StaleTxsTopic, func(msg []byte) error { + serialized := &blocktx_api.TransactionBlock{} + err := proto.Unmarshal(msg, serialized) + if err != nil { + return fmt.Errorf("failed to unmarshal message subscribed on %s topic: %w", MinedTxsTopic, err) + } + + // TODO: send txs to a channel that will make a separate query to db + // and upadate the status of txs from MINED -> MINED_IN_STALE_BLOCk + + return nil + }) + if err != nil { + return fmt.Errorf("failed to subscribe to %s topic: %w", MinedTxsTopic, err) + } + err = p.mqClient.Subscribe(SubmitTxTopic, func(msg []byte) error { serialized := &metamorph_api.TransactionRequest{} err = proto.Unmarshal(msg, serialized) diff --git a/internal/metamorph/processor_test.go b/internal/metamorph/processor_test.go index 90bf3af83..de1b0c690 100644 --- a/internal/metamorph/processor_test.go +++ b/internal/metamorph/processor_test.go @@ -498,7 +498,7 @@ func TestStartSendStatusForTransaction(t *testing.T) { }, } - statusMessageChannel := make(chan *metamorph.PeerTxMessage, 10) + statusMessageChannel := make(chan *metamorph.TxStatusMessage, 10) sut, err := metamorph.NewProcessor(metamorphStore, cStore, pm, statusMessageChannel, metamorph.WithNow(func() time.Time { return time.Date(2023, 10, 1, 13, 0, 0, 0, time.UTC) }), metamorph.WithProcessStatusUpdatesInterval(200*time.Millisecond), metamorph.WithProcessStatusUpdatesBatchSize(3), metamorph.WithCallbackSender(callbackSender)) require.NoError(t, err) @@ -509,7 +509,7 @@ func TestStartSendStatusForTransaction(t *testing.T) { assert.Equal(t, 0, sut.GetProcessorMapSize()) for _, testInput := range tc.inputs { - statusMessageChannel <- &metamorph.PeerTxMessage{ + statusMessageChannel <- &metamorph.TxStatusMessage{ Hash: testInput.hash, Status: testInput.newStatus, Err: testInput.statusErr, diff --git a/internal/metamorph/types.go b/internal/metamorph/types.go index 8d6e687c3..d8991cfb8 100644 --- a/internal/metamorph/types.go +++ b/internal/metamorph/types.go @@ -20,7 +20,7 @@ type StatusAndError struct { CompetingTxs []string } -type PeerTxMessage struct { +type TxStatusMessage struct { Start time.Time Hash *chainhash.Hash Status metamorph_api.Status diff --git a/internal/metamorph/zmq.go b/internal/metamorph/zmq.go index ae660d270..c583856dc 100644 --- a/internal/metamorph/zmq.go +++ b/internal/metamorph/zmq.go @@ -37,7 +37,7 @@ type subscriptionRequest struct { type ZMQ struct { url *url.URL - statusMessageCh chan<- *PeerTxMessage + statusMessageCh chan<- *TxStatusMessage handler ZMQI logger *slog.Logger } @@ -82,7 +82,7 @@ type ZMQI interface { Subscribe(string, chan []string) error } -func NewZMQ(zmqURL *url.URL, statusMessageCh chan<- *PeerTxMessage, zmqHandler ZMQI, logger *slog.Logger) (*ZMQ, error) { +func NewZMQ(zmqURL *url.URL, statusMessageCh chan<- *TxStatusMessage, zmqHandler ZMQI, logger *slog.Logger) (*ZMQ, error) { if zmqHandler == nil { return nil, ErrNilZMQHandler } @@ -115,7 +115,7 @@ func (z *ZMQ) Start() error { continue } - z.statusMessageCh <- &PeerTxMessage{ + z.statusMessageCh <- &TxStatusMessage{ Start: time.Now(), Hash: hash, Status: metamorph_api.Status_ACCEPTED_BY_NETWORK, @@ -131,7 +131,7 @@ func (z *ZMQ) Start() error { } if len(competingTxs) == 0 { - z.statusMessageCh <- &PeerTxMessage{ + z.statusMessageCh <- &TxStatusMessage{ Start: time.Now(), Hash: hash, Status: status, @@ -154,7 +154,7 @@ func (z *ZMQ) Start() error { continue } - z.statusMessageCh <- &PeerTxMessage{ + z.statusMessageCh <- &TxStatusMessage{ Start: time.Now(), Hash: hash, Status: metamorph_api.Status_REJECTED, @@ -242,8 +242,8 @@ func (z *ZMQ) parseTxInfo(c []string) (*ZMQTxInfo, error) { return &txInfo, nil } -func (z *ZMQ) prepareCompetingTxMsgs(hash *chainhash.Hash, competingTxs []string) []*PeerTxMessage { - msgs := []*PeerTxMessage{{ +func (z *ZMQ) prepareCompetingTxMsgs(hash *chainhash.Hash, competingTxs []string) []*TxStatusMessage { + msgs := []*TxStatusMessage{{ Start: time.Now(), Hash: hash, Status: metamorph_api.Status_DOUBLE_SPEND_ATTEMPTED, @@ -268,7 +268,7 @@ func (z *ZMQ) prepareCompetingTxMsgs(hash *chainhash.Hash, competingTxs []string // and return a copy of the slice txsWithoutSelf := removeCompetingSelf(allCompetingTxs, tx) - msgs = append(msgs, &PeerTxMessage{ + msgs = append(msgs, &TxStatusMessage{ Start: time.Now(), Hash: competingHash, Status: metamorph_api.Status_DOUBLE_SPEND_ATTEMPTED, diff --git a/internal/metamorph/zmq_test.go b/internal/metamorph/zmq_test.go index ab503750a..7ddb3e5eb 100644 --- a/internal/metamorph/zmq_test.go +++ b/internal/metamorph/zmq_test.go @@ -70,7 +70,7 @@ func TestZMQ(t *testing.T) { }, } - statuses := make(chan *metamorph.PeerTxMessage, tc.expectedStatusesCount) + statuses := make(chan *metamorph.TxStatusMessage, tc.expectedStatusesCount) zmqURL, err := url.Parse("https://some-url.com") require.NoError(t, err) @@ -83,7 +83,7 @@ func TestZMQ(t *testing.T) { require.NoError(t, err) // then - var status *metamorph.PeerTxMessage + var status *metamorph.TxStatusMessage sCounter := 0 for i := 0; i < tc.expectedStatusesCount; i++ { select { @@ -118,7 +118,7 @@ func TestZMQDoubleSpend(t *testing.T) { numberOfMsgs := 2 hashes := []string{"8e75ae10f86d8a43044a54c3c57d660d20cdb74e233be4b5c90ba752ebdc7e88", "d64adfce6b105dc6bdf475494925bf06802a41a0582586f33c2b16d537a0b7b6"} - statuses := make(chan *metamorph.PeerTxMessage, numberOfMsgs) + statuses := make(chan *metamorph.TxStatusMessage, numberOfMsgs) zmqURL, err := url.Parse("https://some-url.com") require.NoError(t, err) @@ -132,7 +132,7 @@ func TestZMQDoubleSpend(t *testing.T) { require.NoError(t, err) // then - var status *metamorph.PeerTxMessage + var status *metamorph.TxStatusMessage sCounter := 0 for i := 0; i < numberOfMsgs; i++ { select { From 49bd2fb0b0455656679942583f891d7669e3a19d Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Tue, 8 Oct 2024 18:30:51 +0200 Subject: [PATCH 02/36] feat: improve logic of publishing mined txs to metamorph, unlink upserting and publishing txs --- internal/blocktx/processor.go | 140 ++++++------------ .../blocktx.block_transactions_map.yaml | 40 +++++ .../blocktx.blocks.yaml | 22 +++ .../blocktx.transactions.yaml | 43 ++++++ .../postgresql/get_registered_transactions.go | 48 ++++++ .../get_registered_txs_by_block_hashes.go | 67 +++++++++ .../blocktx/store/postgresql/postgres_test.go | 48 +++++- .../postgresql/upsert_block_transactions.go | 42 +----- internal/blocktx/store/store.go | 4 +- internal/metamorph/processor.go | 16 -- 10 files changed, 318 insertions(+), 152 deletions(-) create mode 100644 internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.block_transactions_map.yaml create mode 100644 internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.blocks.yaml create mode 100644 internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.transactions.yaml create mode 100644 internal/blocktx/store/postgresql/get_registered_transactions.go create mode 100644 internal/blocktx/store/postgresql/get_registered_txs_by_block_hashes.go diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 81a511f74..9e5b2f78d 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -475,6 +475,7 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return err } + shouldPerformReorg := false if competing { p.logger.Info("Competing blocks found", slog.String("incoming block hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) @@ -493,12 +494,7 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { p.logger.Info("reorg detected - updating blocks", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) incomingBlock.Status = blocktx_api.Status_LONGEST - - err = p.performReorg(ctx, incomingBlock) - if err != nil { - p.logger.Error("unable to perform reorg", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) - return err - } + shouldPerformReorg = true } } @@ -517,11 +513,36 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return err } - if err = p.storeAndPublishTransactions(ctx, blockId, incomingBlock, calculatedMerkleTree); err != nil { + if err = p.storeTransactions(ctx, blockId, incomingBlock, calculatedMerkleTree); err != nil { p.logger.Error("unable to mark block as mined", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) return err } + // update this struct to have status (for MINED and MINED_IN_STALE_BLOCK statuses) + txsToPublish := make([]*blocktx_api.TransactionBlock, 0) + + // perform reorg - return txs to publish + if shouldPerformReorg { + txsToPublish, err = p.performReorg(ctx, incomingBlock, msg.TransactionHashes) + if err != nil { + p.logger.Error("unable to perform reorg", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return err + } + } else if incomingBlock.Status == blocktx_api.Status_STALE { + // txsToPublish, err = p.getStaleTxs() + } else { + txsToPublish, err = p.store.GetRegisteredTransactions(ctx, blockId) + } + + for _, txBlock := range txsToPublish { + // change that receiver method in metamorph to accept statuses (MINED and MINED_IN_STALE_BLOCK) + err = p.mqClient.PublishMarshal(MinedTxsTopic, txBlock) + if err != nil { + // TODO: add txID to err log + p.logger.Error("failed to publish mined txs", slog.Uint64("height", txBlock.BlockHeight), slog.String("err", err.Error())) + } + } + if err = p.store.MarkBlockAsDone(ctx, &blockHash, msg.Size, uint64(len(msg.TransactionHashes))); err != nil { p.logger.Error("unable to mark block as processed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) return err @@ -597,10 +618,10 @@ func (p *Processor) hasGreatestChainwork(ctx context.Context, incomingBlock *blo return tipChainWork.Cmp(incomingBlockChainwork) < 0, nil } -func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block, transactionHashes []*chainhash.Hash) error { +func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block, transactionHashes []*chainhash.Hash) ([]*blocktx_api.TransactionBlock, error) { staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, incomingBlock.PreviousHash) if err != nil { - return err + return nil, err } lowestHeight := incomingBlock.Height @@ -610,7 +631,7 @@ func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) if err != nil { - return err + return nil, err } staleHashes := make([][]byte, 0) @@ -629,62 +650,35 @@ func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api blockStatusUpdates = append(blockStatusUpdates, update) } - prevStaleTxs, err := p.store.GetRegisteredTxsByBlockHashes(staleHashes, blocktx_api.Status_STALE) // TODO: implement this query - if err != nil { - return err - } - - prevStaleTxs2, err := p.store.GetRegisteredTxsByBlockHashes(transactionHashes, blocktx_api.Status_STALE) // TODO: implement this query + prevStaleTxs, prevLongestTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) if err != nil { - return err - } - - prevStaleTxs = append(prevStaleTxs, prevStaleTxs2) - - prevLongestTxs, err := p.store.GetRegisteredTxsByBlockHashes(longestHashes, blocktx_api.Status_LONGEST) // TODO: implement this query - if err != nil { - return err + return nil, err } minedTxs, staleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { - return err + return nil, err } - for _, minedTx := range minedTxs { - minedTxBlock := &blocktx_api.TransactionBlock{ - TransactionHash: minedTx.TxHash, - BlockHash: minedTx.BlockHash, - BlockHeight: minedTx.BlockHeight, - MerklePath: minedTx.MerklePath, - } + txsCombined := append(minedTxs, staleTxs...) - err := p.mqClient.PublishMarshal(MinedTxsTopic, minedTxBlock) - if err != nil { - p.logger.Error("failed to publish mined tx after reorg", slog.Uint64("height", minedTx.BlockHeight), slog.String("err", err.Error())) - } - } - - for _, staleTx := range staleTxs { - staleTxBlock := &blocktx_api.TransactionBlock{ - TransactionHash: staleTx.TxHash, - BlockHash: staleTx.BlockHash, - BlockHeight: staleTx.BlockHeight, - MerklePath: staleTx.MerklePath, - } + txsToPublish := make([]*blocktx_api.TransactionBlock, len(txsCombined)) - p.mqClient.PublishMarshal(StaleTxsTopic, staleTxBlock) // TODO: add this topic - if err != nil { - p.logger.Error("failed to publish stale tx after reorg", slog.Uint64("height", staleTx.BlockHeight), slog.String("err", err.Error())) + for i, tx := range txsCombined { + txsToPublish[i] = &blocktx_api.TransactionBlock{ + TransactionHash: tx.TxHash, + BlockHash: tx.BlockHash, + BlockHeight: tx.BlockHeight, + MerklePath: tx.MerklePath, } } - return nil + return txsToPublish, nil } -func (p *Processor) storeAndPublishTransactions(ctx context.Context, blockId uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { +func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { ctx, span := tracing.StartTracing(ctx, "markTransactionsAsMined", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -734,33 +728,12 @@ func (p *Processor) storeAndPublishTransactions(ctx context.Context, blockId uin }) if (txIndex+1)%p.transactionStorageBatchSize == 0 { - updateResp, err := p.store.UpsertBlockTransactions(ctx, blockID, txs) + err := p.store.UpsertBlockTransactions(ctx, blockId, txs) if err != nil { return errors.Join(ErrFailedToInsertBlockTransactions, err) } // free up memory txs = txs[:0] - - // when the block is not from the longest chain, just store - // tranasctions in db and don't publish them to metamorph - // TODO: change this to != Status_LONGEST when handling - // ORPHANED blocks is implemented - if block.Status == blocktx_api.Status_STALE { - continue - } - - for _, updResp := range updateResp { - txBlock := &blocktx_api.TransactionBlock{ - TransactionHash: updResp.Hash[:], - BlockHash: block.Hash, - BlockHeight: block.Height, - MerklePath: updResp.MerklePath, - } - err = p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) - if err != nil { - p.logger.Error("failed to publish mined txs", slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("err", err.Error())) - } - } } if percentage, found := progress[txIndex+1]; found { @@ -773,32 +746,11 @@ func (p *Processor) storeAndPublishTransactions(ctx context.Context, blockId uin tracing.EndTracing(iterateMerkleTree, nil) // update all remaining transactions - updateResp, err := p.store.UpsertBlockTransactions(ctx, blockID, txs) + err = p.store.UpsertBlockTransactions(ctx, blockId, txs) if err != nil { return errors.Join(ErrFailedToInsertBlockTransactions, fmt.Errorf("block height: %d", block.Height), err) } - // when the block is not from the longest chain, just store - // tranasctions in db and don't publish them to metamorph - // TODO: change this to != Status_LONGEST when handling - // ORPHANED blocks is implemented - if block.Status == blocktx_api.Status_STALE { - return nil - } - - for _, updResp := range updateResp { - txBlock := &blocktx_api.TransactionBlock{ - TransactionHash: updResp.Hash[:], - BlockHash: block.Hash, - BlockHeight: block.Height, - MerklePath: updResp.MerklePath, - } - err = p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) - if err != nil { - p.logger.Error("failed to publish mined txs", slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("err", err.Error())) - } - } - return nil } diff --git a/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.block_transactions_map.yaml b/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..a405ab034 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.block_transactions_map.yaml @@ -0,0 +1,40 @@ +- blockid: 1 + txid: 1 + merkle_path: merkle-path-1 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1 + txid: 2 + merkle_path: merkle-path-2 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1 + txid: 3 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1 + txid: 4 + merkle_path: merkle-path-4 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1 + txid: 5 + merkle_path: merkle-path-5 + inserted_at: 2023-12-10 14:00:00 +- blockid: 2 + txid: 6 + merkle_path: merkle-path-6 + inserted_at: 2023-12-15 14:00:00 +- blockid: 2 + txid: 7 + merkle_path: merkle-path-7 + inserted_at: 2023-12-15 14:00:00 +- blockid: 2 + txid: 8 + merkle_path: merkle-path-8 + inserted_at: 2023-12-15 14:00:00 +- blockid: 2 + txid: 9 + merkle_path: merkle-path-9 + inserted_at: 2023-12-15 14:00:00 +- blockid: 2 + txid: 10 + merkle_path: merkle-path-10 + inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.blocks.yaml new file mode 100644 index 000000000..f5a812428 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.blocks.yaml @@ -0,0 +1,22 @@ +- inserted_at: 2023-12-10 14:00:00 + id: 1 + hash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9 + prevhash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822013 + processed_at: 2023-12-10 14:10:00 + size: 86840000 + tx_count: 23477 + orphanedyn: false + status: 10 # LONGEST +- inserted_at: 2023-12-15 14:00:00 + id: 2 + hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 + prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a + merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410 + height: 822012 + processed_at: 2023-12-15 14:10:00 + size: 3030000 + tx_count: 856 + orphanedyn: false + status: 20 # STALE diff --git a/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.transactions.yaml new file mode 100644 index 000000000..9fd3af32c --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.transactions.yaml @@ -0,0 +1,43 @@ +# txs from block 1 +- id: 1 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 2 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 3 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 4 + hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 5 + hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd + inserted_at: 2023-12-10 14:00:00 + is_registered: false + +# txs from block 2 +- id: 6 + hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 + inserted_at: 2023-12-15 14:00:00 + is_registered: true +- id: 7 + hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357 + inserted_at: 2023-12-15 14:00:00 + is_registered: true +- id: 8 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-15 14:00:00 + is_registered: false +- id: 9 + hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799 + inserted_at: 2023-12-15 14:00:00 + is_registered: false +- id: 10 + hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8 + inserted_at: 2023-12-15 14:00:00 + is_registered: false diff --git a/internal/blocktx/store/postgresql/get_registered_transactions.go b/internal/blocktx/store/postgresql/get_registered_transactions.go new file mode 100644 index 000000000..5ce802a9b --- /dev/null +++ b/internal/blocktx/store/postgresql/get_registered_transactions.go @@ -0,0 +1,48 @@ +package postgresql + +import ( + "context" + "errors" + "fmt" + + "github.com/bitcoin-sv/arc/internal/blocktx/store" +) + +func (p *PostgreSQL) GetRegisteredTransactions(ctx context.Context, blockId uint64) (registeredTxs []store.TxWithMerklePath, err error) { + qRegisteredTransactions := ` + SELECT + t.hash, + m.merkle_path + FROM blocktx.transactions t + JOIN blocktx.block_transactions_map AS m ON t.id = m.txid + WHERE m.blockid = $1 AND t.is_registered = TRUE + ` + + rows, err := p.db.QueryContext(ctx, qRegisteredTransactions, blockId) + if err != nil { + return nil, fmt.Errorf("failed to get registered transactions for block with id %d: %v", blockId, err) + } + defer rows.Close() + + registeredRows := make([]store.TxWithMerklePath, 0) + + for rows.Next() { + var txHash []byte + var merklePath string + err = rows.Scan(&txHash, &merklePath) + if err != nil { + return nil, errors.Join(store.ErrFailedToGetRows, err) + } + + registeredRows = append(registeredRows, store.TxWithMerklePath{ + Hash: txHash, + MerklePath: merklePath, + }) + } + + if err := rows.Err(); err != nil { + return nil, fmt.Errorf("error getting registered transactions for block with id %d: %v", blockId, err) + } + + return registeredRows, nil +} diff --git a/internal/blocktx/store/postgresql/get_registered_txs_by_block_hashes.go b/internal/blocktx/store/postgresql/get_registered_txs_by_block_hashes.go new file mode 100644 index 000000000..509674c0c --- /dev/null +++ b/internal/blocktx/store/postgresql/get_registered_txs_by_block_hashes.go @@ -0,0 +1,67 @@ +package postgresql + +import ( + "context" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/lib/pq" +) + +func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) (longestTxs []store.GetMinedTransactionResult, staleTxs []store.GetMinedTransactionResult, err error) { + q := ` + SELECT + t.hash, + b.hash, + b.height, + m.merkle_path, + b.status + FROM blocktx.blocks AS b + JOIN blocktx.block_transactions_map AS m ON m.blockid = b.id + JOIN blocktx.transactions AS t ON t.id = m.txid AND t.is_registered = TRUE + WHERE b.hash = ANY($1) + ` + + rows, err := p.db.QueryContext(ctx, q, pq.Array(blockHashes)) + if err != nil { + return + } + defer rows.Close() + + for rows.Next() { + var txHash []byte + var blockHash []byte + var blockHeight uint64 + var merklePath string + var status blocktx_api.Status + + err = rows.Scan( + &txHash, + &blockHash, + &blockHeight, + &merklePath, + &status, + ) + if err != nil { + return + } + + result := store.GetMinedTransactionResult{ + TxHash: txHash, + BlockHash: blockHash, + BlockHeight: blockHeight, + MerklePath: merklePath, + } + + switch status { + case blocktx_api.Status_LONGEST: + longestTxs = append(longestTxs, result) + case blocktx_api.Status_STALE: + staleTxs = append(staleTxs, result) + default: + // do nothing - ignore ORPHANED and UNKNOWN blocks + } + } + + return +} diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index 18bfd6cbd..c63cb1a96 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -286,6 +286,50 @@ func TestPostgresDB(t *testing.T) { } }) + t.Run("get registered txs by block hashes", func(t *testing.T) { + // given + prepareDb(t, postgresDB.db, "fixtures/get_registered_txs_by_block_hashes") + + blockHashLongest := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") + blockHashStale := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") + + blockHashes := [][]byte{ + blockHashLongest[:], + blockHashStale[:], + } + + expectedLongestTxs := []store.GetMinedTransactionResult{ + { + TxHash: testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e")[:], + BlockHash: blockHashLongest[:], + BlockHeight: 822013, + MerklePath: "merkle-path-2", + }, + } + expectedStaleTxs := []store.GetMinedTransactionResult{ + { + TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], + BlockHash: blockHashStale[:], + BlockHeight: 822012, + MerklePath: "merkle-path-6", + }, + { + TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], + BlockHash: blockHashStale[:], + BlockHeight: 822012, + MerklePath: "merkle-path-7", + }, + } + + // when + longestTxs, staleTxs, err := postgresDB.GetRegisteredTxsByBlockHashes(ctx, blockHashes) + + // then + require.NoError(t, err) + require.Equal(t, expectedLongestTxs, longestTxs) + require.Equal(t, expectedStaleTxs, staleTxs) + }) + t.Run("update blocks statuses", func(t *testing.T) { // given prepareDb(t, postgresDB.db, "fixtures/update_blocks_statuses") @@ -337,7 +381,7 @@ func TestPostgresDB(t *testing.T) { // when // get mined transaction and corresponding block - minedTxs, err := postgresDB.GetMinedTransactions(ctx, []*chainhash.Hash{txHash1, txHash2, txHash3, txHash4}) + minedTxs, err := postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:], txHash4[:]}, blocktx_api.Status_LONGEST) require.NoError(t, err) // then @@ -681,7 +725,7 @@ func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { require.NoError(t, err) // then - actual, err := sut.GetMinedTransactions(ctx, []*chainhash.Hash{txHash}) + actual, err := sut.GetMinedTransactions(ctx, [][]byte{txHash[:]}, blocktx_api.Status_LONGEST) require.NoError(t, err) require.ElementsMatch(t, expected, actual) diff --git a/internal/blocktx/store/postgresql/upsert_block_transactions.go b/internal/blocktx/store/postgresql/upsert_block_transactions.go index 090123aab..6b3a69983 100644 --- a/internal/blocktx/store/postgresql/upsert_block_transactions.go +++ b/internal/blocktx/store/postgresql/upsert_block_transactions.go @@ -3,7 +3,6 @@ package postgresql import ( "context" "errors" - "fmt" "github.com/lib/pq" "go.opentelemetry.io/otel/attribute" @@ -13,7 +12,7 @@ import ( ) // UpsertBlockTransactions upserts the transaction hashes for a given block hash and returns updated registered transactions hashes. -func (p *PostgreSQL) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) (registeredRows []store.TxWithMerklePath, err error) { +func (p *PostgreSQL) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) (err error) { ctx, span := tracing.StartTracing(ctx, "UpsertBlockTransactions", p.tracingEnabled, append(p.tracingAttributes, attribute.Int("updates", len(txsWithMerklePaths)))...) defer func() { tracing.EndTracing(span, err) @@ -45,45 +44,10 @@ func (p *PostgreSQL) UpsertBlockTransactions(ctx context.Context, blockID uint64 ON CONFLICT(blockid, txid) DO NOTHING; ` - qRegisteredTransactions := ` - SELECT - t.hash, - m.merkle_path - FROM blocktx.transactions t - JOIN blocktx.block_transactions_map AS m ON t.id = m.txid - WHERE m.blockid = $1 AND t.is_registered = TRUE AND t.hash = ANY($2) - ` - _, err = p.db.ExecContext(ctx, qUpsertTransactions, blockID, pq.Array(txHashesBytes), pq.Array(merklePaths)) if err != nil { - return nil, errors.Join(store.ErrFailedToExecuteTxUpdateQuery, err) - } - - rows, err := p.db.QueryContext(ctx, qRegisteredTransactions, blockID, pq.Array(txHashesBytes)) - if err != nil { - return nil, fmt.Errorf("failed to get registered transactions for block with id %d: %v", blockID, err) - } - defer rows.Close() - - registeredRows = make([]store.TxWithMerklePath, 0) - - for rows.Next() { - var txHash []byte - var merklePath string - err = rows.Scan(&txHash, &merklePath) - if err != nil { - return nil, errors.Join(store.ErrFailedToGetRows, err) - } - - registeredRows = append(registeredRows, store.TxWithMerklePath{ - Hash: txHash, - MerklePath: merklePath, - }) - } - - if err := rows.Err(); err != nil { - return nil, fmt.Errorf("error getting registered transactions for block with id %d: %v", blockID, err) + return errors.Join(store.ErrFailedToExecuteTxUpdateQuery, err) } - return registeredRows, nil + return nil } diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index 4c18e5287..9f2d7ade7 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -33,13 +33,15 @@ type BlocktxStore interface { GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) - UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []TxWithMerklePath) (registeredTxs []TxWithMerklePath, err error) + UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []TxWithMerklePath) error MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error GetBlockGaps(ctx context.Context, heightRange int) ([]*BlockGap, error) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) GetMinedTransactions(ctx context.Context, hashes [][]byte, blockStatus blocktx_api.Status) ([]GetMinedTransactionResult, error) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) + GetRegisteredTransactions(ctx context.Context, blockId uint64) (registeredTxs []TxWithMerklePath, err error) + GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) (longestTxs []GetMinedTransactionResult, staleTxs []GetMinedTransactionResult, err error) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []BlockStatusUpdate) error GetStats(ctx context.Context) (*Stats, error) diff --git a/internal/metamorph/processor.go b/internal/metamorph/processor.go index 6f28658c2..693fe476b 100644 --- a/internal/metamorph/processor.go +++ b/internal/metamorph/processor.go @@ -194,22 +194,6 @@ func (p *Processor) Start() error { return errors.Join(ErrFailedToSubscribe, fmt.Errorf("to %s topic", MinedTxsTopic), err) } - err = p.mqClient.Subscribe(StaleTxsTopic, func(msg []byte) error { - serialized := &blocktx_api.TransactionBlock{} - err := proto.Unmarshal(msg, serialized) - if err != nil { - return fmt.Errorf("failed to unmarshal message subscribed on %s topic: %w", MinedTxsTopic, err) - } - - // TODO: send txs to a channel that will make a separate query to db - // and upadate the status of txs from MINED -> MINED_IN_STALE_BLOCk - - return nil - }) - if err != nil { - return fmt.Errorf("failed to subscribe to %s topic: %w", MinedTxsTopic, err) - } - err = p.mqClient.Subscribe(SubmitTxTopic, func(msg []byte) error { serialized := &metamorph_api.TransactionRequest{} err = proto.Unmarshal(msg, serialized) From 25b7518a5d043eb6a5ad94ff4b6435479bf2148a Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 9 Oct 2024 15:13:15 +0200 Subject: [PATCH 03/36] feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage --- internal/blocktx/processor.go | 114 ++++++++--- internal/blocktx/processor_helpers.go | 4 +- internal/blocktx/processor_helpers_test.go | 48 +++++ internal/blocktx/processor_test.go | 19 +- .../blocktx/store/mocks/blocktx_store_mock.go | 126 +++++++++++- internal/blocktx/store/model.go | 3 +- .../blocktx.block_transactions_map.yaml | 12 -- .../blocktx.blocks.yaml | 11 -- .../blocktx.transactions.yaml | 12 -- .../blocktx.block_transactions_map.yaml | 0 .../blocktx.blocks.yaml | 0 .../blocktx.transactions.yaml | 4 +- .../postgresql/get_mined_transactions.go | 64 ------- .../postgresql/get_registered_transactions.go | 48 ----- .../get_registered_txs_by_block_hashes.go | 67 ------- .../store/postgresql/get_transactions.go | 87 +++++++++ .../blocktx/store/postgresql/postgres_test.go | 181 +++++++++++------- .../postgresql/upsert_block_transactions.go | 2 +- internal/blocktx/store/store.go | 6 +- 19 files changed, 471 insertions(+), 337 deletions(-) delete mode 100644 internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.block_transactions_map.yaml delete mode 100644 internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.blocks.yaml delete mode 100644 internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.transactions.yaml rename internal/blocktx/store/postgresql/fixtures/{get_registered_txs_by_block_hashes => get_transactions}/blocktx.block_transactions_map.yaml (100%) rename internal/blocktx/store/postgresql/fixtures/{get_registered_txs_by_block_hashes => get_transactions}/blocktx.blocks.yaml (100%) rename internal/blocktx/store/postgresql/fixtures/{get_registered_txs_by_block_hashes => get_transactions}/blocktx.transactions.yaml (96%) delete mode 100644 internal/blocktx/store/postgresql/get_mined_transactions.go delete mode 100644 internal/blocktx/store/postgresql/get_registered_transactions.go delete mode 100644 internal/blocktx/store/postgresql/get_registered_txs_by_block_hashes.go create mode 100644 internal/blocktx/store/postgresql/get_transactions.go diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 9e5b2f78d..65cfe9c90 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -383,7 +383,7 @@ func (p *Processor) publishMinedTxs(txHashes []*chainhash.Hash) error { hashesBytes[i] = h[:] } - minedTxs, err := p.store.GetMinedTransactions(p.ctx, hashesBytes, blocktx_api.Status_LONGEST) + minedTxs, err := p.store.GetMinedTransactions(p.ctx, hashesBytes) if err != nil { return fmt.Errorf("failed to get mined transactions: %v", err) } @@ -444,7 +444,7 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { previousBlockHash := msg.Header.PrevBlock merkleRoot := msg.Header.MerkleRoot - // don't process block that was already processed + // don't process block that was already processed or is below our retention height existingBlock, _ := p.store.GetBlock(ctx, &blockHash) if existingBlock != nil && existingBlock.Processed { return nil @@ -485,15 +485,13 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return err } - // check for all registered transactions in the longest chain - // any registered transactions that are in this block but not - // in the longest chain - publish to metamorph as MINED_IN_STALE_CHAIN incomingBlock.Status = blocktx_api.Status_STALE if hasGreatestChainwork { - p.logger.Info("reorg detected - updating blocks", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) + p.logger.Info("chain reorg detected", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) incomingBlock.Status = blocktx_api.Status_LONGEST + shouldPerformReorg = true } } @@ -518,23 +516,36 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return err } - // update this struct to have status (for MINED and MINED_IN_STALE_BLOCK statuses) - txsToPublish := make([]*blocktx_api.TransactionBlock, 0) + txsToPublish := make([]store.TransactionBlock, 0) - // perform reorg - return txs to publish if shouldPerformReorg { - txsToPublish, err = p.performReorg(ctx, incomingBlock, msg.TransactionHashes) + txsToPublish, err = p.performReorg(ctx, incomingBlock) if err != nil { p.logger.Error("unable to perform reorg", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) return err } } else if incomingBlock.Status == blocktx_api.Status_STALE { - // txsToPublish, err = p.getStaleTxs() - } else { + txsToPublish, err = p.getStaleTxs(ctx, blockId) + if err != nil { + p.logger.Error("unable to get stale transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return err + } + } else if incomingBlock.Status == blocktx_api.Status_LONGEST { txsToPublish, err = p.store.GetRegisteredTransactions(ctx, blockId) + if err != nil { + p.logger.Error("unable to get registered transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return err + } } - for _, txBlock := range txsToPublish { + for _, tx := range txsToPublish { + txBlock := &blocktx_api.TransactionBlock{ + BlockHash: tx.BlockHash, + BlockHeight: tx.BlockHeight, + TransactionHash: tx.TxHash, + MerklePath: tx.MerklePath, + } + // change that receiver method in metamorph to accept statuses (MINED and MINED_IN_STALE_BLOCK) err = p.mqClient.PublishMarshal(MinedTxsTopic, txBlock) if err != nil { @@ -618,7 +629,7 @@ func (p *Processor) hasGreatestChainwork(ctx context.Context, incomingBlock *blo return tipChainWork.Cmp(incomingBlockChainwork) < 0, nil } -func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block, transactionHashes []*chainhash.Hash) ([]*blocktx_api.TransactionBlock, error) { +func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block) ([]store.TransactionBlock, error) { staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, incomingBlock.PreviousHash) if err != nil { return nil, err @@ -650,31 +661,34 @@ func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api blockStatusUpdates = append(blockStatusUpdates, update) } - prevStaleTxs, prevLongestTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) + registeredTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) if err != nil { return nil, err } - minedTxs, staleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) - err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { return nil, err } - txsCombined := append(minedTxs, staleTxs...) + prevLongestTxs := make([]store.TransactionBlock, 0) + prevStaleTxs := make([]store.TransactionBlock, 0) - txsToPublish := make([]*blocktx_api.TransactionBlock, len(txsCombined)) - - for i, tx := range txsCombined { - txsToPublish[i] = &blocktx_api.TransactionBlock{ - TransactionHash: tx.TxHash, - BlockHash: tx.BlockHash, - BlockHeight: tx.BlockHeight, - MerklePath: tx.MerklePath, + for _, tx := range registeredTxs { + switch tx.BlockStatus { + case blocktx_api.Status_LONGEST: + prevLongestTxs = append(prevLongestTxs, tx) + case blocktx_api.Status_STALE: + prevStaleTxs = append(prevStaleTxs, tx) + default: + // do nothing - ignore ORPHANED and UNKNOWN blocks } } + minedTxs, staleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) + + txsToPublish := append(minedTxs, staleTxs...) + return txsToPublish, nil } @@ -754,6 +768,54 @@ func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block return nil } +// getStaleTxs returns all transactions from a given STALE block that are not in the longest chain +func (p *Processor) getStaleTxs(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { + // 1. Find registered txs from the given STALE block + // 2. Check for those transactions in the longest chain + // 3. Return only those registered txs from the STALE block that are not found in the longest chain + + registeredTxs, err := p.store.GetRegisteredTransactions(ctx, blockId) + if err != nil { + return nil, err + } + + registeredHashes := make([][]byte, len(registeredTxs)) + for i, tx := range registeredTxs { + registeredHashes[i] = tx.TxHash + } + + minedTxs, err := p.store.GetMinedTransactions(ctx, registeredHashes) + if err != nil { + return nil, err + } + + minedTxsMap := make(map[string]bool) + for _, tx := range minedTxs { + minedTxsMap[string(tx.TxHash)] = true + } + + staleTxs := make([]store.TransactionBlock, 0) + + for _, tx := range registeredTxs { + if minedTxsMap[string(tx.TxHash)] { + continue + } + + staleTxs = append(staleTxs, tx) + } + + return staleTxs, nil +} + +const ( + hoursPerDay = 24 + blocksPerHour = 6 +) + +func (p *Processor) getRetentionHeightRange() int { + return p.dataRetentionDays * hoursPerDay * blocksPerHour +} + func (p *Processor) Shutdown() { p.cancelAll() p.waitGroup.Wait() diff --git a/internal/blocktx/processor_helpers.go b/internal/blocktx/processor_helpers.go index 23eb5f77c..c63090c98 100644 --- a/internal/blocktx/processor_helpers.go +++ b/internal/blocktx/processor_helpers.go @@ -51,8 +51,8 @@ func getLowestHeight(blocks []*blocktx_api.Block) uint64 { return lowest } -func findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs []store.GetMinedTransactionResult) (nowMinedTxs, nowStaleTxs []store.GetMinedTransactionResult) { - prevStaleMap := make(map[string]store.GetMinedTransactionResult) +func findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs []store.TransactionBlock) (nowMinedTxs, nowStaleTxs []store.TransactionBlock) { + prevStaleMap := make(map[string]store.TransactionBlock) for _, tx := range prevStaleTxs { prevStaleMap[string(tx.TxHash)] = tx diff --git a/internal/blocktx/processor_helpers_test.go b/internal/blocktx/processor_helpers_test.go index cf4b1a2e5..2aa6c0e54 100644 --- a/internal/blocktx/processor_helpers_test.go +++ b/internal/blocktx/processor_helpers_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/stretchr/testify/require" ) @@ -35,6 +36,53 @@ func TestGetLowestHeight(t *testing.T) { require.Equal(t, uint64(4), lowestHeight) } +func TestFindMinedAndStaleTxs(t *testing.T) { + // given + prevStaleTxs := []store.TransactionBlock{ + { + TxHash: []byte("1"), + }, + { + TxHash: []byte("2"), + }, + } + prevLongestTxs := []store.TransactionBlock{ + { + TxHash: []byte("A"), + }, + { + TxHash: []byte("B"), + }, + { + TxHash: []byte("1"), + }, + } + + expectedMinedTxs := []store.TransactionBlock{ + { + TxHash: []byte("1"), + }, + { + TxHash: []byte("2"), + }, + } + expectedStaleTxs := []store.TransactionBlock{ + { + TxHash: []byte("A"), + }, + { + TxHash: []byte("B"), + }, + } + + // when + actualMinedTxs, actualStaleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) + + // then + require.Equal(t, expectedMinedTxs, actualMinedTxs) + require.Equal(t, expectedStaleTxs, actualStaleTxs) +} + func TestChainWork(t *testing.T) { testCases := []struct { height int diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 9c931d683..9244a47db 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -177,7 +177,7 @@ func TestHandleBlock(t *testing.T) { GetBlockHashesProcessingInProgressFunc: func(_ context.Context, _ string) ([]*chainhash.Hash, error) { return nil, nil }, } - storeMock.UpsertBlockTransactionsFunc = func(_ context.Context, _ uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { + storeMock.UpsertBlockTransactionsFunc = func(_ context.Context, _ uint64, txsWithMerklePaths []store.TxWithMerklePath) error { require.LessOrEqual(t, len(txsWithMerklePaths), batchSize) for _, txWithMr := range txsWithMerklePaths { @@ -187,7 +187,7 @@ func TestHandleBlock(t *testing.T) { actualInsertedBlockTransactions = append(actualInsertedBlockTransactions, tx[:]) } - return txsWithMerklePaths, nil + return nil } mq := &mocks.MessageQueueClientMock{ @@ -341,13 +341,10 @@ func TestHandleBlockReorg(t *testing.T) { mtx.Lock() insertedBlock = block mtx.Unlock() - return 1, nil + return 1, errors.New("dummy error") // return error here so we don't have to override next db functions }, - MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _ uint64, _ uint64) error { - return nil - }, - UpsertBlockTransactionsFunc: func(_ context.Context, _ uint64, _ []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { - return []store.TxWithMerklePath{}, nil + DelBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) { + return 0, nil }, } @@ -635,12 +632,12 @@ func TestStartProcessRequestTxs(t *testing.T) { t.Run(tc.name, func(t *testing.T) { // given storeMock := &storeMocks.BlocktxStoreMock{ - GetMinedTransactionsFunc: func(_ context.Context, hashes []*chainhash.Hash) ([]store.GetMinedTransactionResult, error) { + GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { for _, hash := range hashes { - require.Equal(t, testdata.TX1Hash, hash) + require.Equal(t, testdata.TX1Hash[:], hash) } - return []store.GetMinedTransactionResult{{ + return []store.TransactionBlock{{ TxHash: testdata.TX1Hash[:], BlockHash: testdata.Block1Hash[:], BlockHeight: 1, diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index 8a9451d33..8101fcb19 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -48,9 +48,15 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { // panic("mock out the GetLongestChainFromHeight method") // }, -// GetMinedTransactionsFunc: func(ctx context.Context, hashes []*chainhash.Hash) ([]store.GetMinedTransactionResult, error) { +// GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { // panic("mock out the GetMinedTransactions method") // }, +// GetRegisteredTransactionsFunc: func(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { +// panic("mock out the GetRegisteredTransactions method") +// }, +// GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { +// panic("mock out the GetRegisteredTxsByBlockHashes method") +// }, // GetStaleChainBackFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { // panic("mock out the GetStaleChainBackFromHash method") // }, @@ -72,10 +78,14 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // UpdateBlocksStatusesFunc: func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { // panic("mock out the UpdateBlocksStatuses method") // }, +<<<<<<< HEAD // UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { // panic("mock out the UpsertBlock method") // }, // UpsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { +======= +// UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { +>>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) // panic("mock out the UpsertBlockTransactions method") // }, // VerifyMerkleRootsFunc: func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { @@ -116,7 +126,13 @@ type BlocktxStoreMock struct { GetLongestChainFromHeightFunc func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) // GetMinedTransactionsFunc mocks the GetMinedTransactions method. - GetMinedTransactionsFunc func(ctx context.Context, hashes []*chainhash.Hash) ([]store.GetMinedTransactionResult, error) + GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) + + // GetRegisteredTransactionsFunc mocks the GetRegisteredTransactions method. + GetRegisteredTransactionsFunc func(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) + + // GetRegisteredTxsByBlockHashesFunc mocks the GetRegisteredTxsByBlockHashes method. + GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) // GetStaleChainBackFromHashFunc mocks the GetStaleChainBackFromHash method. GetStaleChainBackFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) @@ -143,7 +159,11 @@ type BlocktxStoreMock struct { UpsertBlockFunc func(ctx context.Context, block *blocktx_api.Block) (uint64, error) // UpsertBlockTransactionsFunc mocks the UpsertBlockTransactions method. +<<<<<<< HEAD UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) +======= + UpsertBlockTransactionsFunc func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error +>>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) // VerifyMerkleRootsFunc mocks the VerifyMerkleRoots method. VerifyMerkleRootsFunc func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) @@ -218,7 +238,21 @@ type BlocktxStoreMock struct { // Ctx is the ctx argument value. Ctx context.Context // Hashes is the hashes argument value. - Hashes []*chainhash.Hash + Hashes [][]byte + } + // GetRegisteredTransactions holds details about calls to the GetRegisteredTransactions method. + GetRegisteredTransactions []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // BlockId is the blockId argument value. + BlockId uint64 + } + // GetRegisteredTxsByBlockHashes holds details about calls to the GetRegisteredTxsByBlockHashes method. + GetRegisteredTxsByBlockHashes []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // BlockHashes is the blockHashes argument value. + BlockHashes [][]byte } // GetStaleChainBackFromHash holds details about calls to the GetStaleChainBackFromHash method. GetStaleChainBackFromHash []struct { @@ -307,6 +341,8 @@ type BlocktxStoreMock struct { lockGetChainTip sync.RWMutex lockGetLongestChainFromHeight sync.RWMutex lockGetMinedTransactions sync.RWMutex + lockGetRegisteredTransactions sync.RWMutex + lockGetRegisteredTxsByBlockHashes sync.RWMutex lockGetStaleChainBackFromHash sync.RWMutex lockGetStats sync.RWMutex lockMarkBlockAsDone sync.RWMutex @@ -643,13 +679,13 @@ func (mock *BlocktxStoreMock) GetLongestChainFromHeightCalls() []struct { } // GetMinedTransactions calls GetMinedTransactionsFunc. -func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes []*chainhash.Hash) ([]store.GetMinedTransactionResult, error) { +func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { if mock.GetMinedTransactionsFunc == nil { panic("BlocktxStoreMock.GetMinedTransactionsFunc: method is nil but BlocktxStore.GetMinedTransactions was just called") } callInfo := struct { Ctx context.Context - Hashes []*chainhash.Hash + Hashes [][]byte }{ Ctx: ctx, Hashes: hashes, @@ -666,11 +702,11 @@ func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [ // len(mockedBlocktxStore.GetMinedTransactionsCalls()) func (mock *BlocktxStoreMock) GetMinedTransactionsCalls() []struct { Ctx context.Context - Hashes []*chainhash.Hash + Hashes [][]byte } { var calls []struct { Ctx context.Context - Hashes []*chainhash.Hash + Hashes [][]byte } mock.lockGetMinedTransactions.RLock() calls = mock.calls.GetMinedTransactions @@ -678,6 +714,78 @@ func (mock *BlocktxStoreMock) GetMinedTransactionsCalls() []struct { return calls } +// GetRegisteredTransactions calls GetRegisteredTransactionsFunc. +func (mock *BlocktxStoreMock) GetRegisteredTransactions(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { + if mock.GetRegisteredTransactionsFunc == nil { + panic("BlocktxStoreMock.GetRegisteredTransactionsFunc: method is nil but BlocktxStore.GetRegisteredTransactions was just called") + } + callInfo := struct { + Ctx context.Context + BlockId uint64 + }{ + Ctx: ctx, + BlockId: blockId, + } + mock.lockGetRegisteredTransactions.Lock() + mock.calls.GetRegisteredTransactions = append(mock.calls.GetRegisteredTransactions, callInfo) + mock.lockGetRegisteredTransactions.Unlock() + return mock.GetRegisteredTransactionsFunc(ctx, blockId) +} + +// GetRegisteredTransactionsCalls gets all the calls that were made to GetRegisteredTransactions. +// Check the length with: +// +// len(mockedBlocktxStore.GetRegisteredTransactionsCalls()) +func (mock *BlocktxStoreMock) GetRegisteredTransactionsCalls() []struct { + Ctx context.Context + BlockId uint64 +} { + var calls []struct { + Ctx context.Context + BlockId uint64 + } + mock.lockGetRegisteredTransactions.RLock() + calls = mock.calls.GetRegisteredTransactions + mock.lockGetRegisteredTransactions.RUnlock() + return calls +} + +// GetRegisteredTxsByBlockHashes calls GetRegisteredTxsByBlockHashesFunc. +func (mock *BlocktxStoreMock) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + if mock.GetRegisteredTxsByBlockHashesFunc == nil { + panic("BlocktxStoreMock.GetRegisteredTxsByBlockHashesFunc: method is nil but BlocktxStore.GetRegisteredTxsByBlockHashes was just called") + } + callInfo := struct { + Ctx context.Context + BlockHashes [][]byte + }{ + Ctx: ctx, + BlockHashes: blockHashes, + } + mock.lockGetRegisteredTxsByBlockHashes.Lock() + mock.calls.GetRegisteredTxsByBlockHashes = append(mock.calls.GetRegisteredTxsByBlockHashes, callInfo) + mock.lockGetRegisteredTxsByBlockHashes.Unlock() + return mock.GetRegisteredTxsByBlockHashesFunc(ctx, blockHashes) +} + +// GetRegisteredTxsByBlockHashesCalls gets all the calls that were made to GetRegisteredTxsByBlockHashes. +// Check the length with: +// +// len(mockedBlocktxStore.GetRegisteredTxsByBlockHashesCalls()) +func (mock *BlocktxStoreMock) GetRegisteredTxsByBlockHashesCalls() []struct { + Ctx context.Context + BlockHashes [][]byte +} { + var calls []struct { + Ctx context.Context + BlockHashes [][]byte + } + mock.lockGetRegisteredTxsByBlockHashes.RLock() + calls = mock.calls.GetRegisteredTxsByBlockHashes + mock.lockGetRegisteredTxsByBlockHashes.RUnlock() + return calls +} + // GetStaleChainBackFromHash calls GetStaleChainBackFromHashFunc. func (mock *BlocktxStoreMock) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { if mock.GetStaleChainBackFromHashFunc == nil { @@ -971,7 +1079,11 @@ func (mock *BlocktxStoreMock) UpsertBlockCalls() []struct { } // UpsertBlockTransactions calls UpsertBlockTransactionsFunc. +<<<<<<< HEAD func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { +======= +func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { +>>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) if mock.UpsertBlockTransactionsFunc == nil { panic("BlocktxStoreMock.UpsertBlockTransactionsFunc: method is nil but BlocktxStore.UpsertBlockTransactions was just called") } diff --git a/internal/blocktx/store/model.go b/internal/blocktx/store/model.go index 260df433c..e4aaaab57 100644 --- a/internal/blocktx/store/model.go +++ b/internal/blocktx/store/model.go @@ -15,11 +15,12 @@ type TxWithMerklePath struct { MerklePath string } -type GetMinedTransactionResult struct { +type TransactionBlock struct { TxHash []byte BlockHash []byte BlockHeight uint64 MerklePath string + BlockStatus blocktx_api.Status } type BlockStatusUpdate struct { diff --git a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.block_transactions_map.yaml b/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.block_transactions_map.yaml deleted file mode 100644 index f87903b98..000000000 --- a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.block_transactions_map.yaml +++ /dev/null @@ -1,12 +0,0 @@ -- blockid: 9736 - txid: 115361489 - merkle_path: merkle-path-1 - inserted_at: 2024-01-10 12:00:00 -- blockid: 9736 - txid: 115361490 - merkle_path: merkle-path-2 - inserted_at: 2024-01-10 12:00:00 -- blockid: 9736 - txid: 115361491 - merkle_path: merkle-path-3 - inserted_at: 2024-01-10 12:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.blocks.yaml deleted file mode 100644 index 2cf6008e8..000000000 --- a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.blocks.yaml +++ /dev/null @@ -1,11 +0,0 @@ -- inserted_at: 2024-01-10 13:06:03.375 - id: 9736 - hash: 0x6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000 - prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313 - merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb - height: 826481 - processed_at: 2024-01-10 13:06:06.122 - size: 108689370 - tx_count: 799 - orphanedyn: FALSE - merkle_path: "" diff --git a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.transactions.yaml deleted file mode 100644 index a8bffc9e8..000000000 --- a/internal/blocktx/store/postgresql/fixtures/get_mined_transactions/blocktx.transactions.yaml +++ /dev/null @@ -1,12 +0,0 @@ -- id: 115361488 - hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b - inserted_at: 2024-01-09 13:00:00 -- id: 115361489 - hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0 - inserted_at: 2024-01-10 12:00:00 -- id: 115361490 - hash: 0xdbbd24251b9bb824566412395bb76a579bca3477c2d0b4cbc210a769d3bb4177 - inserted_at: 2024-01-10 12:00:00 -- id: 115361491 - hash: 0x0d60dd6dc1f2649efb2847f801dfaa61361a438deb526da2de5b6875e0016514 - inserted_at: 2024-01-10 12:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.block_transactions_map.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml similarity index 100% rename from internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.block_transactions_map.yaml rename to internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml diff --git a/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml similarity index 100% rename from internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.blocks.yaml rename to internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml diff --git a/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml similarity index 96% rename from internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.transactions.yaml rename to internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml index 9fd3af32c..dc776a5a1 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_registered_txs_by_block_hashes/blocktx.transactions.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml @@ -1,4 +1,4 @@ -# txs from block 1 +# txs from block 1 - LONGEST - id: 1 hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 inserted_at: 2023-12-10 14:00:00 @@ -20,7 +20,7 @@ inserted_at: 2023-12-10 14:00:00 is_registered: false -# txs from block 2 +# txs from block 2 - STALE - id: 6 hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/get_mined_transactions.go b/internal/blocktx/store/postgresql/get_mined_transactions.go deleted file mode 100644 index 525e03adc..000000000 --- a/internal/blocktx/store/postgresql/get_mined_transactions.go +++ /dev/null @@ -1,64 +0,0 @@ -package postgresql - -import ( - "context" - - "github.com/lib/pq" - - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/store" - "github.com/bitcoin-sv/arc/internal/tracing" -) - -func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, blockStatus blocktx_api.Status) (result []store.GetMinedTransactionResult, err error) { - ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) - defer func() { - tracing.EndTracing(span, err) - }() - - result = make([]store.GetMinedTransactionResult, 0, len(hashes)) - - q := ` - SELECT - t.hash, - b.hash, - b.height, - m.merkle_path - FROM blocktx.transactions AS t - JOIN blocktx.block_transactions_map AS m ON t.id = m.txid - JOIN blocktx.blocks AS b ON m.blockid = b.id - WHERE t.hash = ANY($1) AND b.status = $2 - ` - - rows, err := p.db.QueryContext(ctx, q, pq.Array(hashes), blockStatus) - if err != nil { - return nil, err - } - defer rows.Close() - - for rows.Next() { - var txHash []byte - var blockHash []byte - var blockHeight uint64 - var merklePath string - - err = rows.Scan( - &txHash, - &blockHash, - &blockHeight, - &merklePath, - ) - if err != nil { - return nil, err - } - - result = append(result, store.GetMinedTransactionResult{ - TxHash: txHash, - BlockHash: blockHash, - BlockHeight: blockHeight, - MerklePath: merklePath, - }) - } - - return result, nil -} diff --git a/internal/blocktx/store/postgresql/get_registered_transactions.go b/internal/blocktx/store/postgresql/get_registered_transactions.go deleted file mode 100644 index 5ce802a9b..000000000 --- a/internal/blocktx/store/postgresql/get_registered_transactions.go +++ /dev/null @@ -1,48 +0,0 @@ -package postgresql - -import ( - "context" - "errors" - "fmt" - - "github.com/bitcoin-sv/arc/internal/blocktx/store" -) - -func (p *PostgreSQL) GetRegisteredTransactions(ctx context.Context, blockId uint64) (registeredTxs []store.TxWithMerklePath, err error) { - qRegisteredTransactions := ` - SELECT - t.hash, - m.merkle_path - FROM blocktx.transactions t - JOIN blocktx.block_transactions_map AS m ON t.id = m.txid - WHERE m.blockid = $1 AND t.is_registered = TRUE - ` - - rows, err := p.db.QueryContext(ctx, qRegisteredTransactions, blockId) - if err != nil { - return nil, fmt.Errorf("failed to get registered transactions for block with id %d: %v", blockId, err) - } - defer rows.Close() - - registeredRows := make([]store.TxWithMerklePath, 0) - - for rows.Next() { - var txHash []byte - var merklePath string - err = rows.Scan(&txHash, &merklePath) - if err != nil { - return nil, errors.Join(store.ErrFailedToGetRows, err) - } - - registeredRows = append(registeredRows, store.TxWithMerklePath{ - Hash: txHash, - MerklePath: merklePath, - }) - } - - if err := rows.Err(); err != nil { - return nil, fmt.Errorf("error getting registered transactions for block with id %d: %v", blockId, err) - } - - return registeredRows, nil -} diff --git a/internal/blocktx/store/postgresql/get_registered_txs_by_block_hashes.go b/internal/blocktx/store/postgresql/get_registered_txs_by_block_hashes.go deleted file mode 100644 index 509674c0c..000000000 --- a/internal/blocktx/store/postgresql/get_registered_txs_by_block_hashes.go +++ /dev/null @@ -1,67 +0,0 @@ -package postgresql - -import ( - "context" - - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/store" - "github.com/lib/pq" -) - -func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) (longestTxs []store.GetMinedTransactionResult, staleTxs []store.GetMinedTransactionResult, err error) { - q := ` - SELECT - t.hash, - b.hash, - b.height, - m.merkle_path, - b.status - FROM blocktx.blocks AS b - JOIN blocktx.block_transactions_map AS m ON m.blockid = b.id - JOIN blocktx.transactions AS t ON t.id = m.txid AND t.is_registered = TRUE - WHERE b.hash = ANY($1) - ` - - rows, err := p.db.QueryContext(ctx, q, pq.Array(blockHashes)) - if err != nil { - return - } - defer rows.Close() - - for rows.Next() { - var txHash []byte - var blockHash []byte - var blockHeight uint64 - var merklePath string - var status blocktx_api.Status - - err = rows.Scan( - &txHash, - &blockHash, - &blockHeight, - &merklePath, - &status, - ) - if err != nil { - return - } - - result := store.GetMinedTransactionResult{ - TxHash: txHash, - BlockHash: blockHash, - BlockHeight: blockHeight, - MerklePath: merklePath, - } - - switch status { - case blocktx_api.Status_LONGEST: - longestTxs = append(longestTxs, result) - case blocktx_api.Status_STALE: - staleTxs = append(staleTxs, result) - default: - // do nothing - ignore ORPHANED and UNKNOWN blocks - } - } - - return -} diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go new file mode 100644 index 000000000..48bb16482 --- /dev/null +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -0,0 +1,87 @@ +package postgresql + +import ( + "context" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/lib/pq" + "go.opentelemetry.io/otel/trace" +) + +func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { + if tracer != nil { + var span trace.Span + ctx, span = tracer.Start(ctx, "GetMinedTransactions") + defer span.End() + } + + predicate := "WHERE t.hash = ANY($1) AND b.status = $2" + + return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(hashes), blocktx_api.Status_LONGEST) +} + +func (p *PostgreSQL) GetRegisteredTransactions(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { + predicate := "WHERE m.blockid = $1 AND t.is_registered = TRUE" + + return p.getTransactionBlocksByPredicate(ctx, predicate, blockId) +} + +func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + predicate := "WHERE b.hash = ANY($1) AND t.is_registered = TRUE" + + return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(blockHashes)) +} + +func (p *PostgreSQL) getTransactionBlocksByPredicate(ctx context.Context, predicate string, predicateParams ...any) ([]store.TransactionBlock, error) { + transactionBlocks := make([]store.TransactionBlock, 0) + + q := ` + SELECT + t.hash, + b.hash, + b.height, + m.merkle_path, + b.status + FROM blocktx.transactions AS t + JOIN blocktx.block_transactions_map AS m ON t.id = m.txid + JOIN blocktx.blocks AS b ON m.blockid = b.id + ` + q += " " + predicate + + rows, err := p.db.QueryContext(ctx, q, predicateParams...) + if err != nil { + return nil, err + } + defer rows.Close() + + for rows.Next() { + var txHash []byte + var blockHash []byte + var blockHeight uint64 + var merklePath string + var blockStatus blocktx_api.Status + + err = rows.Scan( + &txHash, + &blockHash, + &blockHeight, + &merklePath, + &blockStatus, + ) + if err != nil { + return nil, err + } + + transactionBlocks = append(transactionBlocks, store.TransactionBlock{ + TxHash: txHash, + BlockHash: blockHash, + BlockHeight: blockHeight, + MerklePath: merklePath, + BlockStatus: blockStatus, + }) + + } + + return transactionBlocks, nil +} diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index c63cb1a96..a90fdef20 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -286,50 +286,6 @@ func TestPostgresDB(t *testing.T) { } }) - t.Run("get registered txs by block hashes", func(t *testing.T) { - // given - prepareDb(t, postgresDB.db, "fixtures/get_registered_txs_by_block_hashes") - - blockHashLongest := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") - blockHashStale := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") - - blockHashes := [][]byte{ - blockHashLongest[:], - blockHashStale[:], - } - - expectedLongestTxs := []store.GetMinedTransactionResult{ - { - TxHash: testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e")[:], - BlockHash: blockHashLongest[:], - BlockHeight: 822013, - MerklePath: "merkle-path-2", - }, - } - expectedStaleTxs := []store.GetMinedTransactionResult{ - { - TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], - BlockHash: blockHashStale[:], - BlockHeight: 822012, - MerklePath: "merkle-path-6", - }, - { - TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], - BlockHash: blockHashStale[:], - BlockHeight: 822012, - MerklePath: "merkle-path-7", - }, - } - - // when - longestTxs, staleTxs, err := postgresDB.GetRegisteredTxsByBlockHashes(ctx, blockHashes) - - // then - require.NoError(t, err) - require.Equal(t, expectedLongestTxs, longestTxs) - require.Equal(t, expectedStaleTxs, staleTxs) - }) - t.Run("update blocks statuses", func(t *testing.T) { // given prepareDb(t, postgresDB.db, "fixtures/update_blocks_statuses") @@ -368,30 +324,115 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, blocktx_api.Status_LONGEST, stale4.Status) }) - t.Run("test getting mined txs", func(t *testing.T) { + t.Run("get mined txs", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_mined_transactions") + prepareDb(t, postgresDB.db, "fixtures/get_transactions") + + txHash1 := testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853") + txHash2 := testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e") + txHash3 := testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0") // should not be found - from STALE block - txHash1 := testutils.RevChainhash(t, "76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b") - txHash2 := testutils.RevChainhash(t, "164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0") - txHash3 := testutils.RevChainhash(t, "dbbd24251b9bb824566412395bb76a579bca3477c2d0b4cbc210a769d3bb4177") - txHash4 := testutils.RevChainhash(t, "0d60dd6dc1f2649efb2847f801dfaa61361a438deb526da2de5b6875e0016514") + blockHash := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") - blockHash := testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000") + expectedTxs := []store.TransactionBlock{ + { + TxHash: txHash1[:], + BlockHash: blockHash[:], + BlockHeight: 822013, + MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_LONGEST, + }, + { + TxHash: txHash2[:], + BlockHash: blockHash[:], + BlockHeight: 822013, + MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, + }, + } // when - // get mined transaction and corresponding block - minedTxs, err := postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:], txHash4[:]}, blocktx_api.Status_LONGEST) + actualTxs, err := postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}) + + // then require.NoError(t, err) + require.Equal(t, expectedTxs, actualTxs) + }) + + t.Run("get registered txs", func(t *testing.T) { + // given + prepareDb(t, postgresDB.db, "fixtures/get_transactions") + + blockId := uint64(2) + blockHash := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") + + expectedTxs := []store.TransactionBlock{ + { + TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], + BlockHash: blockHash[:], + BlockHeight: 822012, + MerklePath: "merkle-path-6", + BlockStatus: blocktx_api.Status_STALE, + }, + { + TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], + BlockHash: blockHash[:], + BlockHeight: 822012, + MerklePath: "merkle-path-7", + BlockStatus: blocktx_api.Status_STALE, + }, + } + + // when + actualTxs, err := postgresDB.GetRegisteredTransactions(ctx, blockId) // then - require.Len(t, minedTxs, 3) + require.NoError(t, err) + require.Equal(t, expectedTxs, actualTxs) + }) - for _, tx := range minedTxs { - require.True(t, bytes.Equal(tx.TxHash, txHash2[:]) || bytes.Equal(tx.TxHash, txHash3[:]) || bytes.Equal(tx.TxHash, txHash4[:])) - require.Equal(t, tx.BlockHash, blockHash[:]) - require.Equal(t, uint64(826481), tx.BlockHeight) + t.Run("get registered txs by block hashes", func(t *testing.T) { + // given + prepareDb(t, postgresDB.db, "fixtures/get_transactions") + + blockHashLongest := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") + blockHashStale := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") + + blockHashes := [][]byte{ + blockHashLongest[:], + blockHashStale[:], } + + expectedTxs := []store.TransactionBlock{ + { + TxHash: testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e")[:], + BlockHash: blockHashLongest[:], + BlockHeight: 822013, + MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, + }, + { + TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], + BlockHash: blockHashStale[:], + BlockHeight: 822012, + MerklePath: "merkle-path-6", + BlockStatus: blocktx_api.Status_STALE, + }, + { + TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], + BlockHash: blockHashStale[:], + BlockHeight: 822012, + MerklePath: "merkle-path-7", + BlockStatus: blocktx_api.Status_STALE, + }, + } + + // when + actualTxs, err := postgresDB.GetRegisteredTxsByBlockHashes(ctx, blockHashes) + + // then + require.NoError(t, err) + require.Equal(t, expectedTxs, actualTxs) }) t.Run("clear data", func(t *testing.T) { @@ -542,7 +583,6 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { name string txsWithMerklePaths []store.TxWithMerklePath - expectedErr error expectedUpdatedResLen int upsertRepeat bool }{ @@ -626,17 +666,16 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { testBlockID := uint64(9736) // when - res, err := sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) + err := sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) if tc.upsertRepeat { - res, err = sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) + err = sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) require.NoError(t, err) } // then - if tc.expectedErr != nil { - require.ErrorIs(t, err, tc.expectedErr) - return - } + require.NoError(t, err) + + res, err := sut.GetRegisteredTransactions(ctx, testBlockID) require.NoError(t, err) require.Equal(t, tc.expectedUpdatedResLen, len(res)) @@ -644,7 +683,7 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { // assert correctness of returned values // assume registered transactions are at the beginning of tc.txs for i := 0; i < tc.expectedUpdatedResLen; i++ { - require.True(t, bytes.Equal(tc.txsWithMerklePaths[i].Hash, res[i].Hash)) + require.True(t, bytes.Equal(tc.txsWithMerklePaths[i].Hash, res[i].TxHash)) require.Equal(t, tc.txsWithMerklePaths[i].MerklePath, res[i].MerklePath) } @@ -702,30 +741,32 @@ func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { }, } - expected := []store.GetMinedTransactionResult{ + expected := []store.TransactionBlock{ { TxHash: txHash[:], BlockHash: testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000")[:], BlockHeight: uint64(826481), MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_LONGEST, }, { TxHash: txHash[:], BlockHash: testutils.RevChainhash(t, "7258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000")[:], BlockHeight: uint64(826481), MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, }, } // when - _, err := sut.UpsertBlockTransactions(ctx, testBlockID, txsWithMerklePaths) + err := sut.UpsertBlockTransactions(ctx, testBlockID, txsWithMerklePaths) require.NoError(t, err) - _, err = sut.UpsertBlockTransactions(ctx, competingBlockID, competingTxsWithMerklePaths) + err = sut.UpsertBlockTransactions(ctx, competingBlockID, competingTxsWithMerklePaths) require.NoError(t, err) // then - actual, err := sut.GetMinedTransactions(ctx, [][]byte{txHash[:]}, blocktx_api.Status_LONGEST) + actual, err := sut.GetMinedTransactions(ctx, [][]byte{txHash[:]}) require.NoError(t, err) require.ElementsMatch(t, expected, actual) diff --git a/internal/blocktx/store/postgresql/upsert_block_transactions.go b/internal/blocktx/store/postgresql/upsert_block_transactions.go index 6b3a69983..3e9e9a2bd 100644 --- a/internal/blocktx/store/postgresql/upsert_block_transactions.go +++ b/internal/blocktx/store/postgresql/upsert_block_transactions.go @@ -11,7 +11,7 @@ import ( "github.com/bitcoin-sv/arc/internal/tracing" ) -// UpsertBlockTransactions upserts the transaction hashes for a given block hash and returns updated registered transactions hashes. +// UpsertBlockTransactions upserts the transaction hashes for a given block hash. func (p *PostgreSQL) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) (err error) { ctx, span := tracing.StartTracing(ctx, "UpsertBlockTransactions", p.tracingEnabled, append(p.tracingAttributes, attribute.Int("updates", len(txsWithMerklePaths)))...) defer func() { diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index 9f2d7ade7..cee50ca62 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -37,11 +37,11 @@ type BlocktxStore interface { MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error GetBlockGaps(ctx context.Context, heightRange int) ([]*BlockGap, error) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) - GetMinedTransactions(ctx context.Context, hashes [][]byte, blockStatus blocktx_api.Status) ([]GetMinedTransactionResult, error) + GetMinedTransactions(ctx context.Context, hashes [][]byte) ([]TransactionBlock, error) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) - GetRegisteredTransactions(ctx context.Context, blockId uint64) (registeredTxs []TxWithMerklePath, err error) - GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) (longestTxs []GetMinedTransactionResult, staleTxs []GetMinedTransactionResult, err error) + GetRegisteredTransactions(ctx context.Context, blockId uint64) ([]TransactionBlock, error) + GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]TransactionBlock, error) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []BlockStatusUpdate) error GetStats(ctx context.Context) (*Stats, error) From 2c39e412f41fba077efefa6b32057923f2989289 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 9 Oct 2024 15:33:51 +0200 Subject: [PATCH 04/36] feat: improve logs --- .../blocktx/integration_test/reorg_integration_test.go | 5 +++-- internal/blocktx/processor.go | 3 +-- internal/blocktx/processor_helpers.go | 9 +++++++++ 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 073014ed7..023c3344d 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -11,10 +11,11 @@ package integrationtest // 3. This block has a chainwork lower than the current tip of chain - becomes STALE // 4. Next competing block, at height 822016 is being send through the mocked PeerHandler // 5. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest +// 6. Verification of reorg - checking if statuses are correctly switched // // Todo: Next tasks: -// - Verify if reorg was performed correctly, if previous blocks have updated statuses -// - Include metamorph in this test and verify that transactions statuses are properly updated +// - Verify that transactions are properly updated in blocktx store +// - Include mock metamorph in this test and verify that transactions statuses are properly updated import ( "context" diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 65cfe9c90..4069d2861 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -549,8 +549,7 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { // change that receiver method in metamorph to accept statuses (MINED and MINED_IN_STALE_BLOCK) err = p.mqClient.PublishMarshal(MinedTxsTopic, txBlock) if err != nil { - // TODO: add txID to err log - p.logger.Error("failed to publish mined txs", slog.Uint64("height", txBlock.BlockHeight), slog.String("err", err.Error())) + p.logger.Error("failed to publish mined txs", slog.String("blockHash", getHashStringNoErr(tx.BlockHash)), slog.Uint64("height", tx.BlockHeight), slog.String("txHash", getHashStringNoErr(tx.TxHash)), slog.String("err", err.Error())) } } diff --git a/internal/blocktx/processor_helpers.go b/internal/blocktx/processor_helpers.go index c63090c98..a7ab34d8b 100644 --- a/internal/blocktx/processor_helpers.go +++ b/internal/blocktx/processor_helpers.go @@ -7,8 +7,17 @@ import ( blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/libsv/go-p2p/chaincfg/chainhash" ) +func getHashStringNoErr(hash []byte) string { + chash, err := chainhash.NewHash(hash) + if err != nil { + return "" + } + return chash.String() +} + func createBlock(msg *blockchain.BlockMessage, prevBlock *blocktx_api.Block, longestTipExists bool) *blocktx_api.Block { hash := msg.Header.BlockHash() prevHash := msg.Header.PrevBlock From 2a380edf87b4b974dabfe78e5436750e868675b6 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Thu, 10 Oct 2024 18:02:43 +0200 Subject: [PATCH 05/36] feat: integration test for reorg - stale blocks --- .../blocktx.block_transactions_map.yaml | 4 + .../fixtures/blocktx.transactions.yaml | 43 +++++ .../reorg_integration_test.go | 152 ++++++++++++++---- internal/blocktx/processor.go | 14 +- 4 files changed, 175 insertions(+), 38 deletions(-) create mode 100644 internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml create mode 100644 internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml diff --git a/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..bcbec8006 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml @@ -0,0 +1,4 @@ +- blockid: 1 + txid: 1000003 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml new file mode 100644 index 000000000..6f86a056f --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml @@ -0,0 +1,43 @@ +# txs from block 1 - LONGEST +- id: 1000001 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000002 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 1000003 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +# - id: 4 +# hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa +# inserted_at: 2023-12-10 14:00:00 +# is_registered: false +# - id: 5 +# hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd +# inserted_at: 2023-12-10 14:00:00 +# is_registered: false +# +# # txs from block 2 - STALE +# - id: 6 +# hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 +# inserted_at: 2023-12-15 14:00:00 +# is_registered: true +# - id: 7 +# hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357 +# inserted_at: 2023-12-15 14:00:00 +# is_registered: true +# - id: 8 +# hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 +# inserted_at: 2023-12-15 14:00:00 +# is_registered: false +# - id: 9 +# hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799 +# inserted_at: 2023-12-15 14:00:00 +# is_registered: false +# - id: 10 +# hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8 +# inserted_at: 2023-12-15 14:00:00 +# is_registered: false diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 023c3344d..80d702d12 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -26,19 +26,22 @@ import ( "testing" "time" - _ "github.com/golang-migrate/migrate/v4/source/file" - _ "github.com/lib/pq" - "github.com/libsv/go-p2p/chaincfg/chainhash" - "github.com/libsv/go-p2p/wire" - "github.com/ory/dockertest/v3" - "github.com/stretchr/testify/require" - "github.com/bitcoin-sv/arc/internal/blocktx" blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" blocktx_p2p "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication/p2p" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store/postgresql" + "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core" + nats_mock "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core/mocks" testutils "github.com/bitcoin-sv/arc/internal/test_utils" + _ "github.com/golang-migrate/migrate/v4/source/file" + _ "github.com/lib/pq" + "github.com/libsv/go-bc" + "github.com/libsv/go-p2p/chaincfg/chainhash" + "github.com/libsv/go-p2p/wire" + "github.com/ory/dockertest/v3" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" ) const ( @@ -89,7 +92,7 @@ func testmain(m *testing.M) int { return m.Run() } -func TestBlockStatus(t *testing.T) { +func TestReorg(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") } @@ -104,12 +107,49 @@ func TestBlockStatus(t *testing.T) { blocktxStore, err := postgresql.New(dbInfo, 10, 80) require.NoError(t, err) + publishedTxs := make([]*blocktx_api.TransactionBlock, 0) + + mockNatsConn := &nats_mock.NatsConnectionMock{ + PublishFunc: func(subj string, data []byte) error { + serialized := &blocktx_api.TransactionBlock{} + err := proto.Unmarshal(data, serialized) + require.NoError(t, err) + + publishedTxs = append(publishedTxs, serialized) + return nil + }, + } + mqClient := nats_core.New(mockNatsConn, nats_core.WithLogger(logger)) + p2pMsgHandler := blocktx_p2p.NewMsgHandler(logger, blockRequestCh, blockProcessCh) - processor, err := blocktx.NewProcessor(logger, blocktxStore, blockRequestCh, blockProcessCh) + processor, err := blocktx.NewProcessor( + logger, + blocktxStore, + blockRequestCh, + blockProcessCh, + blocktx.WithMessageQueueClient(mqClient), + ) require.NoError(t, err) processor.StartBlockProcessing() + testHandleBlockOnEmptyDatabase(t, p2pMsgHandler, blocktxStore) + publishedTxs = make([]*blocktx_api.TransactionBlock, 0) // clear slice for the next test + + // only load fixtures at this point + testutils.LoadFixtures(t, dbConn, "fixtures") + + staleBlockHash, expectedTxs := testHandleStaleBlock(t, p2pMsgHandler, blocktxStore, publishedTxs) + // verify the transaction was correctly published to metamorph + verifyTxs(t, expectedTxs, publishedTxs) + // clear slice for the next test + publishedTxs = publishedTxs[:0] + + expectedTxs = testHandleReorg(t, p2pMsgHandler, blocktxStore, publishedTxs, staleBlockHash) + verifyTxs(t, expectedTxs, publishedTxs) +} + +func testHandleBlockOnEmptyDatabase(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) { // test for empty database edge case before inserting fixtures prevBlockHash := testutils.RevChainhash(t, "00000000000000000a00c377b260a3219b0c314763f486bc363df7aa7e22ad72") txHash, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") @@ -136,22 +176,21 @@ func TestBlockStatus(t *testing.T) { blockHashZero := blockMessage.Header.BlockHash() - block, err := blocktxStore.GetBlock(context.Background(), &blockHashZero) + block, err := store.GetBlock(context.Background(), &blockHashZero) require.NoError(t, err) require.Equal(t, uint64(822011), block.Height) require.Equal(t, blocktx_api.Status_LONGEST, block.Status) +} - // only load fixtures at this point - testutils.LoadFixtures(t, dbConn, "fixtures") - - prevBlockHash = testutils.RevChainhash(t, "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000") - txHash, err = chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") - require.NoError(t, err) - merkleRoot, err = chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") - require.NoError(t, err) +func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL, publishedTxs []*blocktx_api.TransactionBlock) (*chainhash.Hash, []*blocktx_api.TransactionBlock) { + prevBlockHash := testutils.RevChainhash(t, "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000") + txHash := testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853") + txHash2 := testutils.RevChainhash(t, "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430") // should not be published - is already in the longest chain + treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) + merkleRoot := treeStore[len(treeStore)-1] // should become STALE - blockMessage = &blockchain.BlockMessage{ + blockMessage := &blockchain.BlockMessage{ Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: *prevBlockHash, // block with status LONGEST at height 822014 @@ -159,27 +198,42 @@ func TestBlockStatus(t *testing.T) { Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing block }, Height: uint64(822015), // competing block already exists at this height - TransactionHashes: []*chainhash.Hash{txHash}, + TransactionHashes: []*chainhash.Hash{txHash, txHash2}, } p2pMsgHandler.OnReceive(blockMessage, nil) - require.NoError(t, err) // Allow DB to process the block time.Sleep(200 * time.Millisecond) blockHashStale := blockMessage.Header.BlockHash() - block, err = blocktxStore.GetBlock(context.Background(), &blockHashStale) + block, err := store.GetBlock(context.Background(), &blockHashStale) require.NoError(t, err) require.Equal(t, uint64(822015), block.Height) require.Equal(t, blocktx_api.Status_STALE, block.Status) + // transactions expected to be published to metamorph + expectedTxs := []*blocktx_api.TransactionBlock{ + { + BlockHash: blockHashStale[:], + BlockHeight: 822015, + TransactionHash: txHash[:], + }, + } + + return &blockHashStale, expectedTxs +} + +func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL, publishedTxs []*blocktx_api.TransactionBlock, staleBlockHash *chainhash.Hash) []*blocktx_api.TransactionBlock { + txHash := testutils.RevChainhash(t, "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f") + merkleRoot := testutils.RevChainhash(t, "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f") + // should become LONGEST // reorg should happen - blockMessage = &blockchain.BlockMessage{ + blockMessage := &blockchain.BlockMessage{ Header: &wire.BlockHeader{ Version: 541065216, - PrevBlock: blockHashStale, // block with status STALE at height 822015 + PrevBlock: *staleBlockHash, // block with status STALE at height 822015 MerkleRoot: *merkleRoot, Bits: 0x1a05db8b, // chainwork: "12301577519373468" higher than the competing block }, @@ -188,44 +242,80 @@ func TestBlockStatus(t *testing.T) { } p2pMsgHandler.OnReceive(blockMessage, nil) - require.NoError(t, err) // Allow DB to process the block and perform reorg time.Sleep(1 * time.Second) // verify that reorg happened blockHashLongest := blockMessage.Header.BlockHash() - block, err = blocktxStore.GetBlock(context.Background(), &blockHashLongest) + block, err := store.GetBlock(context.Background(), &blockHashLongest) require.NoError(t, err) require.Equal(t, uint64(822016), block.Height) require.Equal(t, blocktx_api.Status_LONGEST, block.Status) - block, err = blocktxStore.GetBlock(context.Background(), &blockHashStale) + block, err = store.GetBlock(context.Background(), staleBlockHash) require.NoError(t, err) require.Equal(t, uint64(822015), block.Height) require.Equal(t, blocktx_api.Status_LONGEST, block.Status) previouslyLongestBlockHash := testutils.RevChainhash(t, "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000") - block, err = blocktxStore.GetBlock(context.Background(), previouslyLongestBlockHash) + block, err = store.GetBlock(context.Background(), previouslyLongestBlockHash) require.NoError(t, err) require.Equal(t, uint64(822015), block.Height) require.Equal(t, blocktx_api.Status_STALE, block.Status) previouslyLongestBlockHash = testutils.RevChainhash(t, "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000") - block, err = blocktxStore.GetBlock(context.Background(), previouslyLongestBlockHash) + block, err = store.GetBlock(context.Background(), previouslyLongestBlockHash) require.NoError(t, err) require.Equal(t, uint64(822016), block.Height) require.Equal(t, blocktx_api.Status_STALE, block.Status) previouslyLongestBlockHash = testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") - block, err = blocktxStore.GetBlock(context.Background(), previouslyLongestBlockHash) + block, err = store.GetBlock(context.Background(), previouslyLongestBlockHash) require.NoError(t, err) require.Equal(t, uint64(822017), block.Height) require.Equal(t, blocktx_api.Status_STALE, block.Status) beginningOfChain := testutils.RevChainhash(t, "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000") - block, err = blocktxStore.GetBlock(context.Background(), beginningOfChain) + block, err = store.GetBlock(context.Background(), beginningOfChain) require.NoError(t, err) require.Equal(t, uint64(822014), block.Height) require.Equal(t, blocktx_api.Status_LONGEST, block.Status) + + expectedTxs := []*blocktx_api.TransactionBlock{ + { + BlockHash: staleBlockHash[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853")[:], + }, + { + BlockHash: staleBlockHash[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430")[:], + }, + // { + // BlockHash: blockHashLongest[:], + // BlockHeight: 822016, + // TransactionHash: txHash[:], + // }, + } + + return expectedTxs +} + +func verifyTxs(t *testing.T, expectedTxs []*blocktx_api.TransactionBlock, publishedTxs []*blocktx_api.TransactionBlock) { + strippedTxs := make([]*blocktx_api.TransactionBlock, len(publishedTxs)) + for i, tx := range publishedTxs { + chash, err := chainhash.NewHash(tx.TransactionHash) + require.NoError(t, err) + t.Logf("published tx hash: %s", chash.String()) + strippedTxs[i] = &blocktx_api.TransactionBlock{ + BlockHash: tx.BlockHash, + BlockHeight: tx.BlockHeight, + TransactionHash: tx.TransactionHash, + // TODO: add block status + } + } + + require.ElementsMatch(t, expectedTxs, strippedTxs) } diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 4069d2861..9eab0418e 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -478,6 +478,7 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { shouldPerformReorg := false if competing { p.logger.Info("Competing blocks found", slog.String("incoming block hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) + incomingBlock.Status = blocktx_api.Status_STALE hasGreatestChainwork, err := p.hasGreatestChainwork(ctx, incomingBlock) if err != nil { @@ -485,13 +486,8 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return err } - incomingBlock.Status = blocktx_api.Status_STALE - if hasGreatestChainwork { p.logger.Info("chain reorg detected", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) - - incomingBlock.Status = blocktx_api.Status_LONGEST - shouldPerformReorg = true } } @@ -546,6 +542,8 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { MerklePath: tx.MerklePath, } + p.logger.Info("publishing tx", slog.String("txHash", getHashStringNoErr(tx.TxHash))) + // change that receiver method in metamorph to accept statuses (MINED and MINED_IN_STALE_BLOCK) err = p.mqClient.PublishMarshal(MinedTxsTopic, txBlock) if err != nil { @@ -629,7 +627,7 @@ func (p *Processor) hasGreatestChainwork(ctx context.Context, incomingBlock *blo } func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block) ([]store.TransactionBlock, error) { - staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, incomingBlock.PreviousHash) + staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, incomingBlock.Hash) if err != nil { return nil, err } @@ -670,6 +668,8 @@ func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api return nil, err } + p.logger.Info("reorg performed successfully") + prevLongestTxs := make([]store.TransactionBlock, 0) prevStaleTxs := make([]store.TransactionBlock, 0) @@ -751,7 +751,7 @@ func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block if percentage, found := progress[txIndex+1]; found { if totalSize > 0 { - p.logger.Info(fmt.Sprintf("%d txs out of %d marked as mined", txIndex+1, totalSize), slog.Int("percentage", percentage), slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("duration", time.Since(now).String())) + p.logger.Info(fmt.Sprintf("%d txs out of %d stored", txIndex+1, totalSize), slog.Int("percentage", percentage), slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("duration", time.Since(now).String())) } } } From bdebf8da193283ca660aefe10546d1af267ff733 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Fri, 11 Oct 2024 12:01:27 +0200 Subject: [PATCH 06/36] feat: integration test for reorg with edge cases --- .../blocktx/blocktx_api/blocktx_api.pb.go | 212 +++++++++--------- .../blocktx/blocktx_api/blocktx_api.proto | 1 + .../blocktx.block_transactions_map.yaml | 4 + .../fixtures/blocktx.transactions.yaml | 8 + .../reorg_integration_test.go | 37 +-- internal/blocktx/processor.go | 14 +- 6 files changed, 159 insertions(+), 117 deletions(-) diff --git a/internal/blocktx/blocktx_api/blocktx_api.pb.go b/internal/blocktx/blocktx_api/blocktx_api.pb.go index adf251b4b..7415dac45 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.pb.go +++ b/internal/blocktx/blocktx_api/blocktx_api.pb.go @@ -301,6 +301,7 @@ type TransactionBlock struct { BlockHeight uint64 `protobuf:"varint,2,opt,name=block_height,json=blockHeight,proto3" json:"block_height,omitempty"` TransactionHash []byte `protobuf:"bytes,3,opt,name=transaction_hash,json=transactionHash,proto3" json:"transaction_hash,omitempty"` // Little endian MerklePath string `protobuf:"bytes,4,opt,name=merklePath,proto3" json:"merklePath,omitempty"` + BlockStatus Status `protobuf:"varint,5,opt,name=block_status,json=blockStatus,proto3,enum=blocktx_api.Status" json:"block_status,omitempty"` } func (x *TransactionBlock) Reset() { @@ -363,6 +364,13 @@ func (x *TransactionBlock) GetMerklePath() string { return "" } +func (x *TransactionBlock) GetBlockStatus() Status { + if x != nil { + return x.BlockStatus + } + return Status_UNKNOWN +} + type TransactionBlocks struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -797,7 +805,7 @@ var file_internal_blocktx_blocktx_api_blocktx_api_proto_rawDesc = []byte{ 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x22, 0x9f, 0x01, 0x0a, 0x10, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x22, 0xd7, 0x01, 0x0a, 0x10, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, 0x68, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, @@ -807,87 +815,90 @@ var file_internal_blocktx_blocktx_api_blocktx_api_proto_rawDesc = []byte{ 0x0c, 0x52, 0x0f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1e, 0x0a, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x50, 0x61, - 0x74, 0x68, 0x22, 0x61, 0x0a, 0x11, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, - 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x52, 0x11, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x22, 0x39, 0x0a, 0x0b, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x22, 0x31, 0x0a, 0x09, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, - 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, - 0x61, 0x79, 0x73, 0x22, 0x2a, 0x0a, 0x14, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x72, - 0x6f, 0x77, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x22, - 0x48, 0x0a, 0x23, 0x44, 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, - 0x73, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x42, 0x79, 0x22, 0x63, 0x0a, 0x1d, 0x4d, 0x65, 0x72, - 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, - 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x6f, - 0x0a, 0x1e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, 0x72, - 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x12, 0x4d, 0x0a, 0x0c, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, - 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, + 0x74, 0x68, 0x12, 0x36, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x73, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x0b, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x61, 0x0a, 0x11, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, + 0x4c, 0x0a, 0x12, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x11, 0x74, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x22, 0x39, 0x0a, + 0x0b, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, + 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, + 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x31, 0x0a, 0x09, 0x43, 0x6c, 0x65, 0x61, + 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, + 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x72, 0x65, + 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x22, 0x2a, 0x0a, 0x14, 0x52, + 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x22, 0x48, 0x0a, 0x23, 0x44, 0x65, 0x6c, 0x55, 0x6e, + 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, + 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, + 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x42, + 0x79, 0x22, 0x63, 0x0a, 0x1d, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x52, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x22, - 0x5a, 0x0a, 0x1e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, - 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x38, 0x0a, 0x18, 0x75, 0x6e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x04, 0x52, 0x16, 0x75, 0x6e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x2a, 0x3b, 0x0a, 0x06, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, - 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x4c, 0x4f, 0x4e, 0x47, 0x45, 0x53, 0x54, 0x10, 0x0a, 0x12, - 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x10, 0x14, 0x12, 0x0c, 0x0a, 0x08, 0x4f, 0x52, - 0x50, 0x48, 0x41, 0x4e, 0x45, 0x44, 0x10, 0x1e, 0x32, 0xad, 0x04, 0x0a, 0x0a, 0x42, 0x6c, 0x6f, - 0x63, 0x6b, 0x54, 0x78, 0x41, 0x50, 0x49, 0x12, 0x3f, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, - 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x11, 0x43, 0x6c, 0x65, 0x61, - 0x72, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x2e, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, - 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, - 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0b, 0x43, 0x6c, - 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, - 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x58, 0x0a, 0x19, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x4d, 0x61, 0x70, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, - 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, - 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, - 0x12, 0x75, 0x0a, 0x1c, 0x44, 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, - 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, - 0x12, 0x30, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x44, - 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, - 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, - 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x6f, 0x0a, 0x11, 0x56, 0x65, 0x72, 0x69, 0x66, - 0x79, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2b, 0x2e, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, - 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, - 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x0f, 0x5a, 0x0d, 0x2e, 0x3b, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x33, + 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, + 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, + 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x6f, 0x0a, 0x1e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, + 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0c, 0x6d, 0x65, 0x72, 0x6b, + 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, + 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, + 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x0b, 0x6d, 0x65, 0x72, 0x6b, + 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x22, 0x5a, 0x0a, 0x1e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, + 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x38, 0x0a, 0x18, 0x75, 0x6e, 0x76, + 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, + 0x69, 0x67, 0x68, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x52, 0x16, 0x75, 0x6e, 0x76, + 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x73, 0x2a, 0x3b, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, + 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x4c, 0x4f, + 0x4e, 0x47, 0x45, 0x53, 0x54, 0x10, 0x0a, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x4c, 0x45, + 0x10, 0x14, 0x12, 0x0c, 0x0a, 0x08, 0x4f, 0x52, 0x50, 0x48, 0x41, 0x4e, 0x45, 0x44, 0x10, 0x1e, + 0x32, 0xad, 0x04, 0x0a, 0x0a, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x78, 0x41, 0x50, 0x49, 0x12, + 0x3f, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, + 0x79, 0x1a, 0x1b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, + 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x12, 0x50, 0x0a, 0x11, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, + 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0b, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, + 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x58, + 0x0a, 0x19, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x4d, 0x61, 0x70, 0x12, 0x16, 0x2e, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, + 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, + 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x75, 0x0a, 0x1c, 0x44, 0x65, 0x6c, 0x55, + 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, + 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, + 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, + 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, + 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, + 0x6f, 0x0a, 0x11, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, + 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, + 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, + 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, + 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x42, 0x0f, 0x5a, 0x0d, 0x2e, 0x3b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, + 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -925,25 +936,26 @@ var file_internal_blocktx_blocktx_api_blocktx_api_proto_depIdxs = []int32{ 13, // 0: blocktx_api.HealthResponse.timestamp:type_name -> google.protobuf.Timestamp 0, // 1: blocktx_api.Block.status:type_name -> blocktx_api.Status 6, // 2: blocktx_api.Transactions.transactions:type_name -> blocktx_api.Transaction - 4, // 3: blocktx_api.TransactionBlocks.transaction_blocks:type_name -> blocktx_api.TransactionBlock - 10, // 4: blocktx_api.MerkleRootsVerificationRequest.merkle_roots:type_name -> blocktx_api.MerkleRootVerificationRequest - 14, // 5: blocktx_api.BlockTxAPI.Health:input_type -> google.protobuf.Empty - 7, // 6: blocktx_api.BlockTxAPI.ClearTransactions:input_type -> blocktx_api.ClearData - 7, // 7: blocktx_api.BlockTxAPI.ClearBlocks:input_type -> blocktx_api.ClearData - 7, // 8: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:input_type -> blocktx_api.ClearData - 9, // 9: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:input_type -> blocktx_api.DelUnfinishedBlockProcessingRequest - 11, // 10: blocktx_api.BlockTxAPI.VerifyMerkleRoots:input_type -> blocktx_api.MerkleRootsVerificationRequest - 1, // 11: blocktx_api.BlockTxAPI.Health:output_type -> blocktx_api.HealthResponse - 8, // 12: blocktx_api.BlockTxAPI.ClearTransactions:output_type -> blocktx_api.RowsAffectedResponse - 8, // 13: blocktx_api.BlockTxAPI.ClearBlocks:output_type -> blocktx_api.RowsAffectedResponse - 8, // 14: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:output_type -> blocktx_api.RowsAffectedResponse - 8, // 15: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:output_type -> blocktx_api.RowsAffectedResponse - 12, // 16: blocktx_api.BlockTxAPI.VerifyMerkleRoots:output_type -> blocktx_api.MerkleRootVerificationResponse - 11, // [11:17] is the sub-list for method output_type - 5, // [5:11] is the sub-list for method input_type - 5, // [5:5] is the sub-list for extension type_name - 5, // [5:5] is the sub-list for extension extendee - 0, // [0:5] is the sub-list for field type_name + 0, // 3: blocktx_api.TransactionBlock.block_status:type_name -> blocktx_api.Status + 4, // 4: blocktx_api.TransactionBlocks.transaction_blocks:type_name -> blocktx_api.TransactionBlock + 10, // 5: blocktx_api.MerkleRootsVerificationRequest.merkle_roots:type_name -> blocktx_api.MerkleRootVerificationRequest + 14, // 6: blocktx_api.BlockTxAPI.Health:input_type -> google.protobuf.Empty + 7, // 7: blocktx_api.BlockTxAPI.ClearTransactions:input_type -> blocktx_api.ClearData + 7, // 8: blocktx_api.BlockTxAPI.ClearBlocks:input_type -> blocktx_api.ClearData + 7, // 9: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:input_type -> blocktx_api.ClearData + 9, // 10: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:input_type -> blocktx_api.DelUnfinishedBlockProcessingRequest + 11, // 11: blocktx_api.BlockTxAPI.VerifyMerkleRoots:input_type -> blocktx_api.MerkleRootsVerificationRequest + 1, // 12: blocktx_api.BlockTxAPI.Health:output_type -> blocktx_api.HealthResponse + 8, // 13: blocktx_api.BlockTxAPI.ClearTransactions:output_type -> blocktx_api.RowsAffectedResponse + 8, // 14: blocktx_api.BlockTxAPI.ClearBlocks:output_type -> blocktx_api.RowsAffectedResponse + 8, // 15: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:output_type -> blocktx_api.RowsAffectedResponse + 8, // 16: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:output_type -> blocktx_api.RowsAffectedResponse + 12, // 17: blocktx_api.BlockTxAPI.VerifyMerkleRoots:output_type -> blocktx_api.MerkleRootVerificationResponse + 12, // [12:18] is the sub-list for method output_type + 6, // [6:12] is the sub-list for method input_type + 6, // [6:6] is the sub-list for extension type_name + 6, // [6:6] is the sub-list for extension extendee + 0, // [0:6] is the sub-list for field type_name } func init() { file_internal_blocktx_blocktx_api_blocktx_api_proto_init() } diff --git a/internal/blocktx/blocktx_api/blocktx_api.proto b/internal/blocktx/blocktx_api/blocktx_api.proto index a8ccdce96..a243c34fa 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.proto +++ b/internal/blocktx/blocktx_api/blocktx_api.proto @@ -65,6 +65,7 @@ message TransactionBlock { uint64 block_height = 2; bytes transaction_hash = 3; // Little endian string merklePath = 4; + Status block_status = 5; } message TransactionBlocks { diff --git a/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml index bcbec8006..1c5fa6be0 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml +++ b/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml @@ -2,3 +2,7 @@ txid: 1000003 merkle_path: merkle-path-3 inserted_at: 2023-12-10 14:00:00 +- blockid: 3 + txid: 1000005 + merkle_path: merkle-path-5 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml index 6f86a056f..33b4aa519 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml +++ b/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml @@ -11,6 +11,14 @@ hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 inserted_at: 2023-12-10 14:00:00 is_registered: true +- id: 1000004 + hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000005 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-10 14:00:00 + is_registered: true # - id: 4 # hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa # inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 80d702d12..33bbe3580 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -134,7 +134,7 @@ func TestReorg(t *testing.T) { processor.StartBlockProcessing() testHandleBlockOnEmptyDatabase(t, p2pMsgHandler, blocktxStore) - publishedTxs = make([]*blocktx_api.TransactionBlock, 0) // clear slice for the next test + publishedTxs = publishedTxs[:0] // clear slice for the next test // only load fixtures at this point testutils.LoadFixtures(t, dbConn, "fixtures") @@ -218,6 +218,7 @@ func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, s BlockHash: blockHashStale[:], BlockHeight: 822015, TransactionHash: txHash[:], + BlockStatus: blocktx_api.Status_STALE, }, } @@ -226,7 +227,9 @@ func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, s func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL, publishedTxs []*blocktx_api.TransactionBlock, staleBlockHash *chainhash.Hash) []*blocktx_api.TransactionBlock { txHash := testutils.RevChainhash(t, "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f") - merkleRoot := testutils.RevChainhash(t, "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f") + txHash2 := testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa") + treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) + merkleRoot := treeStore[len(treeStore)-1] // should become LONGEST // reorg should happen @@ -238,7 +241,7 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store Bits: 0x1a05db8b, // chainwork: "12301577519373468" higher than the competing block }, Height: uint64(822016), // competing block already exists at this height - TransactionHashes: []*chainhash.Hash{txHash}, + TransactionHashes: []*chainhash.Hash{txHash, txHash2}, } p2pMsgHandler.OnReceive(blockMessage, nil) @@ -283,21 +286,30 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store require.Equal(t, blocktx_api.Status_LONGEST, block.Status) expectedTxs := []*blocktx_api.TransactionBlock{ - { + { // previously in stale chain BlockHash: staleBlockHash[:], BlockHeight: 822015, TransactionHash: testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853")[:], + BlockStatus: blocktx_api.Status_LONGEST, }, - { + { // previously in longest chain - also in stale - should have blockdata updated BlockHash: staleBlockHash[:], BlockHeight: 822015, TransactionHash: testutils.RevChainhash(t, "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430")[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // newly mined from stale block that became longest after reorg + BlockHash: blockHashLongest[:], + BlockHeight: 822016, + TransactionHash: txHash[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // previously longest chain - not found in the new longest chain + BlockHash: previouslyLongestBlockHash[:], + BlockHeight: 822017, + TransactionHash: testutils.RevChainhash(t, "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6")[:], + BlockStatus: blocktx_api.Status_STALE, }, - // { - // BlockHash: blockHashLongest[:], - // BlockHeight: 822016, - // TransactionHash: txHash[:], - // }, } return expectedTxs @@ -306,14 +318,11 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store func verifyTxs(t *testing.T, expectedTxs []*blocktx_api.TransactionBlock, publishedTxs []*blocktx_api.TransactionBlock) { strippedTxs := make([]*blocktx_api.TransactionBlock, len(publishedTxs)) for i, tx := range publishedTxs { - chash, err := chainhash.NewHash(tx.TransactionHash) - require.NoError(t, err) - t.Logf("published tx hash: %s", chash.String()) strippedTxs[i] = &blocktx_api.TransactionBlock{ BlockHash: tx.BlockHash, BlockHeight: tx.BlockHeight, TransactionHash: tx.TransactionHash, - // TODO: add block status + BlockStatus: tx.BlockStatus, } } diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 9eab0418e..3d68f4260 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -540,11 +540,11 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { BlockHeight: tx.BlockHeight, TransactionHash: tx.TxHash, MerklePath: tx.MerklePath, + BlockStatus: tx.BlockStatus, } p.logger.Info("publishing tx", slog.String("txHash", getHashStringNoErr(tx.TxHash))) - // change that receiver method in metamorph to accept statuses (MINED and MINED_IN_STALE_BLOCK) err = p.mqClient.PublishMarshal(MinedTxsTopic, txBlock) if err != nil { p.logger.Error("failed to publish mined txs", slog.String("blockHash", getHashStringNoErr(tx.BlockHash)), slog.Uint64("height", tx.BlockHeight), slog.String("txHash", getHashStringNoErr(tx.TxHash)), slog.String("err", err.Error())) @@ -684,9 +684,17 @@ func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api } } - minedTxs, staleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) + nowMinedTxs, nowStaleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) - txsToPublish := append(minedTxs, staleTxs...) + for i := range nowMinedTxs { + nowMinedTxs[i].BlockStatus = blocktx_api.Status_LONGEST + } + + for i := range nowStaleTxs { + nowStaleTxs[i].BlockStatus = blocktx_api.Status_STALE + } + + txsToPublish := append(nowMinedTxs, nowStaleTxs...) return txsToPublish, nil } From 973f8855217b89c448023c7a8f3bac18ec4c400b Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Fri, 11 Oct 2024 12:15:35 +0200 Subject: [PATCH 07/36] feat: integration test refactored and documented --- .../reorg_integration_test.go | 67 +++++++------------ 1 file changed, 24 insertions(+), 43 deletions(-) diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 33bbe3580..b30a35c03 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -4,18 +4,20 @@ package integrationtest // Postgresql Store - running on docker // Blocktx Processor // PeerHandler - mocked +// Message queue sending txs to metamorph - mocked // // Flow of this test: // 1. A list of blocks from height 822014 to 822017 is added to db from fixtures // 2. A hardcoded msg with competing block at height 822015 is being sent through the mocked PeerHandler // 3. This block has a chainwork lower than the current tip of chain - becomes STALE -// 4. Next competing block, at height 822016 is being send through the mocked PeerHandler -// 5. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest -// 6. Verification of reorg - checking if statuses are correctly switched -// -// Todo: Next tasks: -// - Verify that transactions are properly updated in blocktx store -// - Include mock metamorph in this test and verify that transactions statuses are properly updated +// 4. Registered transactions from this block that are not in the longest chain are published to metamorph message queue +// 5. Next competing block, at height 822016 is being send through the mocked PeerHandler +// 6. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest +// 7. Verification of reorg - checking if statuses are correctly switched +// 8. Verification of transactions +// - transactions from the stale chain becoming the longest are published +// - transactions that were previously in the longest chain are published with udpated block data +// - transactions that were previously in the longest chain, but are not in the stale chain are published with blockstatus = STALE import ( "context" @@ -175,11 +177,7 @@ func testHandleBlockOnEmptyDatabase(t *testing.T, p2pMsgHandler *blocktx_p2p.Msg time.Sleep(200 * time.Millisecond) blockHashZero := blockMessage.Header.BlockHash() - - block, err := store.GetBlock(context.Background(), &blockHashZero) - require.NoError(t, err) - require.Equal(t, uint64(822011), block.Height) - require.Equal(t, blocktx_api.Status_LONGEST, block.Status) + verifyBlock(t, store, &blockHashZero, 822011, blocktx_api.Status_LONGEST) } func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL, publishedTxs []*blocktx_api.TransactionBlock) (*chainhash.Hash, []*blocktx_api.TransactionBlock) { @@ -206,11 +204,7 @@ func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, s time.Sleep(200 * time.Millisecond) blockHashStale := blockMessage.Header.BlockHash() - - block, err := store.GetBlock(context.Background(), &blockHashStale) - require.NoError(t, err) - require.Equal(t, uint64(822015), block.Height) - require.Equal(t, blocktx_api.Status_STALE, block.Status) + verifyBlock(t, store, &blockHashStale, 822015, blocktx_api.Status_STALE) // transactions expected to be published to metamorph expectedTxs := []*blocktx_api.TransactionBlock{ @@ -250,40 +244,20 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store // verify that reorg happened blockHashLongest := blockMessage.Header.BlockHash() - - block, err := store.GetBlock(context.Background(), &blockHashLongest) - require.NoError(t, err) - require.Equal(t, uint64(822016), block.Height) - require.Equal(t, blocktx_api.Status_LONGEST, block.Status) - - block, err = store.GetBlock(context.Background(), staleBlockHash) - require.NoError(t, err) - require.Equal(t, uint64(822015), block.Height) - require.Equal(t, blocktx_api.Status_LONGEST, block.Status) + verifyBlock(t, store, &blockHashLongest, 822016, blocktx_api.Status_LONGEST) + verifyBlock(t, store, staleBlockHash, 822015, blocktx_api.Status_LONGEST) previouslyLongestBlockHash := testutils.RevChainhash(t, "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000") - block, err = store.GetBlock(context.Background(), previouslyLongestBlockHash) - require.NoError(t, err) - require.Equal(t, uint64(822015), block.Height) - require.Equal(t, blocktx_api.Status_STALE, block.Status) + verifyBlock(t, store, previouslyLongestBlockHash, 822015, blocktx_api.Status_STALE) previouslyLongestBlockHash = testutils.RevChainhash(t, "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000") - block, err = store.GetBlock(context.Background(), previouslyLongestBlockHash) - require.NoError(t, err) - require.Equal(t, uint64(822016), block.Height) - require.Equal(t, blocktx_api.Status_STALE, block.Status) + verifyBlock(t, store, previouslyLongestBlockHash, 822016, blocktx_api.Status_STALE) previouslyLongestBlockHash = testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") - block, err = store.GetBlock(context.Background(), previouslyLongestBlockHash) - require.NoError(t, err) - require.Equal(t, uint64(822017), block.Height) - require.Equal(t, blocktx_api.Status_STALE, block.Status) + verifyBlock(t, store, previouslyLongestBlockHash, 822017, blocktx_api.Status_STALE) beginningOfChain := testutils.RevChainhash(t, "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000") - block, err = store.GetBlock(context.Background(), beginningOfChain) - require.NoError(t, err) - require.Equal(t, uint64(822014), block.Height) - require.Equal(t, blocktx_api.Status_LONGEST, block.Status) + verifyBlock(t, store, beginningOfChain, 822014, blocktx_api.Status_LONGEST) expectedTxs := []*blocktx_api.TransactionBlock{ { // previously in stale chain @@ -315,6 +289,13 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store return expectedTxs } +func verifyBlock(t *testing.T, store *postgresql.PostgreSQL, hash *chainhash.Hash, height uint64, status blocktx_api.Status) { + block, err := store.GetBlock(context.Background(), hash) + require.NoError(t, err) + require.Equal(t, height, block.Height) + require.Equal(t, status, block.Status) +} + func verifyTxs(t *testing.T, expectedTxs []*blocktx_api.TransactionBlock, publishedTxs []*blocktx_api.TransactionBlock) { strippedTxs := make([]*blocktx_api.TransactionBlock, len(publishedTxs)) for i, tx := range publishedTxs { From f87253bd1833bc6a30e174af9c232456d36a5c44 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Fri, 11 Oct 2024 16:54:47 +0200 Subject: [PATCH 08/36] feat: add new status MINED_IN_STALE_BLOCK and handle logic for that status --- .../metamorph_api/metamorph_api.pb.go | 160 ++++++++++++++++++ .../metamorph_api/metamorph_api.proto | 1 + .../metamorph/store/postgresql/postgres.go | 17 +- .../store/postgresql/postgres_test.go | 63 ++++++- 4 files changed, 226 insertions(+), 15 deletions(-) diff --git a/internal/metamorph/metamorph_api/metamorph_api.pb.go b/internal/metamorph/metamorph_api/metamorph_api.pb.go index dab2a0866..b872ed9dc 100644 --- a/internal/metamorph/metamorph_api/metamorph_api.pb.go +++ b/internal/metamorph/metamorph_api/metamorph_api.pb.go @@ -40,6 +40,7 @@ const ( Status_SEEN_ON_NETWORK Status = 90 Status_DOUBLE_SPEND_ATTEMPTED Status = 100 Status_REJECTED Status = 110 + Status_MINED_IN_STALE_BLOCK Status = 115 Status_MINED Status = 120 ) @@ -58,6 +59,7 @@ var ( 90: "SEEN_ON_NETWORK", 100: "DOUBLE_SPEND_ATTEMPTED", 110: "REJECTED", + 115: "MINED_IN_STALE_BLOCK", 120: "MINED", } Status_value = map[string]int32{ @@ -73,6 +75,7 @@ var ( "SEEN_ON_NETWORK": 90, "DOUBLE_SPEND_ATTEMPTED": 100, "REJECTED": 110, + "MINED_IN_STALE_BLOCK": 115, "MINED": 120, } ) @@ -1070,6 +1073,7 @@ var file_internal_metamorph_metamorph_api_metamorph_api_proto_rawDesc = []byte{ 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x66, 0x75, 0x6c, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x61, 0x78, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x6d, +<<<<<<< HEAD 0x61, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x77, 0x0a, 0x13, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, @@ -1229,6 +1233,162 @@ var file_internal_metamorph_metamorph_api_metamorph_api_proto_rawDesc = []byte{ 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x11, 0x5a, 0x0f, 0x2e, 0x3b, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +======= + 0x61, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x22, 0x5c, 0x0a, 0x13, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, + 0x12, 0x45, 0x0a, 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, + 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, + 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xbe, 0x03, 0x0a, 0x0b, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x64, + 0x5f, 0x6f, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x74, 0x69, 0x6d, 0x65, + 0x64, 0x4f, 0x75, 0x74, 0x12, 0x37, 0x0a, 0x09, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x5f, 0x61, + 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x52, 0x08, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x41, 0x74, 0x12, 0x3d, 0x0a, + 0x0c, 0x61, 0x6e, 0x6e, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, + 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x64, 0x41, 0x74, 0x12, 0x35, 0x0a, 0x08, + 0x6d, 0x69, 0x6e, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x6d, 0x69, 0x6e, 0x65, + 0x64, 0x41, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x78, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x74, 0x78, 0x69, 0x64, 0x12, 0x2d, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, + 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, + 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, + 0x5f, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, + 0x65, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x22, 0x0a, 0x0c, 0x63, + 0x6f, 0x6d, 0x70, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x54, 0x78, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, + 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x54, 0x78, 0x73, 0x12, + 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, + 0x09, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, + 0x68, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, + 0x68, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x61, 0x77, 0x5f, 0x74, 0x78, 0x18, 0x0b, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x05, 0x72, 0x61, 0x77, 0x54, 0x78, 0x22, 0xd8, 0x02, 0x0a, 0x11, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1b, + 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x64, 0x5f, 0x6f, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x08, 0x52, 0x08, 0x74, 0x69, 0x6d, 0x65, 0x64, 0x4f, 0x75, 0x74, 0x12, 0x37, 0x0a, 0x09, 0x73, + 0x74, 0x6f, 0x72, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x08, 0x73, 0x74, 0x6f, 0x72, + 0x65, 0x64, 0x41, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x78, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x74, 0x78, 0x69, 0x64, 0x12, 0x2d, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, + 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, + 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x6a, 0x65, 0x63, + 0x74, 0x5f, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, + 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x22, 0x0a, 0x0c, + 0x63, 0x6f, 0x6d, 0x70, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x54, 0x78, 0x73, 0x18, 0x06, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x54, 0x78, 0x73, + 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, + 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, + 0x67, 0x68, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, + 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, + 0x73, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, + 0x68, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x50, + 0x61, 0x74, 0x68, 0x22, 0x53, 0x0a, 0x13, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x12, 0x3c, 0x0a, 0x08, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, + 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x08, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x22, 0x2e, 0x0a, 0x18, 0x54, 0x72, 0x61, 0x6e, + 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x78, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x74, 0x78, 0x69, 0x64, 0x22, 0x2e, 0x0a, 0x18, 0x53, 0x65, 0x74, 0x55, + 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x46, 0x0a, 0x19, 0x53, 0x65, 0x74, 0x55, + 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, + 0x5f, 0x61, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, + 0x0f, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, + 0x22, 0x38, 0x0a, 0x10, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, + 0x6e, 0x44, 0x61, 0x79, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x72, 0x65, 0x74, + 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x22, 0x3e, 0x0a, 0x11, 0x43, 0x6c, + 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, + 0x29, 0x0a, 0x10, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x5f, 0x61, 0x66, 0x66, 0x65, 0x63, + 0x74, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x72, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x31, 0x0a, 0x19, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x78, 0x49, 0x44, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x74, 0x78, 0x49, 0x44, 0x73, 0x22, 0x4e, 0x0a, + 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3e, 0x0a, + 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2a, 0x9d, 0x02, + 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, + 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, + 0x0a, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x43, 0x45, 0x49, 0x56, 0x45, 0x44, 0x10, 0x14, 0x12, + 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x4f, 0x52, 0x45, 0x44, 0x10, 0x1e, 0x12, 0x18, 0x0a, 0x14, 0x41, + 0x4e, 0x4e, 0x4f, 0x55, 0x4e, 0x43, 0x45, 0x44, 0x5f, 0x54, 0x4f, 0x5f, 0x4e, 0x45, 0x54, 0x57, + 0x4f, 0x52, 0x4b, 0x10, 0x28, 0x12, 0x18, 0x0a, 0x14, 0x52, 0x45, 0x51, 0x55, 0x45, 0x53, 0x54, + 0x45, 0x44, 0x5f, 0x42, 0x59, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x32, 0x12, + 0x13, 0x0a, 0x0f, 0x53, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x4f, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, + 0x52, 0x4b, 0x10, 0x3c, 0x12, 0x17, 0x0a, 0x13, 0x41, 0x43, 0x43, 0x45, 0x50, 0x54, 0x45, 0x44, + 0x5f, 0x42, 0x59, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x46, 0x12, 0x1a, 0x0a, + 0x16, 0x53, 0x45, 0x45, 0x4e, 0x5f, 0x49, 0x4e, 0x5f, 0x4f, 0x52, 0x50, 0x48, 0x41, 0x4e, 0x5f, + 0x4d, 0x45, 0x4d, 0x50, 0x4f, 0x4f, 0x4c, 0x10, 0x50, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x45, 0x45, + 0x4e, 0x5f, 0x4f, 0x4e, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x5a, 0x12, 0x1a, + 0x0a, 0x16, 0x44, 0x4f, 0x55, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x5f, 0x41, + 0x54, 0x54, 0x45, 0x4d, 0x50, 0x54, 0x45, 0x44, 0x10, 0x64, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, + 0x4a, 0x45, 0x43, 0x54, 0x45, 0x44, 0x10, 0x6e, 0x12, 0x18, 0x0a, 0x14, 0x4d, 0x49, 0x4e, 0x45, + 0x44, 0x5f, 0x49, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x5f, 0x42, 0x4c, 0x4f, 0x43, 0x4b, + 0x10, 0x73, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, 0x32, 0xdd, 0x05, + 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x61, 0x4d, 0x6f, 0x72, 0x70, 0x68, 0x41, 0x50, 0x49, 0x12, 0x41, + 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, + 0x1a, 0x1d, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, + 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, + 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x0f, 0x50, 0x75, + 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x22, 0x2e, + 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x73, 0x1a, 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, + 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x65, 0x73, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, + 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, + 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, + 0x12, 0x5a, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, + 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x00, 0x12, 0x63, 0x0a, 0x14, + 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, + 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, + 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, + 0x00, 0x12, 0x68, 0x0a, 0x11, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, + 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, + 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, + 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, + 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, + 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x09, 0x43, + 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, + 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, + 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, + 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, + 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x11, 0x5a, + 0x0f, 0x2e, 0x3b, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +>>>>>>> 03ac277e (feat: add new status MINED_IN_STALE_BLOCK and handle logic for that status) } var ( diff --git a/internal/metamorph/metamorph_api/metamorph_api.proto b/internal/metamorph/metamorph_api/metamorph_api.proto index 203118e6c..a78a3e0e8 100644 --- a/internal/metamorph/metamorph_api/metamorph_api.proto +++ b/internal/metamorph/metamorph_api/metamorph_api.proto @@ -23,6 +23,7 @@ enum Status { SEEN_ON_NETWORK = 90; DOUBLE_SPEND_ATTEMPTED = 100; REJECTED = 110; + MINED_IN_STALE_BLOCK = 115; MINED = 120; } diff --git a/internal/metamorph/store/postgresql/postgres.go b/internal/metamorph/store/postgresql/postgres.go index 3f79d15fd..033997f34 100644 --- a/internal/metamorph/store/postgresql/postgres.go +++ b/internal/metamorph/store/postgresql/postgres.go @@ -589,7 +589,6 @@ func (p *PostgreSQL) GetSeenOnNetwork(ctx context.Context, since time.Time, unti defer rows.Close() res, err = getStoreDataFromRows(rows) - if err != nil { if rollBackErr := tx.Rollback(); rollBackErr != nil { return nil, errors.Join(err, fmt.Errorf("failed to rollback: %v", rollBackErr)) @@ -795,7 +794,6 @@ func (p *PostgreSQL) UpdateDoubleSpend(ctx context.Context, updates []store.Upda defer rows.Close() res, err = getStoreDataFromRows(rows) - if err != nil { if rollbackErr := tx.Rollback(); rollbackErr != nil { return nil, errors.Join(err, fmt.Errorf("failed to rollback: %v", rollbackErr)) @@ -825,22 +823,27 @@ func (p *PostgreSQL) UpdateMined(ctx context.Context, txsBlocks []*blocktx_api.T blockHashes := make([][]byte, len(txsBlocks)) blockHeights := make([]uint64, len(txsBlocks)) merklePaths := make([]string, len(txsBlocks)) + statuses := make([]metamorph_api.Status, len(txsBlocks)) for i, tx := range txsBlocks { txHashes[i] = tx.TransactionHash blockHashes[i] = tx.BlockHash blockHeights[i] = tx.BlockHeight merklePaths[i] = tx.MerklePath + statuses[i] = metamorph_api.Status_MINED + if tx.BlockStatus == blocktx_api.Status_STALE { + statuses[i] = metamorph_api.Status_MINED_IN_STALE_BLOCK + } } qBulkUpdate := ` UPDATE metamorph.transactions t SET - status=$1, + status=bulk_query.mined_status, block_hash=bulk_query.block_hash, block_height=bulk_query.block_height, merkle_path=bulk_query.merkle_path, - last_modified=$2, + last_modified=$1, status_history=status_history || json_build_object( 'status', status, 'timestamp', last_modified @@ -849,8 +852,8 @@ func (p *PostgreSQL) UpdateMined(ctx context.Context, txsBlocks []*blocktx_api.T ( SELECT * FROM - UNNEST($3::BYTEA[], $4::BYTEA[], $5::BIGINT[], $6::TEXT[]) - AS t(hash, block_hash, block_height, merkle_path) + UNNEST($2::INT[], $3::BYTEA[], $4::BYTEA[], $5::BIGINT[], $6::TEXT[]) + AS t(mined_status, hash, block_hash, block_height, merkle_path) ) AS bulk_query WHERE t.hash=bulk_query.hash @@ -889,7 +892,7 @@ func (p *PostgreSQL) UpdateMined(ctx context.Context, txsBlocks []*blocktx_api.T competingTxsData := getCompetingTxsFromRows(rows) rejectedResponses := updateDoubleSpendRejected(ctx, competingTxsData, tx) - rows, err = tx.QueryContext(ctx, qBulkUpdate, metamorph_api.Status_MINED, p.now(), pq.Array(txHashes), pq.Array(blockHashes), pq.Array(blockHeights), pq.Array(merklePaths)) + rows, err = tx.QueryContext(ctx, qBulkUpdate, p.now(), pq.Array(statuses), pq.Array(txHashes), pq.Array(blockHashes), pq.Array(blockHeights), pq.Array(merklePaths)) if err != nil { if rollBackErr := tx.Rollback(); rollBackErr != nil { return nil, errors.Join(err, fmt.Errorf("failed to rollback: %v", rollBackErr)) diff --git a/internal/metamorph/store/postgresql/postgres_test.go b/internal/metamorph/store/postgresql/postgres_test.go index d03cdabf6..1e554166d 100644 --- a/internal/metamorph/store/postgresql/postgres_test.go +++ b/internal/metamorph/store/postgresql/postgres_test.go @@ -473,6 +473,7 @@ func TestPostgresDB(t *testing.T) { chainHash2 := testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa") chainHash3 := testutils.RevChainhash(t, "a7fd98bd37f9b387dbef4f1a4e4790b9a0d48fb7bbb77455e8f39df0f8909db7") competingHash := testutils.RevChainhash(t, "67fc757d9ed6d119fc0926ae5c82c1a2cf036ec823257cfaea396e49184ec7ff") + chainhash4 := testutils.RevChainhash(t, "3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd") txBlocks := []*blocktx_api.TransactionBlock{ { @@ -480,27 +481,38 @@ func TestPostgresDB(t *testing.T) { BlockHeight: 100, TransactionHash: unminedHash[:], MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_LONGEST, }, { BlockHash: testdata.Block1Hash[:], BlockHeight: 100, TransactionHash: chainHash2[:], MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, }, { BlockHash: testdata.Block1Hash[:], BlockHeight: 100, TransactionHash: testdata.TX3Hash[:], // hash non-existent in db MerklePath: "merkle-path-3", + BlockStatus: blocktx_api.Status_LONGEST, }, { BlockHash: testdata.Block1Hash[:], BlockHeight: 100, TransactionHash: chainHash3[:], // this one has competing transactions MerklePath: "merkle-path-4", + BlockStatus: blocktx_api.Status_LONGEST, + }, + { + BlockHash: testdata.Block2Hash[:], + BlockHeight: 100, + TransactionHash: chainhash4[:], + MerklePath: "merkle-path-5", + BlockStatus: blocktx_api.Status_STALE, // should have status MINED_IN_STALE_BLOCK }, } - expectedUpdates := 4 // 3 for updates + 1 for rejected competing tx + expectedUpdates := 5 // 4 for updates + 1 for rejected competing tx updated, err := postgresDB.UpdateMined(ctx, txBlocks) require.NoError(t, err) @@ -516,14 +528,19 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, "merkle-path-4", updated[1].MerklePath) require.Equal(t, metamorph_api.Status_MINED, updated[1].Status) - require.True(t, unminedHash.IsEqual(updated[2].Hash)) - require.True(t, testdata.Block1Hash.IsEqual(updated[2].BlockHash)) - require.Equal(t, "merkle-path-1", updated[2].MerklePath) - require.Equal(t, metamorph_api.Status_MINED, updated[2].Status) + require.True(t, chainhash4.IsEqual(updated[2].Hash)) + require.True(t, testdata.Block2Hash.IsEqual(updated[2].BlockHash)) + require.Equal(t, "merkle-path-5", updated[2].MerklePath) + require.Equal(t, metamorph_api.Status_MINED_IN_STALE_BLOCK, updated[2].Status) + + require.True(t, unminedHash.IsEqual(updated[3].Hash)) + require.True(t, testdata.Block1Hash.IsEqual(updated[3].BlockHash)) + require.Equal(t, "merkle-path-1", updated[3].MerklePath) + require.Equal(t, metamorph_api.Status_MINED, updated[3].Status) - require.True(t, competingHash.IsEqual(updated[3].Hash)) - require.Equal(t, metamorph_api.Status_REJECTED, updated[3].Status) - require.Equal(t, "double spend attempted", updated[3].RejectReason) + require.True(t, competingHash.IsEqual(updated[4].Hash)) + require.Equal(t, metamorph_api.Status_REJECTED, updated[4].Status) + require.Equal(t, "double spend attempted", updated[4].RejectReason) minedReturned, err := postgresDB.Get(ctx, unminedHash[:]) require.NoError(t, err) @@ -647,6 +664,7 @@ func TestPostgresDB(t *testing.T) { BlockHeight: 100, TransactionHash: unminedHash[:], MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_LONGEST, }, } @@ -667,6 +685,35 @@ func TestPostgresDB(t *testing.T) { unmined.LastModified = postgresDB.now() unmined.Status = metamorph_api.Status_MINED require.Equal(t, &unmined, updatedTx) + + // Fourth update - UpdateMined - MINED_IN_STALE_BLOCK + txBlocks = []*blocktx_api.TransactionBlock{ + { + BlockHash: testdata.Block2Hash[:], + BlockHeight: 100, + TransactionHash: unminedHash[:], + MerklePath: "merkle-path-1", + BlockStatus: blocktx_api.Status_STALE, + }, + } + + updated, err = postgresDB.UpdateMined(ctx, txBlocks) + require.NoError(t, err) + require.Len(t, updated, 1) + + updatedTx, err = postgresDB.Get(ctx, unminedHash[:]) + require.NoError(t, err) + + unmined.BlockHeight = 100 + unmined.BlockHash = testdata.Block2Hash + unmined.MerklePath = "merkle-path-1" + unmined.StatusHistory = append(unmined.StatusHistory, &store.StoreStatus{ + Status: unmined.Status, + Timestamp: unmined.LastModified, + }) + unmined.LastModified = postgresDB.now() + unmined.Status = metamorph_api.Status_MINED_IN_STALE_BLOCK + require.Equal(t, &unmined, updatedTx) }) t.Run("clear data", func(t *testing.T) { From a9a42bbaf4024d746e70e2b7a001ab00e81e9368 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Fri, 11 Oct 2024 16:56:30 +0200 Subject: [PATCH 09/36] chore: fix proto indentation --- internal/blocktx/blocktx_api/blocktx_api.proto | 2 +- internal/metamorph/metamorph_api/metamorph_api.proto | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/blocktx/blocktx_api/blocktx_api.proto b/internal/blocktx/blocktx_api/blocktx_api.proto index a243c34fa..139e00ad2 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.proto +++ b/internal/blocktx/blocktx_api/blocktx_api.proto @@ -65,7 +65,7 @@ message TransactionBlock { uint64 block_height = 2; bytes transaction_hash = 3; // Little endian string merklePath = 4; - Status block_status = 5; + Status block_status = 5; } message TransactionBlocks { diff --git a/internal/metamorph/metamorph_api/metamorph_api.proto b/internal/metamorph/metamorph_api/metamorph_api.proto index a78a3e0e8..ed24b04b6 100644 --- a/internal/metamorph/metamorph_api/metamorph_api.proto +++ b/internal/metamorph/metamorph_api/metamorph_api.proto @@ -21,9 +21,9 @@ enum Status { ACCEPTED_BY_NETWORK = 70; SEEN_IN_ORPHAN_MEMPOOL = 80; SEEN_ON_NETWORK = 90; - DOUBLE_SPEND_ATTEMPTED = 100; + DOUBLE_SPEND_ATTEMPTED = 100; REJECTED = 110; - MINED_IN_STALE_BLOCK = 115; + MINED_IN_STALE_BLOCK = 115; MINED = 120; } From 73d7962adb85fdc4a30b5c08248838c49c7a452c Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Fri, 11 Oct 2024 17:57:03 +0200 Subject: [PATCH 10/36] chore: comments removed --- .../fixtures/blocktx.transactions.yaml | 31 ------------------- .../reorg_integration_test.go | 2 +- 2 files changed, 1 insertion(+), 32 deletions(-) diff --git a/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml index 33b4aa519..0b42d6d15 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml +++ b/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml @@ -1,4 +1,3 @@ -# txs from block 1 - LONGEST - id: 1000001 hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 inserted_at: 2023-12-10 14:00:00 @@ -19,33 +18,3 @@ hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 inserted_at: 2023-12-10 14:00:00 is_registered: true -# - id: 4 -# hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa -# inserted_at: 2023-12-10 14:00:00 -# is_registered: false -# - id: 5 -# hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd -# inserted_at: 2023-12-10 14:00:00 -# is_registered: false -# -# # txs from block 2 - STALE -# - id: 6 -# hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 -# inserted_at: 2023-12-15 14:00:00 -# is_registered: true -# - id: 7 -# hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357 -# inserted_at: 2023-12-15 14:00:00 -# is_registered: true -# - id: 8 -# hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 -# inserted_at: 2023-12-15 14:00:00 -# is_registered: false -# - id: 9 -# hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799 -# inserted_at: 2023-12-15 14:00:00 -# is_registered: false -# - id: 10 -# hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8 -# inserted_at: 2023-12-15 14:00:00 -# is_registered: false diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index b30a35c03..4c3d459dc 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -10,7 +10,7 @@ package integrationtest // 1. A list of blocks from height 822014 to 822017 is added to db from fixtures // 2. A hardcoded msg with competing block at height 822015 is being sent through the mocked PeerHandler // 3. This block has a chainwork lower than the current tip of chain - becomes STALE -// 4. Registered transactions from this block that are not in the longest chain are published to metamorph message queue +// 4. Registered transactions from this block that are not in the longest chain are published to metamorph message queue with blockstatus = STALE // 5. Next competing block, at height 822016 is being send through the mocked PeerHandler // 6. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest // 7. Verification of reorg - checking if statuses are correctly switched From f8c4ff3b75785bf07ce53a9f8159af025142d10a Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Fri, 11 Oct 2024 18:55:22 +0200 Subject: [PATCH 11/36] chore: fix test structure after refactor --- internal/blocktx/processor_test.go | 35 +++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 9244a47db..2fb5e1117 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -328,7 +328,20 @@ func TestHandleBlockReorg(t *testing.T) { Chainwork: "62209952899966", }, nil }, - GetStaleChainBackFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { + InsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { + mtx.Lock() + insertedBlock = &blocktx_api.Block{ + Hash: block.Hash, + Status: block.Status, + } + mtx.Unlock() + return 1, nil + }, + GetStaleChainBackFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + // this function is called ONLY when performing reorg + mtx.Lock() + insertedBlock.Status = blocktx_api.Status_LONGEST + mtx.Unlock() return nil, nil }, GetLongestChainFromHeightFunc: func(_ context.Context, _ uint64) ([]*blocktx_api.Block, error) { @@ -337,14 +350,20 @@ func TestHandleBlockReorg(t *testing.T) { UpdateBlocksStatusesFunc: func(_ context.Context, _ []store.BlockStatusUpdate) error { return nil }, - UpsertBlockFunc: func(_ context.Context, block *blocktx_api.Block) (uint64, error) { - mtx.Lock() - insertedBlock = block - mtx.Unlock() - return 1, errors.New("dummy error") // return error here so we don't have to override next db functions + UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { + return nil }, - DelBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) { - return 0, nil + GetRegisteredTransactionsFunc: func(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { + return nil, nil + }, + GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + return nil, nil + }, + GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { + return nil, nil + }, + MarkBlockAsDoneFunc: func(ctx context.Context, hash *chainhash.Hash, size, txCount uint64) error { + return nil }, } From d8ee0d92ee0ae7ea847928777db72bf9ad0224e3 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Mon, 14 Oct 2024 16:56:24 +0200 Subject: [PATCH 12/36] feat: fix wrong condition --- internal/blocktx/processor.go | 5 ++++- test/utils.go | 6 ++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 3d68f4260..5690bc95d 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -444,9 +444,12 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { previousBlockHash := msg.Header.PrevBlock merkleRoot := msg.Header.MerkleRoot - // don't process block that was already processed or is below our retention height + p.logger.Info("processing incoming block", slog.String("hash", blockHash.String())) + + // don't process block that was already processed existingBlock, _ := p.store.GetBlock(ctx, &blockHash) if existingBlock != nil && existingBlock.Processed { + p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String())) return nil } diff --git a/test/utils.go b/test/utils.go index 15f2d877d..5f652db07 100644 --- a/test/utils.go +++ b/test/utils.go @@ -120,8 +120,10 @@ func generateRandomString(length int) string { return string(b) } -type callbackResponseFn func(w http.ResponseWriter, rc chan *TransactionResponse, ec chan error, status *TransactionResponse) -type callbackBatchResponseFn func(w http.ResponseWriter, rc chan *CallbackBatchResponse, ec chan error, status *CallbackBatchResponse) +type ( + callbackResponseFn func(w http.ResponseWriter, rc chan *TransactionResponse, ec chan error, status *TransactionResponse) + callbackBatchResponseFn func(w http.ResponseWriter, rc chan *CallbackBatchResponse, ec chan error, status *CallbackBatchResponse) +) // use buffered channels for multiple callbacks func startCallbackSrv(t *testing.T, receivedChan chan *TransactionResponse, errChan chan error, alternativeResponseFn callbackResponseFn) (callbackURL, token string, shutdownFn func()) { From 6e1fd9c86b0dd58bd9c8c323ae53f6932abc7afc Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 16 Oct 2024 10:13:53 +0200 Subject: [PATCH 13/36] feat: improve error types --- internal/blocktx/processor.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 5690bc95d..a852eb469 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -29,6 +29,7 @@ var ( ErrFailedToSubscribeToTopic = errors.New("failed to subscribe to register topic") ErrFailedToCreateBUMP = errors.New("failed to create new bump for tx hash from merkle tree and index") ErrFailedToGetStringFromBUMPHex = errors.New("failed to get string from bump for tx hash") + ErrFailedToParseBlockHash = errors.New("failed to parse block hash") ErrFailedToInsertBlockTransactions = errors.New("failed to insert block transactions") ) @@ -713,7 +714,7 @@ func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block blockhash, err := chainhash.NewHash(block.Hash) if err != nil { - return fmt.Errorf("failed to create block hash for block at height %d", block.Height) + return errors.Join(ErrFailedToParseBlockHash, fmt.Errorf("block height: %d", block.Height), err) } var totalSize int @@ -738,7 +739,7 @@ func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block bump, err := bc.NewBUMPFromMerkleTreeAndIndex(block.Height, merkleTree, uint64(txIndex)) if err != nil { - return fmt.Errorf("failed to create new bump for tx hash %s from merkle tree and index at block height %d: %v", hash.String(), block.Height, err) + return errors.Join(ErrFailedToCreateBUMP, fmt.Errorf("tx hash %s, block height: %d", hash.String(), block.Height), err) } bumpHex, err := bump.String() From 1cba622895e04fb84f7dc4db688289916ad35948 Mon Sep 17 00:00:00 2001 From: Kuba <127198012+kuba-4chain@users.noreply.github.com> Date: Wed, 6 Nov 2024 15:27:25 +0100 Subject: [PATCH 14/36] Reorg Support: Handling of orphaned blocks (#617) --- .../blocktx/blocktx_api/blocktx_api.pb.go | 224 +++++---- .../blocktx/blocktx_api/blocktx_api.proto | 7 +- internal/blocktx/chain.go | 29 ++ .../blocktx.block_transactions_map.yaml | 4 +- .../fixtures/blocktx.blocks.yaml | 68 ++- .../reorg_integration_test.go | 211 +++++++-- internal/blocktx/processor.go | 435 ++++++++++++------ internal/blocktx/processor_helpers_test.go | 5 + internal/blocktx/processor_test.go | 213 +++++++-- .../blocktx/store/mocks/blocktx_db_tx_mock.go | 150 ++++++ .../blocktx/store/mocks/blocktx_store_mock.go | 164 +++++-- .../block_processing/blocktx.blocks.yaml | 2 - .../fixtures/clear_data/blocktx.blocks.yaml | 2 - .../get_block_by_height/blocktx.blocks.yaml | 6 +- .../get_block_gaps/blocktx.blocks.yaml | 74 ++- .../get_longest_chain/blocktx.blocks.yaml | 10 +- .../get_orphaned_chain/blocktx.blocks.yaml | 59 +++ .../get_stale_chain/blocktx.blocks.yaml | 10 +- .../blocktx.block_transactions_map.yaml | 12 + .../get_transactions/blocktx.blocks.yaml | 15 +- .../blocktx.transactions.yaml | 14 + .../fixtures/insert_block/blocktx.blocks.yaml | 36 ++ .../mark_block_as_done/blocktx.blocks.yaml | 1 - .../register_transactions/blocktx.blocks.yaml | 1 - .../blocktx.blocks.yaml | 10 +- .../blocktx.blocks.yaml | 6 +- .../verify_merkle_roots/blocktx.blocks.yaml | 8 - .../blocktx/store/postgresql/get_block.go | 6 +- .../store/postgresql/get_block_gaps.go | 45 +- .../store/postgresql/get_longest_chain.go | 33 +- .../store/postgresql/get_orphaned_chain.go | 64 +++ .../store/postgresql/get_stale_chain.go | 30 +- .../store/postgresql/get_transactions.go | 19 +- .../blocktx/store/postgresql/insert_block.go | 23 +- .../store/postgresql/mark_block_as_done.go | 2 +- .../000018_remove_orphanedyn_field.down.sql | 2 + .../000018_remove_orphanedyn_field.up.sql | 1 + .../migrations/000019_add_is_longest.down.sql | 4 + .../migrations/000019_add_is_longest.up.sql | 13 + internal/blocktx/store/postgresql/postgres.go | 62 ++- .../store/postgresql/postgres_helpers.go | 35 ++ .../blocktx/store/postgresql/postgres_test.go | 326 +++++++++++-- .../store/postgresql/update_block_statuses.go | 11 +- .../store/postgresql/verify_merkle_roots.go | 4 +- internal/blocktx/store/store.go | 14 +- internal/blocktx/store/store_mocks.go | 1 + .../metamorph_api/metamorph_api.pb.go | 3 + 47 files changed, 1917 insertions(+), 557 deletions(-) create mode 100644 internal/blocktx/chain.go create mode 100644 internal/blocktx/store/mocks/blocktx_db_tx_mock.go create mode 100644 internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml create mode 100644 internal/blocktx/store/postgresql/fixtures/insert_block/blocktx.blocks.yaml create mode 100644 internal/blocktx/store/postgresql/get_orphaned_chain.go create mode 100644 internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.down.sql create mode 100644 internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.up.sql create mode 100644 internal/blocktx/store/postgresql/migrations/000019_add_is_longest.down.sql create mode 100644 internal/blocktx/store/postgresql/migrations/000019_add_is_longest.up.sql create mode 100644 internal/blocktx/store/postgresql/postgres_helpers.go diff --git a/internal/blocktx/blocktx_api/blocktx_api.pb.go b/internal/blocktx/blocktx_api/blocktx_api.pb.go index 7415dac45..3b0d4038d 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.pb.go +++ b/internal/blocktx/blocktx_api/blocktx_api.pb.go @@ -150,10 +150,9 @@ type Block struct { PreviousHash []byte `protobuf:"bytes,2,opt,name=previous_hash,json=previousHash,proto3" json:"previous_hash,omitempty"` // Little endian MerkleRoot []byte `protobuf:"bytes,3,opt,name=merkle_root,json=merkleRoot,proto3" json:"merkle_root,omitempty"` // Little endian Height uint64 `protobuf:"varint,4,opt,name=height,proto3" json:"height,omitempty"` - Orphaned bool `protobuf:"varint,5,opt,name=orphaned,proto3" json:"orphaned,omitempty"` - Processed bool `protobuf:"varint,6,opt,name=processed,proto3" json:"processed,omitempty"` - Status Status `protobuf:"varint,7,opt,name=status,proto3,enum=blocktx_api.Status" json:"status,omitempty"` - Chainwork string `protobuf:"bytes,8,opt,name=chainwork,proto3" json:"chainwork,omitempty"` + Processed bool `protobuf:"varint,5,opt,name=processed,proto3" json:"processed,omitempty"` + Status Status `protobuf:"varint,6,opt,name=status,proto3,enum=blocktx_api.Status" json:"status,omitempty"` + Chainwork string `protobuf:"bytes,7,opt,name=chainwork,proto3" json:"chainwork,omitempty"` } func (x *Block) Reset() { @@ -216,13 +215,6 @@ func (x *Block) GetHeight() uint64 { return 0 } -func (x *Block) GetOrphaned() bool { - if x != nil { - return x.Orphaned - } - return false -} - func (x *Block) GetProcessed() bool { if x != nil { return x.Processed @@ -784,121 +776,119 @@ var file_internal_blocktx_blocktx_api_blocktx_api_proto_rawDesc = []byte{ 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, - 0x70, 0x22, 0xfe, 0x01, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x68, + 0x70, 0x22, 0xe2, 0x01, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x70, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x1a, 0x0a, - 0x08, 0x6f, 0x72, 0x70, 0x68, 0x61, 0x6e, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, - 0x08, 0x6f, 0x72, 0x70, 0x68, 0x61, 0x6e, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x70, 0x72, 0x6f, - 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x70, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x12, 0x2b, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, - 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x77, 0x6f, 0x72, - 0x6b, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x77, 0x6f, - 0x72, 0x6b, 0x22, 0x4c, 0x0a, 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x3c, 0x0a, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x22, 0xd7, 0x01, 0x0a, 0x10, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, - 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x48, 0x61, 0x73, 0x68, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, - 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x74, 0x72, 0x61, 0x6e, 0x73, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x0f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x61, - 0x73, 0x68, 0x12, 0x1e, 0x0a, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x50, 0x61, - 0x74, 0x68, 0x12, 0x36, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x73, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x0b, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x61, 0x0a, 0x11, 0x54, 0x72, - 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, - 0x4c, 0x0a, 0x12, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, - 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x11, 0x74, 0x72, 0x61, 0x6e, - 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x22, 0x39, 0x0a, - 0x0b, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, - 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, - 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x31, 0x0a, 0x09, 0x43, 0x6c, 0x65, 0x61, - 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, - 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x72, 0x65, - 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x22, 0x2a, 0x0a, 0x14, 0x52, - 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x03, 0x52, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x22, 0x48, 0x0a, 0x23, 0x44, 0x65, 0x6c, 0x55, 0x6e, - 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, - 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x21, - 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x5f, 0x62, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x42, - 0x79, 0x22, 0x63, 0x0a, 0x1d, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, - 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, - 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, - 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x6f, 0x0a, 0x1e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, - 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0c, 0x6d, 0x65, 0x72, 0x6b, - 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, - 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, - 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x0b, 0x6d, 0x65, 0x72, 0x6b, - 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x22, 0x5a, 0x0a, 0x1e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, - 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x38, 0x0a, 0x18, 0x75, 0x6e, 0x76, - 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, - 0x69, 0x67, 0x68, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x52, 0x16, 0x75, 0x6e, 0x76, - 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x73, 0x2a, 0x3b, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, - 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x4c, 0x4f, - 0x4e, 0x47, 0x45, 0x53, 0x54, 0x10, 0x0a, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x4c, 0x45, - 0x10, 0x14, 0x12, 0x0c, 0x0a, 0x08, 0x4f, 0x52, 0x50, 0x48, 0x41, 0x4e, 0x45, 0x44, 0x10, 0x1e, - 0x32, 0xad, 0x04, 0x0a, 0x0a, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x78, 0x41, 0x50, 0x49, 0x12, - 0x3f, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, - 0x79, 0x1a, 0x1b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, - 0x12, 0x50, 0x0a, 0x11, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, + 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x1c, 0x0a, + 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, + 0x52, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x12, 0x2b, 0x0a, 0x06, 0x73, + 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x68, 0x61, 0x69, + 0x6e, 0x77, 0x6f, 0x72, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x68, 0x61, + 0x69, 0x6e, 0x77, 0x6f, 0x72, 0x6b, 0x22, 0x4c, 0x0a, 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3c, 0x0a, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, + 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xd7, 0x01, 0x0a, 0x10, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, + 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, 0x68, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, + 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x74, + 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x48, 0x61, 0x73, 0x68, 0x12, 0x1e, 0x0a, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, + 0x50, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, + 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x36, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, + 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x13, 0x2e, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x61, + 0x0a, 0x11, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x73, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1d, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x11, + 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x73, 0x22, 0x39, 0x0a, 0x0b, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, + 0x68, 0x61, 0x73, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x22, 0x31, 0x0a, 0x09, + 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x24, 0x0a, 0x0d, 0x72, 0x65, 0x74, + 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x22, + 0x2a, 0x0a, 0x14, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x72, 0x6f, 0x77, 0x73, 0x22, 0x48, 0x0a, 0x23, 0x44, + 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, + 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x65, 0x64, 0x5f, + 0x62, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x65, 0x64, 0x42, 0x79, 0x22, 0x63, 0x0a, 0x1d, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, + 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, + 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, + 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x22, 0x6f, 0x0a, 0x1e, 0x4d, 0x65, + 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x4d, 0x0a, 0x0c, + 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, + 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x0b, + 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x22, 0x5a, 0x0a, 0x1e, 0x4d, + 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x38, 0x0a, + 0x18, 0x75, 0x6e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x5f, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x04, 0x52, + 0x16, 0x75, 0x6e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x48, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x2a, 0x3b, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, + 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0b, + 0x0a, 0x07, 0x4c, 0x4f, 0x4e, 0x47, 0x45, 0x53, 0x54, 0x10, 0x0a, 0x12, 0x09, 0x0a, 0x05, 0x53, + 0x54, 0x41, 0x4c, 0x45, 0x10, 0x14, 0x12, 0x0c, 0x0a, 0x08, 0x4f, 0x52, 0x50, 0x48, 0x41, 0x4e, + 0x45, 0x44, 0x10, 0x1e, 0x32, 0xad, 0x04, 0x0a, 0x0a, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x78, + 0x41, 0x50, 0x49, 0x12, 0x3f, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x11, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, + 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, + 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, + 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0b, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, + 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0b, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, - 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x58, - 0x0a, 0x19, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x72, 0x61, 0x6e, - 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x4d, 0x61, 0x70, 0x12, 0x16, 0x2e, 0x62, 0x6c, - 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, - 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, - 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x75, 0x0a, 0x1c, 0x44, 0x65, 0x6c, 0x55, - 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, - 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, - 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, - 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, - 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, - 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, - 0x6f, 0x0a, 0x11, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, - 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, - 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x56, 0x65, - 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, - 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, - 0x42, 0x0f, 0x5a, 0x0d, 0x2e, 0x3b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, - 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x22, 0x00, 0x12, 0x58, 0x0a, 0x19, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, + 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x4d, 0x61, 0x70, 0x12, + 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, + 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, + 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x75, 0x0a, 0x1c, + 0x44, 0x65, 0x6c, 0x55, 0x6e, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x2e, 0x62, + 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x44, 0x65, 0x6c, 0x55, 0x6e, + 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x50, 0x72, 0x6f, + 0x63, 0x65, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, + 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, + 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x00, 0x12, 0x6f, 0x0a, 0x11, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x4d, 0x65, 0x72, + 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x73, 0x12, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, + 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, + 0x74, 0x73, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, + 0x61, 0x70, 0x69, 0x2e, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x56, 0x65, + 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x00, 0x42, 0x0f, 0x5a, 0x0d, 0x2e, 0x3b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, + 0x78, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/internal/blocktx/blocktx_api/blocktx_api.proto b/internal/blocktx/blocktx_api/blocktx_api.proto index 139e00ad2..ab7ca5364 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.proto +++ b/internal/blocktx/blocktx_api/blocktx_api.proto @@ -49,10 +49,9 @@ message Block { bytes previous_hash = 2; // Little endian bytes merkle_root = 3; // Little endian uint64 height = 4; - bool orphaned = 5; - bool processed = 6; - Status status = 7; - string chainwork = 8; + bool processed = 5; + Status status = 6; + string chainwork = 7; } // swagger:model Transactions diff --git a/internal/blocktx/chain.go b/internal/blocktx/chain.go new file mode 100644 index 000000000..3bce4e301 --- /dev/null +++ b/internal/blocktx/chain.go @@ -0,0 +1,29 @@ +package blocktx + +import ( + "errors" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" +) + +var ErrEmptyChain = errors.New("empty chain of blocks") + +type chain []*blocktx_api.Block + +func (c chain) getTip() (*blocktx_api.Block, error) { + if len(c) == 0 { + return nil, ErrEmptyChain + } + + return c[len(c)-1], nil +} + +func (c chain) getHashes() [][]byte { + hashes := make([][]byte, len(c)) + + for i, b := range c { + hashes[i] = b.Hash + } + + return hashes +} diff --git a/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml index 1c5fa6be0..d59dea517 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml +++ b/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml @@ -1,8 +1,8 @@ -- blockid: 1 +- blockid: 1002 txid: 1000003 merkle_path: merkle-path-3 inserted_at: 2023-12-10 14:00:00 -- blockid: 3 +- blockid: 1004 txid: 1000005 merkle_path: merkle-path-5 inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml index 6c256dd42..920da9cbc 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml +++ b/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml @@ -1,5 +1,5 @@ - inserted_at: 2023-12-15 14:00:00 - id: 0 + id: 1001 hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 @@ -7,11 +7,11 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 + is_longest: true chainwork: '62209952899966' - inserted_at: 2023-12-15 14:30:00 - id: 1 + id: 1002 hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 @@ -19,11 +19,11 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 + is_longest: true chainwork: '62209952899966' - inserted_at: 2023-12-15 14:40:00 - id: 2 + id: 1003 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 prevhash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 @@ -31,17 +31,67 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 10 + is_longest: true chainwork: '62209952899966' - inserted_at: 2023-12-15 14:50:00 - id: 3 + id: 1004 hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 - prevhash: 0x5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000 + prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822017 size: 8630000 tx_count: 36724 - orphanedyn: false status: 10 + is_longest: true chainwork: '62209952899966' + +# gap + +- inserted_at: 2023-12-15 14:50:00 + id: 1005 + hash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + prevhash: 0x212a7598a62295f1a520ef525a34f657bc636d9da9bda74acdf6f051cd84c353 + merkleroot: 0x51f33784f6d54f1d6414fa4d8b8d6904215cb16a3fa0a8b1fe02e456a90544d4 + height: 822019 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1006 + hash: 0x00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076 + prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822020 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' + +# gap + +- inserted_at: 2023-12-15 14:50:00 + id: 1007 + hash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + prevhash: 0x743c7dc491ae5fddd37ebf63058f9574b4db9f6a89f483a4baec31820e5df61d + merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 + height: 822022 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1008 + hash: 0x0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd + prevhash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + merkleroot: 0x8e3177a33d6a87785b7104f20ca345e1713ae11ec2723a41028efddabebb861b + height: 822023 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '12301481384759134' diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 4c3d459dc..f9556f3b2 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -1,13 +1,13 @@ package integrationtest // Components of this test: -// Postgresql Store - running on docker // Blocktx Processor +// Postgresql Store - running on docker // PeerHandler - mocked // Message queue sending txs to metamorph - mocked // // Flow of this test: -// 1. A list of blocks from height 822014 to 822017 is added to db from fixtures +// 1. Blocks at heights 822014-822017, 822019-822020 and 822022-822023 are added to db from fixtures // 2. A hardcoded msg with competing block at height 822015 is being sent through the mocked PeerHandler // 3. This block has a chainwork lower than the current tip of chain - becomes STALE // 4. Registered transactions from this block that are not in the longest chain are published to metamorph message queue with blockstatus = STALE @@ -18,6 +18,15 @@ package integrationtest // - transactions from the stale chain becoming the longest are published // - transactions that were previously in the longest chain are published with udpated block data // - transactions that were previously in the longest chain, but are not in the stale chain are published with blockstatus = STALE +// 9. A new block at height 822018 is being sent through the mocked PeerHandler +// 10. This block is extending the previously LONGEST but now STALE chain and finds orphaned chain at heights 822019, 822020 +// 11. The tip of the orphaned chain does not have a greater chainwork than the current longest chain - entire orphaned chain becomes STALE +// 12. A new block at height 822021 is being sent through the mocked PeerHandler +// 13. This block extends the STALE chain and finds orphaned chain at height 822022, 822023 +// 14. The tip of the orphaned chain has a greater chainwork than the current tip of longest chain +// - entire STALE chain at heights 822015 - 822023 becomes LONGEST +// - entire LONGEST chain at height 822015 - 822016 becomes STALE +// 15. Verification of reorg - checking if statuses are correctly switched (for blocks and for transactions) import ( "context" @@ -94,6 +103,30 @@ func testmain(m *testing.M) int { return m.Run() } +const ( + blockHash822011 = "bf9be09b345cc2d904b59951cc8a2ed452d8d143e2e25cde64058270fb3a667a" + + blockHash822014_startOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" + blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" + blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" + + blockHash822015_fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" + blockHash822016_fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" + + blockHash822018 = "212a7598a62295f1a520ef525a34f657bc636d9da9bda74acdf6f051cd84c353" + blockHash822019_orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" + blockHash822020_orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" + blockHash822021 = "743c7dc491ae5fddd37ebf63058f9574b4db9f6a89f483a4baec31820e5df61d" + blockHash822022_orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" + blockHash822023_orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" + + txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" + txhash822015_2 = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" + txhash822016 = "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f" + txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" +) + func TestReorg(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") @@ -141,13 +174,19 @@ func TestReorg(t *testing.T) { // only load fixtures at this point testutils.LoadFixtures(t, dbConn, "fixtures") - staleBlockHash, expectedTxs := testHandleStaleBlock(t, p2pMsgHandler, blocktxStore, publishedTxs) + expectedTxs := testHandleStaleBlock(t, p2pMsgHandler, blocktxStore) // verify the transaction was correctly published to metamorph verifyTxs(t, expectedTxs, publishedTxs) // clear slice for the next test publishedTxs = publishedTxs[:0] - expectedTxs = testHandleReorg(t, p2pMsgHandler, blocktxStore, publishedTxs, staleBlockHash) + expectedTxs = testHandleReorg(t, p2pMsgHandler, blocktxStore) + verifyTxs(t, expectedTxs, publishedTxs) + publishedTxs = publishedTxs[:0] + + testHandleStaleOrphans(t, p2pMsgHandler, blocktxStore) + + expectedTxs = testHandleOrphansReorg(t, p2pMsgHandler, blocktxStore) verifyTxs(t, expectedTxs, publishedTxs) } @@ -176,12 +215,11 @@ func testHandleBlockOnEmptyDatabase(t *testing.T, p2pMsgHandler *blocktx_p2p.Msg // Allow DB to process the block time.Sleep(200 * time.Millisecond) - blockHashZero := blockMessage.Header.BlockHash() - verifyBlock(t, store, &blockHashZero, 822011, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822011, 822011, blocktx_api.Status_LONGEST) } -func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL, publishedTxs []*blocktx_api.TransactionBlock) (*chainhash.Hash, []*blocktx_api.TransactionBlock) { - prevBlockHash := testutils.RevChainhash(t, "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000") +func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { + prevBlockHash := testutils.RevChainhash(t, blockHash822014_startOfChain) txHash := testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853") txHash2 := testutils.RevChainhash(t, "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430") // should not be published - is already in the longest chain treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) @@ -198,82 +236,80 @@ func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, s Height: uint64(822015), // competing block already exists at this height TransactionHashes: []*chainhash.Hash{txHash, txHash2}, } + blockHash := blockMessage.Header.BlockHash() p2pMsgHandler.OnReceive(blockMessage, nil) // Allow DB to process the block time.Sleep(200 * time.Millisecond) - blockHashStale := blockMessage.Header.BlockHash() - verifyBlock(t, store, &blockHashStale, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822015_fork, 822015, blocktx_api.Status_STALE) // transactions expected to be published to metamorph expectedTxs := []*blocktx_api.TransactionBlock{ { - BlockHash: blockHashStale[:], + BlockHash: blockHash[:], BlockHeight: 822015, TransactionHash: txHash[:], BlockStatus: blocktx_api.Status_STALE, }, } - return &blockHashStale, expectedTxs + return expectedTxs } -func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL, publishedTxs []*blocktx_api.TransactionBlock, staleBlockHash *chainhash.Hash) []*blocktx_api.TransactionBlock { - txHash := testutils.RevChainhash(t, "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f") +func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { + txHash := testutils.RevChainhash(t, txhash822016) txHash2 := testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa") treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) merkleRoot := treeStore[len(treeStore)-1] + prevhash := testutils.RevChainhash(t, blockHash822015_fork) // should become LONGEST // reorg should happen blockMessage := &blockchain.BlockMessage{ Header: &wire.BlockHeader{ Version: 541065216, - PrevBlock: *staleBlockHash, // block with status STALE at height 822015 + PrevBlock: *prevhash, // block with status STALE at height 822015 MerkleRoot: *merkleRoot, - Bits: 0x1a05db8b, // chainwork: "12301577519373468" higher than the competing block + Bits: 0x1a05db8b, // chainwork: "12301577519373468" higher than the competing chain }, Height: uint64(822016), // competing block already exists at this height TransactionHashes: []*chainhash.Hash{txHash, txHash2}, } + blockHash := blockMessage.Header.BlockHash() p2pMsgHandler.OnReceive(blockMessage, nil) // Allow DB to process the block and perform reorg time.Sleep(1 * time.Second) // verify that reorg happened - blockHashLongest := blockMessage.Header.BlockHash() - verifyBlock(t, store, &blockHashLongest, 822016, blocktx_api.Status_LONGEST) - verifyBlock(t, store, staleBlockHash, 822015, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822016_fork, 822016, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015_fork, 822015, blocktx_api.Status_LONGEST) - previouslyLongestBlockHash := testutils.RevChainhash(t, "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000") - verifyBlock(t, store, previouslyLongestBlockHash, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_STALE) - previouslyLongestBlockHash = testutils.RevChainhash(t, "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000") - verifyBlock(t, store, previouslyLongestBlockHash, 822016, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822014_startOfChain, 822014, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822019_orphan, 822019, blocktx_api.Status_ORPHANED) - previouslyLongestBlockHash = testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") - verifyBlock(t, store, previouslyLongestBlockHash, 822017, blocktx_api.Status_STALE) - - beginningOfChain := testutils.RevChainhash(t, "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000") - verifyBlock(t, store, beginningOfChain, 822014, blocktx_api.Status_LONGEST) + previouslyLongestBlockHash := testutils.RevChainhash(t, blockHash822017) expectedTxs := []*blocktx_api.TransactionBlock{ { // previously in stale chain - BlockHash: staleBlockHash[:], + BlockHash: prevhash[:], BlockHeight: 822015, - TransactionHash: testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853")[:], + TransactionHash: testutils.RevChainhash(t, txhash822015)[:], BlockStatus: blocktx_api.Status_LONGEST, }, { // previously in longest chain - also in stale - should have blockdata updated - BlockHash: staleBlockHash[:], + BlockHash: prevhash[:], BlockHeight: 822015, - TransactionHash: testutils.RevChainhash(t, "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430")[:], + TransactionHash: testutils.RevChainhash(t, txhash822015_2)[:], BlockStatus: blocktx_api.Status_LONGEST, }, { // newly mined from stale block that became longest after reorg - BlockHash: blockHashLongest[:], + BlockHash: blockHash[:], BlockHeight: 822016, TransactionHash: txHash[:], BlockStatus: blocktx_api.Status_LONGEST, @@ -281,15 +317,120 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store { // previously longest chain - not found in the new longest chain BlockHash: previouslyLongestBlockHash[:], BlockHeight: 822017, - TransactionHash: testutils.RevChainhash(t, "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6")[:], + TransactionHash: testutils.RevChainhash(t, txhash822017)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + } + + return expectedTxs +} + +func testHandleStaleOrphans(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) { + txHash := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") + merkleRoot := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") + prevhash := testutils.RevChainhash(t, blockHash822017) + + // should become STALE + blockMessage := &blockchain.BlockMessage{ + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevhash, // block with status STALE at height 822017 + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain + }, + Height: uint64(822018), + TransactionHashes: []*chainhash.Hash{txHash}, + } + + p2pMsgHandler.OnReceive(blockMessage, nil) + // Allow DB to process the block and find orphans + time.Sleep(1 * time.Second) + + // verify that the block and orphans have STALE status + verifyBlock(t, store, blockHash822018, 822018, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822019_orphan, 822019, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822020_orphan, 822020, blocktx_api.Status_STALE) + + // verify that the blocks after the next gap are still orphans + verifyBlock(t, store, blockHash822022_orphan, 822022, blocktx_api.Status_ORPHANED) + verifyBlock(t, store, blockHash822023_orphan, 822023, blocktx_api.Status_ORPHANED) +} + +func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { + txHash := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") + merkleRoot := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") + prevhash := testutils.RevChainhash(t, blockHash822020_orphan) + + // should become LONGEST + // reorg should happen + blockMessage := &blockchain.BlockMessage{ + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevhash, // block with status STALE at height 822020 + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain + // but the sum of orphan chain has a higher chainwork and should cause a reorg + }, + Height: uint64(822021), + TransactionHashes: []*chainhash.Hash{txHash}, + } + + p2pMsgHandler.OnReceive(blockMessage, nil) + // Allow DB to process the block, find orphans and perform reorg + time.Sleep(2 * time.Second) + + // verify that the reorg happened + verifyBlock(t, store, blockHash822014_startOfChain, 822014, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822018, 822018, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822019_orphan, 822019, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822020_orphan, 822020, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822022_orphan, 822022, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822023_orphan, 822023, blocktx_api.Status_LONGEST) + + verifyBlock(t, store, blockHash822015_fork, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822016_fork, 822016, blocktx_api.Status_STALE) + + bh822015 := testutils.RevChainhash(t, blockHash822015) + bh822015_fork := testutils.RevChainhash(t, blockHash822015_fork) + bh822016_fork := testutils.RevChainhash(t, blockHash822016_fork) + bh822017 := testutils.RevChainhash(t, blockHash822017) + + expectedTxs := []*blocktx_api.TransactionBlock{ + { // in stale chain + BlockHash: bh822015_fork[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + { // in both chains - should have blockdata updated + BlockHash: bh822015[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015_2)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // in stale chain + BlockHash: bh822016_fork[:], + BlockHeight: 822016, + TransactionHash: testutils.RevChainhash(t, txhash822016)[:], BlockStatus: blocktx_api.Status_STALE, }, + { // in now longest chain + BlockHash: bh822017[:], + BlockHeight: 822017, + TransactionHash: testutils.RevChainhash(t, txhash822017)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, } return expectedTxs } -func verifyBlock(t *testing.T, store *postgresql.PostgreSQL, hash *chainhash.Hash, height uint64, status blocktx_api.Status) { +func verifyBlock(t *testing.T, store *postgresql.PostgreSQL, hashStr string, height uint64, status blocktx_api.Status) { + hash := testutils.RevChainhash(t, hashStr) block, err := store.GetBlock(context.Background(), hash) require.NoError(t, err) require.Equal(t, height, block.Height) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index a852eb469..2040f2c3e 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -213,16 +213,33 @@ func (p *Processor) StartBlockProcessing() { case <-p.ctx.Done(): return case blockMsg := <-p.blockProcessCh: - blockhash := blockMsg.Header.BlockHash() + var err error + blockHash := blockMsg.Header.BlockHash() + timeStart := time.Now() defer p.stopBlockProcessGuard(&blockhash) // release guardian at the end p.logger.Info("received block", slog.String("hash", blockhash.String())) - err := p.processBlock(blockMsg) + err = p.processBlock(blockMsg) if err != nil { p.logger.Error("block processing failed", slog.String("hash", blockhash.String()), slog.String("err", err.Error())) p.unlockBlock(p.ctx, &blockhash) + + _, errDel := p.store.DelBlockProcessing(p.ctx, &blockHash, p.hostname) + if errDel != nil { + p.logger.Error("failed to delete block processing", slog.String("hash", blockHash.String()), slog.String("err", errDel.Error())) + } + continue + } + + err = p.store.MarkBlockAsDone(p.ctx, &blockHash, blockMsg.Size, uint64(len(blockMsg.TransactionHashes))) + if err != nil { + p.logger.Error("unable to mark block as processed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) + continue } + + // add the total block processing time to the stats + p.logger.Info("Processed block", slog.String("hash", blockHash.String()), slog.Int("txs", len(blockMsg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) } } }() @@ -384,7 +401,7 @@ func (p *Processor) publishMinedTxs(txHashes []*chainhash.Hash) error { hashesBytes[i] = h[:] } - minedTxs, err := p.store.GetMinedTransactions(p.ctx, hashesBytes) + minedTxs, err := p.store.GetMinedTransactions(p.ctx, hashesBytes, false) if err != nil { return fmt.Errorf("failed to get mined transactions: %v", err) } @@ -395,6 +412,7 @@ func (p *Processor) publishMinedTxs(txHashes []*chainhash.Hash) error { BlockHash: minedTx.BlockHash, BlockHeight: minedTx.BlockHeight, MerklePath: minedTx.MerklePath, + BlockStatus: minedTx.BlockStatus, } err = p.mqClient.PublishMarshal(p.ctx, MinedTxsTopic, txBlock) } @@ -439,101 +457,78 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { tracing.EndTracing(span, err) }() - timeStart := time.Now() - blockHash = msg.Header.BlockHash() - previousBlockHash := msg.Header.PrevBlock - merkleRoot := msg.Header.MerkleRoot + blockHeight := msg.Height p.logger.Info("processing incoming block", slog.String("hash", blockHash.String())) - // don't process block that was already processed + var chain chain + var competing bool + var err error + + // check if we've already processed that block existingBlock, _ := p.store.GetBlock(ctx, &blockHash) - if existingBlock != nil && existingBlock.Processed { - p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String())) - return nil - } - prevBlock, err := p.getPrevBlock(ctx, &previousBlockHash) - if err != nil { - p.logger.Error("unable to get previous block from db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("prevHash", previousBlockHash.String()), slog.String("err", err.Error())) - return err - } + if existingBlock != nil && existingBlock.Processed { + // if the block was already processed, check and update + // possible orphan children of that block + chain, competing, err = p.updateOrphans(ctx, existingBlock, competing) + if err != nil { + p.logger.Error("unable to check and update possible orphaned child blocks", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) + return err + } - longestTipExists := true - if prevBlock == nil { - // This check is only in case there's a fresh, empty database - // with no blocks, to mark the first block as the LONGEST chain - longestTipExists, err = p.longestTipExists(ctx) + if len(chain) == 1 { // this means that no orphans were found + p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String())) + return nil + } + } else { + // if the block was not yet processed, proceed normally + chain, competing, err = p.verifyAndInsertBlock(ctx, msg) if err != nil { p.logger.Error("unable to verify the longest tip existence in db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) return err } } - incomingBlock := createBlock(msg, prevBlock, longestTipExists) - - competing, err := p.competingChainsExist(ctx, incomingBlock) + chainTip, err := chain.getTip() if err != nil { - p.logger.Error("unable to check for competing chains", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) + p.logger.Error("unable to get chain tip", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) return err } shouldPerformReorg := false if competing { - p.logger.Info("Competing blocks found", slog.String("incoming block hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) - incomingBlock.Status = blocktx_api.Status_STALE - - hasGreatestChainwork, err := p.hasGreatestChainwork(ctx, incomingBlock) + hasGreatestChainwork, err := p.hasGreatestChainwork(ctx, chainTip) if err != nil { - p.logger.Error("unable to get the chain tip to verify chainwork", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + p.logger.Error("unable to get the chain tip to verify chainwork", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) return err } if hasGreatestChainwork { - p.logger.Info("chain reorg detected", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) + p.logger.Info("chain reorg detected", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight)) shouldPerformReorg = true } } - p.logger.Info("Upserting block", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) - - blockID, err := p.store.UpsertBlock(ctx, incomingBlock) - if err != nil { - p.logger.Error("unable to upsert block at given height", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) - return err - } - - calculatedMerkleTree := p.buildMerkleTreeStoreChainHash(ctx, msg.TransactionHashes) - - if !merkleRoot.IsEqual(calculatedMerkleTree[len(calculatedMerkleTree)-1]) { - p.logger.Error("merkle root mismatch", slog.String("hash", blockHash.String())) - return err - } - - if err = p.storeTransactions(ctx, blockId, incomingBlock, calculatedMerkleTree); err != nil { - p.logger.Error("unable to mark block as mined", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) - return err - } - txsToPublish := make([]store.TransactionBlock, 0) if shouldPerformReorg { - txsToPublish, err = p.performReorg(ctx, incomingBlock) + txsToPublish, err = p.performReorg(ctx, chainTip) if err != nil { - p.logger.Error("unable to perform reorg", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + p.logger.Error("unable to perform reorg", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) return err } - } else if incomingBlock.Status == blocktx_api.Status_STALE { - txsToPublish, err = p.getStaleTxs(ctx, blockId) + } else if chainTip.Status == blocktx_api.Status_STALE { + txsToPublish, err = p.getStaleTxs(ctx, chain) if err != nil { - p.logger.Error("unable to get stale transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + p.logger.Error("unable to get stale transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) return err } - } else if incomingBlock.Status == blocktx_api.Status_LONGEST { - txsToPublish, err = p.store.GetRegisteredTransactions(ctx, blockId) + } else if chainTip.Status == blocktx_api.Status_LONGEST { + txsToPublish, err = p.store.GetRegisteredTxsByBlockHashes(ctx, chain.getHashes()) if err != nil { - p.logger.Error("unable to get registered transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + p.logger.Error("unable to get registered transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) return err } } @@ -555,15 +550,63 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { } } - if err = p.store.MarkBlockAsDone(ctx, &blockHash, msg.Size, uint64(len(msg.TransactionHashes))); err != nil { - p.logger.Error("unable to mark block as processed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) - return err + return nil +} + +func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMessage) (chain, bool, error) { + blockHash := msg.Header.BlockHash() + previousBlockHash := msg.Header.PrevBlock + + prevBlock, err := p.getPrevBlock(ctx, &previousBlockHash) + if err != nil { + p.logger.Error("unable to get previous block from db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("prevHash", previousBlockHash.String()), slog.String("err", err.Error())) + return nil, false, err + } + + longestTipExists := true + if prevBlock == nil { + // This check is only in case there's a fresh, empty database + // with no blocks, to mark the first block as the LONGEST chain + longestTipExists, err = p.longestTipExists(ctx) + if err != nil { + p.logger.Error("unable to verify the longest tip existance in db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) + return nil, false, err + } } - // add the total block processing time to the stats - p.logger.Info("Processed block", slog.String("hash", blockHash.String()), slog.Int("txs", len(msg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) + incomingBlock := createBlock(msg, prevBlock, longestTipExists) - return nil + competing, err := p.competingChainsExist(ctx, incomingBlock) + if err != nil { + p.logger.Error("unable to check for competing chains", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) + return nil, false, err + } + + if competing { + p.logger.Info("Competing blocks found", slog.String("incoming block hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) + incomingBlock.Status = blocktx_api.Status_STALE + } + + p.logger.Info("Inserting block", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) + + err = p.insertBlockAndStoreTransactions(ctx, incomingBlock, msg.TransactionHashes, msg.Header.MerkleRoot) + if err != nil { + p.logger.Error("unable to insert block and store its transactions", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) + return nil, false, err + } + + // if the block is ORPHANED, there's no need to process it any further + if incomingBlock.Status == blocktx_api.Status_ORPHANED { + return chain{incomingBlock}, false, nil + } + + chain, competing, err := p.updateOrphans(ctx, incomingBlock, competing) + if err != nil { + p.logger.Error("unable to check and update possible orphaned child blocks", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) + return nil, false, err + } + + return chain, competing, nil } func (p *Processor) getPrevBlock(ctx context.Context, prevHash *chainhash.Hash) (*blocktx_api.Block, error) { @@ -610,97 +653,60 @@ func (p *Processor) competingChainsExist(ctx context.Context, block *blocktx_api return true, nil } -func (p *Processor) hasGreatestChainwork(ctx context.Context, incomingBlock *blocktx_api.Block) (bool, error) { - tip, err := p.store.GetChainTip(ctx) - if err != nil && !errors.Is(err, store.ErrBlockNotFound) { - return false, err - } - - // this can happen only in case the blocks table is empty - if tip == nil { - return true, nil - } - - tipChainWork := new(big.Int) - tipChainWork.SetString(tip.Chainwork, 10) - - incomingBlockChainwork := new(big.Int) - incomingBlockChainwork.SetString(incomingBlock.Chainwork, 10) - - return tipChainWork.Cmp(incomingBlockChainwork) < 0, nil -} - -func (p *Processor) performReorg(ctx context.Context, incomingBlock *blocktx_api.Block) ([]store.TransactionBlock, error) { - staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, incomingBlock.Hash) +func (p *Processor) hasGreatestChainwork(ctx context.Context, competingChainTip *blocktx_api.Block) (bool, error) { + staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, competingChainTip.Hash) if err != nil { - return nil, err + return false, err } - lowestHeight := incomingBlock.Height + lowestHeight := competingChainTip.Height if len(staleBlocks) > 0 { lowestHeight = getLowestHeight(staleBlocks) } longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) if err != nil { - return nil, err + return false, err } - staleHashes := make([][]byte, 0) - longestHashes := make([][]byte, len(longestBlocks)) - blockStatusUpdates := make([]store.BlockStatusUpdate, 0) + sumStaleChainwork := big.NewInt(0) + sumLongChainwork := big.NewInt(0) for _, b := range staleBlocks { - staleHashes = append(staleHashes, b.Hash) - update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_LONGEST} - blockStatusUpdates = append(blockStatusUpdates, update) + chainwork := new(big.Int) + chainwork.SetString(b.Chainwork, 10) + sumStaleChainwork = sumStaleChainwork.Add(sumStaleChainwork, chainwork) } - for i, b := range longestBlocks { - longestHashes[i] = b.Hash - update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_STALE} - blockStatusUpdates = append(blockStatusUpdates, update) + for _, b := range longestBlocks { + chainwork := new(big.Int) + chainwork.SetString(b.Chainwork, 10) + sumLongChainwork = sumLongChainwork.Add(sumLongChainwork, chainwork) } - registeredTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) - if err != nil { - return nil, err - } + return sumLongChainwork.Cmp(sumStaleChainwork) < 0, nil +} - err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) +func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) error { + blockId, err := p.store.InsertBlock(ctx, incomingBlock) if err != nil { - return nil, err - } - - p.logger.Info("reorg performed successfully") - - prevLongestTxs := make([]store.TransactionBlock, 0) - prevStaleTxs := make([]store.TransactionBlock, 0) - - for _, tx := range registeredTxs { - switch tx.BlockStatus { - case blocktx_api.Status_LONGEST: - prevLongestTxs = append(prevLongestTxs, tx) - case blocktx_api.Status_STALE: - prevStaleTxs = append(prevStaleTxs, tx) - default: - // do nothing - ignore ORPHANED and UNKNOWN blocks - } + p.logger.Error("unable to insert block at given height", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return err } - nowMinedTxs, nowStaleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) + calculatedMerkleTree := buildMerkleTreeStoreChainHash(ctx, txHashes) - for i := range nowMinedTxs { - nowMinedTxs[i].BlockStatus = blocktx_api.Status_LONGEST + if !merkleRoot.IsEqual(calculatedMerkleTree[len(calculatedMerkleTree)-1]) { + p.logger.Error("merkle root mismatch", slog.String("hash", getHashStringNoErr(incomingBlock.Hash))) + return err } - for i := range nowStaleTxs { - nowStaleTxs[i].BlockStatus = blocktx_api.Status_STALE + if err = p.storeTransactions(ctx, blockId, incomingBlock, calculatedMerkleTree); err != nil { + p.logger.Error("unable to store transactions from block", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.String("err", err.Error())) + return err } - txsToPublish := append(nowMinedTxs, nowStaleTxs...) - - return txsToPublish, nil + return nil } func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { @@ -779,13 +785,168 @@ func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block return nil } -// getStaleTxs returns all transactions from a given STALE block that are not in the longest chain -func (p *Processor) getStaleTxs(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { - // 1. Find registered txs from the given STALE block +func (p *Processor) updateOrphans(ctx context.Context, incomingBlock *blocktx_api.Block, competing bool) (chain, bool, error) { + chain := []*blocktx_api.Block{incomingBlock} + + orphanedBlocks, err := p.store.GetOrphanedChainUpFromHash(ctx, incomingBlock.Hash) + if err != nil { + return nil, false, err + } + if len(orphanedBlocks) == 0 { + return chain, competing, nil + } + + blockStatusUpdates := make([]store.BlockStatusUpdate, len(orphanedBlocks)) + for i := range orphanedBlocks { + // We want to mark all orphaned blocks as STALE + // in case there already exists a block at any + // of their height with status LONGEST, which + // would cause constraint validation (height, is_longest). + // + // If they are part of the LONGEST chain, the reorg + // will happen and update their statuses accordingly. + orphanedBlocks[i].Status = blocktx_api.Status_STALE + + blockStatusUpdates[i] = store.BlockStatusUpdate{ + Hash: orphanedBlocks[i].Hash, + Status: blocktx_api.Status_STALE, + } + } + + err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + if err != nil { + return nil, false, err + } + + p.logger.Info("orphans were found and updated", slog.Int("len", len(orphanedBlocks))) + + chain = append(chain, orphanedBlocks...) + + // if we found any orphans and marked them as STALE + // we need to find out if they are part of the longest + // or stale chain, so competing is returned as true + return chain, true, nil +} + +func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api.Block) ([]store.TransactionBlock, error) { + tx, err := p.store.BeginTx(ctx) + if err != nil { + return nil, err + } + defer func() { + _ = tx.Rollback() + }() + + // Very important step, this will lock blocks + // table for writing but still allow reading. + err = tx.WriteLockBlocksTable(ctx) + if err != nil { + return nil, err + } + + staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, staleChainTip.Hash) + if err != nil { + return nil, err + } + + lowestHeight := staleChainTip.Height + if len(staleBlocks) > 0 { + lowestHeight = getLowestHeight(staleBlocks) + } + + longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) + if err != nil { + return nil, err + } + + staleHashes := make([][]byte, len(staleBlocks)) + longestHashes := make([][]byte, len(longestBlocks)) + + for i, b := range longestBlocks { + longestHashes[i] = b.Hash + } + + for i, b := range staleBlocks { + staleHashes[i] = b.Hash + } + + registeredTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) + if err != nil { + return nil, err + } + + // Order of inserting into blockStatusUpdates is important here, we need to do: + // 1. LONGEST -> STALE + // 2. STALE -> LONGEST + // otherwise, a unique constraint on (height, is_longest) might be violated. + + // 1. LONGEST -> STALE + blockStatusUpdates := make([]store.BlockStatusUpdate, len(longestBlocks)) + for i, b := range longestBlocks { + update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_STALE} + blockStatusUpdates[i] = update + } + + err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + if err != nil { + return nil, err + } + + // 2. STALE -> LONGEST + blockStatusUpdates = make([]store.BlockStatusUpdate, len(staleBlocks)) + for _, b := range staleBlocks { + update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_LONGEST} + blockStatusUpdates = append(blockStatusUpdates, update) + } + + err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + if err != nil { + return nil, err + } + + err = tx.Commit() + if err != nil { + return nil, err + } + + p.logger.Info("reorg performed successfully") + + prevLongestTxs := make([]store.TransactionBlock, 0) + prevStaleTxs := make([]store.TransactionBlock, 0) + + for _, tx := range registeredTxs { + switch tx.BlockStatus { + case blocktx_api.Status_LONGEST: + prevLongestTxs = append(prevLongestTxs, tx) + case blocktx_api.Status_STALE: + prevStaleTxs = append(prevStaleTxs, tx) + default: + // do nothing - ignore ORPHANED and UNKNOWN blocks + } + } + + nowMinedTxs, nowStaleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) + + for i := range nowMinedTxs { + nowMinedTxs[i].BlockStatus = blocktx_api.Status_LONGEST + } + + for i := range nowStaleTxs { + nowStaleTxs[i].BlockStatus = blocktx_api.Status_STALE + } + + txsToPublish := append(nowMinedTxs, nowStaleTxs...) + + return txsToPublish, nil +} + +// getStaleTxs returns all transactions from given STALE blocks that are not in the longest chain +func (p *Processor) getStaleTxs(ctx context.Context, staleChain chain) ([]store.TransactionBlock, error) { + // 1. Find registered txs from given STALE blocks // 2. Check for those transactions in the longest chain - // 3. Return only those registered txs from the STALE block that are not found in the longest chain + // 3. Return only those registered txs from the STALE blocks that are not found in the longest chain - registeredTxs, err := p.store.GetRegisteredTransactions(ctx, blockId) + registeredTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, staleChain.getHashes()) if err != nil { return nil, err } @@ -795,7 +956,7 @@ func (p *Processor) getStaleTxs(ctx context.Context, blockId uint64) ([]store.Tr registeredHashes[i] = tx.TxHash } - minedTxs, err := p.store.GetMinedTransactions(ctx, registeredHashes) + minedTxs, err := p.store.GetMinedTransactions(ctx, registeredHashes, true) if err != nil { return nil, err } diff --git a/internal/blocktx/processor_helpers_test.go b/internal/blocktx/processor_helpers_test.go index 2aa6c0e54..213f90148 100644 --- a/internal/blocktx/processor_helpers_test.go +++ b/internal/blocktx/processor_helpers_test.go @@ -6,6 +6,11 @@ import ( "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" +<<<<<<< HEAD +======= + sdkTx "github.com/bitcoin-sv/go-sdk/transaction" + "github.com/stretchr/testify/assert" +>>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) "github.com/stretchr/testify/require" ) diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 2fb5e1117..94a0c0f74 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -9,20 +9,21 @@ import ( "testing" "time" + "github.com/bitcoin-sv/arc/internal/blocktx" + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/mocks" + "github.com/bitcoin-sv/arc/internal/blocktx/store" + storeMocks "github.com/bitcoin-sv/arc/internal/blocktx/store/mocks" + testutils "github.com/bitcoin-sv/arc/internal/test_utils" + "github.com/bitcoin-sv/arc/internal/testdata" "github.com/libsv/go-p2p/chaincfg/chainhash" "github.com/libsv/go-p2p/wire" "github.com/stretchr/testify/require" "google.golang.org/protobuf/reflect/protoreflect" - "github.com/bitcoin-sv/arc/internal/blocktx" blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" blocktx_p2p "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication/p2p" - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/mocks" - "github.com/bitcoin-sv/arc/internal/blocktx/store" - storeMocks "github.com/bitcoin-sv/arc/internal/blocktx/store/mocks" p2p_mocks "github.com/bitcoin-sv/arc/internal/p2p/mocks" - "github.com/bitcoin-sv/arc/internal/testdata" ) func TestHandleBlock(t *testing.T) { @@ -157,9 +158,23 @@ func TestHandleBlock(t *testing.T) { var actualInsertedBlockTransactions [][]byte + txMock := &storeMocks.DbTransactionMock{ + CommitFunc: func() error { + return nil + }, + RollbackFunc: func() error { + return nil + }, + WriteLockBlocksTableFunc: func(ctx context.Context) error { + return nil + }, + } storeMock := &storeMocks.BlocktxStoreMock{ - GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { - if tc.blockAlreadyProcessed { + BeginTxFunc: func(ctx context.Context) (store.DbTransaction, error) { + return txMock, nil + }, + GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { + if tc.blockAlreadyExists { return &blocktx_api.Block{Processed: true}, nil } return nil, store.ErrBlockNotFound @@ -173,6 +188,18 @@ func TestHandleBlock(t *testing.T) { UpsertBlockFunc: func(_ context.Context, _ *blocktx_api.Block) (uint64, error) { return 0, nil }, + GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + return nil, nil + }, + GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { + return nil, nil + }, + GetRegisteredTransactionsFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + return nil, nil + }, + GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + return nil, nil + }, MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _ uint64, _ uint64) error { return nil }, GetBlockHashesProcessingInProgressFunc: func(_ context.Context, _ string) ([]*chainhash.Hash, error) { return nil, nil }, } @@ -229,14 +256,28 @@ func TestHandleBlock(t *testing.T) { } } -func TestHandleBlockReorg(t *testing.T) { +func TestHandleBlockReorgAndOrphans(t *testing.T) { testCases := []struct { - name string - prevBlockStatus blocktx_api.Status - hasCompetingBlock bool - hasGreaterChainwork bool - expectedStatus blocktx_api.Status + name string + blockAlreadyExists bool + prevBlockStatus blocktx_api.Status + hasCompetingBlock bool + hasGreaterChainwork bool + expectedStatus blocktx_api.Status + shouldFindOrphanChain bool }{ + { + name: "block already exists - no orphans - should be ingored", + blockAlreadyExists: true, + shouldFindOrphanChain: false, + expectedStatus: blocktx_api.Status_UNKNOWN, + }, + { + name: "block already exists - orphans found - reorg", + blockAlreadyExists: true, + shouldFindOrphanChain: true, + expectedStatus: blocktx_api.Status_LONGEST, + }, { name: "previous block longest - no competing - no reorg", prevBlockStatus: blocktx_api.Status_LONGEST, @@ -285,25 +326,71 @@ func TestHandleBlockReorg(t *testing.T) { hasGreaterChainwork: false, expectedStatus: blocktx_api.Status_ORPHANED, }, + { + name: "previous block longest - orphaned chain - no competing - no reorg", + prevBlockStatus: blocktx_api.Status_LONGEST, + hasCompetingBlock: false, + hasGreaterChainwork: false, + expectedStatus: blocktx_api.Status_LONGEST, + shouldFindOrphanChain: true, + }, + { + name: "previous block longest - orphaned chain - competing - reorg", + prevBlockStatus: blocktx_api.Status_LONGEST, + hasCompetingBlock: true, + hasGreaterChainwork: false, // tip of orphan chain has greater chainwork + expectedStatus: blocktx_api.Status_LONGEST, + shouldFindOrphanChain: true, + }, + { + name: "previous block stale - orphaned chain - competing - reorg", + prevBlockStatus: blocktx_api.Status_STALE, + hasCompetingBlock: true, + hasGreaterChainwork: false, // tip of orphan chain has greater chainwork + expectedStatus: blocktx_api.Status_LONGEST, + shouldFindOrphanChain: true, + }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { // given var mtx sync.Mutex - var insertedBlock *blocktx_api.Block + insertedBlockStatus := blocktx_api.Status_UNKNOWN + orphanedChainTip := &blocktx_api.Block{ + Hash: testutils.RevChainhash(t, "0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067")[:], + Status: blocktx_api.Status_ORPHANED, + Chainwork: "34364008516618225545", // greatest chainwork - should cause reorg if found + } - shouldReturnNoBlock := true + shouldReturnNoBlock := !tc.blockAlreadyExists + shouldCheckUpdateStatuses := true + comparingChainwork := true + txMock := &storeMocks.DbTransactionMock{ + CommitFunc: func() error { + return nil + }, + RollbackFunc: func() error { + return nil + }, + WriteLockBlocksTableFunc: func(ctx context.Context) error { + return nil + }, + } storeMock := &storeMocks.BlocktxStoreMock{ - GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { + BeginTxFunc: func(ctx context.Context) (store.DbTransaction, error) { + return txMock, nil + }, + GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { if shouldReturnNoBlock { shouldReturnNoBlock = false return nil, nil } return &blocktx_api.Block{ - Status: tc.prevBlockStatus, + Status: tc.prevBlockStatus, + Processed: true, }, nil }, GetBlockByHeightFunc: func(_ context.Context, _ uint64, _ blocktx_api.Status) (*blocktx_api.Block, error) { @@ -330,21 +417,84 @@ func TestHandleBlockReorg(t *testing.T) { }, InsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { mtx.Lock() - insertedBlock = &blocktx_api.Block{ - Hash: block.Hash, - Status: block.Status, - } + insertedBlockStatus = block.Status mtx.Unlock() - return 1, nil + return 1, errors.New("dummy error") // return error here so we don't have to override next db functions + }, + GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + if tc.shouldFindOrphanChain { + return []*blocktx_api.Block{ + { + Hash: []byte("123"), + Status: blocktx_api.Status_ORPHANED, + Chainwork: "123", + }, + orphanedChainTip, + }, nil + } + + return nil, nil + }, + UpdateBlocksStatusesFunc: func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { + if shouldCheckUpdateStatuses && tc.shouldFindOrphanChain { + mtx.Lock() + shouldCheckUpdateStatuses = false + tipStatusUpdate := blockStatusUpdates[len(blockStatusUpdates)-1] + require.Equal(t, orphanedChainTip.Hash, tipStatusUpdate.Hash) + require.Equal(t, blocktx_api.Status_STALE, tipStatusUpdate.Status) + mtx.Unlock() + } + return nil }, GetStaleChainBackFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { - // this function is called ONLY when performing reorg + if comparingChainwork { + if tc.shouldFindOrphanChain { + require.Equal(t, orphanedChainTip.Hash, hash) + return []*blocktx_api.Block{orphanedChainTip}, nil + } + if tc.hasGreaterChainwork { + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, + { + Chainwork: "42069", + }, + { + Chainwork: "42069", + }, + }, nil + } else { + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, + }, nil + } + } + + // if we get to this point, it means that reorg is happening mtx.Lock() - insertedBlock.Status = blocktx_api.Status_LONGEST + insertedBlockStatus = blocktx_api.Status_LONGEST + if tc.shouldFindOrphanChain { + require.Equal(t, orphanedChainTip.Hash[:], hash) + orphanedChainTip.Status = blocktx_api.Status_LONGEST + } mtx.Unlock() return nil, nil }, - GetLongestChainFromHeightFunc: func(_ context.Context, _ uint64) ([]*blocktx_api.Block, error) { + GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { + if comparingChainwork { + comparingChainwork = false + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, + { + Chainwork: "42069", + }, + }, nil + } return nil, nil }, UpdateBlocksStatusesFunc: func(_ context.Context, _ []store.BlockStatusUpdate) error { @@ -353,13 +503,13 @@ func TestHandleBlockReorg(t *testing.T) { UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { return nil }, - GetRegisteredTransactionsFunc: func(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { + GetRegisteredTransactionsFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { return nil, nil }, GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { return nil, nil }, - GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { + GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { return nil, nil }, MarkBlockAsDoneFunc: func(ctx context.Context, hash *chainhash.Hash, size, txCount uint64) error { @@ -400,7 +550,10 @@ func TestHandleBlockReorg(t *testing.T) { // then time.Sleep(20 * time.Millisecond) mtx.Lock() - require.Equal(t, tc.expectedStatus, insertedBlock.Status) + require.Equal(t, tc.expectedStatus, insertedBlockStatus) + if tc.shouldFindOrphanChain { + require.Equal(t, tc.expectedStatus, orphanedChainTip.Status) + } mtx.Unlock() }) } @@ -651,7 +804,7 @@ func TestStartProcessRequestTxs(t *testing.T) { t.Run(tc.name, func(t *testing.T) { // given storeMock := &storeMocks.BlocktxStoreMock{ - GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { + GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { for _, hash := range hashes { require.Equal(t, testdata.TX1Hash[:], hash) } diff --git a/internal/blocktx/store/mocks/blocktx_db_tx_mock.go b/internal/blocktx/store/mocks/blocktx_db_tx_mock.go new file mode 100644 index 000000000..6096aa9ba --- /dev/null +++ b/internal/blocktx/store/mocks/blocktx_db_tx_mock.go @@ -0,0 +1,150 @@ +// Code generated by moq; DO NOT EDIT. +// github.com/matryer/moq + +package mocks + +import ( + "context" + "github.com/bitcoin-sv/arc/internal/blocktx/store" + "sync" +) + +// Ensure, that DbTransactionMock does implement store.DbTransaction. +// If this is not the case, regenerate this file with moq. +var _ store.DbTransaction = &DbTransactionMock{} + +// DbTransactionMock is a mock implementation of store.DbTransaction. +// +// func TestSomethingThatUsesDbTransaction(t *testing.T) { +// +// // make and configure a mocked store.DbTransaction +// mockedDbTransaction := &DbTransactionMock{ +// CommitFunc: func() error { +// panic("mock out the Commit method") +// }, +// RollbackFunc: func() error { +// panic("mock out the Rollback method") +// }, +// WriteLockBlocksTableFunc: func(ctx context.Context) error { +// panic("mock out the WriteLockBlocksTable method") +// }, +// } +// +// // use mockedDbTransaction in code that requires store.DbTransaction +// // and then make assertions. +// +// } +type DbTransactionMock struct { + // CommitFunc mocks the Commit method. + CommitFunc func() error + + // RollbackFunc mocks the Rollback method. + RollbackFunc func() error + + // WriteLockBlocksTableFunc mocks the WriteLockBlocksTable method. + WriteLockBlocksTableFunc func(ctx context.Context) error + + // calls tracks calls to the methods. + calls struct { + // Commit holds details about calls to the Commit method. + Commit []struct { + } + // Rollback holds details about calls to the Rollback method. + Rollback []struct { + } + // WriteLockBlocksTable holds details about calls to the WriteLockBlocksTable method. + WriteLockBlocksTable []struct { + // Ctx is the ctx argument value. + Ctx context.Context + } + } + lockCommit sync.RWMutex + lockRollback sync.RWMutex + lockWriteLockBlocksTable sync.RWMutex +} + +// Commit calls CommitFunc. +func (mock *DbTransactionMock) Commit() error { + if mock.CommitFunc == nil { + panic("DbTransactionMock.CommitFunc: method is nil but DbTransaction.Commit was just called") + } + callInfo := struct { + }{} + mock.lockCommit.Lock() + mock.calls.Commit = append(mock.calls.Commit, callInfo) + mock.lockCommit.Unlock() + return mock.CommitFunc() +} + +// CommitCalls gets all the calls that were made to Commit. +// Check the length with: +// +// len(mockedDbTransaction.CommitCalls()) +func (mock *DbTransactionMock) CommitCalls() []struct { +} { + var calls []struct { + } + mock.lockCommit.RLock() + calls = mock.calls.Commit + mock.lockCommit.RUnlock() + return calls +} + +// Rollback calls RollbackFunc. +func (mock *DbTransactionMock) Rollback() error { + if mock.RollbackFunc == nil { + panic("DbTransactionMock.RollbackFunc: method is nil but DbTransaction.Rollback was just called") + } + callInfo := struct { + }{} + mock.lockRollback.Lock() + mock.calls.Rollback = append(mock.calls.Rollback, callInfo) + mock.lockRollback.Unlock() + return mock.RollbackFunc() +} + +// RollbackCalls gets all the calls that were made to Rollback. +// Check the length with: +// +// len(mockedDbTransaction.RollbackCalls()) +func (mock *DbTransactionMock) RollbackCalls() []struct { +} { + var calls []struct { + } + mock.lockRollback.RLock() + calls = mock.calls.Rollback + mock.lockRollback.RUnlock() + return calls +} + +// WriteLockBlocksTable calls WriteLockBlocksTableFunc. +func (mock *DbTransactionMock) WriteLockBlocksTable(ctx context.Context) error { + if mock.WriteLockBlocksTableFunc == nil { + panic("DbTransactionMock.WriteLockBlocksTableFunc: method is nil but DbTransaction.WriteLockBlocksTable was just called") + } + callInfo := struct { + Ctx context.Context + }{ + Ctx: ctx, + } + mock.lockWriteLockBlocksTable.Lock() + mock.calls.WriteLockBlocksTable = append(mock.calls.WriteLockBlocksTable, callInfo) + mock.lockWriteLockBlocksTable.Unlock() + return mock.WriteLockBlocksTableFunc(ctx) +} + +// WriteLockBlocksTableCalls gets all the calls that were made to WriteLockBlocksTable. +// Check the length with: +// +// len(mockedDbTransaction.WriteLockBlocksTableCalls()) +func (mock *DbTransactionMock) WriteLockBlocksTableCalls() []struct { + Ctx context.Context +} { + var calls []struct { + Ctx context.Context + } + mock.lockWriteLockBlocksTable.RLock() + calls = mock.calls.WriteLockBlocksTable + mock.lockWriteLockBlocksTable.RUnlock() + return calls +} diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index 8101fcb19..36ee6b459 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -21,6 +21,9 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // // // make and configure a mocked store.BlocktxStore // mockedBlocktxStore := &BlocktxStoreMock{ +// BeginTxFunc: func(ctx context.Context) (store.DbTransaction, error) { +// panic("mock out the BeginTx method") +// }, // ClearBlocktxTableFunc: func(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) { // panic("mock out the ClearBlocktxTable method") // }, @@ -48,10 +51,13 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { // panic("mock out the GetLongestChainFromHeight method") // }, -// GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { +// GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { // panic("mock out the GetMinedTransactions method") // }, -// GetRegisteredTransactionsFunc: func(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { +// GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { +// panic("mock out the GetOrphanedChainUpFromHash method") +// }, +// GetRegisteredTransactionsFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { // panic("mock out the GetRegisteredTransactions method") // }, // GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { @@ -79,6 +85,7 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // panic("mock out the UpdateBlocksStatuses method") // }, <<<<<<< HEAD +<<<<<<< HEAD // UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { // panic("mock out the UpsertBlock method") // }, @@ -86,6 +93,9 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} ======= // UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { >>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) +======= +// UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { +>>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) // panic("mock out the UpsertBlockTransactions method") // }, // VerifyMerkleRootsFunc: func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { @@ -98,6 +108,9 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // // } type BlocktxStoreMock struct { + // BeginTxFunc mocks the BeginTx method. + BeginTxFunc func(ctx context.Context) (store.DbTransaction, error) + // ClearBlocktxTableFunc mocks the ClearBlocktxTable method. ClearBlocktxTableFunc func(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) @@ -126,10 +139,13 @@ type BlocktxStoreMock struct { GetLongestChainFromHeightFunc func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) // GetMinedTransactionsFunc mocks the GetMinedTransactions method. - GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) + GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) + + // GetOrphanedChainUpFromHashFunc mocks the GetOrphanedChainUpFromHash method. + GetOrphanedChainUpFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) // GetRegisteredTransactionsFunc mocks the GetRegisteredTransactions method. - GetRegisteredTransactionsFunc func(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) + GetRegisteredTransactionsFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) // GetRegisteredTxsByBlockHashesFunc mocks the GetRegisteredTxsByBlockHashes method. GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) @@ -159,8 +175,12 @@ type BlocktxStoreMock struct { UpsertBlockFunc func(ctx context.Context, block *blocktx_api.Block) (uint64, error) // UpsertBlockTransactionsFunc mocks the UpsertBlockTransactions method. +<<<<<<< HEAD <<<<<<< HEAD UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) +======= + UpsertBlockTransactionsFunc func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error +>>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) ======= UpsertBlockTransactionsFunc func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error >>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) @@ -170,6 +190,11 @@ type BlocktxStoreMock struct { // calls tracks calls to the methods. calls struct { + // BeginTx holds details about calls to the BeginTx method. + BeginTx []struct { + // Ctx is the ctx argument value. + Ctx context.Context + } // ClearBlocktxTable holds details about calls to the ClearBlocktxTable method. ClearBlocktxTable []struct { // Ctx is the ctx argument value. @@ -239,13 +264,22 @@ type BlocktxStoreMock struct { Ctx context.Context // Hashes is the hashes argument value. Hashes [][]byte + // OnlyLongestChain is the onlyLongestChain argument value. + OnlyLongestChain bool + } + // GetOrphanedChainUpFromHash holds details about calls to the GetOrphanedChainUpFromHash method. + GetOrphanedChainUpFromHash []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hash is the hash argument value. + Hash []byte } // GetRegisteredTransactions holds details about calls to the GetRegisteredTransactions method. GetRegisteredTransactions []struct { // Ctx is the ctx argument value. Ctx context.Context - // BlockId is the blockId argument value. - BlockId uint64 + // BlockHashes is the blockHashes argument value. + BlockHashes [][]byte } // GetRegisteredTxsByBlockHashes holds details about calls to the GetRegisteredTxsByBlockHashes method. GetRegisteredTxsByBlockHashes []struct { @@ -331,6 +365,7 @@ type BlocktxStoreMock struct { MaxAllowedBlockHeightMismatch int } } + lockBeginTx sync.RWMutex lockClearBlocktxTable sync.RWMutex lockClose sync.RWMutex lockDelBlockProcessing sync.RWMutex @@ -341,6 +376,7 @@ type BlocktxStoreMock struct { lockGetChainTip sync.RWMutex lockGetLongestChainFromHeight sync.RWMutex lockGetMinedTransactions sync.RWMutex + lockGetOrphanedChainUpFromHash sync.RWMutex lockGetRegisteredTransactions sync.RWMutex lockGetRegisteredTxsByBlockHashes sync.RWMutex lockGetStaleChainBackFromHash sync.RWMutex @@ -355,6 +391,38 @@ type BlocktxStoreMock struct { lockVerifyMerkleRoots sync.RWMutex } +// BeginTx calls BeginTxFunc. +func (mock *BlocktxStoreMock) BeginTx(ctx context.Context) (store.DbTransaction, error) { + if mock.BeginTxFunc == nil { + panic("BlocktxStoreMock.BeginTxFunc: method is nil but BlocktxStore.BeginTx was just called") + } + callInfo := struct { + Ctx context.Context + }{ + Ctx: ctx, + } + mock.lockBeginTx.Lock() + mock.calls.BeginTx = append(mock.calls.BeginTx, callInfo) + mock.lockBeginTx.Unlock() + return mock.BeginTxFunc(ctx) +} + +// BeginTxCalls gets all the calls that were made to BeginTx. +// Check the length with: +// +// len(mockedBlocktxStore.BeginTxCalls()) +func (mock *BlocktxStoreMock) BeginTxCalls() []struct { + Ctx context.Context +} { + var calls []struct { + Ctx context.Context + } + mock.lockBeginTx.RLock() + calls = mock.calls.BeginTx + mock.lockBeginTx.RUnlock() + return calls +} + // ClearBlocktxTable calls ClearBlocktxTableFunc. func (mock *BlocktxStoreMock) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) { if mock.ClearBlocktxTableFunc == nil { @@ -679,21 +747,23 @@ func (mock *BlocktxStoreMock) GetLongestChainFromHeightCalls() []struct { } // GetMinedTransactions calls GetMinedTransactionsFunc. -func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { +func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { if mock.GetMinedTransactionsFunc == nil { panic("BlocktxStoreMock.GetMinedTransactionsFunc: method is nil but BlocktxStore.GetMinedTransactions was just called") } callInfo := struct { - Ctx context.Context - Hashes [][]byte + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool }{ - Ctx: ctx, - Hashes: hashes, + Ctx: ctx, + Hashes: hashes, + OnlyLongestChain: onlyLongestChain, } mock.lockGetMinedTransactions.Lock() mock.calls.GetMinedTransactions = append(mock.calls.GetMinedTransactions, callInfo) mock.lockGetMinedTransactions.Unlock() - return mock.GetMinedTransactionsFunc(ctx, hashes) + return mock.GetMinedTransactionsFunc(ctx, hashes, onlyLongestChain) } // GetMinedTransactionsCalls gets all the calls that were made to GetMinedTransactions. @@ -701,12 +771,14 @@ func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [ // // len(mockedBlocktxStore.GetMinedTransactionsCalls()) func (mock *BlocktxStoreMock) GetMinedTransactionsCalls() []struct { - Ctx context.Context - Hashes [][]byte + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool } { var calls []struct { - Ctx context.Context - Hashes [][]byte + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool } mock.lockGetMinedTransactions.RLock() calls = mock.calls.GetMinedTransactions @@ -714,22 +786,58 @@ func (mock *BlocktxStoreMock) GetMinedTransactionsCalls() []struct { return calls } +// GetOrphanedChainUpFromHash calls GetOrphanedChainUpFromHashFunc. +func (mock *BlocktxStoreMock) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + if mock.GetOrphanedChainUpFromHashFunc == nil { + panic("BlocktxStoreMock.GetOrphanedChainUpFromHashFunc: method is nil but BlocktxStore.GetOrphanedChainUpFromHash was just called") + } + callInfo := struct { + Ctx context.Context + Hash []byte + }{ + Ctx: ctx, + Hash: hash, + } + mock.lockGetOrphanedChainUpFromHash.Lock() + mock.calls.GetOrphanedChainUpFromHash = append(mock.calls.GetOrphanedChainUpFromHash, callInfo) + mock.lockGetOrphanedChainUpFromHash.Unlock() + return mock.GetOrphanedChainUpFromHashFunc(ctx, hash) +} + +// GetOrphanedChainUpFromHashCalls gets all the calls that were made to GetOrphanedChainUpFromHash. +// Check the length with: +// +// len(mockedBlocktxStore.GetOrphanedChainUpFromHashCalls()) +func (mock *BlocktxStoreMock) GetOrphanedChainUpFromHashCalls() []struct { + Ctx context.Context + Hash []byte +} { + var calls []struct { + Ctx context.Context + Hash []byte + } + mock.lockGetOrphanedChainUpFromHash.RLock() + calls = mock.calls.GetOrphanedChainUpFromHash + mock.lockGetOrphanedChainUpFromHash.RUnlock() + return calls +} + // GetRegisteredTransactions calls GetRegisteredTransactionsFunc. -func (mock *BlocktxStoreMock) GetRegisteredTransactions(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { +func (mock *BlocktxStoreMock) GetRegisteredTransactions(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { if mock.GetRegisteredTransactionsFunc == nil { panic("BlocktxStoreMock.GetRegisteredTransactionsFunc: method is nil but BlocktxStore.GetRegisteredTransactions was just called") } callInfo := struct { - Ctx context.Context - BlockId uint64 + Ctx context.Context + BlockHashes [][]byte }{ - Ctx: ctx, - BlockId: blockId, + Ctx: ctx, + BlockHashes: blockHashes, } mock.lockGetRegisteredTransactions.Lock() mock.calls.GetRegisteredTransactions = append(mock.calls.GetRegisteredTransactions, callInfo) mock.lockGetRegisteredTransactions.Unlock() - return mock.GetRegisteredTransactionsFunc(ctx, blockId) + return mock.GetRegisteredTransactionsFunc(ctx, blockHashes) } // GetRegisteredTransactionsCalls gets all the calls that were made to GetRegisteredTransactions. @@ -737,12 +845,12 @@ func (mock *BlocktxStoreMock) GetRegisteredTransactions(ctx context.Context, blo // // len(mockedBlocktxStore.GetRegisteredTransactionsCalls()) func (mock *BlocktxStoreMock) GetRegisteredTransactionsCalls() []struct { - Ctx context.Context - BlockId uint64 + Ctx context.Context + BlockHashes [][]byte } { var calls []struct { - Ctx context.Context - BlockId uint64 + Ctx context.Context + BlockHashes [][]byte } mock.lockGetRegisteredTransactions.RLock() calls = mock.calls.GetRegisteredTransactions @@ -1080,9 +1188,13 @@ func (mock *BlocktxStoreMock) UpsertBlockCalls() []struct { // UpsertBlockTransactions calls UpsertBlockTransactionsFunc. <<<<<<< HEAD +<<<<<<< HEAD func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { ======= func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { +>>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) +======= +func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { >>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) if mock.UpsertBlockTransactionsFunc == nil { panic("BlocktxStoreMock.UpsertBlockTransactionsFunc: method is nil but BlocktxStore.UpsertBlockTransactions was just called") diff --git a/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml index b26a8f7e9..4e04686ca 100644 --- a/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml @@ -6,7 +6,6 @@ height: 822013 size: 86840000 tx_count: 23477 - orphanedyn: false - inserted_at: 2023-12-15 14:00:00 id: 2 hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 @@ -16,4 +15,3 @@ processed_at: 2023-12-15 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false diff --git a/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.blocks.yaml index 9e00aa7da..976e16a18 100644 --- a/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.blocks.yaml @@ -7,7 +7,6 @@ processed_at: 2023-12-10 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false - inserted_at: 2023-12-15 14:00:00 id: 2 hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 @@ -17,4 +16,3 @@ processed_at: 2023-12-15 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false diff --git a/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml index 44928c5d8..654530c00 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml @@ -7,7 +7,6 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 chainwork: '123456' - inserted_at: 2023-12-15 14:30:00 @@ -19,7 +18,6 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 chainwork: '123456' - inserted_at: 2023-12-15 14:30:00 @@ -31,9 +29,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 20 # STALE - competing block chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:40:00 id: 3 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 @@ -43,7 +41,6 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 10 chainwork: '123456' - inserted_at: 2023-12-15 14:50:00 @@ -54,6 +51,5 @@ height: 822020 size: 8630000 tx_count: 36724 - orphanedyn: false status: 10 chainwork: '123456' diff --git a/internal/blocktx/store/postgresql/fixtures/get_block_gaps/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_block_gaps/blocktx.blocks.yaml index 7951c92d8..d3641f5a3 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_block_gaps/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_block_gaps/blocktx.blocks.yaml @@ -3,76 +3,132 @@ hash: 0xca415274325bac4ea3a39b0cef9356e75469d758ba4a57030000000000000000 prevhash: 0x3dc8a25238765c58a9fccbcf83fba59bf93894bcaaee8e040000000000000000 merkleroot: 0x4f3d1594808f485303ba8a1ed4f7c15f8087e5da1b7ebd25fa266630a8a6d880 - height: 812010 + height: 822008 # Out of range + status: 10 # LONGEST + is_longest: true size: 244000000 tx_count: 4437 - orphanedyn: false + +# Gap 822009 + - inserted_at: 2023-12-10 14:00:00 id: 1 hash: 0x72ad227eaaf73d36bc86f46347310c9b21a360b277c3000a0000000000000000 prevhash: 0x4ad773b1a464129a0ed8c7a8c71bb98175f0f01da1793f0e0000000000000000 merkleroot: 0x145b33264b4440278446f4cb5008dcf87e54e7827a215da9621b652eb17eef88 height: 822010 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-10 14:10:00 size: 244000000 tx_count: 4437 - orphanedyn: false + +# block 822011 is being processed (in table block_processing) + - inserted_at: 2023-12-10 14:00:00 id: 2 hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 prevhash: 0x3a03313b727fa08c170fab2660c225d52b4d85516c92a0020000000000000000 merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410 height: 822012 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-10 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false - inserted_at: 2023-12-15 14:00:00 id: 3 hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 height: 822013 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false + +# Gap 822014 + - inserted_at: 2023-12-15 14:30:00 id: 4 hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 height: 822015 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false - inserted_at: 2023-12-15 14:40:00 id: 5 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 prevhash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 height: 822016 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false + +# Gap 822017 - 822019 + - inserted_at: 2023-12-15 14:50:00 id: 6 hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 prevhash: 0x5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822020 + status: 30 # ORPHANED + is_longest: false size: 8630000 tx_count: 36724 - orphanedyn: false - inserted_at: 2023-12-15 15:00:00 id: 7 hash: 0x1d5fc8020fd68baea5c0cad654f04eb791a81100c51045090000000000000000 prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 merkleroot: 0x3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c height: 822021 + status: 30 # ORPHANED + is_longest: false processed_at: 2023-12-15 15:00:00 size: 1620000 tx_count: 5578 - orphanedyn: false + +# competing chain at height 822018 - 822020 +- inserted_at: 2023-12-15 14:50:00 + id: 8 + hash: 0x0000000000000000092497212ff3447c8b53c8777d83082049aa64998d1f04f0 + prevhash: 0x00000000000000000d840fb91c0df3b057db04a0250c6d88b2f25aadcfc8410b + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822018 + status: 30 # ORPHANED + is_longest: false + processed_at: 2023-12-15 15:00:00 + size: 8630000 + tx_count: 36724 +- inserted_at: 2023-12-15 14:50:00 + id: 9 + hash: 0x00000000000000000911795efb7fbbd73ecb589a63d6f6f658bab67b57ce9caa + prevhash: 0x0000000000000000092497212ff3447c8b53c8777d83082049aa64998d1f04f0 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822019 + status: 30 # ORPHANED + is_longest: false + processed_at: 2023-12-15 15:00:00 + size: 8630000 + tx_count: 36724 +- inserted_at: 2023-12-15 14:50:00 + id: 10 + hash: 0x0000000000000000082e0b0d5924f2f190cb4a53eac364524d9388f0c39cc7bd + prevhash: 0x00000000000000000911795efb7fbbd73ecb589a63d6f6f658bab67b57ce9caa + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822020 + status: 30 # ORPHANED + is_longest: false + processed_at: 2023-12-15 15:00:00 + size: 8630000 + tx_count: 36724 + diff --git a/internal/blocktx/store/postgresql/fixtures/get_longest_chain/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_longest_chain/blocktx.blocks.yaml index ce21cb387..ec6976daf 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_longest_chain/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_longest_chain/blocktx.blocks.yaml @@ -7,9 +7,9 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 1 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde @@ -19,9 +19,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 2 hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 @@ -31,9 +31,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 20 # STALE - competing block chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:40:00 id: 3 hash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 @@ -43,9 +43,9 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:50:00 id: 4 hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca @@ -54,6 +54,6 @@ height: 822017 size: 8630000 tx_count: 36724 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml new file mode 100644 index 000000000..b8f7b9e32 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml @@ -0,0 +1,59 @@ +- inserted_at: 2023-12-15 14:00:00 + id: 0 + hash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822015 + processed_at: 2023-12-15 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 # LONGEST + chainwork: '123456' + is_longest: true +- inserted_at: 2023-12-15 14:30:00 + id: 1 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822016 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:30:00 + id: 2 + hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 + prevhash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822017 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:40:00 + id: 3 + hash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 + prevhash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822018 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:50:00 + id: 4 + hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca + prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822020 # Another gap + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/get_stale_chain/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_stale_chain/blocktx.blocks.yaml index ce21cb387..ec6976daf 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_stale_chain/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_stale_chain/blocktx.blocks.yaml @@ -7,9 +7,9 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 1 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde @@ -19,9 +19,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 2 hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 @@ -31,9 +31,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 20 # STALE - competing block chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:40:00 id: 3 hash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 @@ -43,9 +43,9 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:50:00 id: 4 hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca @@ -54,6 +54,6 @@ height: 822017 size: 8630000 tx_count: 36724 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml index a405ab034..3bceb9ce5 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml @@ -38,3 +38,15 @@ txid: 10 merkle_path: merkle-path-10 inserted_at: 2023-12-15 14:00:00 +- blockid: 3 + txid: 11 + merkle_path: merkle-path-11 + inserted_at: 2023-12-15 14:00:00 +- blockid: 3 + txid: 12 + merkle_path: merkle-path-12 + inserted_at: 2023-12-15 14:00:00 +- blockid: 3 + txid: 13 + merkle_path: merkle-path-13 + inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml index f5a812428..06c1dba87 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml @@ -7,8 +7,8 @@ processed_at: 2023-12-10 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 # LONGEST + is_longest: true - inserted_at: 2023-12-15 14:00:00 id: 2 hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 @@ -18,5 +18,16 @@ processed_at: 2023-12-15 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false status: 20 # STALE + is_longest: false +- inserted_at: 2023-12-10 14:00:00 + id: 3 + hash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + prevhash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9 + merkleroot: 0x713e7713ebc6414420a418e87bbd42ecaf7e0e38c923c2cf9e72718d3c329acb + height: 822014 + processed_at: 2023-12-10 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 # LONGEST + is_longest: true diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml index dc776a5a1..8ae03913e 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml @@ -41,3 +41,17 @@ hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8 inserted_at: 2023-12-15 14:00:00 is_registered: false + +# txs from block 3 - LONGEST +- id: 11 + hash: 0x2eb9f15adaf9e7d1de19f3ebc6bf95b62871a4e053c30ac0d1b1df85a6163d8e + inserted_at: 2023-12-15 14:00:00 + is_registered: true +- id: 12 + hash: 0x45ad0e3de133e386faeff8ecf12b665875d527031b9aa75ca96d3fc2b7098fa5 + inserted_at: 2023-12-15 14:00:00 + is_registered: false +- id: 13 + hash: 0x4bac520c26dba4e24c3fb73bf4fd0d66e45ec39b976d1d052f8a4f499f4aa004 + inserted_at: 2023-12-15 14:00:00 + is_registered: false diff --git a/internal/blocktx/store/postgresql/fixtures/insert_block/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/insert_block/blocktx.blocks.yaml new file mode 100644 index 000000000..9cb7bc4c1 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/insert_block/blocktx.blocks.yaml @@ -0,0 +1,36 @@ +- inserted_at: 2023-12-15 14:30:00 + id: 1 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 # LONGEST + chainwork: '123456' + is_longest: true +- inserted_at: 2023-12-15 14:30:00 + id: 2 + hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 + prevhash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 20 # STALE + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:30:00 + id: 3 + hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 + prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml index 7a9d599f3..e936d86f2 100644 --- a/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml @@ -4,6 +4,5 @@ prevhash: 0x3a03313b727fa08c170fab2660c225d52b4d85516c92a0020000000000000000 merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410 height: 822012 - orphanedyn: false status: 10 chainwork: '1234' diff --git a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml index 2cf6008e8..a2f4e567a 100644 --- a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml @@ -7,5 +7,4 @@ processed_at: 2024-01-10 13:06:06.122 size: 108689370 tx_count: 799 - orphanedyn: FALSE merkle_path: "" diff --git a/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml index ce21cb387..ec6976daf 100644 --- a/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml @@ -7,9 +7,9 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 1 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde @@ -19,9 +19,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 10 # LONGEST chainwork: '123456' + is_longest: true - inserted_at: 2023-12-15 14:30:00 id: 2 hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9 @@ -31,9 +31,9 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false status: 20 # STALE - competing block chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:40:00 id: 3 hash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 @@ -43,9 +43,9 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false - inserted_at: 2023-12-15 14:50:00 id: 4 hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca @@ -54,6 +54,6 @@ height: 822017 size: 8630000 tx_count: 36724 - orphanedyn: false status: 20 # STALE chainwork: '123456' + is_longest: false diff --git a/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml index 3e007ea6c..59e5f2fbe 100644 --- a/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml @@ -4,17 +4,19 @@ prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313 merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb height: 826481 + status: 10 # STALE + is_longest: true processed_at: 2024-01-10 13:06:06.122 size: 108689370 tx_count: 799 - orphanedyn: FALSE - inserted_at: 2024-01-10 13:06:03.375 id: 9737 hash: 0x7258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000 prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313 merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb height: 826481 + status: 20 # STALE + is_longest: false processed_at: 2024-01-10 13:06:06.122 size: 108689370 tx_count: 799 - orphanedyn: FALSE diff --git a/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml index 7951c92d8..f8c707254 100644 --- a/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml @@ -6,7 +6,6 @@ height: 812010 size: 244000000 tx_count: 4437 - orphanedyn: false - inserted_at: 2023-12-10 14:00:00 id: 1 hash: 0x72ad227eaaf73d36bc86f46347310c9b21a360b277c3000a0000000000000000 @@ -16,7 +15,6 @@ processed_at: 2023-12-10 14:10:00 size: 244000000 tx_count: 4437 - orphanedyn: false - inserted_at: 2023-12-10 14:00:00 id: 2 hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 @@ -26,7 +24,6 @@ processed_at: 2023-12-10 14:10:00 size: 3030000 tx_count: 856 - orphanedyn: false - inserted_at: 2023-12-15 14:00:00 id: 3 hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 @@ -36,7 +33,6 @@ processed_at: 2023-12-15 14:10:00 size: 86840000 tx_count: 23477 - orphanedyn: false - inserted_at: 2023-12-15 14:30:00 id: 4 hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 @@ -46,7 +42,6 @@ processed_at: 2023-12-15 14:30:00 size: 20160000 tx_count: 6523 - orphanedyn: false - inserted_at: 2023-12-15 14:40:00 id: 5 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 @@ -56,7 +51,6 @@ processed_at: 2023-12-15 14:40:00 size: 299650000 tx_count: 62162 - orphanedyn: false - inserted_at: 2023-12-15 14:50:00 id: 6 hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 @@ -65,7 +59,6 @@ height: 822020 size: 8630000 tx_count: 36724 - orphanedyn: false - inserted_at: 2023-12-15 15:00:00 id: 7 hash: 0x1d5fc8020fd68baea5c0cad654f04eb791a81100c51045090000000000000000 @@ -75,4 +68,3 @@ processed_at: 2023-12-15 15:00:00 size: 1620000 tx_count: 5578 - orphanedyn: false diff --git a/internal/blocktx/store/postgresql/get_block.go b/internal/blocktx/store/postgresql/get_block.go index 4733ab91c..a520af83d 100644 --- a/internal/blocktx/store/postgresql/get_block.go +++ b/internal/blocktx/store/postgresql/get_block.go @@ -23,9 +23,9 @@ func (p *PostgreSQL) GetBlockByHeight(ctx context.Context, height uint64, status } func (p *PostgreSQL) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) { - predicate := "WHERE height = (SELECT MAX(height) FROM blocktx.blocks blks WHERE blks.status = $1)" + predicate := "WHERE height = (SELECT MAX(height) FROM blocktx.blocks blks WHERE blks.is_longest = true)" - return p.queryBlockByPredicate(ctx, predicate, blocktx_api.Status_LONGEST) + return p.queryBlockByPredicate(ctx, predicate) } func (p *PostgreSQL) queryBlockByPredicate(ctx context.Context, predicate string, predicateParams ...any) (*blocktx_api.Block, error) { @@ -36,7 +36,6 @@ func (p *PostgreSQL) queryBlockByPredicate(ctx context.Context, predicate string ,merkleroot ,height ,processed_at - ,orphanedyn ,status ,chainwork FROM blocktx.blocks @@ -54,7 +53,6 @@ func (p *PostgreSQL) queryBlockByPredicate(ctx context.Context, predicate string &block.MerkleRoot, &block.Height, &processedAt, - &block.Orphaned, &block.Status, &block.Chainwork, ); err != nil { diff --git a/internal/blocktx/store/postgresql/get_block_gaps.go b/internal/blocktx/store/postgresql/get_block_gaps.go index bd0fffd6d..143c2d226 100644 --- a/internal/blocktx/store/postgresql/get_block_gaps.go +++ b/internal/blocktx/store/postgresql/get_block_gaps.go @@ -8,25 +8,34 @@ import ( ) func (p *PostgreSQL) GetBlockGaps(ctx context.Context, blockHeightRange int) ([]*store.BlockGap, error) { + // Flow of this query: + // + // 1. Get height - 1 and prevhash from blocks where there isn't a previous block + // and where height is greater than our height range parameter. + // + // 2. Add to result from 1. all blocks from the blocks table that are unprocessed yet. + // + // 3. Combine the result from 1. and 2. with block_processing table and remove all + // results that are being currently processed. + // + // 4. Sort by height. q := ` - SELECT DISTINCT all_missing.height, all_missing.hash FROM - (SELECT missing_blocks.missing_block_height AS height, blocktx.blocks.prevhash AS hash FROM blocktx.blocks - JOIN ( - SELECT bl.block_heights AS missing_block_height FROM ( - SELECT unnest(ARRAY( - SELECT a.n - FROM generate_series((SELECT max(height) - $1 AS block_height FROM blocktx.blocks b), (SELECT max(height) AS block_height FROM blocktx.blocks b)) AS a(n) - )) AS block_heights) AS bl - LEFT JOIN blocktx.blocks blks ON blks.height = bl.block_heights - WHERE blks.height IS NULL - ) AS missing_blocks ON blocktx.blocks.height = missing_blocks.missing_block_height + 1 - UNION - SELECT height, hash FROM blocktx.blocks WHERE processed_at IS NULL AND height < (SELECT max(height) AS block_height FROM blocktx.blocks b) - AND height > (SELECT max(height) - $1 AS block_height FROM blocktx.blocks b) - ) AS all_missing - LEFT JOIN blocktx.block_processing bp ON bp.block_hash = all_missing.hash - WHERE bp IS NULL ORDER BY all_missing.height DESC; - ` + SELECT DISTINCT all_missing.missing_height, all_missing.missing_hash + FROM ( + SELECT b.height - 1 AS missing_height, b.prevhash AS missing_hash + FROM blocktx.blocks b + WHERE b.height > (SELECT max(height) - $1 FROM blocktx.blocks) + AND NOT EXISTS (SELECT 1 FROM blocktx.blocks missing WHERE missing.hash = b.prevhash) + UNION + SELECT unprocessed.height AS missing_height, unprocessed.hash AS missing_hash + FROM blocktx.blocks unprocessed + WHERE unprocessed.processed_at IS NULL + AND unprocessed.height > (SELECT max(height) - $1 FROM blocktx.blocks) + ) AS all_missing + LEFT JOIN blocktx.block_processing bp ON bp.block_hash = all_missing.missing_hash + WHERE bp.block_hash IS NULL + ORDER BY all_missing.missing_height ASC; + ` rows, err := p.db.QueryContext(ctx, q, blockHeightRange) if err != nil { diff --git a/internal/blocktx/store/postgresql/get_longest_chain.go b/internal/blocktx/store/postgresql/get_longest_chain.go index 9e8da6bc3..cca3607a2 100644 --- a/internal/blocktx/store/postgresql/get_longest_chain.go +++ b/internal/blocktx/store/postgresql/get_longest_chain.go @@ -2,7 +2,6 @@ package postgresql import ( "context" - "database/sql" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" ) @@ -15,43 +14,17 @@ func (p *PostgreSQL) GetLongestChainFromHeight(ctx context.Context, height uint6 ,merkleroot ,height ,processed_at - ,orphanedyn ,status ,chainwork FROM blocktx.blocks - WHERE height >= $1 AND status = $2 + WHERE height >= $1 AND is_longest = true ` - longestBlocks := make([]*blocktx_api.Block, 0) - - rows, err := p.db.QueryContext(ctx, q, height, blocktx_api.Status_LONGEST) + rows, err := p.db.QueryContext(ctx, q, height) if err != nil { return nil, err } defer rows.Close() - for rows.Next() { - var block blocktx_api.Block - var processedAt sql.NullString - - err := rows.Scan( - &block.Hash, - &block.PreviousHash, - &block.MerkleRoot, - &block.Height, - &processedAt, - &block.Orphaned, - &block.Status, - &block.Chainwork, - ) - if err != nil { - return nil, err - } - - block.Processed = processedAt.Valid - - longestBlocks = append(longestBlocks, &block) - } - - return longestBlocks, nil + return p.parseBlocks(rows) } diff --git a/internal/blocktx/store/postgresql/get_orphaned_chain.go b/internal/blocktx/store/postgresql/get_orphaned_chain.go new file mode 100644 index 000000000..14ce171b3 --- /dev/null +++ b/internal/blocktx/store/postgresql/get_orphaned_chain.go @@ -0,0 +1,64 @@ +package postgresql + +import ( + "context" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" +) + +// GetOrphanedChainUpFromHash is a function that recursively searches for blocks marked +// as ORPHANED from the given hash - up to the tip of orphaned chain of blocks. +// +// It searches for the block whose prevhash matches the hash of the given block, +// and then repeats that recursively for each newly found orphaned block until +// it has the entire orphaned chain. +func (p *PostgreSQL) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + // The way this query works, is that the result from the first SELECT + // will be stored in the `orphans` variable, which is later used + // for recursion in the second SELECT. + // + // Then entire recursion happens in the second SELECT, after UNION ALL, + // and the first SELECT is just to set up the `orphans` variable with + // the first, initial value. Then, the `orphans` variable is recursively + // updated with values returned from the second SELECT. + q := ` + WITH RECURSIVE orphans AS ( + SELECT + hash + ,prevhash + ,merkleroot + ,height + ,processed_at + ,status + ,chainwork + FROM blocktx.blocks WHERE prevhash = $1 AND status = $2 + UNION ALL + SELECT + b.hash + ,b.prevhash + ,b.merkleroot + ,b.height + ,b.processed_at + ,b.status + ,b.chainwork + FROM blocktx.blocks b JOIN orphans o ON b.prevhash = o.hash AND b.status = $2 + ) + SELECT + hash + ,prevhash + ,merkleroot + ,height + ,processed_at + ,status + ,chainwork + FROM orphans + ` + + rows, err := p.db.QueryContext(ctx, q, hash, blocktx_api.Status_ORPHANED) + if err != nil { + return nil, err + } + defer rows.Close() + + return p.parseBlocks(rows) +} diff --git a/internal/blocktx/store/postgresql/get_stale_chain.go b/internal/blocktx/store/postgresql/get_stale_chain.go index 31b000028..71c96aa63 100644 --- a/internal/blocktx/store/postgresql/get_stale_chain.go +++ b/internal/blocktx/store/postgresql/get_stale_chain.go @@ -2,7 +2,6 @@ package postgresql import ( "context" - "database/sql" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" ) @@ -36,7 +35,6 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ,merkleroot ,height ,processed_at - ,orphanedyn ,status ,chainwork FROM blocktx.blocks WHERE hash = $1 @@ -47,7 +45,6 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ,b.merkleroot ,b.height ,b.processed_at - ,b.orphanedyn ,b.status ,b.chainwork FROM blocktx.blocks b JOIN prevBlocks p ON b.hash = p.prevhash AND b.status = $2 @@ -58,12 +55,10 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ,merkleroot ,height ,processed_at - ,orphanedyn ,status ,chainwork FROM prevBlocks ` - staleBlocks := make([]*blocktx_api.Block, 0) rows, err := p.db.QueryContext(ctx, q, hash, blocktx_api.Status_STALE) if err != nil { @@ -71,28 +66,5 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) } defer rows.Close() - for rows.Next() { - var block blocktx_api.Block - var processedAt sql.NullString - - err := rows.Scan( - &block.Hash, - &block.PreviousHash, - &block.MerkleRoot, - &block.Height, - &processedAt, - &block.Orphaned, - &block.Status, - &block.Chainwork, - ) - if err != nil { - return nil, err - } - - block.Processed = processedAt.Valid - - staleBlocks = append(staleBlocks, &block) - } - - return staleBlocks, nil + return p.parseBlocks(rows) } diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go index 48bb16482..7815eaecd 100644 --- a/internal/blocktx/store/postgresql/get_transactions.go +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -9,22 +9,25 @@ import ( "go.opentelemetry.io/otel/trace" ) -func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte) ([]store.TransactionBlock, error) { +func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { if tracer != nil { var span trace.Span ctx, span = tracer.Start(ctx, "GetMinedTransactions") defer span.End() } - predicate := "WHERE t.hash = ANY($1) AND b.status = $2" - - return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(hashes), blocktx_api.Status_LONGEST) -} + if onlyLongestChain { + predicate := "WHERE t.hash = ANY($1) AND b.is_longest = true" + return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(hashes)) + } -func (p *PostgreSQL) GetRegisteredTransactions(ctx context.Context, blockId uint64) ([]store.TransactionBlock, error) { - predicate := "WHERE m.blockid = $1 AND t.is_registered = TRUE" + predicate := "WHERE t.hash = ANY($1) AND (b.status = $2 OR b.status = $3)" - return p.getTransactionBlocksByPredicate(ctx, predicate, blockId) + return p.getTransactionBlocksByPredicate(ctx, predicate, + pq.Array(hashes), + blocktx_api.Status_LONGEST, + blocktx_api.Status_STALE, + ) } func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { diff --git a/internal/blocktx/store/postgresql/insert_block.go b/internal/blocktx/store/postgresql/insert_block.go index 1da99ea92..cab0188fb 100644 --- a/internal/blocktx/store/postgresql/insert_block.go +++ b/internal/blocktx/store/postgresql/insert_block.go @@ -15,10 +15,23 @@ func (p *PostgreSQL) UpsertBlock(ctx context.Context, block *blocktx_api.Block) tracing.EndTracing(span, err) }() + // This query will insert a block ONLY if one of the 3 conditions is met: + // 1. Block being inserted is `ORPHANED` and there's no previous block in the database + // 2. The block being inserted has the same status as its previous block + // 3. The block being inserted has status `STALE` but the previous block was `LONGEST` + // Any other situation would mean an error in block processing + // (probably because of another block being inserted by other blocktx instance at the same time) + // and requires the block to be received and processed again. qInsert := ` - INSERT INTO blocktx.blocks (hash, prevhash, merkleroot, height, status, chainwork) - VALUES ($1 ,$2 , $3, $4, $5, $6) - ON CONFLICT (hash) DO UPDATE SET orphanedyn = FALSE + INSERT INTO blocktx.blocks (hash, prevhash, merkleroot, height, status, chainwork, is_longest) + SELECT v.hash, v.prevhash, v.merkleroot, v.height, v.status, v.chainwork, v.is_longest + FROM (VALUES ($1::BYTEA, $2::BYTEA, $3::BYTEA, $4::BIGINT, $5::INTEGER, $6::TEXT, $7::BOOLEAN)) + AS v(hash, prevhash, merkleroot, height, status, chainwork, is_longest) + LEFT JOIN blocktx.blocks AS prevblock ON prevblock.hash = v.prevhash + WHERE ((v.status = $8 OR v.status = $9) AND prevblock.id IS NULL) + OR prevblock.status = $5 + OR (prevblock.status = $9 AND $5 = $10) + ON CONFLICT (hash) DO UPDATE SET status = EXCLUDED.status RETURNING id ` @@ -29,6 +42,10 @@ func (p *PostgreSQL) UpsertBlock(ctx context.Context, block *blocktx_api.Block) block.GetHeight(), block.GetStatus(), block.GetChainwork(), + block.GetStatus() == blocktx_api.Status_LONGEST, + blocktx_api.Status_ORPHANED, + blocktx_api.Status_LONGEST, + blocktx_api.Status_STALE, ) err = row.Scan(&blockID) diff --git a/internal/blocktx/store/postgresql/mark_block_as_done.go b/internal/blocktx/store/postgresql/mark_block_as_done.go index 6cda2c67b..c17bf58b4 100644 --- a/internal/blocktx/store/postgresql/mark_block_as_done.go +++ b/internal/blocktx/store/postgresql/mark_block_as_done.go @@ -19,7 +19,7 @@ func (p *PostgreSQL) MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, SET processed_at = $4, size = $1, tx_count = $2 - WHERE hash = $3 + WHERE hash = $3 AND processed_at IS NULL ` if _, err = p.db.ExecContext(ctx, q, size, txCount, hash[:], p.now()); err != nil { diff --git a/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.down.sql b/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.down.sql new file mode 100644 index 000000000..575f2acd6 --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE blocktx.block +ADD COLUMN orphanedyn BOOLEAN NOT NULL DEFAULT FALSE; diff --git a/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.up.sql b/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.up.sql new file mode 100644 index 000000000..d3861e568 --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.up.sql @@ -0,0 +1 @@ +ALTER TABLE blocktx.blocks DROP COLUMN orphanedyn; diff --git a/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.down.sql b/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.down.sql new file mode 100644 index 000000000..8fd5c89c6 --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.down.sql @@ -0,0 +1,4 @@ +ALTER TABLE blocktx.blocks +DROP INDEX pux_height_is_longest, +DROP INDEX ix_block_is_longest, +DROP COLUMN is_longest; diff --git a/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.up.sql b/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.up.sql new file mode 100644 index 000000000..d3527fbea --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.up.sql @@ -0,0 +1,13 @@ +-- field `is_longest` is an implementation detail that will help to +-- make sure that there is only one longest chain at any given height +-- and is also used as a helper when querying for longest chain +ALTER TABLE blocktx.blocks +ADD COLUMN is_longest BOOLEAN NOT NULL DEFAULT TRUE; + +-- This will make is faster to search for blocks WHERE is_longest = true +CREATE INDEX ix_block_is_longest ON blocktx.blocks(is_longest); + +-- This will make sure that there can only be ONE block at any +-- given height that is considered part of the LONGEST chain. +CREATE UNIQUE INDEX pux_height_is_longest ON blocktx.blocks(height) +WHERE is_longest; diff --git a/internal/blocktx/store/postgresql/postgres.go b/internal/blocktx/store/postgresql/postgres.go index 53fbf7b50..19353cb5d 100644 --- a/internal/blocktx/store/postgresql/postgres.go +++ b/internal/blocktx/store/postgresql/postgres.go @@ -13,13 +13,26 @@ import ( "github.com/bitcoin-sv/arc/internal/blocktx/store" ) +var ErrNoTransaction = errors.New("sql: transaction has already been committed or rolled back") + const ( postgresDriverName = "postgres" maxPostgresBulkInsertRows = 8192 ) +type QueryAble interface { + ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) + Prepare(query string) (*sql.Stmt, error) + Query(query string, args ...interface{}) (*sql.Rows, error) + QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) + QueryRow(query string, args ...interface{}) *sql.Row + QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row +} + type PostgreSQL struct { - db *sql.DB + _db *sql.DB + _tx *sql.Tx + db QueryAble // this would be pointing either to _db or _tx now func() time.Time maxPostgresBulkInsertRows int tracingEnabled bool @@ -58,11 +71,13 @@ func New(dbInfo string, idleConns int, maxOpenConns int, opts ...func(postgreSQL db.SetMaxOpenConns(maxOpenConns) p := &PostgreSQL{ - db: db, + _db: db, now: time.Now, maxPostgresBulkInsertRows: maxPostgresBulkInsertRows, } + p.db = p._db + for _, opt := range opts { opt(p) } @@ -70,15 +85,54 @@ func New(dbInfo string, idleConns int, maxOpenConns int, opts ...func(postgreSQL return p, nil } +func (p *PostgreSQL) BeginTx(ctx context.Context) (store.DbTransaction, error) { + tx, err := p._db.BeginTx(ctx, nil) + if err != nil { + return nil, err + } + + p._tx = tx + p.db = p._tx + + return p, nil +} + func (p *PostgreSQL) Close() error { - return p.db.Close() + return p._db.Close() } func (p *PostgreSQL) Ping(ctx context.Context) error { - r, err := p.db.QueryContext(ctx, "SELECT 1;") + r, err := p._db.QueryContext(ctx, "SELECT 1;") if err != nil { return err } return r.Close() } + +func (p *PostgreSQL) Commit() error { + p.db = p._db + return p._tx.Commit() +} + +func (p *PostgreSQL) Rollback() error { + p.db = p._db + return p._tx.Rollback() +} + +func (p *PostgreSQL) WriteLockBlocksTable(ctx context.Context) error { + tx, ok := p.db.(*sql.Tx) + if !ok { + return ErrNoTransaction + } + + // This will lock `blocks` table for writing, when performing reorg. + // Any INSERT or UPDATE to the table will wait until the lock is released. + // Another instance wanting to acquire this lock at the same time will have + // to wait until the transaction holding the lock is completed and the lock + // is released. + // + // Reading from the table is still allowed. + _, err := tx.ExecContext(ctx, "LOCK TABLE blocktx.blocks IN EXCLUSIVE MODE") + return err +} diff --git a/internal/blocktx/store/postgresql/postgres_helpers.go b/internal/blocktx/store/postgresql/postgres_helpers.go new file mode 100644 index 000000000..5bd3e603e --- /dev/null +++ b/internal/blocktx/store/postgresql/postgres_helpers.go @@ -0,0 +1,35 @@ +package postgresql + +import ( + "database/sql" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" +) + +func (p *PostgreSQL) parseBlocks(rows *sql.Rows) ([]*blocktx_api.Block, error) { + blocks := make([]*blocktx_api.Block, 0) + + for rows.Next() { + var block blocktx_api.Block + var processedAt sql.NullString + + err := rows.Scan( + &block.Hash, + &block.PreviousHash, + &block.MerkleRoot, + &block.Height, + &processedAt, + &block.Status, + &block.Chainwork, + ) + if err != nil { + return nil, err + } + + block.Processed = processedAt.Valid + + blocks = append(blocks, &block) + } + + return blocks, nil +} diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index a90fdef20..e2400bc8b 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -3,7 +3,7 @@ package postgresql import ( "bytes" "context" - "database/sql" + "errors" "log" "os" "testing" @@ -31,9 +31,9 @@ type Block struct { MerkleRoot string `db:"merkleroot"` MerklePath *string `db:"merkle_path"` Height int64 `db:"height"` - Orphaned bool `db:"orphanedyn"` Status int `db:"status"` Chainwork string `db:"chainwork"` + IsLongest bool `db:"is_longest"` Size *int64 `db:"size"` TxCount *int64 `db:"tx_count"` Processed bool `db:"processed"` @@ -89,17 +89,17 @@ func testmain(m *testing.M) int { return m.Run() } -func prepareDb(t *testing.T, db *sql.DB, fixture string) { +func prepareDb(t *testing.T, postgres *PostgreSQL, fixture string) { t.Helper() - testutils.PruneTables(t, db, + testutils.PruneTables(t, postgres._db, "blocktx.blocks", "blocktx.transactions", "blocktx.block_transactions_map", ) if fixture != "" { - testutils.LoadFixtures(t, db, fixture) + testutils.LoadFixtures(t, postgres._db, fixture) } } @@ -133,16 +133,32 @@ func TestPostgresDB(t *testing.T) { t.Run("insert block / get block", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "") + prepareDb(t, postgresDB, "") blockHash1 := testutils.RevChainhash(t, "000000000000000001b8adefc1eb98896c80e30e517b9e2655f1f929d9958a48") blockHash2 := testutils.RevChainhash(t, "00000000000000000a081a539601645abe977946f8f6466a3c9e0c34d50be4a8") + blockHashViolating := testutils.RevChainhash(t, "00000000b69bd8e4dc60580117617a466d5c76ada85fb7b87e9baea01f9d9984") merkleRoot := testutils.RevChainhash(t, "31e25c5ac7c143687f55fc49caf0f552ba6a16d4f785e4c9a9a842179a085f0c") expectedBlock := &blocktx_api.Block{ Hash: blockHash2[:], PreviousHash: blockHash1[:], MerkleRoot: merkleRoot[:], Height: 100, + Status: blocktx_api.Status_LONGEST, + } + expectedBlockViolatingUniqueIndex := &blocktx_api.Block{ + Hash: blockHashViolating[:], + PreviousHash: blockHash1[:], + MerkleRoot: merkleRoot[:], + Height: 100, + Status: blocktx_api.Status_LONGEST, + } + expectedBlockOverrideStatus := &blocktx_api.Block{ + Hash: blockHash2[:], + PreviousHash: blockHash1[:], + MerkleRoot: merkleRoot[:], + Height: 100, + Status: blocktx_api.Status_ORPHANED, } // when -> then @@ -153,11 +169,27 @@ func TestPostgresDB(t *testing.T) { actualBlockResp, err := postgresDB.GetBlock(ctx, blockHash2) require.NoError(t, err) require.Equal(t, expectedBlock, actualBlockResp) + + // when + id, err = postgresDB.InsertBlock(ctx, expectedBlockViolatingUniqueIndex) + + // then + require.True(t, errors.Is(err, store.ErrFailedToInsertBlock)) + + // when + id, err = postgresDB.InsertBlock(ctx, expectedBlockOverrideStatus) + require.NoError(t, err) + require.Equal(t, uint64(1), id) // this should only update the status and retain the same ID + + // then + actualBlockResp, err = postgresDB.GetBlock(ctx, blockHash2) + require.NoError(t, err) + require.Equal(t, expectedBlockOverrideStatus, actualBlockResp) }) t.Run("get block by height / get chain tip", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_block_by_height") + prepareDb(t, postgresDB, "fixtures/get_block_by_height") height := uint64(822015) expectedHashAtHeightLongest := testutils.RevChainhash(t, "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000") @@ -189,30 +221,36 @@ func TestPostgresDB(t *testing.T) { t.Run("get block gaps", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_block_gaps") + prepareDb(t, postgresDB, "fixtures/get_block_gaps") hash822014 := testutils.RevChainhash(t, "67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000") hash822019 := testutils.RevChainhash(t, "5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000") hash822020 := testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") hash822009 := testutils.RevChainhash(t, "4ad773b1a464129a0ed8c7a8c71bb98175f0f01da1793f0e0000000000000000") + hash822017competing := testutils.RevChainhash(t, "00000000000000000d840fb91c0df3b057db04a0250c6d88b2f25aadcfc8410b") expectedBlockGaps := []*store.BlockGap{ { // gap - Height: 822019, - Hash: hash822019, + Height: 822009, + Hash: hash822009, }, + // block 11 is being processed { // gap Height: 822014, Hash: hash822014, }, + { // gap from competing chain + Height: 822017, + Hash: hash822017competing, + }, + { // gap + Height: 822019, + Hash: hash822019, + }, { // processing not finished Height: 822020, Hash: hash822020, }, - { // gap - Height: 822009, - Hash: hash822009, - }, } // when @@ -220,7 +258,7 @@ func TestPostgresDB(t *testing.T) { // then require.NoError(t, err) - require.Equal(t, 4, len(actualBlockGaps)) + require.Equal(t, len(expectedBlockGaps), len(actualBlockGaps)) require.ElementsMatch(t, expectedBlockGaps, actualBlockGaps) }) @@ -239,7 +277,7 @@ func TestPostgresDB(t *testing.T) { t.Run("get longest chain from height", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_longest_chain") + prepareDb(t, postgresDB, "fixtures/get_longest_chain") startingHeight := uint64(822014) hash0Longest := testutils.RevChainhash(t, "0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067") @@ -263,7 +301,7 @@ func TestPostgresDB(t *testing.T) { t.Run("get stale chain back from hash", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_stale_chain") + prepareDb(t, postgresDB, "fixtures/get_stale_chain") hash2Stale := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") hash3Stale := testutils.RevChainhash(t, "0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1") @@ -286,9 +324,35 @@ func TestPostgresDB(t *testing.T) { } }) + t.Run("get orphaned chain up from hash", func(t *testing.T) { + // given + prepareDb(t, postgresDB, "fixtures/get_orphaned_chain") + + hashGapFiller := testutils.RevChainhash(t, "0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067") + hash2Orphaned := testutils.RevChainhash(t, "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde") + hash3Orphaned := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") + hash4Orphaned := testutils.RevChainhash(t, "0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1") + + expectedOrphanedHashes := [][]byte{ + hash2Orphaned[:], + hash3Orphaned[:], + hash4Orphaned[:], + } + + // when + actualOrphanedBlocks, err := postgresDB.GetOrphanedChainUpFromHash(ctx, hashGapFiller[:]) + require.NoError(t, err) + + // then + require.Equal(t, len(expectedOrphanedHashes), len(actualOrphanedBlocks)) + for i, b := range actualOrphanedBlocks { + require.Equal(t, expectedOrphanedHashes[i], b.Hash) + } + }) + t.Run("update blocks statuses", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/update_blocks_statuses") + prepareDb(t, postgresDB, "fixtures/update_blocks_statuses") hash1Longest := testutils.RevChainhash(t, "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde") hash2Stale := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") @@ -302,6 +366,11 @@ func TestPostgresDB(t *testing.T) { {Hash: hash4Stale[:], Status: blocktx_api.Status_LONGEST}, } + blockStatusUpdatesViolating := []store.BlockStatusUpdate{ + // there is already a LONGEST block at that height + {Hash: hash1Longest[:], Status: blocktx_api.Status_LONGEST}, + } + // when err := postgresDB.UpdateBlocksStatuses(ctx, blockStatusUpdates) require.NoError(t, err) @@ -322,17 +391,22 @@ func TestPostgresDB(t *testing.T) { stale4, err := postgresDB.GetBlock(ctx, hash4Stale) require.NoError(t, err) require.Equal(t, blocktx_api.Status_LONGEST, stale4.Status) + + // when + err = postgresDB.UpdateBlocksStatuses(ctx, blockStatusUpdatesViolating) + require.Equal(t, store.ErrFailedToUpdateBlockStatuses, err) }) t.Run("get mined txs", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_transactions") + prepareDb(t, postgresDB, "fixtures/get_transactions") txHash1 := testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853") txHash2 := testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e") - txHash3 := testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0") // should not be found - from STALE block + txHash3 := testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0") // from STALE block blockHash := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") + blockHash2 := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") expectedTxs := []store.TransactionBlock{ { @@ -349,10 +423,26 @@ func TestPostgresDB(t *testing.T) { MerklePath: "merkle-path-2", BlockStatus: blocktx_api.Status_LONGEST, }, + { + TxHash: txHash3[:], + BlockHash: blockHash2[:], + BlockHeight: 822012, + MerklePath: "merkle-path-6", + BlockStatus: blocktx_api.Status_STALE, + }, } // when - actualTxs, err := postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}) + onlyLongestChain := true + actualTxs, err := postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}, onlyLongestChain) + + // then + require.NoError(t, err) + require.ElementsMatch(t, expectedTxs[:2], actualTxs) + + // when + onlyLongestChain = false + actualTxs, err = postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}, onlyLongestChain) // then require.NoError(t, err) @@ -361,22 +451,29 @@ func TestPostgresDB(t *testing.T) { t.Run("get registered txs", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_transactions") + prepareDb(t, postgresDB, "fixtures/get_transactions") - blockId := uint64(2) - blockHash := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") + blockHash := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") + blockHash2 := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") expectedTxs := []store.TransactionBlock{ { - TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], + TxHash: testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e")[:], BlockHash: blockHash[:], + BlockHeight: 822013, + MerklePath: "merkle-path-2", + BlockStatus: blocktx_api.Status_LONGEST, + }, + { + TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], + BlockHash: blockHash2[:], BlockHeight: 822012, MerklePath: "merkle-path-6", BlockStatus: blocktx_api.Status_STALE, }, { TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], - BlockHash: blockHash[:], + BlockHash: blockHash2[:], BlockHeight: 822012, MerklePath: "merkle-path-7", BlockStatus: blocktx_api.Status_STALE, @@ -384,7 +481,7 @@ func TestPostgresDB(t *testing.T) { } // when - actualTxs, err := postgresDB.GetRegisteredTransactions(ctx, blockId) + actualTxs, err := postgresDB.GetRegisteredTxsByBlockHashes(ctx, [][]byte{blockHash[:], blockHash2[:]}) // then require.NoError(t, err) @@ -393,7 +490,7 @@ func TestPostgresDB(t *testing.T) { t.Run("get registered txs by block hashes", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_transactions") + prepareDb(t, postgresDB, "fixtures/get_transactions") blockHashLongest := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") blockHashStale := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") @@ -436,7 +533,7 @@ func TestPostgresDB(t *testing.T) { }) t.Run("clear data", func(t *testing.T) { - prepareDb(t, postgresDB.db, "fixtures/clear_data") + prepareDb(t, postgresDB, "fixtures/clear_data") resp, err := postgresDB.ClearBlocktxTable(context.Background(), 10, "blocks") require.NoError(t, err) @@ -470,7 +567,7 @@ func TestPostgresDB(t *testing.T) { }) t.Run("set/get/del block processing", func(t *testing.T) { - prepareDb(t, postgresDB.db, "fixtures/block_processing") + prepareDb(t, postgresDB, "fixtures/block_processing") bh1 := testutils.RevChainhash(t, "747468cf7e6639ba9aa277ade1cf27639b0f214cec5719020000000000000000") @@ -503,7 +600,7 @@ func TestPostgresDB(t *testing.T) { t.Run("mark block as done", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/mark_block_as_done") + prepareDb(t, postgresDB, "fixtures/mark_block_as_done") bh1 := testutils.RevChainhash(t, "b71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000") @@ -528,7 +625,7 @@ func TestPostgresDB(t *testing.T) { t.Run("verify merkle roots", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/verify_merkle_roots") + prepareDb(t, postgresDB, "fixtures/verify_merkle_roots") merkleRequests := []*blocktx_api.MerkleRootVerificationRequest{ { @@ -572,6 +669,24 @@ func TestPostgresDB(t *testing.T) { // then assert.Equal(t, expectedUnverifiedBlockHeights, res.UnverifiedBlockHeights) }) + + t.Run("lock blocks table", func(t *testing.T) { + err := postgresDB.WriteLockBlocksTable(context.Background()) + require.Error(t, err) + require.Equal(t, ErrNoTransaction, err) + + tx, err := postgresDB.BeginTx(context.Background()) + require.NoError(t, err) + + err = tx.WriteLockBlocksTable(context.Background()) + require.NoError(t, err) + + err = tx.Rollback() + require.NoError(t, err) + + err = tx.Commit() + require.Equal(t, ErrNoTransaction, err) + }) } func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { @@ -661,9 +776,10 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { // given - prepareDb(t, sut.db, "fixtures/upsert_block_transactions") + prepareDb(t, sut, "fixtures/upsert_block_transactions") testBlockID := uint64(9736) + testBlockHash := testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000") // when err := sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) @@ -675,7 +791,7 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { // then require.NoError(t, err) - res, err := sut.GetRegisteredTransactions(ctx, testBlockID) + res, err := sut.GetRegisteredTxsByBlockHashes(ctx, [][]byte{testBlockHash[:]}) require.NoError(t, err) require.Equal(t, tc.expectedUpdatedResLen, len(res)) @@ -720,7 +836,7 @@ func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { defer sut.Close() sut.maxPostgresBulkInsertRows = 5 - prepareDb(t, sut.db, "fixtures/upsert_block_transactions") + prepareDb(t, sut, "fixtures/upsert_block_transactions") testBlockID := uint64(9736) competingBlockID := uint64(9737) @@ -749,13 +865,6 @@ func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { MerklePath: "merkle-path-1", BlockStatus: blocktx_api.Status_LONGEST, }, - { - TxHash: txHash[:], - BlockHash: testutils.RevChainhash(t, "7258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000")[:], - BlockHeight: uint64(826481), - MerklePath: "merkle-path-2", - BlockStatus: blocktx_api.Status_LONGEST, - }, } // when @@ -766,7 +875,7 @@ func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { require.NoError(t, err) // then - actual, err := sut.GetMinedTransactions(ctx, [][]byte{txHash[:]}) + actual, err := sut.GetMinedTransactions(ctx, [][]byte{txHash[:]}, true) require.NoError(t, err) require.ElementsMatch(t, expected, actual) @@ -817,7 +926,7 @@ func TestPostgresStore_RegisterTransactions(t *testing.T) { for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { // given - prepareDb(t, sut.db, "fixtures/register_transactions") + prepareDb(t, sut, "fixtures/register_transactions") // when result, err := sut.RegisterTransactions(ctx, tc.txs) @@ -855,3 +964,132 @@ func TestPostgresStore_RegisterTransactions(t *testing.T) { }) } } + +func TestInsertBlockConditions(t *testing.T) { + tt := []struct { + name string + blockStatus blocktx_api.Status + prevBlockExists bool + prevBlockStatus blocktx_api.Status + + shouldSucceed bool + }{ + { + name: "extend longest chain - success", + blockStatus: blocktx_api.Status_LONGEST, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_LONGEST, + shouldSucceed: true, + }, + { + name: "extend stale chain - sucsess", + blockStatus: blocktx_api.Status_STALE, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_STALE, + shouldSucceed: true, + }, + { + name: "extend orphaned chain - success", + blockStatus: blocktx_api.Status_ORPHANED, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldSucceed: true, + }, + { + name: "stale block extends longest - success", + blockStatus: blocktx_api.Status_STALE, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_LONGEST, + shouldSucceed: true, + }, + { + name: "orphan block - success", + blockStatus: blocktx_api.Status_ORPHANED, + prevBlockExists: false, + shouldSucceed: true, + }, + { + name: "stale block with no prevBlock - fail", + blockStatus: blocktx_api.Status_STALE, + prevBlockExists: false, + shouldSucceed: false, + }, + { + name: "orphan block extending longest chain - fail", + blockStatus: blocktx_api.Status_ORPHANED, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_LONGEST, + shouldSucceed: false, + }, + { + name: "orphan block extending stale chain - fail", + blockStatus: blocktx_api.Status_ORPHANED, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_STALE, + shouldSucceed: false, + }, + { + name: "longest block extending stale chain - fail", + blockStatus: blocktx_api.Status_LONGEST, + prevBlockExists: true, + prevBlockStatus: blocktx_api.Status_STALE, + shouldSucceed: false, + }, + } + + // common setup for test cases + ctx, _, sut := setupPostgresTest(t) + defer sut.Close() + + for _, tc := range tt { + t.Run(tc.name, func(t *testing.T) { + // given + prepareDb(t, sut, "fixtures/insert_block") + + blockHashLongest := testutils.RevChainhash(t, "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde") + blockHashStale := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") + blockHashOrphaned := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") + randomPrevBlockHash := testutils.RevChainhash(t, "0000000000000000099da871f74c55a6305e6a37ef8bf955ad7d29ca4b44fda9") + + var prevBlockHash []byte + + if tc.prevBlockExists { + switch tc.prevBlockStatus { + case blocktx_api.Status_LONGEST: + prevBlockHash = blockHashLongest[:] + case blocktx_api.Status_STALE: + prevBlockHash = blockHashStale[:] + case blocktx_api.Status_ORPHANED: + prevBlockHash = blockHashOrphaned[:] + } + } else { + prevBlockHash = randomPrevBlockHash[:] + } + + blockHash := testutils.RevChainhash(t, "0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1") + merkleRoot := testutils.RevChainhash(t, "7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257") + + block := &blocktx_api.Block{ + Hash: blockHash[:], + PreviousHash: prevBlockHash, + MerkleRoot: merkleRoot[:], + Height: 822016, + Processed: true, + Status: tc.blockStatus, + Chainwork: "123", + } + + // when + blockId, err := sut.InsertBlock(ctx, block) + + // then + if tc.shouldSucceed { + require.NotEqual(t, uint64(0), blockId) + require.NoError(t, err) + } else { + require.Equal(t, uint64(0), blockId) + require.True(t, errors.Is(err, store.ErrFailedToInsertBlock)) + } + }) + } +} diff --git a/internal/blocktx/store/postgresql/update_block_statuses.go b/internal/blocktx/store/postgresql/update_block_statuses.go index dc5e19dd3..ca88bf058 100644 --- a/internal/blocktx/store/postgresql/update_block_statuses.go +++ b/internal/blocktx/store/postgresql/update_block_statuses.go @@ -2,6 +2,7 @@ package postgresql import ( "context" + "errors" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" @@ -11,22 +12,24 @@ import ( func (p *PostgreSQL) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { q := ` UPDATE blocktx.blocks b - SET status = updates.status - FROM (SELECT * FROM UNNEST($1::BYTEA[], $2::INTEGER[]) AS u(hash, status)) AS updates + SET status = updates.status, is_longest = updates.is_longest + FROM (SELECT * FROM UNNEST($1::BYTEA[], $2::INTEGER[], $3::BOOLEAN[]) AS u(hash, status, is_longest)) AS updates WHERE b.hash = updates.hash ` blockHashes := make([][]byte, len(blockStatusUpdates)) statuses := make([]blocktx_api.Status, len(blockStatusUpdates)) + is_longest := make([]bool, len(blockStatusUpdates)) for i, update := range blockStatusUpdates { blockHashes[i] = update.Hash statuses[i] = update.Status + is_longest[i] = update.Status == blocktx_api.Status_LONGEST } - _, err := p.db.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses)) + _, err := p.db.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(is_longest)) if err != nil { - return err + return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) } return nil diff --git a/internal/blocktx/store/postgresql/verify_merkle_roots.go b/internal/blocktx/store/postgresql/verify_merkle_roots.go index 9f303d719..a02d201ee 100644 --- a/internal/blocktx/store/postgresql/verify_merkle_roots.go +++ b/internal/blocktx/store/postgresql/verify_merkle_roots.go @@ -17,7 +17,7 @@ func (p *PostgreSQL) VerifyMerkleRoots( maxAllowedBlockHeightMismatch int, ) (*blocktx_api.MerkleRootVerificationResponse, error) { qTopHeight := ` - SELECT MAX(b.height), MIN(b.height) FROM blocktx.blocks b WHERE b.orphanedyn = false + SELECT MAX(b.height), MIN(b.height) FROM blocktx.blocks b WHERE b.is_longest = true ` var topHeight uint64 @@ -32,7 +32,7 @@ func (p *PostgreSQL) VerifyMerkleRoots( } qMerkleRoot := ` - SELECT b.height FROM blocktx.blocks b WHERE b.merkleroot = $1 AND b.height = $2 AND b.orphanedyn = false + SELECT b.height FROM blocktx.blocks b WHERE b.merkleroot = $1 AND b.height = $2 AND b.is_longest = true ` var unverifiedBlockHeights []uint64 diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index cee50ca62..ce1efb25a 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -15,6 +15,7 @@ var ( ErrUnableToPrepareStatement = errors.New("unable to prepare statement") ErrUnableToDeleteRows = errors.New("unable to delete rows") ErrFailedToInsertBlock = errors.New("failed to insert block") + ErrFailedToUpdateBlockStatuses = errors.New("failed to update block statuses") ErrFailedToOpenDB = errors.New("failed to open postgres database") ErrFailedToInsertTransactions = errors.New("failed to bulk insert transactions") ErrFailedToGetRows = errors.New("failed to get rows") @@ -32,15 +33,15 @@ type BlocktxStore interface { GetBlock(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) - UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) + InsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []TxWithMerklePath) error MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error GetBlockGaps(ctx context.Context, heightRange int) ([]*BlockGap, error) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) - GetMinedTransactions(ctx context.Context, hashes [][]byte) ([]TransactionBlock, error) + GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]TransactionBlock, error) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) - GetRegisteredTransactions(ctx context.Context, blockId uint64) ([]TransactionBlock, error) + GetOrphanedChainUpFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]TransactionBlock, error) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []BlockStatusUpdate) error GetStats(ctx context.Context) (*Stats, error) @@ -50,6 +51,13 @@ type BlocktxStore interface { DelBlockProcessing(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) VerifyMerkleRoots(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) + BeginTx(ctx context.Context) (DbTransaction, error) Ping(ctx context.Context) error Close() error } + +type DbTransaction interface { + Commit() error + Rollback() error + WriteLockBlocksTable(ctx context.Context) error +} diff --git a/internal/blocktx/store/store_mocks.go b/internal/blocktx/store/store_mocks.go index a82b9ba66..10083b35b 100644 --- a/internal/blocktx/store/store_mocks.go +++ b/internal/blocktx/store/store_mocks.go @@ -1,3 +1,4 @@ package store //go:generate moq -pkg mocks -out ./mocks/blocktx_store_mock.go . BlocktxStore +//go:generate moq -pkg mocks -out ./mocks/blocktx_db_tx_mock.go . DbTransaction diff --git a/internal/metamorph/metamorph_api/metamorph_api.pb.go b/internal/metamorph/metamorph_api/metamorph_api.pb.go index b872ed9dc..62e4e97e4 100644 --- a/internal/metamorph/metamorph_api/metamorph_api.pb.go +++ b/internal/metamorph/metamorph_api/metamorph_api.pb.go @@ -1388,6 +1388,9 @@ var file_internal_metamorph_metamorph_api_metamorph_api_proto_rawDesc = []byte{ 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x11, 0x5a, 0x0f, 0x2e, 0x3b, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +<<<<<<< HEAD +>>>>>>> 03ac277e (feat: add new status MINED_IN_STALE_BLOCK and handle logic for that status) +======= >>>>>>> 03ac277e (feat: add new status MINED_IN_STALE_BLOCK and handle logic for that status) } From 0e84f195714b97510b38df66064e7f46b634e47a Mon Sep 17 00:00:00 2001 From: Kuba Date: Wed, 6 Nov 2024 17:43:56 +0100 Subject: [PATCH 15/36] feat(ARCO-199): update files after rebase --- cmd/arc/services/metamorph.go | 2 +- internal/blocktx/processor.go | 12 +- internal/blocktx/processor_helpers_test.go | 5 - internal/blocktx/processor_test.go | 32 +-- .../blocktx/store/mocks/blocktx_store_mock.go | 88 +----- .../store/postgresql/get_transactions.go | 9 +- .../blocktx/store/postgresql/postgres_test.go | 12 +- .../{insert_block.go => upsert_block.go} | 0 internal/blocktx/store/store.go | 2 +- .../metamorph_api/metamorph_api.pb.go | 254 ++++-------------- .../store/postgresql/postgres_test.go | 2 +- 11 files changed, 82 insertions(+), 336 deletions(-) rename internal/blocktx/store/postgresql/{insert_block.go => upsert_block.go} (100%) diff --git a/cmd/arc/services/metamorph.go b/cmd/arc/services/metamorph.go index cce7f6ad3..d14f2dfea 100644 --- a/cmd/arc/services/metamorph.go +++ b/cmd/arc/services/metamorph.go @@ -47,7 +47,7 @@ func StartMetamorph(logger *slog.Logger, arcConfig *config.ArcConfig, cacheStore metamorphStore store.MetamorphStore peerHandler *metamorph.PeerHandler pm metamorph.PeerManager - statusMessageCh chan *metamorph.PeerTxMessage + statusMessageCh chan *metamorph.TxStatusMessage mqClient metamorph.MessageQueueClient processor *metamorph.Processor server *metamorph.Server diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 2040f2c3e..572a0445e 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -217,13 +217,13 @@ func (p *Processor) StartBlockProcessing() { blockHash := blockMsg.Header.BlockHash() timeStart := time.Now() - defer p.stopBlockProcessGuard(&blockhash) // release guardian at the end + defer p.stopBlockProcessGuard(&blockHash) // release guardian at the end - p.logger.Info("received block", slog.String("hash", blockhash.String())) + p.logger.Info("received block", slog.String("hash", blockHash.String())) err = p.processBlock(blockMsg) if err != nil { - p.logger.Error("block processing failed", slog.String("hash", blockhash.String()), slog.String("err", err.Error())) - p.unlockBlock(p.ctx, &blockhash) + p.logger.Error("block processing failed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) + p.unlockBlock(p.ctx, &blockHash) _, errDel := p.store.DelBlockProcessing(p.ctx, &blockHash, p.hostname) if errDel != nil { @@ -688,13 +688,13 @@ func (p *Processor) hasGreatestChainwork(ctx context.Context, competingChainTip } func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) error { - blockId, err := p.store.InsertBlock(ctx, incomingBlock) + blockId, err := p.store.UpsertBlock(ctx, incomingBlock) if err != nil { p.logger.Error("unable to insert block at given height", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) return err } - calculatedMerkleTree := buildMerkleTreeStoreChainHash(ctx, txHashes) + calculatedMerkleTree := p.buildMerkleTreeStoreChainHash(ctx, txHashes) if !merkleRoot.IsEqual(calculatedMerkleTree[len(calculatedMerkleTree)-1]) { p.logger.Error("merkle root mismatch", slog.String("hash", getHashStringNoErr(incomingBlock.Hash))) diff --git a/internal/blocktx/processor_helpers_test.go b/internal/blocktx/processor_helpers_test.go index 213f90148..2aa6c0e54 100644 --- a/internal/blocktx/processor_helpers_test.go +++ b/internal/blocktx/processor_helpers_test.go @@ -6,11 +6,6 @@ import ( "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" -<<<<<<< HEAD -======= - sdkTx "github.com/bitcoin-sv/go-sdk/transaction" - "github.com/stretchr/testify/assert" ->>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) "github.com/stretchr/testify/require" ) diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 94a0c0f74..065fbd7c9 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -174,7 +174,7 @@ func TestHandleBlock(t *testing.T) { return txMock, nil }, GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { - if tc.blockAlreadyExists { + if tc.blockAlreadyProcessed { return &blocktx_api.Block{Processed: true}, nil } return nil, store.ErrBlockNotFound @@ -194,9 +194,6 @@ func TestHandleBlock(t *testing.T) { GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { return nil, nil }, - GetRegisteredTransactionsFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { - return nil, nil - }, GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { return nil, nil }, @@ -379,10 +376,10 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { }, } storeMock := &storeMocks.BlocktxStoreMock{ - BeginTxFunc: func(ctx context.Context) (store.DbTransaction, error) { + BeginTxFunc: func(_ context.Context) (store.DbTransaction, error) { return txMock, nil }, - GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { + GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { if shouldReturnNoBlock { shouldReturnNoBlock = false return nil, nil @@ -405,21 +402,13 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { return nil, store.ErrBlockNotFound }, GetChainTipFunc: func(_ context.Context) (*blocktx_api.Block, error) { - if tc.hasGreaterChainwork { - return &blocktx_api.Block{ - Chainwork: "42069", - }, nil - } - - return &blocktx_api.Block{ - Chainwork: "62209952899966", - }, nil + return &blocktx_api.Block{}, nil }, - InsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { + UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { mtx.Lock() insertedBlockStatus = block.Status mtx.Unlock() - return 1, errors.New("dummy error") // return error here so we don't have to override next db functions + return 1, nil }, GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { if tc.shouldFindOrphanChain { @@ -497,15 +486,9 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { } return nil, nil }, - UpdateBlocksStatusesFunc: func(_ context.Context, _ []store.BlockStatusUpdate) error { - return nil - }, UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { return nil }, - GetRegisteredTransactionsFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { - return nil, nil - }, GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { return nil, nil }, @@ -515,6 +498,9 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { MarkBlockAsDoneFunc: func(ctx context.Context, hash *chainhash.Hash, size, txCount uint64) error { return nil }, + DelBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) { + return 0, nil + }, } // build peer manager and processor diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index 36ee6b459..ed3c26bb5 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -57,9 +57,6 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { // panic("mock out the GetOrphanedChainUpFromHash method") // }, -// GetRegisteredTransactionsFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { -// panic("mock out the GetRegisteredTransactions method") -// }, // GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { // panic("mock out the GetRegisteredTxsByBlockHashes method") // }, @@ -84,18 +81,10 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // UpdateBlocksStatusesFunc: func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { // panic("mock out the UpdateBlocksStatuses method") // }, -<<<<<<< HEAD -<<<<<<< HEAD // UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { // panic("mock out the UpsertBlock method") // }, -// UpsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { -======= -// UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { ->>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) -======= // UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { ->>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) // panic("mock out the UpsertBlockTransactions method") // }, // VerifyMerkleRootsFunc: func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { @@ -144,9 +133,6 @@ type BlocktxStoreMock struct { // GetOrphanedChainUpFromHashFunc mocks the GetOrphanedChainUpFromHash method. GetOrphanedChainUpFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) - // GetRegisteredTransactionsFunc mocks the GetRegisteredTransactions method. - GetRegisteredTransactionsFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) - // GetRegisteredTxsByBlockHashesFunc mocks the GetRegisteredTxsByBlockHashes method. GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) @@ -175,15 +161,7 @@ type BlocktxStoreMock struct { UpsertBlockFunc func(ctx context.Context, block *blocktx_api.Block) (uint64, error) // UpsertBlockTransactionsFunc mocks the UpsertBlockTransactions method. -<<<<<<< HEAD -<<<<<<< HEAD - UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) -======= - UpsertBlockTransactionsFunc func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error ->>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) -======= UpsertBlockTransactionsFunc func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error ->>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) // VerifyMerkleRootsFunc mocks the VerifyMerkleRoots method. VerifyMerkleRootsFunc func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) @@ -274,13 +252,6 @@ type BlocktxStoreMock struct { // Hash is the hash argument value. Hash []byte } - // GetRegisteredTransactions holds details about calls to the GetRegisteredTransactions method. - GetRegisteredTransactions []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // BlockHashes is the blockHashes argument value. - BlockHashes [][]byte - } // GetRegisteredTxsByBlockHashes holds details about calls to the GetRegisteredTxsByBlockHashes method. GetRegisteredTxsByBlockHashes []struct { // Ctx is the ctx argument value. @@ -350,8 +321,8 @@ type BlocktxStoreMock struct { UpsertBlockTransactions []struct { // Ctx is the ctx argument value. Ctx context.Context - // BlockID is the blockID argument value. - BlockID uint64 + // BlockId is the blockId argument value. + BlockId uint64 // TxsWithMerklePaths is the txsWithMerklePaths argument value. TxsWithMerklePaths []store.TxWithMerklePath } @@ -377,7 +348,6 @@ type BlocktxStoreMock struct { lockGetLongestChainFromHeight sync.RWMutex lockGetMinedTransactions sync.RWMutex lockGetOrphanedChainUpFromHash sync.RWMutex - lockGetRegisteredTransactions sync.RWMutex lockGetRegisteredTxsByBlockHashes sync.RWMutex lockGetStaleChainBackFromHash sync.RWMutex lockGetStats sync.RWMutex @@ -822,42 +792,6 @@ func (mock *BlocktxStoreMock) GetOrphanedChainUpFromHashCalls() []struct { return calls } -// GetRegisteredTransactions calls GetRegisteredTransactionsFunc. -func (mock *BlocktxStoreMock) GetRegisteredTransactions(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { - if mock.GetRegisteredTransactionsFunc == nil { - panic("BlocktxStoreMock.GetRegisteredTransactionsFunc: method is nil but BlocktxStore.GetRegisteredTransactions was just called") - } - callInfo := struct { - Ctx context.Context - BlockHashes [][]byte - }{ - Ctx: ctx, - BlockHashes: blockHashes, - } - mock.lockGetRegisteredTransactions.Lock() - mock.calls.GetRegisteredTransactions = append(mock.calls.GetRegisteredTransactions, callInfo) - mock.lockGetRegisteredTransactions.Unlock() - return mock.GetRegisteredTransactionsFunc(ctx, blockHashes) -} - -// GetRegisteredTransactionsCalls gets all the calls that were made to GetRegisteredTransactions. -// Check the length with: -// -// len(mockedBlocktxStore.GetRegisteredTransactionsCalls()) -func (mock *BlocktxStoreMock) GetRegisteredTransactionsCalls() []struct { - Ctx context.Context - BlockHashes [][]byte -} { - var calls []struct { - Ctx context.Context - BlockHashes [][]byte - } - mock.lockGetRegisteredTransactions.RLock() - calls = mock.calls.GetRegisteredTransactions - mock.lockGetRegisteredTransactions.RUnlock() - return calls -} - // GetRegisteredTxsByBlockHashes calls GetRegisteredTxsByBlockHashesFunc. func (mock *BlocktxStoreMock) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { if mock.GetRegisteredTxsByBlockHashesFunc == nil { @@ -1187,31 +1121,23 @@ func (mock *BlocktxStoreMock) UpsertBlockCalls() []struct { } // UpsertBlockTransactions calls UpsertBlockTransactionsFunc. -<<<<<<< HEAD -<<<<<<< HEAD -func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) ([]store.TxWithMerklePath, error) { -======= -func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { ->>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) -======= func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { ->>>>>>> 35a3353a (feat: improved way of transactions publishing to metamorph, store methods refactor and test coverage) if mock.UpsertBlockTransactionsFunc == nil { panic("BlocktxStoreMock.UpsertBlockTransactionsFunc: method is nil but BlocktxStore.UpsertBlockTransactions was just called") } callInfo := struct { Ctx context.Context - BlockID uint64 + BlockId uint64 TxsWithMerklePaths []store.TxWithMerklePath }{ Ctx: ctx, - BlockID: blockID, + BlockId: blockId, TxsWithMerklePaths: txsWithMerklePaths, } mock.lockUpsertBlockTransactions.Lock() mock.calls.UpsertBlockTransactions = append(mock.calls.UpsertBlockTransactions, callInfo) mock.lockUpsertBlockTransactions.Unlock() - return mock.UpsertBlockTransactionsFunc(ctx, blockID, txsWithMerklePaths) + return mock.UpsertBlockTransactionsFunc(ctx, blockId, txsWithMerklePaths) } // UpsertBlockTransactionsCalls gets all the calls that were made to UpsertBlockTransactions. @@ -1220,12 +1146,12 @@ func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, block // len(mockedBlocktxStore.UpsertBlockTransactionsCalls()) func (mock *BlocktxStoreMock) UpsertBlockTransactionsCalls() []struct { Ctx context.Context - BlockID uint64 + BlockId uint64 TxsWithMerklePaths []store.TxWithMerklePath } { var calls []struct { Ctx context.Context - BlockID uint64 + BlockId uint64 TxsWithMerklePaths []store.TxWithMerklePath } mock.lockUpsertBlockTransactions.RLock() diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go index 7815eaecd..e78562bb3 100644 --- a/internal/blocktx/store/postgresql/get_transactions.go +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -5,16 +5,13 @@ import ( "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/bitcoin-sv/arc/internal/tracing" "github.com/lib/pq" - "go.opentelemetry.io/otel/trace" ) func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { - if tracer != nil { - var span trace.Span - ctx, span = tracer.Start(ctx, "GetMinedTransactions") - defer span.End() - } + ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) if onlyLongestChain { predicate := "WHERE t.hash = ANY($1) AND b.is_longest = true" diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index e2400bc8b..28fc0e4df 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -131,7 +131,7 @@ func TestPostgresDB(t *testing.T) { var err error - t.Run("insert block / get block", func(t *testing.T) { + t.Run("upsert block / get block", func(t *testing.T) { // given prepareDb(t, postgresDB, "") @@ -171,13 +171,13 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, expectedBlock, actualBlockResp) // when - id, err = postgresDB.InsertBlock(ctx, expectedBlockViolatingUniqueIndex) + id, err = postgresDB.UpsertBlock(ctx, expectedBlockViolatingUniqueIndex) // then require.True(t, errors.Is(err, store.ErrFailedToInsertBlock)) // when - id, err = postgresDB.InsertBlock(ctx, expectedBlockOverrideStatus) + id, err = postgresDB.UpsertBlock(ctx, expectedBlockOverrideStatus) require.NoError(t, err) require.Equal(t, uint64(1), id) // this should only update the status and retain the same ID @@ -394,7 +394,7 @@ func TestPostgresDB(t *testing.T) { // when err = postgresDB.UpdateBlocksStatuses(ctx, blockStatusUpdatesViolating) - require.Equal(t, store.ErrFailedToUpdateBlockStatuses, err) + require.True(t, errors.Is(err, store.ErrFailedToUpdateBlockStatuses)) }) t.Run("get mined txs", func(t *testing.T) { @@ -965,7 +965,7 @@ func TestPostgresStore_RegisterTransactions(t *testing.T) { } } -func TestInsertBlockConditions(t *testing.T) { +func TestUpsertBlockConditions(t *testing.T) { tt := []struct { name string blockStatus blocktx_api.Status @@ -1080,7 +1080,7 @@ func TestInsertBlockConditions(t *testing.T) { } // when - blockId, err := sut.InsertBlock(ctx, block) + blockId, err := sut.UpsertBlock(ctx, block) // then if tc.shouldSucceed { diff --git a/internal/blocktx/store/postgresql/insert_block.go b/internal/blocktx/store/postgresql/upsert_block.go similarity index 100% rename from internal/blocktx/store/postgresql/insert_block.go rename to internal/blocktx/store/postgresql/upsert_block.go diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index ce1efb25a..3eaea4015 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -33,7 +33,7 @@ type BlocktxStore interface { GetBlock(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) - InsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) + UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []TxWithMerklePath) error MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error GetBlockGaps(ctx context.Context, heightRange int) ([]*BlockGap, error) diff --git a/internal/metamorph/metamorph_api/metamorph_api.pb.go b/internal/metamorph/metamorph_api/metamorph_api.pb.go index 62e4e97e4..b665b3e69 100644 --- a/internal/metamorph/metamorph_api/metamorph_api.pb.go +++ b/internal/metamorph/metamorph_api/metamorph_api.pb.go @@ -1073,7 +1073,6 @@ var file_internal_metamorph_metamorph_api_metamorph_api_proto_rawDesc = []byte{ 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x11, 0x66, 0x75, 0x6c, 0x6c, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x61, 0x78, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x0c, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x6d, -<<<<<<< HEAD 0x61, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x0d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x77, 0x0a, 0x13, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, @@ -1168,7 +1167,7 @@ var file_internal_metamorph_metamorph_api_metamorph_api_proto_rawDesc = []byte{ 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0c, 0x74, 0x72, 0x61, - 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2a, 0x83, 0x02, 0x0a, 0x06, 0x53, 0x74, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2a, 0x9d, 0x02, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, 0x0a, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x43, 0x45, 0x49, 0x56, 0x45, 0x44, 0x10, 0x14, 0x12, 0x0a, 0x0a, 0x06, 0x53, @@ -1184,214 +1183,57 @@ var file_internal_metamorph_metamorph_api_metamorph_api_proto_rawDesc = []byte{ 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x5a, 0x12, 0x1a, 0x0a, 0x16, 0x44, 0x4f, 0x55, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x5f, 0x41, 0x54, 0x54, 0x45, 0x4d, 0x50, 0x54, 0x45, 0x44, 0x10, 0x64, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x4a, 0x45, 0x43, 0x54, - 0x45, 0x44, 0x10, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, 0x32, - 0xdd, 0x05, 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x61, 0x4d, 0x6f, 0x72, 0x70, 0x68, 0x41, 0x50, 0x49, - 0x12, 0x41, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, - 0x74, 0x79, 0x1a, 0x1d, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, - 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, - 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, - 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x0f, - 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, - 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, + 0x45, 0x44, 0x10, 0x6e, 0x12, 0x18, 0x0a, 0x14, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x5f, 0x49, 0x4e, + 0x5f, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x5f, 0x42, 0x4c, 0x4f, 0x43, 0x4b, 0x10, 0x73, 0x12, 0x09, + 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, 0x32, 0xdd, 0x05, 0x0a, 0x0c, 0x4d, 0x65, + 0x74, 0x61, 0x4d, 0x6f, 0x72, 0x70, 0x68, 0x41, 0x50, 0x49, 0x12, 0x41, 0x0a, 0x06, 0x48, 0x65, + 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1d, 0x2e, 0x6d, + 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, + 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x57, 0x0a, + 0x0e, 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, + 0x21, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x73, 0x1a, 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, - 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x47, 0x65, 0x74, - 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, - 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, - 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x22, 0x00, 0x12, 0x5a, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, - 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1b, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x00, 0x12, 0x63, - 0x0a, 0x14, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, - 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x22, 0x00, 0x12, 0x68, 0x0a, 0x11, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, - 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, - 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, - 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, - 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, - 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, - 0x09, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x2e, 0x6d, 0x65, 0x74, - 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, - 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, - 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, - 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, - 0x11, 0x5a, 0x0f, 0x2e, 0x3b, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, - 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -======= - 0x61, 0x78, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x22, 0x5c, 0x0a, 0x13, 0x54, 0x72, 0x61, - 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, - 0x12, 0x45, 0x0a, 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, - 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, - 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xbe, 0x03, 0x0a, 0x0b, 0x54, 0x72, 0x61, 0x6e, - 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x64, - 0x5f, 0x6f, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x74, 0x69, 0x6d, 0x65, - 0x64, 0x4f, 0x75, 0x74, 0x12, 0x37, 0x0a, 0x09, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x5f, 0x61, - 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x52, 0x08, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x64, 0x41, 0x74, 0x12, 0x3d, 0x0a, - 0x0c, 0x61, 0x6e, 0x6e, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x0b, 0x61, 0x6e, 0x6e, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x64, 0x41, 0x74, 0x12, 0x35, 0x0a, 0x08, - 0x6d, 0x69, 0x6e, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x07, 0x6d, 0x69, 0x6e, 0x65, - 0x64, 0x41, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x78, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x74, 0x78, 0x69, 0x64, 0x12, 0x2d, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, - 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, - 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, - 0x5f, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, - 0x65, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x22, 0x0a, 0x0c, 0x63, - 0x6f, 0x6d, 0x70, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x54, 0x78, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, - 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x54, 0x78, 0x73, 0x12, - 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, 0x68, - 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, 0x73, - 0x68, 0x12, 0x15, 0x0a, 0x06, 0x72, 0x61, 0x77, 0x5f, 0x74, 0x78, 0x18, 0x0b, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x05, 0x72, 0x61, 0x77, 0x54, 0x78, 0x22, 0xd8, 0x02, 0x0a, 0x11, 0x54, 0x72, 0x61, - 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1b, - 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x64, 0x5f, 0x6f, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x08, 0x74, 0x69, 0x6d, 0x65, 0x64, 0x4f, 0x75, 0x74, 0x12, 0x37, 0x0a, 0x09, 0x73, - 0x74, 0x6f, 0x72, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x08, 0x73, 0x74, 0x6f, 0x72, - 0x65, 0x64, 0x41, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x78, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x74, 0x78, 0x69, 0x64, 0x12, 0x2d, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, - 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, - 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x6a, 0x65, 0x63, - 0x74, 0x5f, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, - 0x72, 0x65, 0x6a, 0x65, 0x63, 0x74, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x22, 0x0a, 0x0c, - 0x63, 0x6f, 0x6d, 0x70, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x54, 0x78, 0x73, 0x18, 0x06, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x65, 0x74, 0x69, 0x6e, 0x67, 0x54, 0x78, 0x73, - 0x12, 0x21, 0x0a, 0x0c, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x65, 0x69, 0x67, 0x68, 0x74, - 0x18, 0x07, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x65, 0x69, - 0x67, 0x68, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x68, 0x61, 0x73, - 0x68, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x48, 0x61, - 0x73, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, - 0x68, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x50, - 0x61, 0x74, 0x68, 0x22, 0x53, 0x0a, 0x13, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x12, 0x3c, 0x0a, 0x08, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6d, - 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, - 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x08, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, 0x73, 0x22, 0x2e, 0x0a, 0x18, 0x54, 0x72, 0x61, 0x6e, - 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x78, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x74, 0x78, 0x69, 0x64, 0x22, 0x2e, 0x0a, 0x18, 0x53, 0x65, 0x74, 0x55, - 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x46, 0x0a, 0x19, 0x53, 0x65, 0x74, 0x55, - 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, - 0x5f, 0x61, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, - 0x0f, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, - 0x22, 0x38, 0x0a, 0x10, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x72, 0x65, 0x74, 0x65, 0x6e, 0x74, 0x69, 0x6f, - 0x6e, 0x44, 0x61, 0x79, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0d, 0x72, 0x65, 0x74, - 0x65, 0x6e, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x79, 0x73, 0x22, 0x3e, 0x0a, 0x11, 0x43, 0x6c, - 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x29, 0x0a, 0x10, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x73, 0x5f, 0x61, 0x66, 0x66, 0x65, 0x63, - 0x74, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0f, 0x72, 0x65, 0x63, 0x6f, 0x72, - 0x64, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x22, 0x31, 0x0a, 0x19, 0x54, 0x72, - 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x78, 0x49, 0x44, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x74, 0x78, 0x49, 0x44, 0x73, 0x22, 0x4e, 0x0a, - 0x0c, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3e, 0x0a, - 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, - 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x0c, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2a, 0x9d, 0x02, - 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, - 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, - 0x0a, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x43, 0x45, 0x49, 0x56, 0x45, 0x44, 0x10, 0x14, 0x12, - 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x4f, 0x52, 0x45, 0x44, 0x10, 0x1e, 0x12, 0x18, 0x0a, 0x14, 0x41, - 0x4e, 0x4e, 0x4f, 0x55, 0x4e, 0x43, 0x45, 0x44, 0x5f, 0x54, 0x4f, 0x5f, 0x4e, 0x45, 0x54, 0x57, - 0x4f, 0x52, 0x4b, 0x10, 0x28, 0x12, 0x18, 0x0a, 0x14, 0x52, 0x45, 0x51, 0x55, 0x45, 0x53, 0x54, - 0x45, 0x44, 0x5f, 0x42, 0x59, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x32, 0x12, - 0x13, 0x0a, 0x0f, 0x53, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x4f, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, - 0x52, 0x4b, 0x10, 0x3c, 0x12, 0x17, 0x0a, 0x13, 0x41, 0x43, 0x43, 0x45, 0x50, 0x54, 0x45, 0x44, - 0x5f, 0x42, 0x59, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x46, 0x12, 0x1a, 0x0a, - 0x16, 0x53, 0x45, 0x45, 0x4e, 0x5f, 0x49, 0x4e, 0x5f, 0x4f, 0x52, 0x50, 0x48, 0x41, 0x4e, 0x5f, - 0x4d, 0x45, 0x4d, 0x50, 0x4f, 0x4f, 0x4c, 0x10, 0x50, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x45, 0x45, - 0x4e, 0x5f, 0x4f, 0x4e, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x5a, 0x12, 0x1a, - 0x0a, 0x16, 0x44, 0x4f, 0x55, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x5f, 0x41, - 0x54, 0x54, 0x45, 0x4d, 0x50, 0x54, 0x45, 0x44, 0x10, 0x64, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, - 0x4a, 0x45, 0x43, 0x54, 0x45, 0x44, 0x10, 0x6e, 0x12, 0x18, 0x0a, 0x14, 0x4d, 0x49, 0x4e, 0x45, - 0x44, 0x5f, 0x49, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x5f, 0x42, 0x4c, 0x4f, 0x43, 0x4b, - 0x10, 0x73, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, 0x32, 0xdd, 0x05, - 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x61, 0x4d, 0x6f, 0x72, 0x70, 0x68, 0x41, 0x50, 0x49, 0x12, 0x41, - 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, - 0x1a, 0x1d, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, - 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, - 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, - 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, - 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x0f, 0x50, 0x75, - 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x22, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, - 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x73, 0x1a, 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, - 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, - 0x74, 0x75, 0x73, 0x65, 0x73, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x54, 0x72, - 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, + 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, + 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x22, 0x00, 0x12, 0x5b, 0x0a, 0x0f, 0x50, 0x75, 0x74, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, - 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, - 0x12, 0x5a, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, - 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x1a, 0x22, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, - 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x00, 0x12, 0x63, 0x0a, 0x14, - 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, - 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, - 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, - 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, - 0x00, 0x12, 0x68, 0x0a, 0x11, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, - 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, - 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, - 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x65, + 0x73, 0x22, 0x00, 0x12, 0x57, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, + 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, + 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, + 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, 0x12, 0x5a, 0x0a, 0x0f, + 0x47, 0x65, 0x74, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x28, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, - 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x09, 0x43, - 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, - 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, - 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, - 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, - 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x11, 0x5a, - 0x0f, 0x2e, 0x3b, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, - 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -<<<<<<< HEAD ->>>>>>> 03ac277e (feat: add new status MINED_IN_STALE_BLOCK and handle logic for that status) -======= ->>>>>>> 03ac277e (feat: add new status MINED_IN_STALE_BLOCK and handle logic for that status) + 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x6d, 0x65, 0x74, 0x61, + 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x00, 0x12, 0x63, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x54, + 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, + 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, + 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, + 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x00, 0x12, 0x68, 0x0a, + 0x11, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, + 0x6d, 0x65, 0x12, 0x27, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, + 0x70, 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, + 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x6d, 0x65, + 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x74, 0x55, + 0x6e, 0x6c, 0x6f, 0x63, 0x6b, 0x65, 0x64, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x09, 0x43, 0x6c, 0x65, 0x61, 0x72, + 0x44, 0x61, 0x74, 0x61, 0x12, 0x1f, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, + 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x6d, 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, + 0x68, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x11, 0x5a, 0x0f, 0x2e, 0x3b, 0x6d, + 0x65, 0x74, 0x61, 0x6d, 0x6f, 0x72, 0x70, 0x68, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/internal/metamorph/store/postgresql/postgres_test.go b/internal/metamorph/store/postgresql/postgres_test.go index 1e554166d..3a29f1292 100644 --- a/internal/metamorph/store/postgresql/postgres_test.go +++ b/internal/metamorph/store/postgresql/postgres_test.go @@ -707,7 +707,7 @@ func TestPostgresDB(t *testing.T) { unmined.BlockHeight = 100 unmined.BlockHash = testdata.Block2Hash unmined.MerklePath = "merkle-path-1" - unmined.StatusHistory = append(unmined.StatusHistory, &store.StoreStatus{ + unmined.StatusHistory = append(unmined.StatusHistory, &store.Status{ Status: unmined.Status, Timestamp: unmined.LastModified, }) From 774498272bb21e5ada9248bdb41b307514c46db4 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 6 Nov 2024 19:22:36 +0100 Subject: [PATCH 16/36] chore(ARCO-199): cosmetic fixes --- .../reorg_integration_test.go | 4 +- internal/blocktx/processor.go | 40 +++++++------------ internal/blocktx/processor_test.go | 36 ++++++++--------- .../store/postgresql/get_transactions.go | 10 ++--- .../store/postgresql/mark_block_as_done.go | 2 +- .../blocktx/store/postgresql/postgres_test.go | 8 ++-- internal/blocktx/store/store.go | 2 +- .../metamorph/store/postgresql/postgres.go | 2 +- 8 files changed, 47 insertions(+), 57 deletions(-) diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index f9556f3b2..35e53803a 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -220,8 +220,8 @@ func testHandleBlockOnEmptyDatabase(t *testing.T, p2pMsgHandler *blocktx_p2p.Msg func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { prevBlockHash := testutils.RevChainhash(t, blockHash822014_startOfChain) - txHash := testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853") - txHash2 := testutils.RevChainhash(t, "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430") // should not be published - is already in the longest chain + txHash := testutils.RevChainhash(t, txhash822015) + txHash2 := testutils.RevChainhash(t, txhash822015_2) // should not be published - is already in the longest chain treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) merkleRoot := treeStore[len(treeStore)-1] diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 572a0445e..8eb2cec5a 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -217,29 +217,28 @@ func (p *Processor) StartBlockProcessing() { blockHash := blockMsg.Header.BlockHash() timeStart := time.Now() - defer p.stopBlockProcessGuard(&blockHash) // release guardian at the end - p.logger.Info("received block", slog.String("hash", blockHash.String())) + err = p.processBlock(blockMsg) if err != nil { p.logger.Error("block processing failed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) p.unlockBlock(p.ctx, &blockHash) - - _, errDel := p.store.DelBlockProcessing(p.ctx, &blockHash, p.hostname) - if errDel != nil { - p.logger.Error("failed to delete block processing", slog.String("hash", blockHash.String()), slog.String("err", errDel.Error())) - } + p.stopBlockProcessGuard(&blockHash) // release guardian continue + } - err = p.store.MarkBlockAsDone(p.ctx, &blockHash, blockMsg.Size, uint64(len(blockMsg.TransactionHashes))) - if err != nil { - p.logger.Error("unable to mark block as processed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) + storeErr := p.store.MarkBlockAsDone(p.ctx, &blockHash, blockMsg.Size, uint64(len(blockMsg.TransactionHashes))) + if storeErr != nil { + p.logger.Error("unable to mark block as processed", slog.String("hash", blockHash.String()), slog.String("err", storeErr.Error())) + p.unlockBlock(p.ctx, &blockHash) + p.stopBlockProcessGuard(&blockHash) // release guardian continue } // add the total block processing time to the stats p.logger.Info("Processed block", slog.String("hash", blockHash.String()), slog.Int("txs", len(blockMsg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) + p.stopBlockProcessGuard(&blockHash) // release guardian } } }() @@ -249,7 +248,7 @@ func (p *Processor) startBlockProcessGuard(ctx context.Context, hash *chainhash. p.waitGroup.Add(1) execCtx, stopFn := context.WithCancel(ctx) - p.processGuardsMap.Store(hash, stopFn) + p.processGuardsMap.Store(*hash, stopFn) go func() { defer p.waitGroup.Done() @@ -688,7 +687,7 @@ func (p *Processor) hasGreatestChainwork(ctx context.Context, competingChainTip } func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) error { - blockId, err := p.store.UpsertBlock(ctx, incomingBlock) + blockID, err := p.store.UpsertBlock(ctx, incomingBlock) if err != nil { p.logger.Error("unable to insert block at given height", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) return err @@ -701,7 +700,7 @@ func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomin return err } - if err = p.storeTransactions(ctx, blockId, incomingBlock, calculatedMerkleTree); err != nil { + if err = p.storeTransactions(ctx, blockID, incomingBlock, calculatedMerkleTree); err != nil { p.logger.Error("unable to store transactions from block", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.String("err", err.Error())) return err } @@ -709,7 +708,7 @@ func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomin return nil } -func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { +func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { ctx, span := tracing.StartTracing(ctx, "markTransactionsAsMined", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -759,7 +758,7 @@ func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block }) if (txIndex+1)%p.transactionStorageBatchSize == 0 { - err := p.store.UpsertBlockTransactions(ctx, blockId, txs) + err := p.store.UpsertBlockTransactions(ctx, blockID, txs) if err != nil { return errors.Join(ErrFailedToInsertBlockTransactions, err) } @@ -777,7 +776,7 @@ func (p *Processor) storeTransactions(ctx context.Context, blockId uint64, block tracing.EndTracing(iterateMerkleTree, nil) // update all remaining transactions - err = p.store.UpsertBlockTransactions(ctx, blockId, txs) + err = p.store.UpsertBlockTransactions(ctx, blockID, txs) if err != nil { return errors.Join(ErrFailedToInsertBlockTransactions, fmt.Errorf("block height: %d", block.Height), err) } @@ -979,15 +978,6 @@ func (p *Processor) getStaleTxs(ctx context.Context, staleChain chain) ([]store. return staleTxs, nil } -const ( - hoursPerDay = 24 - blocksPerHour = 6 -) - -func (p *Processor) getRetentionHeightRange() int { - return p.dataRetentionDays * hoursPerDay * blocksPerHour -} - func (p *Processor) Shutdown() { p.cancelAll() p.waitGroup.Wait() diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 065fbd7c9..1a8ed1469 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -165,15 +165,15 @@ func TestHandleBlock(t *testing.T) { RollbackFunc: func() error { return nil }, - WriteLockBlocksTableFunc: func(ctx context.Context) error { + WriteLockBlocksTableFunc: func(_ context.Context) error { return nil }, } storeMock := &storeMocks.BlocktxStoreMock{ - BeginTxFunc: func(ctx context.Context) (store.DbTransaction, error) { + BeginTxFunc: func(_ context.Context) (store.DbTransaction, error) { return txMock, nil }, - GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { + GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { if tc.blockAlreadyProcessed { return &blocktx_api.Block{Processed: true}, nil } @@ -188,13 +188,13 @@ func TestHandleBlock(t *testing.T) { UpsertBlockFunc: func(_ context.Context, _ *blocktx_api.Block) (uint64, error) { return 0, nil }, - GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + GetOrphanedChainUpFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { return nil, nil }, - GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { + GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte, _ bool) ([]store.TransactionBlock, error) { return nil, nil }, - GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.TransactionBlock, error) { return nil, nil }, MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _ uint64, _ uint64) error { return nil }, @@ -371,7 +371,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { RollbackFunc: func() error { return nil }, - WriteLockBlocksTableFunc: func(ctx context.Context) error { + WriteLockBlocksTableFunc: func(_ context.Context) error { return nil }, } @@ -404,13 +404,13 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { GetChainTipFunc: func(_ context.Context) (*blocktx_api.Block, error) { return &blocktx_api.Block{}, nil }, - UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { + UpsertBlockFunc: func(_ context.Context, block *blocktx_api.Block) (uint64, error) { mtx.Lock() insertedBlockStatus = block.Status mtx.Unlock() return 1, nil }, - GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + GetOrphanedChainUpFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { if tc.shouldFindOrphanChain { return []*blocktx_api.Block{ { @@ -424,7 +424,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { return nil, nil }, - UpdateBlocksStatusesFunc: func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { + UpdateBlocksStatusesFunc: func(_ context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { if shouldCheckUpdateStatuses && tc.shouldFindOrphanChain { mtx.Lock() shouldCheckUpdateStatuses = false @@ -435,7 +435,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { } return nil }, - GetStaleChainBackFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + GetStaleChainBackFromHashFunc: func(_ context.Context, hash []byte) ([]*blocktx_api.Block, error) { if comparingChainwork { if tc.shouldFindOrphanChain { require.Equal(t, orphanedChainTip.Hash, hash) @@ -472,7 +472,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { mtx.Unlock() return nil, nil }, - GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { + GetLongestChainFromHeightFunc: func(_ context.Context, _ uint64) ([]*blocktx_api.Block, error) { if comparingChainwork { comparingChainwork = false return []*blocktx_api.Block{ @@ -486,19 +486,19 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { } return nil, nil }, - UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { + UpsertBlockTransactionsFunc: func(_ context.Context, _ uint64, _ []store.TxWithMerklePath) error { return nil }, - GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.TransactionBlock, error) { return nil, nil }, - GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { + GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte, _ bool) ([]store.TransactionBlock, error) { return nil, nil }, - MarkBlockAsDoneFunc: func(ctx context.Context, hash *chainhash.Hash, size, txCount uint64) error { + MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _, _ uint64) error { return nil }, - DelBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) { + DelBlockProcessingFunc: func(_ context.Context, _ *chainhash.Hash, _ string) (int64, error) { return 0, nil }, } @@ -790,7 +790,7 @@ func TestStartProcessRequestTxs(t *testing.T) { t.Run(tc.name, func(t *testing.T) { // given storeMock := &storeMocks.BlocktxStoreMock{ - GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { + GetMinedTransactionsFunc: func(_ context.Context, hashes [][]byte, _ bool) ([]store.TransactionBlock, error) { for _, hash := range hashes { require.Equal(t, testdata.TX1Hash[:], hash) } diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go index e78562bb3..885f5670c 100644 --- a/internal/blocktx/store/postgresql/get_transactions.go +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -40,12 +40,12 @@ func (p *PostgreSQL) getTransactionBlocksByPredicate(ctx context.Context, predic SELECT t.hash, b.hash, - b.height, - m.merkle_path, + b.height, + m.merkle_path, b.status - FROM blocktx.transactions AS t - JOIN blocktx.block_transactions_map AS m ON t.id = m.txid - JOIN blocktx.blocks AS b ON m.blockid = b.id + FROM blocktx.transactions AS t + JOIN blocktx.block_transactions_map AS m ON t.id = m.txid + JOIN blocktx.blocks AS b ON m.blockid = b.id ` q += " " + predicate diff --git a/internal/blocktx/store/postgresql/mark_block_as_done.go b/internal/blocktx/store/postgresql/mark_block_as_done.go index c17bf58b4..6cda2c67b 100644 --- a/internal/blocktx/store/postgresql/mark_block_as_done.go +++ b/internal/blocktx/store/postgresql/mark_block_as_done.go @@ -19,7 +19,7 @@ func (p *PostgreSQL) MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, SET processed_at = $4, size = $1, tx_count = $2 - WHERE hash = $3 AND processed_at IS NULL + WHERE hash = $3 ` if _, err = p.db.ExecContext(ctx, q, size, txCount, hash[:], p.now()); err != nil { diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index 28fc0e4df..f1bba129f 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -171,7 +171,7 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, expectedBlock, actualBlockResp) // when - id, err = postgresDB.UpsertBlock(ctx, expectedBlockViolatingUniqueIndex) + _, err = postgresDB.UpsertBlock(ctx, expectedBlockViolatingUniqueIndex) // then require.True(t, errors.Is(err, store.ErrFailedToInsertBlock)) @@ -1080,14 +1080,14 @@ func TestUpsertBlockConditions(t *testing.T) { } // when - blockId, err := sut.UpsertBlock(ctx, block) + blockID, err := sut.UpsertBlock(ctx, block) // then if tc.shouldSucceed { - require.NotEqual(t, uint64(0), blockId) + require.NotEqual(t, uint64(0), blockID) require.NoError(t, err) } else { - require.Equal(t, uint64(0), blockId) + require.Equal(t, uint64(0), blockID) require.True(t, errors.Is(err, store.ErrFailedToInsertBlock)) } }) diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index 3eaea4015..29fa9aa9b 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -34,7 +34,7 @@ type BlocktxStore interface { GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) - UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []TxWithMerklePath) error + UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []TxWithMerklePath) error MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error GetBlockGaps(ctx context.Context, heightRange int) ([]*BlockGap, error) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) diff --git a/internal/metamorph/store/postgresql/postgres.go b/internal/metamorph/store/postgresql/postgres.go index 033997f34..46d66a600 100644 --- a/internal/metamorph/store/postgresql/postgres.go +++ b/internal/metamorph/store/postgresql/postgres.go @@ -853,7 +853,7 @@ func (p *PostgreSQL) UpdateMined(ctx context.Context, txsBlocks []*blocktx_api.T SELECT * FROM UNNEST($2::INT[], $3::BYTEA[], $4::BYTEA[], $5::BIGINT[], $6::TEXT[]) - AS t(mined_status, hash, block_hash, block_height, merkle_path) + AS t(mined_status, hash, block_hash, block_height, merkle_path) ) AS bulk_query WHERE t.hash=bulk_query.hash From de2336aec87ae8a45a152aecd7fa0f3dbb868ed5 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 6 Nov 2024 19:29:34 +0100 Subject: [PATCH 17/36] chore(ARCO-199): satisfy linter and generate mocks --- .../reorg_integration_test.go | 88 +++++++++---------- internal/blocktx/processor.go | 1 - internal/blocktx/processor_test.go | 11 ++- .../blocktx/store/mocks/blocktx_store_mock.go | 20 ++--- .../store/postgresql/update_block_statuses.go | 6 +- 5 files changed, 62 insertions(+), 64 deletions(-) diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 35e53803a..9f3f0dd1b 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -106,25 +106,25 @@ func testmain(m *testing.M) int { const ( blockHash822011 = "bf9be09b345cc2d904b59951cc8a2ed452d8d143e2e25cde64058270fb3a667a" - blockHash822014_startOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" - blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" - blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" - blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" - - blockHash822015_fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" - blockHash822016_fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" - - blockHash822018 = "212a7598a62295f1a520ef525a34f657bc636d9da9bda74acdf6f051cd84c353" - blockHash822019_orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" - blockHash822020_orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" - blockHash822021 = "743c7dc491ae5fddd37ebf63058f9574b4db9f6a89f483a4baec31820e5df61d" - blockHash822022_orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" - blockHash822023_orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" - - txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" - txhash822015_2 = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" - txhash822016 = "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f" - txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" + blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" + blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" + blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" + + blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" + blockHash822016Fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" + + blockHash822018 = "212a7598a62295f1a520ef525a34f657bc636d9da9bda74acdf6f051cd84c353" + blockHash822019Orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" + blockHash822020Orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" + blockHash822021 = "743c7dc491ae5fddd37ebf63058f9574b4db9f6a89f483a4baec31820e5df61d" + blockHash822022Orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" + blockHash822023Orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" + + txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" + txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" + txhash822016 = "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f" + txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" ) func TestReorg(t *testing.T) { @@ -145,7 +145,7 @@ func TestReorg(t *testing.T) { publishedTxs := make([]*blocktx_api.TransactionBlock, 0) mockNatsConn := &nats_mock.NatsConnectionMock{ - PublishFunc: func(subj string, data []byte) error { + PublishFunc: func(_ string, data []byte) error { serialized := &blocktx_api.TransactionBlock{} err := proto.Unmarshal(data, serialized) require.NoError(t, err) @@ -219,9 +219,9 @@ func testHandleBlockOnEmptyDatabase(t *testing.T, p2pMsgHandler *blocktx_p2p.Msg } func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { - prevBlockHash := testutils.RevChainhash(t, blockHash822014_startOfChain) + prevBlockHash := testutils.RevChainhash(t, blockHash822014StartOfChain) txHash := testutils.RevChainhash(t, txhash822015) - txHash2 := testutils.RevChainhash(t, txhash822015_2) // should not be published - is already in the longest chain + txHash2 := testutils.RevChainhash(t, txhash822015Competing) // should not be published - is already in the longest chain treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) merkleRoot := treeStore[len(treeStore)-1] @@ -242,7 +242,7 @@ func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, s // Allow DB to process the block time.Sleep(200 * time.Millisecond) - verifyBlock(t, store, blockHash822015_fork, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) // transactions expected to be published to metamorph expectedTxs := []*blocktx_api.TransactionBlock{ @@ -262,7 +262,7 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store txHash2 := testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa") treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) merkleRoot := treeStore[len(treeStore)-1] - prevhash := testutils.RevChainhash(t, blockHash822015_fork) + prevhash := testutils.RevChainhash(t, blockHash822015Fork) // should become LONGEST // reorg should happen @@ -283,15 +283,15 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store time.Sleep(1 * time.Second) // verify that reorg happened - verifyBlock(t, store, blockHash822016_fork, 822016, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822015_fork, 822015, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_STALE) verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_STALE) verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822014_startOfChain, 822014, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822019_orphan, 822019, blocktx_api.Status_ORPHANED) + verifyBlock(t, store, blockHash822014StartOfChain, 822014, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_ORPHANED) previouslyLongestBlockHash := testutils.RevChainhash(t, blockHash822017) @@ -305,7 +305,7 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store { // previously in longest chain - also in stale - should have blockdata updated BlockHash: prevhash[:], BlockHeight: 822015, - TransactionHash: testutils.RevChainhash(t, txhash822015_2)[:], + TransactionHash: testutils.RevChainhash(t, txhash822015Competing)[:], BlockStatus: blocktx_api.Status_LONGEST, }, { // newly mined from stale block that became longest after reorg @@ -348,18 +348,18 @@ func testHandleStaleOrphans(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, // verify that the block and orphans have STALE status verifyBlock(t, store, blockHash822018, 822018, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822019_orphan, 822019, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822020_orphan, 822020, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_STALE) // verify that the blocks after the next gap are still orphans - verifyBlock(t, store, blockHash822022_orphan, 822022, blocktx_api.Status_ORPHANED) - verifyBlock(t, store, blockHash822023_orphan, 822023, blocktx_api.Status_ORPHANED) + verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_ORPHANED) + verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_ORPHANED) } func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { txHash := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") merkleRoot := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") - prevhash := testutils.RevChainhash(t, blockHash822020_orphan) + prevhash := testutils.RevChainhash(t, blockHash822020Orphan) // should become LONGEST // reorg should happen @@ -380,23 +380,23 @@ func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, time.Sleep(2 * time.Second) // verify that the reorg happened - verifyBlock(t, store, blockHash822014_startOfChain, 822014, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822014StartOfChain, 822014, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822018, 822018, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822019_orphan, 822019, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822020_orphan, 822020, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822022_orphan, 822022, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822023_orphan, 822023, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822015_fork, 822015, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822016_fork, 822016, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_STALE) bh822015 := testutils.RevChainhash(t, blockHash822015) - bh822015_fork := testutils.RevChainhash(t, blockHash822015_fork) - bh822016_fork := testutils.RevChainhash(t, blockHash822016_fork) + bh822015_fork := testutils.RevChainhash(t, blockHash822015Fork) + bh822016_fork := testutils.RevChainhash(t, blockHash822016Fork) bh822017 := testutils.RevChainhash(t, blockHash822017) expectedTxs := []*blocktx_api.TransactionBlock{ @@ -409,7 +409,7 @@ func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, { // in both chains - should have blockdata updated BlockHash: bh822015[:], BlockHeight: 822015, - TransactionHash: testutils.RevChainhash(t, txhash822015_2)[:], + TransactionHash: testutils.RevChainhash(t, txhash822015Competing)[:], BlockStatus: blocktx_api.Status_LONGEST, }, { // in stale chain diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 8eb2cec5a..d573f8735 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -225,7 +225,6 @@ func (p *Processor) StartBlockProcessing() { p.unlockBlock(p.ctx, &blockHash) p.stopBlockProcessGuard(&blockHash) // release guardian continue - } storeErr := p.store.MarkBlockAsDone(p.ctx, &blockHash, blockMsg.Size, uint64(len(blockMsg.TransactionHashes))) diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 1a8ed1469..7e24288e2 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -453,13 +453,12 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { Chainwork: "42069", }, }, nil - } else { - return []*blocktx_api.Block{ - { - Chainwork: "62209952899966", - }, - }, nil } + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, + }, nil } // if we get to this point, it means that reorg is happening diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index ed3c26bb5..264355b59 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -84,7 +84,7 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { // panic("mock out the UpsertBlock method") // }, -// UpsertBlockTransactionsFunc: func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { +// UpsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { // panic("mock out the UpsertBlockTransactions method") // }, // VerifyMerkleRootsFunc: func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { @@ -161,7 +161,7 @@ type BlocktxStoreMock struct { UpsertBlockFunc func(ctx context.Context, block *blocktx_api.Block) (uint64, error) // UpsertBlockTransactionsFunc mocks the UpsertBlockTransactions method. - UpsertBlockTransactionsFunc func(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error + UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error // VerifyMerkleRootsFunc mocks the VerifyMerkleRoots method. VerifyMerkleRootsFunc func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) @@ -321,8 +321,8 @@ type BlocktxStoreMock struct { UpsertBlockTransactions []struct { // Ctx is the ctx argument value. Ctx context.Context - // BlockId is the blockId argument value. - BlockId uint64 + // BlockID is the blockID argument value. + BlockID uint64 // TxsWithMerklePaths is the txsWithMerklePaths argument value. TxsWithMerklePaths []store.TxWithMerklePath } @@ -1121,23 +1121,23 @@ func (mock *BlocktxStoreMock) UpsertBlockCalls() []struct { } // UpsertBlockTransactions calls UpsertBlockTransactionsFunc. -func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockId uint64, txsWithMerklePaths []store.TxWithMerklePath) error { +func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { if mock.UpsertBlockTransactionsFunc == nil { panic("BlocktxStoreMock.UpsertBlockTransactionsFunc: method is nil but BlocktxStore.UpsertBlockTransactions was just called") } callInfo := struct { Ctx context.Context - BlockId uint64 + BlockID uint64 TxsWithMerklePaths []store.TxWithMerklePath }{ Ctx: ctx, - BlockId: blockId, + BlockID: blockID, TxsWithMerklePaths: txsWithMerklePaths, } mock.lockUpsertBlockTransactions.Lock() mock.calls.UpsertBlockTransactions = append(mock.calls.UpsertBlockTransactions, callInfo) mock.lockUpsertBlockTransactions.Unlock() - return mock.UpsertBlockTransactionsFunc(ctx, blockId, txsWithMerklePaths) + return mock.UpsertBlockTransactionsFunc(ctx, blockID, txsWithMerklePaths) } // UpsertBlockTransactionsCalls gets all the calls that were made to UpsertBlockTransactions. @@ -1146,12 +1146,12 @@ func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, block // len(mockedBlocktxStore.UpsertBlockTransactionsCalls()) func (mock *BlocktxStoreMock) UpsertBlockTransactionsCalls() []struct { Ctx context.Context - BlockId uint64 + BlockID uint64 TxsWithMerklePaths []store.TxWithMerklePath } { var calls []struct { Ctx context.Context - BlockId uint64 + BlockID uint64 TxsWithMerklePaths []store.TxWithMerklePath } mock.lockUpsertBlockTransactions.RLock() diff --git a/internal/blocktx/store/postgresql/update_block_statuses.go b/internal/blocktx/store/postgresql/update_block_statuses.go index ca88bf058..3f5a56e7c 100644 --- a/internal/blocktx/store/postgresql/update_block_statuses.go +++ b/internal/blocktx/store/postgresql/update_block_statuses.go @@ -19,15 +19,15 @@ func (p *PostgreSQL) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdate blockHashes := make([][]byte, len(blockStatusUpdates)) statuses := make([]blocktx_api.Status, len(blockStatusUpdates)) - is_longest := make([]bool, len(blockStatusUpdates)) + isLongest := make([]bool, len(blockStatusUpdates)) for i, update := range blockStatusUpdates { blockHashes[i] = update.Hash statuses[i] = update.Status - is_longest[i] = update.Status == blocktx_api.Status_LONGEST + isLongest[i] = update.Status == blocktx_api.Status_LONGEST } - _, err := p.db.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(is_longest)) + _, err := p.db.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(isLongest)) if err != nil { return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) } From b64a9913e9e16425f8bf52b105a3ad8a0db7ac66 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 6 Nov 2024 19:33:32 +0100 Subject: [PATCH 18/36] chore(ARCO-199): linter errors again --- .../blocktx/integration_test/reorg_integration_test.go | 8 ++++---- internal/blocktx/store/postgresql/get_transactions.go | 1 - 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 9f3f0dd1b..aa38e7c16 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -395,13 +395,13 @@ func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_STALE) bh822015 := testutils.RevChainhash(t, blockHash822015) - bh822015_fork := testutils.RevChainhash(t, blockHash822015Fork) - bh822016_fork := testutils.RevChainhash(t, blockHash822016Fork) + bh822015Fork := testutils.RevChainhash(t, blockHash822015Fork) + bh822016Fork := testutils.RevChainhash(t, blockHash822016Fork) bh822017 := testutils.RevChainhash(t, blockHash822017) expectedTxs := []*blocktx_api.TransactionBlock{ { // in stale chain - BlockHash: bh822015_fork[:], + BlockHash: bh822015Fork[:], BlockHeight: 822015, TransactionHash: testutils.RevChainhash(t, txhash822015)[:], BlockStatus: blocktx_api.Status_STALE, @@ -413,7 +413,7 @@ func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, BlockStatus: blocktx_api.Status_LONGEST, }, { // in stale chain - BlockHash: bh822016_fork[:], + BlockHash: bh822016Fork[:], BlockHeight: 822016, TransactionHash: testutils.RevChainhash(t, txhash822016)[:], BlockStatus: blocktx_api.Status_STALE, diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go index 885f5670c..b7e9902cd 100644 --- a/internal/blocktx/store/postgresql/get_transactions.go +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -80,7 +80,6 @@ func (p *PostgreSQL) getTransactionBlocksByPredicate(ctx context.Context, predic MerklePath: merklePath, BlockStatus: blockStatus, }) - } return transactionBlocks, nil From e96c972b3aa47210cb4b0df5a3a6de7b805c6e86 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 6 Nov 2024 19:46:53 +0100 Subject: [PATCH 19/36] feat(ARCO-199): remove status from GetBlockByHeight --- internal/blocktx/processor.go | 4 ++-- internal/blocktx/processor_test.go | 4 ++-- internal/blocktx/store/mocks/blocktx_store_mock.go | 14 ++++---------- internal/blocktx/store/postgresql/get_block.go | 6 +++--- internal/blocktx/store/postgresql/postgres_test.go | 9 ++------- internal/blocktx/store/store.go | 2 +- 6 files changed, 14 insertions(+), 25 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index d573f8735..311be5ef8 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -635,7 +635,7 @@ func (p *Processor) competingChainsExist(ctx context.Context, block *blocktx_api } if block.Status == blocktx_api.Status_LONGEST { - competingBlock, err := p.store.GetBlockByHeight(ctx, block.Height, blocktx_api.Status_LONGEST) + competingBlock, err := p.store.GetBlockByHeight(ctx, block.Height) if err != nil && !errors.Is(err, store.ErrBlockNotFound) { return false, err } @@ -741,7 +741,7 @@ func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block break } - bump, err := bc.NewBUMPFromMerkleTreeAndIndex(block.Height, merkleTree, uint64(txIndex)) + bump, err := bc.NewBUMPFromMerkleTreeAndIndex(block.Height, merkleTree, uint64(txIndex)) // NOSONAR if err != nil { return errors.Join(ErrFailedToCreateBUMP, fmt.Errorf("tx hash %s, block height: %d", hash.String(), block.Height), err) } diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 7e24288e2..6d9d14a11 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -179,7 +179,7 @@ func TestHandleBlock(t *testing.T) { } return nil, store.ErrBlockNotFound }, - GetBlockByHeightFunc: func(_ context.Context, _ uint64, _ blocktx_api.Status) (*blocktx_api.Block, error) { + GetBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { return nil, store.ErrBlockNotFound }, GetChainTipFunc: func(_ context.Context) (*blocktx_api.Block, error) { @@ -390,7 +390,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { Processed: true, }, nil }, - GetBlockByHeightFunc: func(_ context.Context, _ uint64, _ blocktx_api.Status) (*blocktx_api.Block, error) { + GetBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { if tc.hasCompetingBlock { blockHash, err := chainhash.NewHashFromStr("0000000000000000087590e1ad6360c0c491556c9af75c0d22ce9324cb5713cf") require.NoError(t, err) diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index 264355b59..bda560134 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -36,7 +36,7 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { // panic("mock out the GetBlock method") // }, -// GetBlockByHeightFunc: func(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) { +// GetBlockByHeightFunc: func(ctx context.Context, height uint64) (*blocktx_api.Block, error) { // panic("mock out the GetBlockByHeight method") // }, // GetBlockGapsFunc: func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { @@ -113,7 +113,7 @@ type BlocktxStoreMock struct { GetBlockFunc func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) // GetBlockByHeightFunc mocks the GetBlockByHeight method. - GetBlockByHeightFunc func(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) + GetBlockByHeightFunc func(ctx context.Context, height uint64) (*blocktx_api.Block, error) // GetBlockGapsFunc mocks the GetBlockGaps method. GetBlockGapsFunc func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) @@ -207,8 +207,6 @@ type BlocktxStoreMock struct { Ctx context.Context // Height is the height argument value. Height uint64 - // Status is the status argument value. - Status blocktx_api.Status } // GetBlockGaps holds details about calls to the GetBlockGaps method. GetBlockGaps []struct { @@ -537,23 +535,21 @@ func (mock *BlocktxStoreMock) GetBlockCalls() []struct { } // GetBlockByHeight calls GetBlockByHeightFunc. -func (mock *BlocktxStoreMock) GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) { +func (mock *BlocktxStoreMock) GetBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { if mock.GetBlockByHeightFunc == nil { panic("BlocktxStoreMock.GetBlockByHeightFunc: method is nil but BlocktxStore.GetBlockByHeight was just called") } callInfo := struct { Ctx context.Context Height uint64 - Status blocktx_api.Status }{ Ctx: ctx, Height: height, - Status: status, } mock.lockGetBlockByHeight.Lock() mock.calls.GetBlockByHeight = append(mock.calls.GetBlockByHeight, callInfo) mock.lockGetBlockByHeight.Unlock() - return mock.GetBlockByHeightFunc(ctx, height, status) + return mock.GetBlockByHeightFunc(ctx, height) } // GetBlockByHeightCalls gets all the calls that were made to GetBlockByHeight. @@ -563,12 +559,10 @@ func (mock *BlocktxStoreMock) GetBlockByHeight(ctx context.Context, height uint6 func (mock *BlocktxStoreMock) GetBlockByHeightCalls() []struct { Ctx context.Context Height uint64 - Status blocktx_api.Status } { var calls []struct { Ctx context.Context Height uint64 - Status blocktx_api.Status } mock.lockGetBlockByHeight.RLock() calls = mock.calls.GetBlockByHeight diff --git a/internal/blocktx/store/postgresql/get_block.go b/internal/blocktx/store/postgresql/get_block.go index a520af83d..b8852d460 100644 --- a/internal/blocktx/store/postgresql/get_block.go +++ b/internal/blocktx/store/postgresql/get_block.go @@ -16,10 +16,10 @@ func (p *PostgreSQL) GetBlock(ctx context.Context, hash *chainhash.Hash) (*block return p.queryBlockByPredicate(ctx, predicate, hash[:]) } -func (p *PostgreSQL) GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) { - predicate := "WHERE height = $1 AND status = $2" +func (p *PostgreSQL) GetBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { + predicate := "WHERE height = $1 AND is_longest = true" - return p.queryBlockByPredicate(ctx, predicate, height, status) + return p.queryBlockByPredicate(ctx, predicate, height) } func (p *PostgreSQL) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) { diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index f1bba129f..90e09ec19 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -193,7 +193,6 @@ func TestPostgresDB(t *testing.T) { height := uint64(822015) expectedHashAtHeightLongest := testutils.RevChainhash(t, "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000") - expectedHashAtHeightStale := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") heightNotFound := uint64(812222) @@ -201,15 +200,11 @@ func TestPostgresDB(t *testing.T) { hashAtTip := testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") // when -> then - actualBlock, err := postgresDB.GetBlockByHeight(context.Background(), height, blocktx_api.Status_LONGEST) + actualBlock, err := postgresDB.GetBlockByHeight(context.Background(), height) require.NoError(t, err) require.Equal(t, expectedHashAtHeightLongest[:], actualBlock.Hash) - actualBlock, err = postgresDB.GetBlockByHeight(context.Background(), height, blocktx_api.Status_STALE) - require.NoError(t, err) - require.Equal(t, expectedHashAtHeightStale[:], actualBlock.Hash) - - actualBlock, err = postgresDB.GetBlockByHeight(context.Background(), heightNotFound, blocktx_api.Status_LONGEST) + actualBlock, err = postgresDB.GetBlockByHeight(context.Background(), heightNotFound) require.Nil(t, actualBlock) require.Equal(t, store.ErrBlockNotFound, err) diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index 29fa9aa9b..042a29664 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -31,7 +31,7 @@ type Stats struct { type BlocktxStore interface { RegisterTransactions(ctx context.Context, txHashes [][]byte) (updatedTxs []*chainhash.Hash, err error) GetBlock(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) - GetBlockByHeight(ctx context.Context, height uint64, status blocktx_api.Status) (*blocktx_api.Block, error) + GetBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []TxWithMerklePath) error From 777979e74fac3686675b7f5388e5350fb31540f0 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Thu, 7 Nov 2024 13:29:12 +0100 Subject: [PATCH 20/36] feat(ARCO-199): fix e2e tests --- internal/blocktx/store/postgresql/get_block_gaps.go | 6 ++++++ test/config/config.yaml | 2 +- test/init_test.go | 2 ++ 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/internal/blocktx/store/postgresql/get_block_gaps.go b/internal/blocktx/store/postgresql/get_block_gaps.go index 143c2d226..6c4a83a6c 100644 --- a/internal/blocktx/store/postgresql/get_block_gaps.go +++ b/internal/blocktx/store/postgresql/get_block_gaps.go @@ -52,6 +52,12 @@ func (p *PostgreSQL) GetBlockGaps(ctx context.Context, blockHeightRange int) ([] return nil, err } + // in e2e tests, peers will misbehave if we ask + // for a genesis block, so we need to ignore it + if height == uint64(0) { + continue + } + txHash, err := chainhash.NewHash(hash) if err != nil { return nil, err diff --git a/test/config/config.yaml b/test/config/config.yaml index 1c32339dd..773166aa7 100644 --- a/test/config/config.yaml +++ b/test/config/config.yaml @@ -94,7 +94,7 @@ blocktx: recordRetentionDays: 28 profilerAddr: localhost:9993 registerTxsInterval: 200ms - fillGapsInterval: 15m + fillGapsInterval: 1s # this is needed, becuase during init, peers don't send us some blocks maxAllowedBlockHeightMismatch: 3 api: diff --git a/test/init_test.go b/test/init_test.go index f41f69cd2..061bbab8c 100644 --- a/test/init_test.go +++ b/test/init_test.go @@ -71,5 +71,7 @@ func setupSut() { break } } + + time.Sleep(5 * time.Second) // wait for fillGaps to fill eventual gaps } } From a72a80e49ea7b04849195399d035b17b5c70807f Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Thu, 7 Nov 2024 15:21:49 +0100 Subject: [PATCH 21/36] feat(ARCO-199): create a clone of blocktx store for UnitOfWork purposes --- internal/blocktx/processor.go | 28 +++++- internal/blocktx/processor_test.go | 13 ++- .../blocktx/store/mocks/blocktx_db_tx_mock.go | 40 ++++----- .../blocktx/store/mocks/blocktx_store_mock.go | 88 +++++++++---------- internal/blocktx/store/postgresql/postgres.go | 58 +++++++----- .../blocktx/store/postgresql/postgres_test.go | 8 +- internal/blocktx/store/store.go | 5 +- internal/blocktx/store/store_mocks.go | 2 +- 8 files changed, 140 insertions(+), 102 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 311be5ef8..43202a274 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -786,6 +786,21 @@ func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block func (p *Processor) updateOrphans(ctx context.Context, incomingBlock *blocktx_api.Block, competing bool) (chain, bool, error) { chain := []*blocktx_api.Block{incomingBlock} + uow, err := p.store.StartUnitOfWork(ctx) + if err != nil { + return nil, false, err + } + defer func() { + _ = uow.Rollback() + }() + + // Very important step, this will lock blocks + // table for writing but still allow reading. + err = uow.WriteLockBlocksTable(ctx) + if err != nil { + return nil, false, err + } + orphanedBlocks, err := p.store.GetOrphanedChainUpFromHash(ctx, incomingBlock.Hash) if err != nil { return nil, false, err @@ -816,6 +831,11 @@ func (p *Processor) updateOrphans(ctx context.Context, incomingBlock *blocktx_ap return nil, false, err } + err = uow.Commit() + if err != nil { + return nil, false, err + } + p.logger.Info("orphans were found and updated", slog.Int("len", len(orphanedBlocks))) chain = append(chain, orphanedBlocks...) @@ -827,17 +847,17 @@ func (p *Processor) updateOrphans(ctx context.Context, incomingBlock *blocktx_ap } func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api.Block) ([]store.TransactionBlock, error) { - tx, err := p.store.BeginTx(ctx) + uow, err := p.store.StartUnitOfWork(ctx) if err != nil { return nil, err } defer func() { - _ = tx.Rollback() + _ = uow.Rollback() }() // Very important step, this will lock blocks // table for writing but still allow reading. - err = tx.WriteLockBlocksTable(ctx) + err = uow.WriteLockBlocksTable(ctx) if err != nil { return nil, err } @@ -902,7 +922,7 @@ func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api return nil, err } - err = tx.Commit() + err = uow.Commit() if err != nil { return nil, err } diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 6d9d14a11..3e4119175 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -157,8 +157,7 @@ func TestHandleBlock(t *testing.T) { } var actualInsertedBlockTransactions [][]byte - - txMock := &storeMocks.DbTransactionMock{ + uowMock := &storeMocks.DbTransactionMock{ CommitFunc: func() error { return nil }, @@ -170,8 +169,8 @@ func TestHandleBlock(t *testing.T) { }, } storeMock := &storeMocks.BlocktxStoreMock{ - BeginTxFunc: func(_ context.Context) (store.DbTransaction, error) { - return txMock, nil + StartUnitOfWorkFunc: func(_ context.Context) (store.UnitOfWork, error) { + return uowMock, nil }, GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { if tc.blockAlreadyProcessed { @@ -364,7 +363,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { shouldCheckUpdateStatuses := true comparingChainwork := true - txMock := &storeMocks.DbTransactionMock{ + uowMock := &storeMocks.DbTransactionMock{ CommitFunc: func() error { return nil }, @@ -376,8 +375,8 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { }, } storeMock := &storeMocks.BlocktxStoreMock{ - BeginTxFunc: func(_ context.Context) (store.DbTransaction, error) { - return txMock, nil + StartUnitOfWorkFunc: func(_ context.Context) (store.UnitOfWork, error) { + return uowMock, nil }, GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { if shouldReturnNoBlock { diff --git a/internal/blocktx/store/mocks/blocktx_db_tx_mock.go b/internal/blocktx/store/mocks/blocktx_db_tx_mock.go index 6096aa9ba..c68fcc7b3 100644 --- a/internal/blocktx/store/mocks/blocktx_db_tx_mock.go +++ b/internal/blocktx/store/mocks/blocktx_db_tx_mock.go @@ -9,16 +9,16 @@ import ( "sync" ) -// Ensure, that DbTransactionMock does implement store.DbTransaction. +// Ensure, that UnitOfWorkMock does implement store.UnitOfWork. // If this is not the case, regenerate this file with moq. -var _ store.DbTransaction = &DbTransactionMock{} +var _ store.UnitOfWork = &UnitOfWorkMock{} -// DbTransactionMock is a mock implementation of store.DbTransaction. +// UnitOfWorkMock is a mock implementation of store.UnitOfWork. // -// func TestSomethingThatUsesDbTransaction(t *testing.T) { +// func TestSomethingThatUsesUnitOfWork(t *testing.T) { // -// // make and configure a mocked store.DbTransaction -// mockedDbTransaction := &DbTransactionMock{ +// // make and configure a mocked store.UnitOfWork +// mockedUnitOfWork := &UnitOfWorkMock{ // CommitFunc: func() error { // panic("mock out the Commit method") // }, @@ -30,11 +30,11 @@ var _ store.DbTransaction = &DbTransactionMock{} // }, // } // -// // use mockedDbTransaction in code that requires store.DbTransaction +// // use mockedUnitOfWork in code that requires store.UnitOfWork // // and then make assertions. // // } -type DbTransactionMock struct { +type UnitOfWorkMock struct { // CommitFunc mocks the Commit method. CommitFunc func() error @@ -64,9 +64,9 @@ type DbTransactionMock struct { } // Commit calls CommitFunc. -func (mock *DbTransactionMock) Commit() error { +func (mock *UnitOfWorkMock) Commit() error { if mock.CommitFunc == nil { - panic("DbTransactionMock.CommitFunc: method is nil but DbTransaction.Commit was just called") + panic("UnitOfWorkMock.CommitFunc: method is nil but UnitOfWork.Commit was just called") } callInfo := struct { }{} @@ -79,8 +79,8 @@ func (mock *DbTransactionMock) Commit() error { // CommitCalls gets all the calls that were made to Commit. // Check the length with: // -// len(mockedDbTransaction.CommitCalls()) -func (mock *DbTransactionMock) CommitCalls() []struct { +// len(mockedUnitOfWork.CommitCalls()) +func (mock *UnitOfWorkMock) CommitCalls() []struct { } { var calls []struct { } @@ -91,9 +91,9 @@ func (mock *DbTransactionMock) CommitCalls() []struct { } // Rollback calls RollbackFunc. -func (mock *DbTransactionMock) Rollback() error { +func (mock *UnitOfWorkMock) Rollback() error { if mock.RollbackFunc == nil { - panic("DbTransactionMock.RollbackFunc: method is nil but DbTransaction.Rollback was just called") + panic("UnitOfWorkMock.RollbackFunc: method is nil but UnitOfWork.Rollback was just called") } callInfo := struct { }{} @@ -106,8 +106,8 @@ func (mock *DbTransactionMock) Rollback() error { // RollbackCalls gets all the calls that were made to Rollback. // Check the length with: // -// len(mockedDbTransaction.RollbackCalls()) -func (mock *DbTransactionMock) RollbackCalls() []struct { +// len(mockedUnitOfWork.RollbackCalls()) +func (mock *UnitOfWorkMock) RollbackCalls() []struct { } { var calls []struct { } @@ -118,9 +118,9 @@ func (mock *DbTransactionMock) RollbackCalls() []struct { } // WriteLockBlocksTable calls WriteLockBlocksTableFunc. -func (mock *DbTransactionMock) WriteLockBlocksTable(ctx context.Context) error { +func (mock *UnitOfWorkMock) WriteLockBlocksTable(ctx context.Context) error { if mock.WriteLockBlocksTableFunc == nil { - panic("DbTransactionMock.WriteLockBlocksTableFunc: method is nil but DbTransaction.WriteLockBlocksTable was just called") + panic("UnitOfWorkMock.WriteLockBlocksTableFunc: method is nil but UnitOfWork.WriteLockBlocksTable was just called") } callInfo := struct { Ctx context.Context @@ -136,8 +136,8 @@ func (mock *DbTransactionMock) WriteLockBlocksTable(ctx context.Context) error { // WriteLockBlocksTableCalls gets all the calls that were made to WriteLockBlocksTable. // Check the length with: // -// len(mockedDbTransaction.WriteLockBlocksTableCalls()) -func (mock *DbTransactionMock) WriteLockBlocksTableCalls() []struct { +// len(mockedUnitOfWork.WriteLockBlocksTableCalls()) +func (mock *UnitOfWorkMock) WriteLockBlocksTableCalls() []struct { Ctx context.Context } { var calls []struct { diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index bda560134..a4fe30b23 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -21,9 +21,6 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // // // make and configure a mocked store.BlocktxStore // mockedBlocktxStore := &BlocktxStoreMock{ -// BeginTxFunc: func(ctx context.Context) (store.DbTransaction, error) { -// panic("mock out the BeginTx method") -// }, // ClearBlocktxTableFunc: func(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) { // panic("mock out the ClearBlocktxTable method") // }, @@ -78,6 +75,9 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // SetBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) { // panic("mock out the SetBlockProcessing method") // }, +// StartUnitOfWorkFunc: func(ctx context.Context) (store.UnitOfWork, error) { +// panic("mock out the StartUnitOfWork method") +// }, // UpdateBlocksStatusesFunc: func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { // panic("mock out the UpdateBlocksStatuses method") // }, @@ -97,9 +97,6 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // // } type BlocktxStoreMock struct { - // BeginTxFunc mocks the BeginTx method. - BeginTxFunc func(ctx context.Context) (store.DbTransaction, error) - // ClearBlocktxTableFunc mocks the ClearBlocktxTable method. ClearBlocktxTableFunc func(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) @@ -154,6 +151,9 @@ type BlocktxStoreMock struct { // SetBlockProcessingFunc mocks the SetBlockProcessing method. SetBlockProcessingFunc func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) + // StartUnitOfWorkFunc mocks the StartUnitOfWork method. + StartUnitOfWorkFunc func(ctx context.Context) (store.UnitOfWork, error) + // UpdateBlocksStatusesFunc mocks the UpdateBlocksStatuses method. UpdateBlocksStatusesFunc func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error @@ -168,11 +168,6 @@ type BlocktxStoreMock struct { // calls tracks calls to the methods. calls struct { - // BeginTx holds details about calls to the BeginTx method. - BeginTx []struct { - // Ctx is the ctx argument value. - Ctx context.Context - } // ClearBlocktxTable holds details about calls to the ClearBlocktxTable method. ClearBlocktxTable []struct { // Ctx is the ctx argument value. @@ -301,6 +296,11 @@ type BlocktxStoreMock struct { // ProcessedBy is the processedBy argument value. ProcessedBy string } + // StartUnitOfWork holds details about calls to the StartUnitOfWork method. + StartUnitOfWork []struct { + // Ctx is the ctx argument value. + Ctx context.Context + } // UpdateBlocksStatuses holds details about calls to the UpdateBlocksStatuses method. UpdateBlocksStatuses []struct { // Ctx is the ctx argument value. @@ -334,7 +334,6 @@ type BlocktxStoreMock struct { MaxAllowedBlockHeightMismatch int } } - lockBeginTx sync.RWMutex lockClearBlocktxTable sync.RWMutex lockClose sync.RWMutex lockDelBlockProcessing sync.RWMutex @@ -353,44 +352,13 @@ type BlocktxStoreMock struct { lockPing sync.RWMutex lockRegisterTransactions sync.RWMutex lockSetBlockProcessing sync.RWMutex + lockStartUnitOfWork sync.RWMutex lockUpdateBlocksStatuses sync.RWMutex lockUpsertBlock sync.RWMutex lockUpsertBlockTransactions sync.RWMutex lockVerifyMerkleRoots sync.RWMutex } -// BeginTx calls BeginTxFunc. -func (mock *BlocktxStoreMock) BeginTx(ctx context.Context) (store.DbTransaction, error) { - if mock.BeginTxFunc == nil { - panic("BlocktxStoreMock.BeginTxFunc: method is nil but BlocktxStore.BeginTx was just called") - } - callInfo := struct { - Ctx context.Context - }{ - Ctx: ctx, - } - mock.lockBeginTx.Lock() - mock.calls.BeginTx = append(mock.calls.BeginTx, callInfo) - mock.lockBeginTx.Unlock() - return mock.BeginTxFunc(ctx) -} - -// BeginTxCalls gets all the calls that were made to BeginTx. -// Check the length with: -// -// len(mockedBlocktxStore.BeginTxCalls()) -func (mock *BlocktxStoreMock) BeginTxCalls() []struct { - Ctx context.Context -} { - var calls []struct { - Ctx context.Context - } - mock.lockBeginTx.RLock() - calls = mock.calls.BeginTx - mock.lockBeginTx.RUnlock() - return calls -} - // ClearBlocktxTable calls ClearBlocktxTableFunc. func (mock *BlocktxStoreMock) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) { if mock.ClearBlocktxTableFunc == nil { @@ -1042,6 +1010,38 @@ func (mock *BlocktxStoreMock) SetBlockProcessingCalls() []struct { return calls } +// StartUnitOfWork calls StartUnitOfWorkFunc. +func (mock *BlocktxStoreMock) StartUnitOfWork(ctx context.Context) (store.UnitOfWork, error) { + if mock.StartUnitOfWorkFunc == nil { + panic("BlocktxStoreMock.StartUnitOfWorkFunc: method is nil but BlocktxStore.StartUnitOfWork was just called") + } + callInfo := struct { + Ctx context.Context + }{ + Ctx: ctx, + } + mock.lockStartUnitOfWork.Lock() + mock.calls.StartUnitOfWork = append(mock.calls.StartUnitOfWork, callInfo) + mock.lockStartUnitOfWork.Unlock() + return mock.StartUnitOfWorkFunc(ctx) +} + +// StartUnitOfWorkCalls gets all the calls that were made to StartUnitOfWork. +// Check the length with: +// +// len(mockedBlocktxStore.StartUnitOfWorkCalls()) +func (mock *BlocktxStoreMock) StartUnitOfWorkCalls() []struct { + Ctx context.Context +} { + var calls []struct { + Ctx context.Context + } + mock.lockStartUnitOfWork.RLock() + calls = mock.calls.StartUnitOfWork + mock.lockStartUnitOfWork.RUnlock() + return calls +} + // UpdateBlocksStatuses calls UpdateBlocksStatusesFunc. func (mock *BlocktxStoreMock) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { if mock.UpdateBlocksStatusesFunc == nil { diff --git a/internal/blocktx/store/postgresql/postgres.go b/internal/blocktx/store/postgresql/postgres.go index 19353cb5d..1a320b300 100644 --- a/internal/blocktx/store/postgresql/postgres.go +++ b/internal/blocktx/store/postgresql/postgres.go @@ -37,6 +37,7 @@ type PostgreSQL struct { maxPostgresBulkInsertRows int tracingEnabled bool tracingAttributes []attribute.KeyValue + dbInfo string } func WithNow(nowFunc func() time.Time) func(*PostgreSQL) { @@ -74,6 +75,7 @@ func New(dbInfo string, idleConns int, maxOpenConns int, opts ...func(postgreSQL _db: db, now: time.Now, maxPostgresBulkInsertRows: maxPostgresBulkInsertRows, + dbInfo: dbInfo, } p.db = p._db @@ -85,18 +87,6 @@ func New(dbInfo string, idleConns int, maxOpenConns int, opts ...func(postgreSQL return p, nil } -func (p *PostgreSQL) BeginTx(ctx context.Context) (store.DbTransaction, error) { - tx, err := p._db.BeginTx(ctx, nil) - if err != nil { - return nil, err - } - - p._tx = tx - p.db = p._tx - - return p, nil -} - func (p *PostgreSQL) Close() error { return p._db.Close() } @@ -110,18 +100,46 @@ func (p *PostgreSQL) Ping(ctx context.Context) error { return r.Close() } -func (p *PostgreSQL) Commit() error { - p.db = p._db - return p._tx.Commit() +func (p *PostgreSQL) StartUnitOfWork(ctx context.Context) (store.UnitOfWork, error) { + // This will create a clone of the store and start a transaction + // to avoid messing with the state of the main singleton store + cloneDB, err := sql.Open(postgresDriverName, p.dbInfo) + if err != nil { + return nil, errors.Join(store.ErrFailedToOpenDB, err) + } + + cloneStore := &PostgreSQL{ + _db: cloneDB, + now: time.Now, + maxPostgresBulkInsertRows: maxPostgresBulkInsertRows, + tracingEnabled: p.tracingEnabled, + tracingAttributes: p.tracingAttributes, + } + + tx, err := cloneStore._db.BeginTx(ctx, nil) + if err != nil { + return nil, err + } + + cloneStore._tx = tx + cloneStore.db = cloneStore._tx + + return cloneStore, nil } -func (p *PostgreSQL) Rollback() error { - p.db = p._db - return p._tx.Rollback() +// UnitOfWork methods below +func (uow *PostgreSQL) Commit() error { + uow.db = uow._db + return uow._tx.Commit() +} + +func (uow *PostgreSQL) Rollback() error { + uow.db = uow._db + return uow._tx.Rollback() } -func (p *PostgreSQL) WriteLockBlocksTable(ctx context.Context) error { - tx, ok := p.db.(*sql.Tx) +func (uow *PostgreSQL) WriteLockBlocksTable(ctx context.Context) error { + tx, ok := uow.db.(*sql.Tx) if !ok { return ErrNoTransaction } diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index 90e09ec19..8281e5d8b 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -670,16 +670,16 @@ func TestPostgresDB(t *testing.T) { require.Error(t, err) require.Equal(t, ErrNoTransaction, err) - tx, err := postgresDB.BeginTx(context.Background()) + uow, err := postgresDB.StartUnitOfWork(context.Background()) require.NoError(t, err) - err = tx.WriteLockBlocksTable(context.Background()) + err = uow.WriteLockBlocksTable(context.Background()) require.NoError(t, err) - err = tx.Rollback() + err = uow.Rollback() require.NoError(t, err) - err = tx.Commit() + err = uow.Commit() require.Equal(t, ErrNoTransaction, err) }) } diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index 042a29664..d6cfe1f72 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -51,12 +51,13 @@ type BlocktxStore interface { DelBlockProcessing(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) VerifyMerkleRoots(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) - BeginTx(ctx context.Context) (DbTransaction, error) Ping(ctx context.Context) error Close() error + + StartUnitOfWork(ctx context.Context) (UnitOfWork, error) } -type DbTransaction interface { +type UnitOfWork interface { Commit() error Rollback() error WriteLockBlocksTable(ctx context.Context) error diff --git a/internal/blocktx/store/store_mocks.go b/internal/blocktx/store/store_mocks.go index 10083b35b..746a61325 100644 --- a/internal/blocktx/store/store_mocks.go +++ b/internal/blocktx/store/store_mocks.go @@ -1,4 +1,4 @@ package store //go:generate moq -pkg mocks -out ./mocks/blocktx_store_mock.go . BlocktxStore -//go:generate moq -pkg mocks -out ./mocks/blocktx_db_tx_mock.go . DbTransaction +//go:generate moq -pkg mocks -out ./mocks/blocktx_db_tx_mock.go . UnitOfWork From c8c8fbdec09dbde24f02c446ce094d35e0b23acb Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Thu, 7 Nov 2024 15:26:20 +0100 Subject: [PATCH 22/36] chore(ARCO-199): rename mock in tests --- internal/blocktx/processor_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 3e4119175..8b1673d59 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -157,7 +157,7 @@ func TestHandleBlock(t *testing.T) { } var actualInsertedBlockTransactions [][]byte - uowMock := &storeMocks.DbTransactionMock{ + uowMock := &storeMocks.UnitOfWorkMock{ CommitFunc: func() error { return nil }, @@ -363,7 +363,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { shouldCheckUpdateStatuses := true comparingChainwork := true - uowMock := &storeMocks.DbTransactionMock{ + uowMock := &storeMocks.UnitOfWorkMock{ CommitFunc: func() error { return nil }, From 734b60abf59293775c384fab3bc287e7b9663426 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Thu, 7 Nov 2024 16:10:27 +0100 Subject: [PATCH 23/36] feat(ARCO-199): improve UnitOfWork in blocktx store --- internal/blocktx/processor.go | 14 +- internal/blocktx/processor_test.go | 130 +- .../blocktx/store/mocks/blocktx_db_tx_mock.go | 1125 ++++++++++++++++- internal/blocktx/store/postgresql/postgres.go | 45 +- internal/blocktx/store/store.go | 1 + 5 files changed, 1215 insertions(+), 100 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 43202a274..122c8ceb8 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -801,7 +801,7 @@ func (p *Processor) updateOrphans(ctx context.Context, incomingBlock *blocktx_ap return nil, false, err } - orphanedBlocks, err := p.store.GetOrphanedChainUpFromHash(ctx, incomingBlock.Hash) + orphanedBlocks, err := uow.GetOrphanedChainUpFromHash(ctx, incomingBlock.Hash) if err != nil { return nil, false, err } @@ -826,7 +826,7 @@ func (p *Processor) updateOrphans(ctx context.Context, incomingBlock *blocktx_ap } } - err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + err = uow.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { return nil, false, err } @@ -862,7 +862,7 @@ func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api return nil, err } - staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, staleChainTip.Hash) + staleBlocks, err := uow.GetStaleChainBackFromHash(ctx, staleChainTip.Hash) if err != nil { return nil, err } @@ -872,7 +872,7 @@ func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api lowestHeight = getLowestHeight(staleBlocks) } - longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) + longestBlocks, err := uow.GetLongestChainFromHeight(ctx, lowestHeight) if err != nil { return nil, err } @@ -888,7 +888,7 @@ func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api staleHashes[i] = b.Hash } - registeredTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) + registeredTxs, err := uow.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) if err != nil { return nil, err } @@ -905,7 +905,7 @@ func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api blockStatusUpdates[i] = update } - err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + err = uow.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { return nil, err } @@ -917,7 +917,7 @@ func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api blockStatusUpdates = append(blockStatusUpdates, update) } - err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + err = uow.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { return nil, err } diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 8b1673d59..e1ac6c51e 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -158,6 +158,9 @@ func TestHandleBlock(t *testing.T) { var actualInsertedBlockTransactions [][]byte uowMock := &storeMocks.UnitOfWorkMock{ + GetOrphanedChainUpFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { + return nil, nil + }, CommitFunc: func() error { return nil }, @@ -187,9 +190,6 @@ func TestHandleBlock(t *testing.T) { UpsertBlockFunc: func(_ context.Context, _ *blocktx_api.Block) (uint64, error) { return 0, nil }, - GetOrphanedChainUpFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { - return nil, nil - }, GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte, _ bool) ([]store.TransactionBlock, error) { return nil, nil }, @@ -361,9 +361,50 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { shouldReturnNoBlock := !tc.blockAlreadyExists shouldCheckUpdateStatuses := true - comparingChainwork := true uowMock := &storeMocks.UnitOfWorkMock{ + GetOrphanedChainUpFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { + if tc.shouldFindOrphanChain { + return []*blocktx_api.Block{ + { + Hash: []byte("123"), + Status: blocktx_api.Status_ORPHANED, + Chainwork: "123", + }, + orphanedChainTip, + }, nil + } + + return nil, nil + }, + UpdateBlocksStatusesFunc: func(_ context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { + if shouldCheckUpdateStatuses && tc.shouldFindOrphanChain { + mtx.Lock() + shouldCheckUpdateStatuses = false + tipStatusUpdate := blockStatusUpdates[len(blockStatusUpdates)-1] + require.Equal(t, orphanedChainTip.Hash, tipStatusUpdate.Hash) + require.Equal(t, blocktx_api.Status_STALE, tipStatusUpdate.Status) + mtx.Unlock() + } + return nil + }, + GetStaleChainBackFromHashFunc: func(_ context.Context, hash []byte) ([]*blocktx_api.Block, error) { + // if this method is called from UnitOfwork, it means that reorg is happening + mtx.Lock() + insertedBlockStatus = blocktx_api.Status_LONGEST + if tc.shouldFindOrphanChain { + require.Equal(t, orphanedChainTip.Hash[:], hash) + orphanedChainTip.Status = blocktx_api.Status_LONGEST + } + mtx.Unlock() + return nil, nil + }, + GetLongestChainFromHeightFunc: func(_ context.Context, _ uint64) ([]*blocktx_api.Block, error) { + return nil, nil + }, + GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.TransactionBlock, error) { + return nil, nil + }, CommitFunc: func() error { return nil }, @@ -409,80 +450,39 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { mtx.Unlock() return 1, nil }, - GetOrphanedChainUpFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { + GetStaleChainBackFromHashFunc: func(_ context.Context, hash []byte) ([]*blocktx_api.Block, error) { if tc.shouldFindOrphanChain { - return []*blocktx_api.Block{ - { - Hash: []byte("123"), - Status: blocktx_api.Status_ORPHANED, - Chainwork: "123", - }, - orphanedChainTip, - }, nil + require.Equal(t, orphanedChainTip.Hash, hash) + return []*blocktx_api.Block{orphanedChainTip}, nil } - - return nil, nil - }, - UpdateBlocksStatusesFunc: func(_ context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { - if shouldCheckUpdateStatuses && tc.shouldFindOrphanChain { - mtx.Lock() - shouldCheckUpdateStatuses = false - tipStatusUpdate := blockStatusUpdates[len(blockStatusUpdates)-1] - require.Equal(t, orphanedChainTip.Hash, tipStatusUpdate.Hash) - require.Equal(t, blocktx_api.Status_STALE, tipStatusUpdate.Status) - mtx.Unlock() - } - return nil - }, - GetStaleChainBackFromHashFunc: func(_ context.Context, hash []byte) ([]*blocktx_api.Block, error) { - if comparingChainwork { - if tc.shouldFindOrphanChain { - require.Equal(t, orphanedChainTip.Hash, hash) - return []*blocktx_api.Block{orphanedChainTip}, nil - } - if tc.hasGreaterChainwork { - return []*blocktx_api.Block{ - { - Chainwork: "62209952899966", - }, - { - Chainwork: "42069", - }, - { - Chainwork: "42069", - }, - }, nil - } + if tc.hasGreaterChainwork { return []*blocktx_api.Block{ { Chainwork: "62209952899966", }, - }, nil - } - - // if we get to this point, it means that reorg is happening - mtx.Lock() - insertedBlockStatus = blocktx_api.Status_LONGEST - if tc.shouldFindOrphanChain { - require.Equal(t, orphanedChainTip.Hash[:], hash) - orphanedChainTip.Status = blocktx_api.Status_LONGEST - } - mtx.Unlock() - return nil, nil - }, - GetLongestChainFromHeightFunc: func(_ context.Context, _ uint64) ([]*blocktx_api.Block, error) { - if comparingChainwork { - comparingChainwork = false - return []*blocktx_api.Block{ { - Chainwork: "62209952899966", + Chainwork: "42069", }, { Chainwork: "42069", }, }, nil } - return nil, nil + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, + }, nil + }, + GetLongestChainFromHeightFunc: func(_ context.Context, _ uint64) ([]*blocktx_api.Block, error) { + return []*blocktx_api.Block{ + { + Chainwork: "62209952899966", + }, + { + Chainwork: "42069", + }, + }, nil }, UpsertBlockTransactionsFunc: func(_ context.Context, _ uint64, _ []store.TxWithMerklePath) error { return nil diff --git a/internal/blocktx/store/mocks/blocktx_db_tx_mock.go b/internal/blocktx/store/mocks/blocktx_db_tx_mock.go index c68fcc7b3..f93c655c7 100644 --- a/internal/blocktx/store/mocks/blocktx_db_tx_mock.go +++ b/internal/blocktx/store/mocks/blocktx_db_tx_mock.go @@ -5,7 +5,9 @@ package mocks import ( "context" + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/libsv/go-p2p/chaincfg/chainhash" "sync" ) @@ -19,12 +21,78 @@ var _ store.UnitOfWork = &UnitOfWorkMock{} // // // make and configure a mocked store.UnitOfWork // mockedUnitOfWork := &UnitOfWorkMock{ +// ClearBlocktxTableFunc: func(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) { +// panic("mock out the ClearBlocktxTable method") +// }, +// CloseFunc: func() error { +// panic("mock out the Close method") +// }, // CommitFunc: func() error { // panic("mock out the Commit method") // }, +// DelBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) { +// panic("mock out the DelBlockProcessing method") +// }, +// GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { +// panic("mock out the GetBlock method") +// }, +// GetBlockByHeightFunc: func(ctx context.Context, height uint64) (*blocktx_api.Block, error) { +// panic("mock out the GetBlockByHeight method") +// }, +// GetBlockGapsFunc: func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { +// panic("mock out the GetBlockGaps method") +// }, +// GetBlockHashesProcessingInProgressFunc: func(ctx context.Context, processedBy string) ([]*chainhash.Hash, error) { +// panic("mock out the GetBlockHashesProcessingInProgress method") +// }, +// GetChainTipFunc: func(ctx context.Context) (*blocktx_api.Block, error) { +// panic("mock out the GetChainTip method") +// }, +// GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { +// panic("mock out the GetLongestChainFromHeight method") +// }, +// GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { +// panic("mock out the GetMinedTransactions method") +// }, +// GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { +// panic("mock out the GetOrphanedChainUpFromHash method") +// }, +// GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { +// panic("mock out the GetRegisteredTxsByBlockHashes method") +// }, +// GetStaleChainBackFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { +// panic("mock out the GetStaleChainBackFromHash method") +// }, +// MarkBlockAsDoneFunc: func(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error { +// panic("mock out the MarkBlockAsDone method") +// }, +// PingFunc: func(ctx context.Context) error { +// panic("mock out the Ping method") +// }, +// RegisterTransactionsFunc: func(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) { +// panic("mock out the RegisterTransactions method") +// }, // RollbackFunc: func() error { // panic("mock out the Rollback method") // }, +// SetBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) { +// panic("mock out the SetBlockProcessing method") +// }, +// StartUnitOfWorkFunc: func(ctx context.Context) (store.UnitOfWork, error) { +// panic("mock out the StartUnitOfWork method") +// }, +// UpdateBlocksStatusesFunc: func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { +// panic("mock out the UpdateBlocksStatuses method") +// }, +// UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { +// panic("mock out the UpsertBlock method") +// }, +// UpsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { +// panic("mock out the UpsertBlockTransactions method") +// }, +// VerifyMerkleRootsFunc: func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { +// panic("mock out the VerifyMerkleRoots method") +// }, // WriteLockBlocksTableFunc: func(ctx context.Context) error { // panic("mock out the WriteLockBlocksTable method") // }, @@ -35,32 +103,347 @@ var _ store.UnitOfWork = &UnitOfWorkMock{} // // } type UnitOfWorkMock struct { + // ClearBlocktxTableFunc mocks the ClearBlocktxTable method. + ClearBlocktxTableFunc func(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) + + // CloseFunc mocks the Close method. + CloseFunc func() error + // CommitFunc mocks the Commit method. CommitFunc func() error + // DelBlockProcessingFunc mocks the DelBlockProcessing method. + DelBlockProcessingFunc func(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) + + // GetBlockFunc mocks the GetBlock method. + GetBlockFunc func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) + + // GetBlockByHeightFunc mocks the GetBlockByHeight method. + GetBlockByHeightFunc func(ctx context.Context, height uint64) (*blocktx_api.Block, error) + + // GetBlockGapsFunc mocks the GetBlockGaps method. + GetBlockGapsFunc func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) + + // GetBlockHashesProcessingInProgressFunc mocks the GetBlockHashesProcessingInProgress method. + GetBlockHashesProcessingInProgressFunc func(ctx context.Context, processedBy string) ([]*chainhash.Hash, error) + + // GetChainTipFunc mocks the GetChainTip method. + GetChainTipFunc func(ctx context.Context) (*blocktx_api.Block, error) + + // GetLongestChainFromHeightFunc mocks the GetLongestChainFromHeight method. + GetLongestChainFromHeightFunc func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) + + // GetMinedTransactionsFunc mocks the GetMinedTransactions method. + GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) + + // GetOrphanedChainUpFromHashFunc mocks the GetOrphanedChainUpFromHash method. + GetOrphanedChainUpFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) + + // GetRegisteredTxsByBlockHashesFunc mocks the GetRegisteredTxsByBlockHashes method. + GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) + + // GetStaleChainBackFromHashFunc mocks the GetStaleChainBackFromHash method. + GetStaleChainBackFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) + + // MarkBlockAsDoneFunc mocks the MarkBlockAsDone method. + MarkBlockAsDoneFunc func(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error + + // PingFunc mocks the Ping method. + PingFunc func(ctx context.Context) error + + // RegisterTransactionsFunc mocks the RegisterTransactions method. + RegisterTransactionsFunc func(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) + // RollbackFunc mocks the Rollback method. RollbackFunc func() error + // SetBlockProcessingFunc mocks the SetBlockProcessing method. + SetBlockProcessingFunc func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) + + // StartUnitOfWorkFunc mocks the StartUnitOfWork method. + StartUnitOfWorkFunc func(ctx context.Context) (store.UnitOfWork, error) + + // UpdateBlocksStatusesFunc mocks the UpdateBlocksStatuses method. + UpdateBlocksStatusesFunc func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error + + // UpsertBlockFunc mocks the UpsertBlock method. + UpsertBlockFunc func(ctx context.Context, block *blocktx_api.Block) (uint64, error) + + // UpsertBlockTransactionsFunc mocks the UpsertBlockTransactions method. + UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error + + // VerifyMerkleRootsFunc mocks the VerifyMerkleRoots method. + VerifyMerkleRootsFunc func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) + // WriteLockBlocksTableFunc mocks the WriteLockBlocksTable method. WriteLockBlocksTableFunc func(ctx context.Context) error // calls tracks calls to the methods. calls struct { + // ClearBlocktxTable holds details about calls to the ClearBlocktxTable method. + ClearBlocktxTable []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // RetentionDays is the retentionDays argument value. + RetentionDays int32 + // Table is the table argument value. + Table string + } + // Close holds details about calls to the Close method. + Close []struct { + } // Commit holds details about calls to the Commit method. Commit []struct { } + // DelBlockProcessing holds details about calls to the DelBlockProcessing method. + DelBlockProcessing []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hash is the hash argument value. + Hash *chainhash.Hash + // ProcessedBy is the processedBy argument value. + ProcessedBy string + } + // GetBlock holds details about calls to the GetBlock method. + GetBlock []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hash is the hash argument value. + Hash *chainhash.Hash + } + // GetBlockByHeight holds details about calls to the GetBlockByHeight method. + GetBlockByHeight []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Height is the height argument value. + Height uint64 + } + // GetBlockGaps holds details about calls to the GetBlockGaps method. + GetBlockGaps []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // HeightRange is the heightRange argument value. + HeightRange int + } + // GetBlockHashesProcessingInProgress holds details about calls to the GetBlockHashesProcessingInProgress method. + GetBlockHashesProcessingInProgress []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // ProcessedBy is the processedBy argument value. + ProcessedBy string + } + // GetChainTip holds details about calls to the GetChainTip method. + GetChainTip []struct { + // Ctx is the ctx argument value. + Ctx context.Context + } + // GetLongestChainFromHeight holds details about calls to the GetLongestChainFromHeight method. + GetLongestChainFromHeight []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Height is the height argument value. + Height uint64 + } + // GetMinedTransactions holds details about calls to the GetMinedTransactions method. + GetMinedTransactions []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hashes is the hashes argument value. + Hashes [][]byte + // OnlyLongestChain is the onlyLongestChain argument value. + OnlyLongestChain bool + } + // GetOrphanedChainUpFromHash holds details about calls to the GetOrphanedChainUpFromHash method. + GetOrphanedChainUpFromHash []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hash is the hash argument value. + Hash []byte + } + // GetRegisteredTxsByBlockHashes holds details about calls to the GetRegisteredTxsByBlockHashes method. + GetRegisteredTxsByBlockHashes []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // BlockHashes is the blockHashes argument value. + BlockHashes [][]byte + } + // GetStaleChainBackFromHash holds details about calls to the GetStaleChainBackFromHash method. + GetStaleChainBackFromHash []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hash is the hash argument value. + Hash []byte + } + // MarkBlockAsDone holds details about calls to the MarkBlockAsDone method. + MarkBlockAsDone []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hash is the hash argument value. + Hash *chainhash.Hash + // Size is the size argument value. + Size uint64 + // TxCount is the txCount argument value. + TxCount uint64 + } + // Ping holds details about calls to the Ping method. + Ping []struct { + // Ctx is the ctx argument value. + Ctx context.Context + } + // RegisterTransactions holds details about calls to the RegisterTransactions method. + RegisterTransactions []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // TxHashes is the txHashes argument value. + TxHashes [][]byte + } // Rollback holds details about calls to the Rollback method. Rollback []struct { } + // SetBlockProcessing holds details about calls to the SetBlockProcessing method. + SetBlockProcessing []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Hash is the hash argument value. + Hash *chainhash.Hash + // ProcessedBy is the processedBy argument value. + ProcessedBy string + } + // StartUnitOfWork holds details about calls to the StartUnitOfWork method. + StartUnitOfWork []struct { + // Ctx is the ctx argument value. + Ctx context.Context + } + // UpdateBlocksStatuses holds details about calls to the UpdateBlocksStatuses method. + UpdateBlocksStatuses []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // BlockStatusUpdates is the blockStatusUpdates argument value. + BlockStatusUpdates []store.BlockStatusUpdate + } + // UpsertBlock holds details about calls to the UpsertBlock method. + UpsertBlock []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Block is the block argument value. + Block *blocktx_api.Block + } + // UpsertBlockTransactions holds details about calls to the UpsertBlockTransactions method. + UpsertBlockTransactions []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // BlockID is the blockID argument value. + BlockID uint64 + // TxsWithMerklePaths is the txsWithMerklePaths argument value. + TxsWithMerklePaths []store.TxWithMerklePath + } + // VerifyMerkleRoots holds details about calls to the VerifyMerkleRoots method. + VerifyMerkleRoots []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // MerkleRoots is the merkleRoots argument value. + MerkleRoots []*blocktx_api.MerkleRootVerificationRequest + // MaxAllowedBlockHeightMismatch is the maxAllowedBlockHeightMismatch argument value. + MaxAllowedBlockHeightMismatch int + } // WriteLockBlocksTable holds details about calls to the WriteLockBlocksTable method. WriteLockBlocksTable []struct { // Ctx is the ctx argument value. Ctx context.Context } } - lockCommit sync.RWMutex - lockRollback sync.RWMutex - lockWriteLockBlocksTable sync.RWMutex + lockClearBlocktxTable sync.RWMutex + lockClose sync.RWMutex + lockCommit sync.RWMutex + lockDelBlockProcessing sync.RWMutex + lockGetBlock sync.RWMutex + lockGetBlockByHeight sync.RWMutex + lockGetBlockGaps sync.RWMutex + lockGetBlockHashesProcessingInProgress sync.RWMutex + lockGetChainTip sync.RWMutex + lockGetLongestChainFromHeight sync.RWMutex + lockGetMinedTransactions sync.RWMutex + lockGetOrphanedChainUpFromHash sync.RWMutex + lockGetRegisteredTxsByBlockHashes sync.RWMutex + lockGetStaleChainBackFromHash sync.RWMutex + lockMarkBlockAsDone sync.RWMutex + lockPing sync.RWMutex + lockRegisterTransactions sync.RWMutex + lockRollback sync.RWMutex + lockSetBlockProcessing sync.RWMutex + lockStartUnitOfWork sync.RWMutex + lockUpdateBlocksStatuses sync.RWMutex + lockUpsertBlock sync.RWMutex + lockUpsertBlockTransactions sync.RWMutex + lockVerifyMerkleRoots sync.RWMutex + lockWriteLockBlocksTable sync.RWMutex +} + +// ClearBlocktxTable calls ClearBlocktxTableFunc. +func (mock *UnitOfWorkMock) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) { + if mock.ClearBlocktxTableFunc == nil { + panic("UnitOfWorkMock.ClearBlocktxTableFunc: method is nil but UnitOfWork.ClearBlocktxTable was just called") + } + callInfo := struct { + Ctx context.Context + RetentionDays int32 + Table string + }{ + Ctx: ctx, + RetentionDays: retentionDays, + Table: table, + } + mock.lockClearBlocktxTable.Lock() + mock.calls.ClearBlocktxTable = append(mock.calls.ClearBlocktxTable, callInfo) + mock.lockClearBlocktxTable.Unlock() + return mock.ClearBlocktxTableFunc(ctx, retentionDays, table) +} + +// ClearBlocktxTableCalls gets all the calls that were made to ClearBlocktxTable. +// Check the length with: +// +// len(mockedUnitOfWork.ClearBlocktxTableCalls()) +func (mock *UnitOfWorkMock) ClearBlocktxTableCalls() []struct { + Ctx context.Context + RetentionDays int32 + Table string +} { + var calls []struct { + Ctx context.Context + RetentionDays int32 + Table string + } + mock.lockClearBlocktxTable.RLock() + calls = mock.calls.ClearBlocktxTable + mock.lockClearBlocktxTable.RUnlock() + return calls +} + +// Close calls CloseFunc. +func (mock *UnitOfWorkMock) Close() error { + if mock.CloseFunc == nil { + panic("UnitOfWorkMock.CloseFunc: method is nil but UnitOfWork.Close was just called") + } + callInfo := struct { + }{} + mock.lockClose.Lock() + mock.calls.Close = append(mock.calls.Close, callInfo) + mock.lockClose.Unlock() + return mock.CloseFunc() +} + +// CloseCalls gets all the calls that were made to Close. +// Check the length with: +// +// len(mockedUnitOfWork.CloseCalls()) +func (mock *UnitOfWorkMock) CloseCalls() []struct { +} { + var calls []struct { + } + mock.lockClose.RLock() + calls = mock.calls.Close + mock.lockClose.RUnlock() + return calls } // Commit calls CommitFunc. @@ -90,6 +473,518 @@ func (mock *UnitOfWorkMock) CommitCalls() []struct { return calls } +// DelBlockProcessing calls DelBlockProcessingFunc. +func (mock *UnitOfWorkMock) DelBlockProcessing(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) { + if mock.DelBlockProcessingFunc == nil { + panic("UnitOfWorkMock.DelBlockProcessingFunc: method is nil but UnitOfWork.DelBlockProcessing was just called") + } + callInfo := struct { + Ctx context.Context + Hash *chainhash.Hash + ProcessedBy string + }{ + Ctx: ctx, + Hash: hash, + ProcessedBy: processedBy, + } + mock.lockDelBlockProcessing.Lock() + mock.calls.DelBlockProcessing = append(mock.calls.DelBlockProcessing, callInfo) + mock.lockDelBlockProcessing.Unlock() + return mock.DelBlockProcessingFunc(ctx, hash, processedBy) +} + +// DelBlockProcessingCalls gets all the calls that were made to DelBlockProcessing. +// Check the length with: +// +// len(mockedUnitOfWork.DelBlockProcessingCalls()) +func (mock *UnitOfWorkMock) DelBlockProcessingCalls() []struct { + Ctx context.Context + Hash *chainhash.Hash + ProcessedBy string +} { + var calls []struct { + Ctx context.Context + Hash *chainhash.Hash + ProcessedBy string + } + mock.lockDelBlockProcessing.RLock() + calls = mock.calls.DelBlockProcessing + mock.lockDelBlockProcessing.RUnlock() + return calls +} + +// GetBlock calls GetBlockFunc. +func (mock *UnitOfWorkMock) GetBlock(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { + if mock.GetBlockFunc == nil { + panic("UnitOfWorkMock.GetBlockFunc: method is nil but UnitOfWork.GetBlock was just called") + } + callInfo := struct { + Ctx context.Context + Hash *chainhash.Hash + }{ + Ctx: ctx, + Hash: hash, + } + mock.lockGetBlock.Lock() + mock.calls.GetBlock = append(mock.calls.GetBlock, callInfo) + mock.lockGetBlock.Unlock() + return mock.GetBlockFunc(ctx, hash) +} + +// GetBlockCalls gets all the calls that were made to GetBlock. +// Check the length with: +// +// len(mockedUnitOfWork.GetBlockCalls()) +func (mock *UnitOfWorkMock) GetBlockCalls() []struct { + Ctx context.Context + Hash *chainhash.Hash +} { + var calls []struct { + Ctx context.Context + Hash *chainhash.Hash + } + mock.lockGetBlock.RLock() + calls = mock.calls.GetBlock + mock.lockGetBlock.RUnlock() + return calls +} + +// GetBlockByHeight calls GetBlockByHeightFunc. +func (mock *UnitOfWorkMock) GetBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { + if mock.GetBlockByHeightFunc == nil { + panic("UnitOfWorkMock.GetBlockByHeightFunc: method is nil but UnitOfWork.GetBlockByHeight was just called") + } + callInfo := struct { + Ctx context.Context + Height uint64 + }{ + Ctx: ctx, + Height: height, + } + mock.lockGetBlockByHeight.Lock() + mock.calls.GetBlockByHeight = append(mock.calls.GetBlockByHeight, callInfo) + mock.lockGetBlockByHeight.Unlock() + return mock.GetBlockByHeightFunc(ctx, height) +} + +// GetBlockByHeightCalls gets all the calls that were made to GetBlockByHeight. +// Check the length with: +// +// len(mockedUnitOfWork.GetBlockByHeightCalls()) +func (mock *UnitOfWorkMock) GetBlockByHeightCalls() []struct { + Ctx context.Context + Height uint64 +} { + var calls []struct { + Ctx context.Context + Height uint64 + } + mock.lockGetBlockByHeight.RLock() + calls = mock.calls.GetBlockByHeight + mock.lockGetBlockByHeight.RUnlock() + return calls +} + +// GetBlockGaps calls GetBlockGapsFunc. +func (mock *UnitOfWorkMock) GetBlockGaps(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { + if mock.GetBlockGapsFunc == nil { + panic("UnitOfWorkMock.GetBlockGapsFunc: method is nil but UnitOfWork.GetBlockGaps was just called") + } + callInfo := struct { + Ctx context.Context + HeightRange int + }{ + Ctx: ctx, + HeightRange: heightRange, + } + mock.lockGetBlockGaps.Lock() + mock.calls.GetBlockGaps = append(mock.calls.GetBlockGaps, callInfo) + mock.lockGetBlockGaps.Unlock() + return mock.GetBlockGapsFunc(ctx, heightRange) +} + +// GetBlockGapsCalls gets all the calls that were made to GetBlockGaps. +// Check the length with: +// +// len(mockedUnitOfWork.GetBlockGapsCalls()) +func (mock *UnitOfWorkMock) GetBlockGapsCalls() []struct { + Ctx context.Context + HeightRange int +} { + var calls []struct { + Ctx context.Context + HeightRange int + } + mock.lockGetBlockGaps.RLock() + calls = mock.calls.GetBlockGaps + mock.lockGetBlockGaps.RUnlock() + return calls +} + +// GetBlockHashesProcessingInProgress calls GetBlockHashesProcessingInProgressFunc. +func (mock *UnitOfWorkMock) GetBlockHashesProcessingInProgress(ctx context.Context, processedBy string) ([]*chainhash.Hash, error) { + if mock.GetBlockHashesProcessingInProgressFunc == nil { + panic("UnitOfWorkMock.GetBlockHashesProcessingInProgressFunc: method is nil but UnitOfWork.GetBlockHashesProcessingInProgress was just called") + } + callInfo := struct { + Ctx context.Context + ProcessedBy string + }{ + Ctx: ctx, + ProcessedBy: processedBy, + } + mock.lockGetBlockHashesProcessingInProgress.Lock() + mock.calls.GetBlockHashesProcessingInProgress = append(mock.calls.GetBlockHashesProcessingInProgress, callInfo) + mock.lockGetBlockHashesProcessingInProgress.Unlock() + return mock.GetBlockHashesProcessingInProgressFunc(ctx, processedBy) +} + +// GetBlockHashesProcessingInProgressCalls gets all the calls that were made to GetBlockHashesProcessingInProgress. +// Check the length with: +// +// len(mockedUnitOfWork.GetBlockHashesProcessingInProgressCalls()) +func (mock *UnitOfWorkMock) GetBlockHashesProcessingInProgressCalls() []struct { + Ctx context.Context + ProcessedBy string +} { + var calls []struct { + Ctx context.Context + ProcessedBy string + } + mock.lockGetBlockHashesProcessingInProgress.RLock() + calls = mock.calls.GetBlockHashesProcessingInProgress + mock.lockGetBlockHashesProcessingInProgress.RUnlock() + return calls +} + +// GetChainTip calls GetChainTipFunc. +func (mock *UnitOfWorkMock) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) { + if mock.GetChainTipFunc == nil { + panic("UnitOfWorkMock.GetChainTipFunc: method is nil but UnitOfWork.GetChainTip was just called") + } + callInfo := struct { + Ctx context.Context + }{ + Ctx: ctx, + } + mock.lockGetChainTip.Lock() + mock.calls.GetChainTip = append(mock.calls.GetChainTip, callInfo) + mock.lockGetChainTip.Unlock() + return mock.GetChainTipFunc(ctx) +} + +// GetChainTipCalls gets all the calls that were made to GetChainTip. +// Check the length with: +// +// len(mockedUnitOfWork.GetChainTipCalls()) +func (mock *UnitOfWorkMock) GetChainTipCalls() []struct { + Ctx context.Context +} { + var calls []struct { + Ctx context.Context + } + mock.lockGetChainTip.RLock() + calls = mock.calls.GetChainTip + mock.lockGetChainTip.RUnlock() + return calls +} + +// GetLongestChainFromHeight calls GetLongestChainFromHeightFunc. +func (mock *UnitOfWorkMock) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { + if mock.GetLongestChainFromHeightFunc == nil { + panic("UnitOfWorkMock.GetLongestChainFromHeightFunc: method is nil but UnitOfWork.GetLongestChainFromHeight was just called") + } + callInfo := struct { + Ctx context.Context + Height uint64 + }{ + Ctx: ctx, + Height: height, + } + mock.lockGetLongestChainFromHeight.Lock() + mock.calls.GetLongestChainFromHeight = append(mock.calls.GetLongestChainFromHeight, callInfo) + mock.lockGetLongestChainFromHeight.Unlock() + return mock.GetLongestChainFromHeightFunc(ctx, height) +} + +// GetLongestChainFromHeightCalls gets all the calls that were made to GetLongestChainFromHeight. +// Check the length with: +// +// len(mockedUnitOfWork.GetLongestChainFromHeightCalls()) +func (mock *UnitOfWorkMock) GetLongestChainFromHeightCalls() []struct { + Ctx context.Context + Height uint64 +} { + var calls []struct { + Ctx context.Context + Height uint64 + } + mock.lockGetLongestChainFromHeight.RLock() + calls = mock.calls.GetLongestChainFromHeight + mock.lockGetLongestChainFromHeight.RUnlock() + return calls +} + +// GetMinedTransactions calls GetMinedTransactionsFunc. +func (mock *UnitOfWorkMock) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { + if mock.GetMinedTransactionsFunc == nil { + panic("UnitOfWorkMock.GetMinedTransactionsFunc: method is nil but UnitOfWork.GetMinedTransactions was just called") + } + callInfo := struct { + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool + }{ + Ctx: ctx, + Hashes: hashes, + OnlyLongestChain: onlyLongestChain, + } + mock.lockGetMinedTransactions.Lock() + mock.calls.GetMinedTransactions = append(mock.calls.GetMinedTransactions, callInfo) + mock.lockGetMinedTransactions.Unlock() + return mock.GetMinedTransactionsFunc(ctx, hashes, onlyLongestChain) +} + +// GetMinedTransactionsCalls gets all the calls that were made to GetMinedTransactions. +// Check the length with: +// +// len(mockedUnitOfWork.GetMinedTransactionsCalls()) +func (mock *UnitOfWorkMock) GetMinedTransactionsCalls() []struct { + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool +} { + var calls []struct { + Ctx context.Context + Hashes [][]byte + OnlyLongestChain bool + } + mock.lockGetMinedTransactions.RLock() + calls = mock.calls.GetMinedTransactions + mock.lockGetMinedTransactions.RUnlock() + return calls +} + +// GetOrphanedChainUpFromHash calls GetOrphanedChainUpFromHashFunc. +func (mock *UnitOfWorkMock) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + if mock.GetOrphanedChainUpFromHashFunc == nil { + panic("UnitOfWorkMock.GetOrphanedChainUpFromHashFunc: method is nil but UnitOfWork.GetOrphanedChainUpFromHash was just called") + } + callInfo := struct { + Ctx context.Context + Hash []byte + }{ + Ctx: ctx, + Hash: hash, + } + mock.lockGetOrphanedChainUpFromHash.Lock() + mock.calls.GetOrphanedChainUpFromHash = append(mock.calls.GetOrphanedChainUpFromHash, callInfo) + mock.lockGetOrphanedChainUpFromHash.Unlock() + return mock.GetOrphanedChainUpFromHashFunc(ctx, hash) +} + +// GetOrphanedChainUpFromHashCalls gets all the calls that were made to GetOrphanedChainUpFromHash. +// Check the length with: +// +// len(mockedUnitOfWork.GetOrphanedChainUpFromHashCalls()) +func (mock *UnitOfWorkMock) GetOrphanedChainUpFromHashCalls() []struct { + Ctx context.Context + Hash []byte +} { + var calls []struct { + Ctx context.Context + Hash []byte + } + mock.lockGetOrphanedChainUpFromHash.RLock() + calls = mock.calls.GetOrphanedChainUpFromHash + mock.lockGetOrphanedChainUpFromHash.RUnlock() + return calls +} + +// GetRegisteredTxsByBlockHashes calls GetRegisteredTxsByBlockHashesFunc. +func (mock *UnitOfWorkMock) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { + if mock.GetRegisteredTxsByBlockHashesFunc == nil { + panic("UnitOfWorkMock.GetRegisteredTxsByBlockHashesFunc: method is nil but UnitOfWork.GetRegisteredTxsByBlockHashes was just called") + } + callInfo := struct { + Ctx context.Context + BlockHashes [][]byte + }{ + Ctx: ctx, + BlockHashes: blockHashes, + } + mock.lockGetRegisteredTxsByBlockHashes.Lock() + mock.calls.GetRegisteredTxsByBlockHashes = append(mock.calls.GetRegisteredTxsByBlockHashes, callInfo) + mock.lockGetRegisteredTxsByBlockHashes.Unlock() + return mock.GetRegisteredTxsByBlockHashesFunc(ctx, blockHashes) +} + +// GetRegisteredTxsByBlockHashesCalls gets all the calls that were made to GetRegisteredTxsByBlockHashes. +// Check the length with: +// +// len(mockedUnitOfWork.GetRegisteredTxsByBlockHashesCalls()) +func (mock *UnitOfWorkMock) GetRegisteredTxsByBlockHashesCalls() []struct { + Ctx context.Context + BlockHashes [][]byte +} { + var calls []struct { + Ctx context.Context + BlockHashes [][]byte + } + mock.lockGetRegisteredTxsByBlockHashes.RLock() + calls = mock.calls.GetRegisteredTxsByBlockHashes + mock.lockGetRegisteredTxsByBlockHashes.RUnlock() + return calls +} + +// GetStaleChainBackFromHash calls GetStaleChainBackFromHashFunc. +func (mock *UnitOfWorkMock) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { + if mock.GetStaleChainBackFromHashFunc == nil { + panic("UnitOfWorkMock.GetStaleChainBackFromHashFunc: method is nil but UnitOfWork.GetStaleChainBackFromHash was just called") + } + callInfo := struct { + Ctx context.Context + Hash []byte + }{ + Ctx: ctx, + Hash: hash, + } + mock.lockGetStaleChainBackFromHash.Lock() + mock.calls.GetStaleChainBackFromHash = append(mock.calls.GetStaleChainBackFromHash, callInfo) + mock.lockGetStaleChainBackFromHash.Unlock() + return mock.GetStaleChainBackFromHashFunc(ctx, hash) +} + +// GetStaleChainBackFromHashCalls gets all the calls that were made to GetStaleChainBackFromHash. +// Check the length with: +// +// len(mockedUnitOfWork.GetStaleChainBackFromHashCalls()) +func (mock *UnitOfWorkMock) GetStaleChainBackFromHashCalls() []struct { + Ctx context.Context + Hash []byte +} { + var calls []struct { + Ctx context.Context + Hash []byte + } + mock.lockGetStaleChainBackFromHash.RLock() + calls = mock.calls.GetStaleChainBackFromHash + mock.lockGetStaleChainBackFromHash.RUnlock() + return calls +} + +// MarkBlockAsDone calls MarkBlockAsDoneFunc. +func (mock *UnitOfWorkMock) MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error { + if mock.MarkBlockAsDoneFunc == nil { + panic("UnitOfWorkMock.MarkBlockAsDoneFunc: method is nil but UnitOfWork.MarkBlockAsDone was just called") + } + callInfo := struct { + Ctx context.Context + Hash *chainhash.Hash + Size uint64 + TxCount uint64 + }{ + Ctx: ctx, + Hash: hash, + Size: size, + TxCount: txCount, + } + mock.lockMarkBlockAsDone.Lock() + mock.calls.MarkBlockAsDone = append(mock.calls.MarkBlockAsDone, callInfo) + mock.lockMarkBlockAsDone.Unlock() + return mock.MarkBlockAsDoneFunc(ctx, hash, size, txCount) +} + +// MarkBlockAsDoneCalls gets all the calls that were made to MarkBlockAsDone. +// Check the length with: +// +// len(mockedUnitOfWork.MarkBlockAsDoneCalls()) +func (mock *UnitOfWorkMock) MarkBlockAsDoneCalls() []struct { + Ctx context.Context + Hash *chainhash.Hash + Size uint64 + TxCount uint64 +} { + var calls []struct { + Ctx context.Context + Hash *chainhash.Hash + Size uint64 + TxCount uint64 + } + mock.lockMarkBlockAsDone.RLock() + calls = mock.calls.MarkBlockAsDone + mock.lockMarkBlockAsDone.RUnlock() + return calls +} + +// Ping calls PingFunc. +func (mock *UnitOfWorkMock) Ping(ctx context.Context) error { + if mock.PingFunc == nil { + panic("UnitOfWorkMock.PingFunc: method is nil but UnitOfWork.Ping was just called") + } + callInfo := struct { + Ctx context.Context + }{ + Ctx: ctx, + } + mock.lockPing.Lock() + mock.calls.Ping = append(mock.calls.Ping, callInfo) + mock.lockPing.Unlock() + return mock.PingFunc(ctx) +} + +// PingCalls gets all the calls that were made to Ping. +// Check the length with: +// +// len(mockedUnitOfWork.PingCalls()) +func (mock *UnitOfWorkMock) PingCalls() []struct { + Ctx context.Context +} { + var calls []struct { + Ctx context.Context + } + mock.lockPing.RLock() + calls = mock.calls.Ping + mock.lockPing.RUnlock() + return calls +} + +// RegisterTransactions calls RegisterTransactionsFunc. +func (mock *UnitOfWorkMock) RegisterTransactions(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) { + if mock.RegisterTransactionsFunc == nil { + panic("UnitOfWorkMock.RegisterTransactionsFunc: method is nil but UnitOfWork.RegisterTransactions was just called") + } + callInfo := struct { + Ctx context.Context + TxHashes [][]byte + }{ + Ctx: ctx, + TxHashes: txHashes, + } + mock.lockRegisterTransactions.Lock() + mock.calls.RegisterTransactions = append(mock.calls.RegisterTransactions, callInfo) + mock.lockRegisterTransactions.Unlock() + return mock.RegisterTransactionsFunc(ctx, txHashes) +} + +// RegisterTransactionsCalls gets all the calls that were made to RegisterTransactions. +// Check the length with: +// +// len(mockedUnitOfWork.RegisterTransactionsCalls()) +func (mock *UnitOfWorkMock) RegisterTransactionsCalls() []struct { + Ctx context.Context + TxHashes [][]byte +} { + var calls []struct { + Ctx context.Context + TxHashes [][]byte + } + mock.lockRegisterTransactions.RLock() + calls = mock.calls.RegisterTransactions + mock.lockRegisterTransactions.RUnlock() + return calls +} + // Rollback calls RollbackFunc. func (mock *UnitOfWorkMock) Rollback() error { if mock.RollbackFunc == nil { @@ -117,6 +1012,230 @@ func (mock *UnitOfWorkMock) RollbackCalls() []struct { return calls } +// SetBlockProcessing calls SetBlockProcessingFunc. +func (mock *UnitOfWorkMock) SetBlockProcessing(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) { + if mock.SetBlockProcessingFunc == nil { + panic("UnitOfWorkMock.SetBlockProcessingFunc: method is nil but UnitOfWork.SetBlockProcessing was just called") + } + callInfo := struct { + Ctx context.Context + Hash *chainhash.Hash + ProcessedBy string + }{ + Ctx: ctx, + Hash: hash, + ProcessedBy: processedBy, + } + mock.lockSetBlockProcessing.Lock() + mock.calls.SetBlockProcessing = append(mock.calls.SetBlockProcessing, callInfo) + mock.lockSetBlockProcessing.Unlock() + return mock.SetBlockProcessingFunc(ctx, hash, processedBy) +} + +// SetBlockProcessingCalls gets all the calls that were made to SetBlockProcessing. +// Check the length with: +// +// len(mockedUnitOfWork.SetBlockProcessingCalls()) +func (mock *UnitOfWorkMock) SetBlockProcessingCalls() []struct { + Ctx context.Context + Hash *chainhash.Hash + ProcessedBy string +} { + var calls []struct { + Ctx context.Context + Hash *chainhash.Hash + ProcessedBy string + } + mock.lockSetBlockProcessing.RLock() + calls = mock.calls.SetBlockProcessing + mock.lockSetBlockProcessing.RUnlock() + return calls +} + +// StartUnitOfWork calls StartUnitOfWorkFunc. +func (mock *UnitOfWorkMock) StartUnitOfWork(ctx context.Context) (store.UnitOfWork, error) { + if mock.StartUnitOfWorkFunc == nil { + panic("UnitOfWorkMock.StartUnitOfWorkFunc: method is nil but UnitOfWork.StartUnitOfWork was just called") + } + callInfo := struct { + Ctx context.Context + }{ + Ctx: ctx, + } + mock.lockStartUnitOfWork.Lock() + mock.calls.StartUnitOfWork = append(mock.calls.StartUnitOfWork, callInfo) + mock.lockStartUnitOfWork.Unlock() + return mock.StartUnitOfWorkFunc(ctx) +} + +// StartUnitOfWorkCalls gets all the calls that were made to StartUnitOfWork. +// Check the length with: +// +// len(mockedUnitOfWork.StartUnitOfWorkCalls()) +func (mock *UnitOfWorkMock) StartUnitOfWorkCalls() []struct { + Ctx context.Context +} { + var calls []struct { + Ctx context.Context + } + mock.lockStartUnitOfWork.RLock() + calls = mock.calls.StartUnitOfWork + mock.lockStartUnitOfWork.RUnlock() + return calls +} + +// UpdateBlocksStatuses calls UpdateBlocksStatusesFunc. +func (mock *UnitOfWorkMock) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { + if mock.UpdateBlocksStatusesFunc == nil { + panic("UnitOfWorkMock.UpdateBlocksStatusesFunc: method is nil but UnitOfWork.UpdateBlocksStatuses was just called") + } + callInfo := struct { + Ctx context.Context + BlockStatusUpdates []store.BlockStatusUpdate + }{ + Ctx: ctx, + BlockStatusUpdates: blockStatusUpdates, + } + mock.lockUpdateBlocksStatuses.Lock() + mock.calls.UpdateBlocksStatuses = append(mock.calls.UpdateBlocksStatuses, callInfo) + mock.lockUpdateBlocksStatuses.Unlock() + return mock.UpdateBlocksStatusesFunc(ctx, blockStatusUpdates) +} + +// UpdateBlocksStatusesCalls gets all the calls that were made to UpdateBlocksStatuses. +// Check the length with: +// +// len(mockedUnitOfWork.UpdateBlocksStatusesCalls()) +func (mock *UnitOfWorkMock) UpdateBlocksStatusesCalls() []struct { + Ctx context.Context + BlockStatusUpdates []store.BlockStatusUpdate +} { + var calls []struct { + Ctx context.Context + BlockStatusUpdates []store.BlockStatusUpdate + } + mock.lockUpdateBlocksStatuses.RLock() + calls = mock.calls.UpdateBlocksStatuses + mock.lockUpdateBlocksStatuses.RUnlock() + return calls +} + +// UpsertBlock calls UpsertBlockFunc. +func (mock *UnitOfWorkMock) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) { + if mock.UpsertBlockFunc == nil { + panic("UnitOfWorkMock.UpsertBlockFunc: method is nil but UnitOfWork.UpsertBlock was just called") + } + callInfo := struct { + Ctx context.Context + Block *blocktx_api.Block + }{ + Ctx: ctx, + Block: block, + } + mock.lockUpsertBlock.Lock() + mock.calls.UpsertBlock = append(mock.calls.UpsertBlock, callInfo) + mock.lockUpsertBlock.Unlock() + return mock.UpsertBlockFunc(ctx, block) +} + +// UpsertBlockCalls gets all the calls that were made to UpsertBlock. +// Check the length with: +// +// len(mockedUnitOfWork.UpsertBlockCalls()) +func (mock *UnitOfWorkMock) UpsertBlockCalls() []struct { + Ctx context.Context + Block *blocktx_api.Block +} { + var calls []struct { + Ctx context.Context + Block *blocktx_api.Block + } + mock.lockUpsertBlock.RLock() + calls = mock.calls.UpsertBlock + mock.lockUpsertBlock.RUnlock() + return calls +} + +// UpsertBlockTransactions calls UpsertBlockTransactionsFunc. +func (mock *UnitOfWorkMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { + if mock.UpsertBlockTransactionsFunc == nil { + panic("UnitOfWorkMock.UpsertBlockTransactionsFunc: method is nil but UnitOfWork.UpsertBlockTransactions was just called") + } + callInfo := struct { + Ctx context.Context + BlockID uint64 + TxsWithMerklePaths []store.TxWithMerklePath + }{ + Ctx: ctx, + BlockID: blockID, + TxsWithMerklePaths: txsWithMerklePaths, + } + mock.lockUpsertBlockTransactions.Lock() + mock.calls.UpsertBlockTransactions = append(mock.calls.UpsertBlockTransactions, callInfo) + mock.lockUpsertBlockTransactions.Unlock() + return mock.UpsertBlockTransactionsFunc(ctx, blockID, txsWithMerklePaths) +} + +// UpsertBlockTransactionsCalls gets all the calls that were made to UpsertBlockTransactions. +// Check the length with: +// +// len(mockedUnitOfWork.UpsertBlockTransactionsCalls()) +func (mock *UnitOfWorkMock) UpsertBlockTransactionsCalls() []struct { + Ctx context.Context + BlockID uint64 + TxsWithMerklePaths []store.TxWithMerklePath +} { + var calls []struct { + Ctx context.Context + BlockID uint64 + TxsWithMerklePaths []store.TxWithMerklePath + } + mock.lockUpsertBlockTransactions.RLock() + calls = mock.calls.UpsertBlockTransactions + mock.lockUpsertBlockTransactions.RUnlock() + return calls +} + +// VerifyMerkleRoots calls VerifyMerkleRootsFunc. +func (mock *UnitOfWorkMock) VerifyMerkleRoots(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { + if mock.VerifyMerkleRootsFunc == nil { + panic("UnitOfWorkMock.VerifyMerkleRootsFunc: method is nil but UnitOfWork.VerifyMerkleRoots was just called") + } + callInfo := struct { + Ctx context.Context + MerkleRoots []*blocktx_api.MerkleRootVerificationRequest + MaxAllowedBlockHeightMismatch int + }{ + Ctx: ctx, + MerkleRoots: merkleRoots, + MaxAllowedBlockHeightMismatch: maxAllowedBlockHeightMismatch, + } + mock.lockVerifyMerkleRoots.Lock() + mock.calls.VerifyMerkleRoots = append(mock.calls.VerifyMerkleRoots, callInfo) + mock.lockVerifyMerkleRoots.Unlock() + return mock.VerifyMerkleRootsFunc(ctx, merkleRoots, maxAllowedBlockHeightMismatch) +} + +// VerifyMerkleRootsCalls gets all the calls that were made to VerifyMerkleRoots. +// Check the length with: +// +// len(mockedUnitOfWork.VerifyMerkleRootsCalls()) +func (mock *UnitOfWorkMock) VerifyMerkleRootsCalls() []struct { + Ctx context.Context + MerkleRoots []*blocktx_api.MerkleRootVerificationRequest + MaxAllowedBlockHeightMismatch int +} { + var calls []struct { + Ctx context.Context + MerkleRoots []*blocktx_api.MerkleRootVerificationRequest + MaxAllowedBlockHeightMismatch int + } + mock.lockVerifyMerkleRoots.RLock() + calls = mock.calls.VerifyMerkleRoots + mock.lockVerifyMerkleRoots.RUnlock() + return calls +} + // WriteLockBlocksTable calls WriteLockBlocksTableFunc. func (mock *UnitOfWorkMock) WriteLockBlocksTable(ctx context.Context) error { if mock.WriteLockBlocksTableFunc == nil { diff --git a/internal/blocktx/store/postgresql/postgres.go b/internal/blocktx/store/postgresql/postgres.go index 1a320b300..e73d94b2a 100644 --- a/internal/blocktx/store/postgresql/postgres.go +++ b/internal/blocktx/store/postgresql/postgres.go @@ -73,13 +73,12 @@ func New(dbInfo string, idleConns int, maxOpenConns int, opts ...func(postgreSQL p := &PostgreSQL{ _db: db, + db: db, now: time.Now, maxPostgresBulkInsertRows: maxPostgresBulkInsertRows, dbInfo: dbInfo, } - p.db = p._db - for _, opt := range opts { opt(p) } @@ -101,46 +100,42 @@ func (p *PostgreSQL) Ping(ctx context.Context) error { } func (p *PostgreSQL) StartUnitOfWork(ctx context.Context) (store.UnitOfWork, error) { - // This will create a clone of the store and start a transaction - // to avoid messing with the state of the main singleton store - cloneDB, err := sql.Open(postgresDriverName, p.dbInfo) + tx, err := p._db.BeginTx(ctx, nil) if err != nil { - return nil, errors.Join(store.ErrFailedToOpenDB, err) + return nil, err } + // This will create a clone of the store and use the transaction created + // above to avoid messing with the state of the main singleton store cloneStore := &PostgreSQL{ - _db: cloneDB, + _tx: tx, + db: tx, now: time.Now, maxPostgresBulkInsertRows: maxPostgresBulkInsertRows, tracingEnabled: p.tracingEnabled, tracingAttributes: p.tracingAttributes, } - tx, err := cloneStore._db.BeginTx(ctx, nil) - if err != nil { - return nil, err - } - - cloneStore._tx = tx - cloneStore.db = cloneStore._tx - return cloneStore, nil } // UnitOfWork methods below -func (uow *PostgreSQL) Commit() error { - uow.db = uow._db - return uow._tx.Commit() +func (p *PostgreSQL) Commit() error { + if p._tx == nil { + return ErrNoTransaction + } + return p._tx.Commit() } -func (uow *PostgreSQL) Rollback() error { - uow.db = uow._db - return uow._tx.Rollback() +func (p *PostgreSQL) Rollback() error { + if p._tx == nil { + return ErrNoTransaction + } + return p._tx.Rollback() } -func (uow *PostgreSQL) WriteLockBlocksTable(ctx context.Context) error { - tx, ok := uow.db.(*sql.Tx) - if !ok { +func (p *PostgreSQL) WriteLockBlocksTable(ctx context.Context) error { + if p._tx == nil { return ErrNoTransaction } @@ -151,6 +146,6 @@ func (uow *PostgreSQL) WriteLockBlocksTable(ctx context.Context) error { // is released. // // Reading from the table is still allowed. - _, err := tx.ExecContext(ctx, "LOCK TABLE blocktx.blocks IN EXCLUSIVE MODE") + _, err := p._tx.ExecContext(ctx, "LOCK TABLE blocktx.blocks IN EXCLUSIVE MODE") return err } diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index d6cfe1f72..dd30b8f44 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -58,6 +58,7 @@ type BlocktxStore interface { } type UnitOfWork interface { + BlocktxStore Commit() error Rollback() error WriteLockBlocksTable(ctx context.Context) error From d76bb00061cf91608b207a8dbcb92577e9bddcd0 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Thu, 7 Nov 2024 16:26:38 +0100 Subject: [PATCH 24/36] feat(ARCO-199): remove publishing to metamorh for incoming STALE blocks --- .../reorg_integration_test.go | 27 +++-------- internal/blocktx/processor.go | 45 ------------------- 2 files changed, 6 insertions(+), 66 deletions(-) diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index aa38e7c16..42bd38fe9 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -169,22 +169,20 @@ func TestReorg(t *testing.T) { processor.StartBlockProcessing() testHandleBlockOnEmptyDatabase(t, p2pMsgHandler, blocktxStore) - publishedTxs = publishedTxs[:0] // clear slice for the next test // only load fixtures at this point testutils.LoadFixtures(t, dbConn, "fixtures") - expectedTxs := testHandleStaleBlock(t, p2pMsgHandler, blocktxStore) - // verify the transaction was correctly published to metamorph - verifyTxs(t, expectedTxs, publishedTxs) - // clear slice for the next test - publishedTxs = publishedTxs[:0] + testHandleStaleBlock(t, p2pMsgHandler, blocktxStore) + // verify the no transaction was published to metamorph + require.Len(t, publishedTxs, 0) - expectedTxs = testHandleReorg(t, p2pMsgHandler, blocktxStore) + expectedTxs := testHandleReorg(t, p2pMsgHandler, blocktxStore) verifyTxs(t, expectedTxs, publishedTxs) publishedTxs = publishedTxs[:0] testHandleStaleOrphans(t, p2pMsgHandler, blocktxStore) + require.Len(t, publishedTxs, 0) expectedTxs = testHandleOrphansReorg(t, p2pMsgHandler, blocktxStore) verifyTxs(t, expectedTxs, publishedTxs) @@ -218,7 +216,7 @@ func testHandleBlockOnEmptyDatabase(t *testing.T, p2pMsgHandler *blocktx_p2p.Msg verifyBlock(t, store, blockHash822011, 822011, blocktx_api.Status_LONGEST) } -func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { +func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) { prevBlockHash := testutils.RevChainhash(t, blockHash822014StartOfChain) txHash := testutils.RevChainhash(t, txhash822015) txHash2 := testutils.RevChainhash(t, txhash822015Competing) // should not be published - is already in the longest chain @@ -236,25 +234,12 @@ func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, s Height: uint64(822015), // competing block already exists at this height TransactionHashes: []*chainhash.Hash{txHash, txHash2}, } - blockHash := blockMessage.Header.BlockHash() p2pMsgHandler.OnReceive(blockMessage, nil) // Allow DB to process the block time.Sleep(200 * time.Millisecond) verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) - - // transactions expected to be published to metamorph - expectedTxs := []*blocktx_api.TransactionBlock{ - { - BlockHash: blockHash[:], - BlockHeight: 822015, - TransactionHash: txHash[:], - BlockStatus: blocktx_api.Status_STALE, - }, - } - - return expectedTxs } func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 122c8ceb8..a13e3302a 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -517,12 +517,6 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { p.logger.Error("unable to perform reorg", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) return err } - } else if chainTip.Status == blocktx_api.Status_STALE { - txsToPublish, err = p.getStaleTxs(ctx, chain) - if err != nil { - p.logger.Error("unable to get stale transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) - return err - } } else if chainTip.Status == blocktx_api.Status_LONGEST { txsToPublish, err = p.store.GetRegisteredTxsByBlockHashes(ctx, chain.getHashes()) if err != nil { @@ -958,45 +952,6 @@ func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api return txsToPublish, nil } -// getStaleTxs returns all transactions from given STALE blocks that are not in the longest chain -func (p *Processor) getStaleTxs(ctx context.Context, staleChain chain) ([]store.TransactionBlock, error) { - // 1. Find registered txs from given STALE blocks - // 2. Check for those transactions in the longest chain - // 3. Return only those registered txs from the STALE blocks that are not found in the longest chain - - registeredTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, staleChain.getHashes()) - if err != nil { - return nil, err - } - - registeredHashes := make([][]byte, len(registeredTxs)) - for i, tx := range registeredTxs { - registeredHashes[i] = tx.TxHash - } - - minedTxs, err := p.store.GetMinedTransactions(ctx, registeredHashes, true) - if err != nil { - return nil, err - } - - minedTxsMap := make(map[string]bool) - for _, tx := range minedTxs { - minedTxsMap[string(tx.TxHash)] = true - } - - staleTxs := make([]store.TransactionBlock, 0) - - for _, tx := range registeredTxs { - if minedTxsMap[string(tx.TxHash)] { - continue - } - - staleTxs = append(staleTxs, tx) - } - - return staleTxs, nil -} - func (p *Processor) Shutdown() { p.cancelAll() p.waitGroup.Wait() From 2d085c81cd9387ae885cd0aa84452e38dc899fe6 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Fri, 8 Nov 2024 12:34:02 +0100 Subject: [PATCH 25/36] chore(ARCO-199): ignore a gosec warning intentionally --- internal/blocktx/processor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index a13e3302a..dd2d809c9 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -735,7 +735,7 @@ func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block break } - bump, err := bc.NewBUMPFromMerkleTreeAndIndex(block.Height, merkleTree, uint64(txIndex)) // NOSONAR + bump, err := bc.NewBUMPFromMerkleTreeAndIndex(block.Height, merkleTree, uint64(txIndex)) // #nosec G115 if err != nil { return errors.Join(ErrFailedToCreateBUMP, fmt.Errorf("tx hash %s, block height: %d", hash.String(), block.Height), err) } From f591c12d2690b22c4ef756161534ec43424ce868 Mon Sep 17 00:00:00 2001 From: Kuba <127198012+kuba-4chain@users.noreply.github.com> Date: Wed, 20 Nov 2024 16:41:18 +0100 Subject: [PATCH 26/36] feat(reorg): simplification (#651) --- internal/blocktx/chain.go | 29 - .../fixtures/blocktx.blocks.yaml | 24 +- .../reorg_integration_test.go | 45 +- internal/blocktx/processor.go | 471 +++--- internal/blocktx/processor_helpers.go | 69 +- internal/blocktx/processor_helpers_test.go | 43 +- internal/blocktx/processor_test.go | 204 +-- .../blocktx/store/mocks/blocktx_db_tx_mock.go | 1269 ----------------- .../blocktx/store/mocks/blocktx_store_mock.go | 186 +-- .../get_orphaned_chain/blocktx.blocks.yaml | 41 +- .../blocktx/store/postgresql/get_block.go | 2 +- .../store/postgresql/get_orphaned_chain.go | 38 +- .../store/postgresql/get_stale_chain.go | 1 + internal/blocktx/store/postgresql/postgres.go | 71 +- .../blocktx/store/postgresql/postgres_test.go | 55 +- .../store/postgresql/update_block_statuses.go | 27 +- internal/blocktx/store/store.go | 13 +- internal/blocktx/store/store_mocks.go | 1 - 18 files changed, 534 insertions(+), 2055 deletions(-) delete mode 100644 internal/blocktx/chain.go delete mode 100644 internal/blocktx/store/mocks/blocktx_db_tx_mock.go diff --git a/internal/blocktx/chain.go b/internal/blocktx/chain.go deleted file mode 100644 index 3bce4e301..000000000 --- a/internal/blocktx/chain.go +++ /dev/null @@ -1,29 +0,0 @@ -package blocktx - -import ( - "errors" - - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" -) - -var ErrEmptyChain = errors.New("empty chain of blocks") - -type chain []*blocktx_api.Block - -func (c chain) getTip() (*blocktx_api.Block, error) { - if len(c) == 0 { - return nil, ErrEmptyChain - } - - return c[len(c)-1], nil -} - -func (c chain) getHashes() [][]byte { - hashes := make([][]byte, len(c)) - - for i, b := range c { - hashes[i] = b.Hash - } - - return hashes -} diff --git a/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml index 920da9cbc..1e913f285 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml +++ b/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml @@ -41,20 +41,31 @@ merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822017 size: 8630000 + processed_at: 2023-12-15 14:40:00 tx_count: 36724 status: 10 is_longest: true chainwork: '62209952899966' - -# gap - +- inserted_at: 2023-12-15 14:50:00 + id: 10052 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + merkleroot: 0xde0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9 + height: 822018 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' - inserted_at: 2023-12-15 14:50:00 id: 1005 hash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 - prevhash: 0x212a7598a62295f1a520ef525a34f657bc636d9da9bda74acdf6f051cd84c353 + prevhash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde merkleroot: 0x51f33784f6d54f1d6414fa4d8b8d6904215cb16a3fa0a8b1fe02e456a90544d4 height: 822019 size: 8630000 + processed_at: 2023-12-15 14:40:00 tx_count: 36724 status: 30 # ORPHANED is_longest: false @@ -66,6 +77,7 @@ merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822020 size: 8630000 + processed_at: 2023-12-15 14:40:00 tx_count: 36724 status: 30 # ORPHANED is_longest: false @@ -76,10 +88,11 @@ - inserted_at: 2023-12-15 14:50:00 id: 1007 hash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c - prevhash: 0x743c7dc491ae5fddd37ebf63058f9574b4db9f6a89f483a4baec31820e5df61d + prevhash: 0xd46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060 merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 height: 822022 size: 8630000 + processed_at: 2023-12-15 14:40:00 tx_count: 36724 status: 30 # ORPHANED is_longest: false @@ -91,6 +104,7 @@ merkleroot: 0x8e3177a33d6a87785b7104f20ca345e1713ae11ec2723a41028efddabebb861b height: 822023 size: 8630000 + processed_at: 2023-12-15 14:40:00 tx_count: 36724 status: 30 # ORPHANED is_longest: false diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 42bd38fe9..56cb848e2 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -7,10 +7,10 @@ package integrationtest // Message queue sending txs to metamorph - mocked // // Flow of this test: -// 1. Blocks at heights 822014-822017, 822019-822020 and 822022-822023 are added to db from fixtures +// 1. Blocks at heights 822014-822017 (LONGEST), 822018-822020 (ORPHANED) and 822022-822023 (ORPHANED) are added to db from fixtures // 2. A hardcoded msg with competing block at height 822015 is being sent through the mocked PeerHandler // 3. This block has a chainwork lower than the current tip of chain - becomes STALE -// 4. Registered transactions from this block that are not in the longest chain are published to metamorph message queue with blockstatus = STALE +// 4. Registered transactions from this block are ignored // 5. Next competing block, at height 822016 is being send through the mocked PeerHandler // 6. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest // 7. Verification of reorg - checking if statuses are correctly switched @@ -18,13 +18,13 @@ package integrationtest // - transactions from the stale chain becoming the longest are published // - transactions that were previously in the longest chain are published with udpated block data // - transactions that were previously in the longest chain, but are not in the stale chain are published with blockstatus = STALE -// 9. A new block at height 822018 is being sent through the mocked PeerHandler -// 10. This block is extending the previously LONGEST but now STALE chain and finds orphaned chain at heights 822019, 822020 -// 11. The tip of the orphaned chain does not have a greater chainwork than the current longest chain - entire orphaned chain becomes STALE -// 12. A new block at height 822021 is being sent through the mocked PeerHandler -// 13. This block extends the STALE chain and finds orphaned chain at height 822022, 822023 -// 14. The tip of the orphaned chain has a greater chainwork than the current tip of longest chain -// - entire STALE chain at heights 822015 - 822023 becomes LONGEST +// 9. A new block at height 822021 is being sent through the mocked PeerHandler +// 10. This block is extending the orphaned chain and finds that it's connected to the stale chain - orphans get updated to STALE +// 11. The new stale chain does not have a greater chainwork than the current longest chain - entire orphaned chain becomes STALE +// 12. A new block at height 822024 is being sent through the mocked PeerHandler +// 13. This block extends the orphaned chain and finds that it's connected to the stale chain - orphans get updated to STALE +// 14. The new stale chain has a greater chainwork than the current longest chain +// - entire STALE chain at heights 822015 - 822024 becomes LONGEST // - entire LONGEST chain at height 822015 - 822016 becomes STALE // 15. Verification of reorg - checking if statuses are correctly switched (for blocks and for transactions) @@ -114,12 +114,13 @@ const ( blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" blockHash822016Fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" - blockHash822018 = "212a7598a62295f1a520ef525a34f657bc636d9da9bda74acdf6f051cd84c353" + blockHash822018Orphan = "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde" blockHash822019Orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" blockHash822020Orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" - blockHash822021 = "743c7dc491ae5fddd37ebf63058f9574b4db9f6a89f483a4baec31820e5df61d" + blockHash822021 = "d46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060" blockHash822022Orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" blockHash822023Orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" + blockHash822024 = "5d60cfea9a7ef96554768150716788e9643eaafd5a1979636777a6a5835b07c6" txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" @@ -313,17 +314,17 @@ func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store func testHandleStaleOrphans(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) { txHash := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") merkleRoot := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") - prevhash := testutils.RevChainhash(t, blockHash822017) + prevhash := testutils.RevChainhash(t, blockHash822020Orphan) // should become STALE blockMessage := &blockchain.BlockMessage{ Header: &wire.BlockHeader{ Version: 541065216, - PrevBlock: *prevhash, // block with status STALE at height 822017 + PrevBlock: *prevhash, // block with status ORPHANED at height 822020 - connected to STALE chain MerkleRoot: *merkleRoot, Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain }, - Height: uint64(822018), + Height: uint64(822021), TransactionHashes: []*chainhash.Hash{txHash}, } @@ -332,9 +333,10 @@ func testHandleStaleOrphans(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, time.Sleep(1 * time.Second) // verify that the block and orphans have STALE status - verifyBlock(t, store, blockHash822018, 822018, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_STALE) verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_STALE) verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_STALE) // verify that the blocks after the next gap are still orphans verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_ORPHANED) @@ -344,19 +346,19 @@ func testHandleStaleOrphans(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { txHash := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") merkleRoot := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") - prevhash := testutils.RevChainhash(t, blockHash822020Orphan) + prevhash := testutils.RevChainhash(t, blockHash822023Orphan) // should become LONGEST // reorg should happen blockMessage := &blockchain.BlockMessage{ Header: &wire.BlockHeader{ Version: 541065216, - PrevBlock: *prevhash, // block with status STALE at height 822020 + PrevBlock: *prevhash, // block with status ORPHANED at height 822023 - connected to STALE chain MerkleRoot: *merkleRoot, - Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain - // but the sum of orphan chain has a higher chainwork and should cause a reorg + Bits: 0x1d00ffff, // chainwork: "4295032833" + // the sum of orphan chain has a higher chainwork and should cause a reorg }, - Height: uint64(822021), + Height: uint64(822024), TransactionHashes: []*chainhash.Hash{txHash}, } @@ -369,12 +371,13 @@ func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822018, 822018, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822024, 822024, blocktx_api.Status_LONGEST) verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_STALE) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index dd2d809c9..23b57a2e4 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" "log/slog" - "math/big" "os" "sync" "time" @@ -31,6 +30,8 @@ var ( ErrFailedToGetStringFromBUMPHex = errors.New("failed to get string from bump for tx hash") ErrFailedToParseBlockHash = errors.New("failed to parse block hash") ErrFailedToInsertBlockTransactions = errors.New("failed to insert block transactions") + ErrBlockAlreadyExists = errors.New("block already exists in the database") + ErrUnexpectedBlockStatus = errors.New("unexpected block status") ) const ( @@ -458,71 +459,37 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { blockHash = msg.Header.BlockHash() blockHeight := msg.Height - p.logger.Info("processing incoming block", slog.String("hash", blockHash.String())) - - var chain chain - var competing bool - var err error + p.logger.Info("processing incoming block", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight)) // check if we've already processed that block existingBlock, _ := p.store.GetBlock(ctx, &blockHash) if existingBlock != nil && existingBlock.Processed { - // if the block was already processed, check and update - // possible orphan children of that block - chain, competing, err = p.updateOrphans(ctx, existingBlock, competing) - if err != nil { - p.logger.Error("unable to check and update possible orphaned child blocks", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) - return err - } - - if len(chain) == 1 { // this means that no orphans were found - p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String())) - return nil - } - } else { - // if the block was not yet processed, proceed normally - chain, competing, err = p.verifyAndInsertBlock(ctx, msg) - if err != nil { - p.logger.Error("unable to verify the longest tip existence in db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) - return err - } + p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight)) + return nil } - chainTip, err := chain.getTip() + block, err := p.verifyAndInsertBlock(ctx, msg) if err != nil { - p.logger.Error("unable to get chain tip", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) return err } - shouldPerformReorg := false - if competing { - hasGreatestChainwork, err := p.hasGreatestChainwork(ctx, chainTip) - if err != nil { - p.logger.Error("unable to get the chain tip to verify chainwork", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) - return err - } + var txsToPublish []store.TransactionBlock - if hasGreatestChainwork { - p.logger.Info("chain reorg detected", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight)) - shouldPerformReorg = true - } + switch block.Status { + case blocktx_api.Status_LONGEST: + txsToPublish, err = p.getRegisteredTransactions(ctx, []*blocktx_api.Block{block}) + case blocktx_api.Status_STALE: + txsToPublish, err = p.handleStaleBlock(ctx, block) + case blocktx_api.Status_ORPHANED: + txsToPublish, err = p.handleOrphans(ctx, block) + default: + return ErrUnexpectedBlockStatus } - txsToPublish := make([]store.TransactionBlock, 0) - - if shouldPerformReorg { - txsToPublish, err = p.performReorg(ctx, chainTip) - if err != nil { - p.logger.Error("unable to perform reorg", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) - return err - } - } else if chainTip.Status == blocktx_api.Status_LONGEST { - txsToPublish, err = p.store.GetRegisteredTxsByBlockHashes(ctx, chain.getHashes()) - if err != nil { - p.logger.Error("unable to get registered transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight), slog.String("err", err.Error())) - return err - } + if err != nil { + // error is already logged in each method above + return err } for _, tx := range txsToPublish { @@ -545,69 +512,95 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return nil } -func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMessage) (chain, bool, error) { +func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMessage) (*blocktx_api.Block, error) { blockHash := msg.Header.BlockHash() previousBlockHash := msg.Header.PrevBlock + merkleRoot := msg.Header.MerkleRoot + + incomingBlock := &blocktx_api.Block{ + Hash: blockHash[:], + PreviousHash: previousBlockHash[:], + MerkleRoot: merkleRoot[:], + Height: msg.Height, + Chainwork: calculateChainwork(msg.Header.Bits).String(), + } + + err := p.assignBlockStatus(ctx, incomingBlock, previousBlockHash) + if err != nil { + p.logger.Error("unable to assign block status", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return nil, err + } + + p.logger.Info("Inserting block", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) - prevBlock, err := p.getPrevBlock(ctx, &previousBlockHash) + err = p.insertBlockAndStoreTransactions(ctx, incomingBlock, msg.TransactionHashes, msg.Header.MerkleRoot) if err != nil { - p.logger.Error("unable to get previous block from db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("prevHash", previousBlockHash.String()), slog.String("err", err.Error())) - return nil, false, err + p.logger.Error("unable to insert block and store its transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return nil, err } - longestTipExists := true + return incomingBlock, nil +} + +func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Block, prevBlockHash chainhash.Hash) error { + prevBlock, _ := p.store.GetBlock(ctx, &prevBlockHash) + if prevBlock == nil { // This check is only in case there's a fresh, empty database // with no blocks, to mark the first block as the LONGEST chain - longestTipExists, err = p.longestTipExists(ctx) + longestTipExists, err := p.longestTipExists(ctx) if err != nil { - p.logger.Error("unable to verify the longest tip existance in db", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) - return nil, false, err + p.logger.Error("unable to verify the longest tip existance in db", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return err } - } - - incomingBlock := createBlock(msg, prevBlock, longestTipExists) - competing, err := p.competingChainsExist(ctx, incomingBlock) - if err != nil { - p.logger.Error("unable to check for competing chains", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height), slog.String("err", err.Error())) - return nil, false, err + // if there's no longest block in the + // database - mark this block as LONGEST + // otherwise - it's an orphan + if !longestTipExists { + block.Status = blocktx_api.Status_LONGEST + } else { + block.Status = blocktx_api.Status_ORPHANED + } + return nil } - if competing { - p.logger.Info("Competing blocks found", slog.String("incoming block hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height)) - incomingBlock.Status = blocktx_api.Status_STALE + // if the previous block exists in the db but is currently being + // processed by another instance, we don't know what the final + // status of that parent block will be, so mark the incoming block + // as ORPHANED and wait for the next block to confirm the status + if !prevBlock.Processed { + block.Status = blocktx_api.Status_ORPHANED + return nil } - p.logger.Info("Inserting block", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) + if prevBlock.Status == blocktx_api.Status_LONGEST { + competingBlock, err := p.store.GetLongestBlockByHeight(ctx, block.Height) + if err != nil && !errors.Is(err, store.ErrBlockNotFound) { + p.logger.Error("unable to get the competing block from db", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return err + } - err = p.insertBlockAndStoreTransactions(ctx, incomingBlock, msg.TransactionHashes, msg.Header.MerkleRoot) - if err != nil { - p.logger.Error("unable to insert block and store its transactions", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) - return nil, false, err - } + if competingBlock == nil { + block.Status = blocktx_api.Status_LONGEST + return nil + } - // if the block is ORPHANED, there's no need to process it any further - if incomingBlock.Status == blocktx_api.Status_ORPHANED { - return chain{incomingBlock}, false, nil - } + if bytes.Equal(block.Hash, competingBlock.Hash) { + // this means that another instance is already processing + // or have processed this block that we're processing here + // so we can throw an error and finish processing + return ErrBlockAlreadyExists + } - chain, competing, err := p.updateOrphans(ctx, incomingBlock, competing) - if err != nil { - p.logger.Error("unable to check and update possible orphaned child blocks", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) - return nil, false, err + block.Status = blocktx_api.Status_STALE + return nil } - return chain, competing, nil -} + // ORPHANED or STALE + block.Status = prevBlock.Status -func (p *Processor) getPrevBlock(ctx context.Context, prevHash *chainhash.Hash) (*blocktx_api.Block, error) { - prevBlock, err := p.store.GetBlock(ctx, prevHash) - if err != nil && !errors.Is(err, store.ErrBlockNotFound) { - return nil, err - } - - return prevBlock, nil + return nil } func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { @@ -623,60 +616,20 @@ func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { return true, nil } -func (p *Processor) competingChainsExist(ctx context.Context, block *blocktx_api.Block) (bool, error) { - if block.Status == blocktx_api.Status_ORPHANED { - return false, nil +func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) ([]store.TransactionBlock, error) { + blockHashes := make([][]byte, len(blocks)) + for i, b := range blocks { + blockHashes[i] = b.Hash } - if block.Status == blocktx_api.Status_LONGEST { - competingBlock, err := p.store.GetBlockByHeight(ctx, block.Height) - if err != nil && !errors.Is(err, store.ErrBlockNotFound) { - return false, err - } - - if competingBlock != nil && !bytes.Equal(competingBlock.Hash, block.Hash) { - return true, nil - } - - return false, nil - } - - // If STALE status - return true, nil -} - -func (p *Processor) hasGreatestChainwork(ctx context.Context, competingChainTip *blocktx_api.Block) (bool, error) { - staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, competingChainTip.Hash) - if err != nil { - return false, err - } - - lowestHeight := competingChainTip.Height - if len(staleBlocks) > 0 { - lowestHeight = getLowestHeight(staleBlocks) - } - - longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) + txsToPublish, err := p.store.GetRegisteredTxsByBlockHashes(ctx, blockHashes) if err != nil { - return false, err - } - - sumStaleChainwork := big.NewInt(0) - sumLongChainwork := big.NewInt(0) - - for _, b := range staleBlocks { - chainwork := new(big.Int) - chainwork.SetString(b.Chainwork, 10) - sumStaleChainwork = sumStaleChainwork.Add(sumStaleChainwork, chainwork) - } - - for _, b := range longestBlocks { - chainwork := new(big.Int) - chainwork.SetString(b.Chainwork, 10) - sumLongChainwork = sumLongChainwork.Add(sumLongChainwork, chainwork) + block := blocks[len(blocks)-1] + p.logger.Error("unable to get registered transactions", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, err } - return sumLongChainwork.Cmp(sumStaleChainwork) < 0, nil + return txsToPublish, nil } func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) error { @@ -777,179 +730,175 @@ func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block return nil } -func (p *Processor) updateOrphans(ctx context.Context, incomingBlock *blocktx_api.Block, competing bool) (chain, bool, error) { - chain := []*blocktx_api.Block{incomingBlock} - - uow, err := p.store.StartUnitOfWork(ctx) +func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block) ([]store.TransactionBlock, error) { + staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, block.Hash) if err != nil { - return nil, false, err + p.logger.Error("unable to get STALE blocks to verify chainwork", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, err } - defer func() { - _ = uow.Rollback() - }() - // Very important step, this will lock blocks - // table for writing but still allow reading. - err = uow.WriteLockBlocksTable(ctx) - if err != nil { - return nil, false, err + lowestHeight := block.Height + if len(staleBlocks) > 0 { + lowestHeight = staleBlocks[0].Height } - orphanedBlocks, err := uow.GetOrphanedChainUpFromHash(ctx, incomingBlock.Hash) + longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) if err != nil { - return nil, false, err - } - if len(orphanedBlocks) == 0 { - return chain, competing, nil + p.logger.Error("unable to get LONGEST blocks to verify chainwork", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, err } - blockStatusUpdates := make([]store.BlockStatusUpdate, len(orphanedBlocks)) - for i := range orphanedBlocks { - // We want to mark all orphaned blocks as STALE - // in case there already exists a block at any - // of their height with status LONGEST, which - // would cause constraint validation (height, is_longest). - // - // If they are part of the LONGEST chain, the reorg - // will happen and update their statuses accordingly. - orphanedBlocks[i].Status = blocktx_api.Status_STALE + staleChainwork := sumChainwork(staleBlocks) + longestChainwork := sumChainwork(longestBlocks) - blockStatusUpdates[i] = store.BlockStatusUpdate{ - Hash: orphanedBlocks[i].Hash, - Status: blocktx_api.Status_STALE, - } - } + if longestChainwork.Cmp(staleChainwork) < 0 { + p.logger.Info("chain reorg detected", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height)) - err = uow.UpdateBlocksStatuses(ctx, blockStatusUpdates) - if err != nil { - return nil, false, err + txsToPublish, err := p.performReorg(ctx, staleBlocks, longestBlocks) + if err != nil { + p.logger.Error("unable to perform reorg", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, err + } + return txsToPublish, nil } - err = uow.Commit() - if err != nil { - return nil, false, err - } + return nil, nil +} - p.logger.Info("orphans were found and updated", slog.Int("len", len(orphanedBlocks))) +func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) ([]store.TransactionBlock, error) { + staleHashes := make([][]byte, len(staleBlocks)) + longestHashes := make([][]byte, len(longestBlocks)) - chain = append(chain, orphanedBlocks...) + blockStatusUpdates := make([]store.BlockStatusUpdate, len(longestBlocks)+len(staleBlocks)) - // if we found any orphans and marked them as STALE - // we need to find out if they are part of the longest - // or stale chain, so competing is returned as true - return chain, true, nil -} + for i, b := range longestBlocks { + longestHashes[i] = b.Hash -func (p *Processor) performReorg(ctx context.Context, staleChainTip *blocktx_api.Block) ([]store.TransactionBlock, error) { - uow, err := p.store.StartUnitOfWork(ctx) - if err != nil { - return nil, err + b.Status = blocktx_api.Status_STALE + update := store.BlockStatusUpdate{Hash: b.Hash, Status: b.Status} + blockStatusUpdates[i] = update } - defer func() { - _ = uow.Rollback() - }() - // Very important step, this will lock blocks - // table for writing but still allow reading. - err = uow.WriteLockBlocksTable(ctx) - if err != nil { - return nil, err + for i, b := range staleBlocks { + staleHashes[i] = b.Hash + + b.Status = blocktx_api.Status_LONGEST + update := store.BlockStatusUpdate{Hash: b.Hash, Status: b.Status} + blockStatusUpdates[i+len(longestBlocks)] = update } - staleBlocks, err := uow.GetStaleChainBackFromHash(ctx, staleChainTip.Hash) + err := p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { return nil, err } - lowestHeight := staleChainTip.Height - if len(staleBlocks) > 0 { - lowestHeight = getLowestHeight(staleBlocks) - } + p.logger.Info("reorg performed successfully") - longestBlocks, err := uow.GetLongestChainFromHeight(ctx, lowestHeight) + registeredTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) if err != nil { return nil, err } - staleHashes := make([][]byte, len(staleBlocks)) - longestHashes := make([][]byte, len(longestBlocks)) + longestTxs := make([]store.TransactionBlock, 0) + staleTxs := make([]store.TransactionBlock, 0) - for i, b := range longestBlocks { - longestHashes[i] = b.Hash + for _, tx := range registeredTxs { + switch tx.BlockStatus { + case blocktx_api.Status_LONGEST: + longestTxs = append(longestTxs, tx) + case blocktx_api.Status_STALE: + staleTxs = append(staleTxs, tx) + default: + // do nothing - ignore txs from ORPHANED or UNKNOWN blocks + } } - for i, b := range staleBlocks { - staleHashes[i] = b.Hash - } + staleTxs = findDistinctStaleTxs(longestTxs, staleTxs) + + return append(longestTxs, staleTxs...), nil +} - registeredTxs, err := uow.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) +func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) ([]store.TransactionBlock, error) { + orphans, ancestor, err := p.store.GetOrphansBackToNonOrphanAncestor(ctx, block.Hash) if err != nil { + p.logger.Error("unable to get ORPHANED blocks", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) return nil, err } - // Order of inserting into blockStatusUpdates is important here, we need to do: - // 1. LONGEST -> STALE - // 2. STALE -> LONGEST - // otherwise, a unique constraint on (height, is_longest) might be violated. - - // 1. LONGEST -> STALE - blockStatusUpdates := make([]store.BlockStatusUpdate, len(longestBlocks)) - for i, b := range longestBlocks { - update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_STALE} - blockStatusUpdates[i] = update + if ancestor == nil || !ancestor.Processed || len(orphans) == 0 { + return nil, nil } - err = uow.UpdateBlocksStatuses(ctx, blockStatusUpdates) - if err != nil { - return nil, err - } + p.logger.Info("orphaned chain found", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("status", block.Status.String())) - // 2. STALE -> LONGEST - blockStatusUpdates = make([]store.BlockStatusUpdate, len(staleBlocks)) - for _, b := range staleBlocks { - update := store.BlockStatusUpdate{Hash: b.Hash, Status: blocktx_api.Status_LONGEST} - blockStatusUpdates = append(blockStatusUpdates, update) - } + if ancestor.Status == blocktx_api.Status_STALE { + err = p.acceptIntoChain(ctx, orphans, ancestor.Status) + if err != nil { + return nil, err + } - err = uow.UpdateBlocksStatuses(ctx, blockStatusUpdates) - if err != nil { - return nil, err + block.Status = blocktx_api.Status_STALE + return p.handleStaleBlock(ctx, block) } - err = uow.Commit() - if err != nil { - return nil, err - } + if ancestor.Status == blocktx_api.Status_LONGEST { + // If there is competing block at the height of + // the first orphan, then we need to mark them + // all as stale and recheck for reorg. + // + // If there's no competing block at the height + // of the first orphan, then we can assume that + // there's no competing chain at all. - p.logger.Info("reorg performed successfully") + competingBlock, err := p.store.GetLongestBlockByHeight(ctx, orphans[0].Height) + if err != nil && !errors.Is(err, store.ErrBlockNotFound) { + p.logger.Error("unable to get competing block when handling orphans", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + return nil, err + } - prevLongestTxs := make([]store.TransactionBlock, 0) - prevStaleTxs := make([]store.TransactionBlock, 0) + if competingBlock != nil && !bytes.Equal(competingBlock.Hash, orphans[0].Hash) { + err = p.acceptIntoChain(ctx, orphans, blocktx_api.Status_STALE) + if err != nil { + return nil, err + } - for _, tx := range registeredTxs { - switch tx.BlockStatus { - case blocktx_api.Status_LONGEST: - prevLongestTxs = append(prevLongestTxs, tx) - case blocktx_api.Status_STALE: - prevStaleTxs = append(prevStaleTxs, tx) - default: - // do nothing - ignore ORPHANED and UNKNOWN blocks + block.Status = blocktx_api.Status_STALE + return p.handleStaleBlock(ctx, block) } - } - nowMinedTxs, nowStaleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) + err = p.acceptIntoChain(ctx, orphans, ancestor.Status) // LONGEST + if err != nil { + return nil, err + } - for i := range nowMinedTxs { - nowMinedTxs[i].BlockStatus = blocktx_api.Status_LONGEST + p.logger.Info("orphaned chain accepted into LONGEST chain", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height)) + return p.getRegisteredTransactions(ctx, orphans) } - for i := range nowStaleTxs { - nowStaleTxs[i].BlockStatus = blocktx_api.Status_STALE + return nil, nil +} + +func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.Block, chain blocktx_api.Status) error { + blockStatusUpdates := make([]store.BlockStatusUpdate, len(blocks)) + + for i, b := range blocks { + b.Status = chain + blockStatusUpdates[i] = store.BlockStatusUpdate{ + Hash: b.Hash, + Status: b.Status, + } } - txsToPublish := append(nowMinedTxs, nowStaleTxs...) + tip := blocks[len(blocks)-1] - return txsToPublish, nil + err := p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + if err != nil { + p.logger.Error("unable to accept blocks into chain", slog.String("hash", getHashStringNoErr(tip.Hash)), slog.Uint64("height", tip.Height), slog.String("chain", chain.String()), slog.String("err", err.Error())) + return err + } + + p.logger.Info("blocks successfully accepted into chain", slog.String("hash", getHashStringNoErr(tip.Hash)), slog.Uint64("height", tip.Height), slog.String("chain", chain.String())) + return nil } func (p *Processor) Shutdown() { diff --git a/internal/blocktx/processor_helpers.go b/internal/blocktx/processor_helpers.go index a7ab34d8b..a5fbcd53a 100644 --- a/internal/blocktx/processor_helpers.go +++ b/internal/blocktx/processor_helpers.go @@ -4,7 +4,6 @@ import ( "math" "math/big" - blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/libsv/go-p2p/chaincfg/chainhash" @@ -18,69 +17,33 @@ func getHashStringNoErr(hash []byte) string { return chash.String() } -func createBlock(msg *blockchain.BlockMessage, prevBlock *blocktx_api.Block, longestTipExists bool) *blocktx_api.Block { - hash := msg.Header.BlockHash() - prevHash := msg.Header.PrevBlock - merkleRoot := msg.Header.MerkleRoot - chainwork := calculateChainwork(msg.Header.Bits) - - var status blocktx_api.Status - if prevBlock == nil { - if longestTipExists { - status = blocktx_api.Status_ORPHANED - } else { - status = blocktx_api.Status_LONGEST - } - } else { - status = prevBlock.Status - } - - return &blocktx_api.Block{ - Hash: hash[:], - PreviousHash: prevHash[:], - MerkleRoot: merkleRoot[:], - Height: msg.Height, - Status: status, - Chainwork: chainwork.String(), - } -} - -func getLowestHeight(blocks []*blocktx_api.Block) uint64 { - if len(blocks) == 0 { - return 0 - } - - lowest := blocks[0].Height +func sumChainwork(blocks []*blocktx_api.Block) *big.Int { + sum := big.NewInt(0) for _, b := range blocks { - if b.Height < lowest { - lowest = b.Height - } + chainwork := new(big.Int) + chainwork.SetString(b.Chainwork, 10) + + sum = sum.Add(sum, chainwork) } - return lowest + return sum } -func findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs []store.TransactionBlock) (nowMinedTxs, nowStaleTxs []store.TransactionBlock) { - prevStaleMap := make(map[string]store.TransactionBlock) +func findDistinctStaleTxs(longestTxs, staleTxs []store.TransactionBlock) []store.TransactionBlock { + longestTxsMap := make(map[string]struct{}) - for _, tx := range prevStaleTxs { - prevStaleMap[string(tx.TxHash)] = tx - // every tx that was in previously stale blocks is to - // be mined regardless of whether it was also in the - // previously longest chain (update block info) - // or previously stale chain (new mined) - nowMinedTxs = append(nowMinedTxs, tx) + for _, tx := range longestTxs { + longestTxsMap[string(tx.TxHash)] = struct{}{} } - for _, longestTx := range prevLongestTxs { - if _, found := prevStaleMap[string(longestTx.TxHash)]; !found { - // if a transaction that was previously in a longest chain is - // not found in the previously stale blocks - it is now stale - nowStaleTxs = append(nowStaleTxs, longestTx) + distinctStaleTxs := make([]store.TransactionBlock, 0) + for _, tx := range staleTxs { + if _, found := longestTxsMap[string(tx.TxHash)]; !found { + distinctStaleTxs = append(distinctStaleTxs, tx) } } - return + return distinctStaleTxs } // calculateChainwork calculates chainwork from the given difficulty bits diff --git a/internal/blocktx/processor_helpers_test.go b/internal/blocktx/processor_helpers_test.go index 2aa6c0e54..b7ad49fd3 100644 --- a/internal/blocktx/processor_helpers_test.go +++ b/internal/blocktx/processor_helpers_test.go @@ -4,41 +4,13 @@ import ( "fmt" "testing" - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/stretchr/testify/require" ) -func TestGetLowestHeight(t *testing.T) { - // given - blocks := []*blocktx_api.Block{ - { - Height: 123, - }, - { - Height: 250, - }, - { - Height: 83340, - }, - { - Height: 4, - }, - { - Height: 40, - }, - } - - // when - lowestHeight := getLowestHeight(blocks) - - // then - require.Equal(t, uint64(4), lowestHeight) -} - func TestFindMinedAndStaleTxs(t *testing.T) { // given - prevStaleTxs := []store.TransactionBlock{ + longestTxs := []store.TransactionBlock{ { TxHash: []byte("1"), }, @@ -46,7 +18,7 @@ func TestFindMinedAndStaleTxs(t *testing.T) { TxHash: []byte("2"), }, } - prevLongestTxs := []store.TransactionBlock{ + staleTxs := []store.TransactionBlock{ { TxHash: []byte("A"), }, @@ -58,14 +30,6 @@ func TestFindMinedAndStaleTxs(t *testing.T) { }, } - expectedMinedTxs := []store.TransactionBlock{ - { - TxHash: []byte("1"), - }, - { - TxHash: []byte("2"), - }, - } expectedStaleTxs := []store.TransactionBlock{ { TxHash: []byte("A"), @@ -76,10 +40,9 @@ func TestFindMinedAndStaleTxs(t *testing.T) { } // when - actualMinedTxs, actualStaleTxs := findMinedAndStaleTxs(prevStaleTxs, prevLongestTxs) + actualStaleTxs := findDistinctStaleTxs(longestTxs, staleTxs) // then - require.Equal(t, expectedMinedTxs, actualMinedTxs) require.Equal(t, expectedStaleTxs, actualStaleTxs) } diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index e1ac6c51e..28115d28e 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -14,7 +14,6 @@ import ( "github.com/bitcoin-sv/arc/internal/blocktx/mocks" "github.com/bitcoin-sv/arc/internal/blocktx/store" storeMocks "github.com/bitcoin-sv/arc/internal/blocktx/store/mocks" - testutils "github.com/bitcoin-sv/arc/internal/test_utils" "github.com/bitcoin-sv/arc/internal/testdata" "github.com/libsv/go-p2p/chaincfg/chainhash" "github.com/libsv/go-p2p/wire" @@ -157,31 +156,14 @@ func TestHandleBlock(t *testing.T) { } var actualInsertedBlockTransactions [][]byte - uowMock := &storeMocks.UnitOfWorkMock{ - GetOrphanedChainUpFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { - return nil, nil - }, - CommitFunc: func() error { - return nil - }, - RollbackFunc: func() error { - return nil - }, - WriteLockBlocksTableFunc: func(_ context.Context) error { - return nil - }, - } storeMock := &storeMocks.BlocktxStoreMock{ - StartUnitOfWorkFunc: func(_ context.Context) (store.UnitOfWork, error) { - return uowMock, nil - }, GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { if tc.blockAlreadyProcessed { return &blocktx_api.Block{Processed: true}, nil } return nil, store.ErrBlockNotFound }, - GetBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { + GetLongestBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { return nil, store.ErrBlockNotFound }, GetChainTipFunc: func(_ context.Context) (*blocktx_api.Block, error) { @@ -254,25 +236,19 @@ func TestHandleBlock(t *testing.T) { func TestHandleBlockReorgAndOrphans(t *testing.T) { testCases := []struct { - name string - blockAlreadyExists bool - prevBlockStatus blocktx_api.Status - hasCompetingBlock bool - hasGreaterChainwork bool - expectedStatus blocktx_api.Status - shouldFindOrphanChain bool + name string + blockAlreadyExists bool + prevBlockStatus blocktx_api.Status + hasCompetingBlock bool + hasGreaterChainwork bool + shouldFindOrphanAncestor bool + ancestorStatus blocktx_api.Status + expectedStatus blocktx_api.Status }{ { - name: "block already exists - no orphans - should be ingored", - blockAlreadyExists: true, - shouldFindOrphanChain: false, - expectedStatus: blocktx_api.Status_UNKNOWN, - }, - { - name: "block already exists - orphans found - reorg", - blockAlreadyExists: true, - shouldFindOrphanChain: true, - expectedStatus: blocktx_api.Status_LONGEST, + name: "block already exists - should be ingored", + blockAlreadyExists: true, + expectedStatus: blocktx_api.Status_UNKNOWN, }, { name: "previous block longest - no competing - no reorg", @@ -295,56 +271,55 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { expectedStatus: blocktx_api.Status_LONGEST, }, { - name: "previous block stale - competing - no reorg", - prevBlockStatus: blocktx_api.Status_STALE, - hasCompetingBlock: true, - hasGreaterChainwork: false, - expectedStatus: blocktx_api.Status_STALE, - }, - { - name: "previous block stale - no competing - no reorg", + name: "previous block stale - no reorg", prevBlockStatus: blocktx_api.Status_STALE, - hasCompetingBlock: false, hasGreaterChainwork: false, expectedStatus: blocktx_api.Status_STALE, }, { - name: "previous block stale - no competing - reorg", + name: "previous block stale - reorg", prevBlockStatus: blocktx_api.Status_STALE, - hasCompetingBlock: false, hasGreaterChainwork: true, expectedStatus: blocktx_api.Status_LONGEST, }, { - name: "previous block orphaned - no competing - no reorg", - prevBlockStatus: blocktx_api.Status_ORPHANED, - hasCompetingBlock: false, - hasGreaterChainwork: false, - expectedStatus: blocktx_api.Status_ORPHANED, + name: "previous block orphaned - no ancestor", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: false, + expectedStatus: blocktx_api.Status_ORPHANED, }, { - name: "previous block longest - orphaned chain - no competing - no reorg", - prevBlockStatus: blocktx_api.Status_LONGEST, - hasCompetingBlock: false, - hasGreaterChainwork: false, - expectedStatus: blocktx_api.Status_LONGEST, - shouldFindOrphanChain: true, + name: "previous block orphaned - stale ancestor", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: true, + ancestorStatus: blocktx_api.Status_STALE, + expectedStatus: blocktx_api.Status_STALE, }, { - name: "previous block longest - orphaned chain - competing - reorg", - prevBlockStatus: blocktx_api.Status_LONGEST, - hasCompetingBlock: true, - hasGreaterChainwork: false, // tip of orphan chain has greater chainwork - expectedStatus: blocktx_api.Status_LONGEST, - shouldFindOrphanChain: true, + name: "previous block orphaned - longest ancestor - no competing", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: true, + ancestorStatus: blocktx_api.Status_LONGEST, + hasCompetingBlock: false, + expectedStatus: blocktx_api.Status_LONGEST, }, { - name: "previous block stale - orphaned chain - competing - reorg", - prevBlockStatus: blocktx_api.Status_STALE, - hasCompetingBlock: true, - hasGreaterChainwork: false, // tip of orphan chain has greater chainwork - expectedStatus: blocktx_api.Status_LONGEST, - shouldFindOrphanChain: true, + name: "previous block orphaned - longest ancestor - competing - no reorg", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: true, + ancestorStatus: blocktx_api.Status_LONGEST, + hasCompetingBlock: true, + hasGreaterChainwork: false, + expectedStatus: blocktx_api.Status_STALE, + }, + { + name: "previous block orphaned - longest ancestor - competing - reorg", + prevBlockStatus: blocktx_api.Status_ORPHANED, + shouldFindOrphanAncestor: true, + ancestorStatus: blocktx_api.Status_LONGEST, + hasCompetingBlock: true, + hasGreaterChainwork: true, + expectedStatus: blocktx_api.Status_LONGEST, }, } @@ -353,72 +328,9 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { // given var mtx sync.Mutex insertedBlockStatus := blocktx_api.Status_UNKNOWN - orphanedChainTip := &blocktx_api.Block{ - Hash: testutils.RevChainhash(t, "0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067")[:], - Status: blocktx_api.Status_ORPHANED, - Chainwork: "34364008516618225545", // greatest chainwork - should cause reorg if found - } - shouldReturnNoBlock := !tc.blockAlreadyExists - shouldCheckUpdateStatuses := true - uowMock := &storeMocks.UnitOfWorkMock{ - GetOrphanedChainUpFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { - if tc.shouldFindOrphanChain { - return []*blocktx_api.Block{ - { - Hash: []byte("123"), - Status: blocktx_api.Status_ORPHANED, - Chainwork: "123", - }, - orphanedChainTip, - }, nil - } - - return nil, nil - }, - UpdateBlocksStatusesFunc: func(_ context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { - if shouldCheckUpdateStatuses && tc.shouldFindOrphanChain { - mtx.Lock() - shouldCheckUpdateStatuses = false - tipStatusUpdate := blockStatusUpdates[len(blockStatusUpdates)-1] - require.Equal(t, orphanedChainTip.Hash, tipStatusUpdate.Hash) - require.Equal(t, blocktx_api.Status_STALE, tipStatusUpdate.Status) - mtx.Unlock() - } - return nil - }, - GetStaleChainBackFromHashFunc: func(_ context.Context, hash []byte) ([]*blocktx_api.Block, error) { - // if this method is called from UnitOfwork, it means that reorg is happening - mtx.Lock() - insertedBlockStatus = blocktx_api.Status_LONGEST - if tc.shouldFindOrphanChain { - require.Equal(t, orphanedChainTip.Hash[:], hash) - orphanedChainTip.Status = blocktx_api.Status_LONGEST - } - mtx.Unlock() - return nil, nil - }, - GetLongestChainFromHeightFunc: func(_ context.Context, _ uint64) ([]*blocktx_api.Block, error) { - return nil, nil - }, - GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.TransactionBlock, error) { - return nil, nil - }, - CommitFunc: func() error { - return nil - }, - RollbackFunc: func() error { - return nil - }, - WriteLockBlocksTableFunc: func(_ context.Context) error { - return nil - }, - } storeMock := &storeMocks.BlocktxStoreMock{ - StartUnitOfWorkFunc: func(_ context.Context) (store.UnitOfWork, error) { - return uowMock, nil - }, GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { if shouldReturnNoBlock { shouldReturnNoBlock = false @@ -430,7 +342,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { Processed: true, }, nil }, - GetBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { + GetLongestBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { if tc.hasCompetingBlock { blockHash, err := chainhash.NewHashFromStr("0000000000000000087590e1ad6360c0c491556c9af75c0d22ce9324cb5713cf") require.NoError(t, err) @@ -450,11 +362,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { mtx.Unlock() return 1, nil }, - GetStaleChainBackFromHashFunc: func(_ context.Context, hash []byte) ([]*blocktx_api.Block, error) { - if tc.shouldFindOrphanChain { - require.Equal(t, orphanedChainTip.Hash, hash) - return []*blocktx_api.Block{orphanedChainTip}, nil - } + GetStaleChainBackFromHashFunc: func(_ context.Context, _ []byte) ([]*blocktx_api.Block, error) { if tc.hasGreaterChainwork { return []*blocktx_api.Block{ { @@ -484,6 +392,21 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { }, }, nil }, + UpdateBlocksStatusesFunc: func(_ context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { + mtx.Lock() + tipStatusUpdate := blockStatusUpdates[len(blockStatusUpdates)-1] + insertedBlockStatus = tipStatusUpdate.Status + mtx.Unlock() + return nil + }, + GetOrphansBackToNonOrphanAncestorFunc: func(_ context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) { + if tc.shouldFindOrphanAncestor { + orphans := []*blocktx_api.Block{{Hash: hash}} + ancestor := &blocktx_api.Block{Hash: []byte("123"), Status: tc.ancestorStatus, Processed: true} + return orphans, ancestor, nil + } + return nil, nil, nil + }, UpsertBlockTransactionsFunc: func(_ context.Context, _ uint64, _ []store.TxWithMerklePath) error { return nil }, @@ -535,9 +458,6 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { time.Sleep(20 * time.Millisecond) mtx.Lock() require.Equal(t, tc.expectedStatus, insertedBlockStatus) - if tc.shouldFindOrphanChain { - require.Equal(t, tc.expectedStatus, orphanedChainTip.Status) - } mtx.Unlock() }) } diff --git a/internal/blocktx/store/mocks/blocktx_db_tx_mock.go b/internal/blocktx/store/mocks/blocktx_db_tx_mock.go deleted file mode 100644 index f93c655c7..000000000 --- a/internal/blocktx/store/mocks/blocktx_db_tx_mock.go +++ /dev/null @@ -1,1269 +0,0 @@ -// Code generated by moq; DO NOT EDIT. -// github.com/matryer/moq - -package mocks - -import ( - "context" - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/store" - "github.com/libsv/go-p2p/chaincfg/chainhash" - "sync" -) - -// Ensure, that UnitOfWorkMock does implement store.UnitOfWork. -// If this is not the case, regenerate this file with moq. -var _ store.UnitOfWork = &UnitOfWorkMock{} - -// UnitOfWorkMock is a mock implementation of store.UnitOfWork. -// -// func TestSomethingThatUsesUnitOfWork(t *testing.T) { -// -// // make and configure a mocked store.UnitOfWork -// mockedUnitOfWork := &UnitOfWorkMock{ -// ClearBlocktxTableFunc: func(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) { -// panic("mock out the ClearBlocktxTable method") -// }, -// CloseFunc: func() error { -// panic("mock out the Close method") -// }, -// CommitFunc: func() error { -// panic("mock out the Commit method") -// }, -// DelBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) { -// panic("mock out the DelBlockProcessing method") -// }, -// GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { -// panic("mock out the GetBlock method") -// }, -// GetBlockByHeightFunc: func(ctx context.Context, height uint64) (*blocktx_api.Block, error) { -// panic("mock out the GetBlockByHeight method") -// }, -// GetBlockGapsFunc: func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { -// panic("mock out the GetBlockGaps method") -// }, -// GetBlockHashesProcessingInProgressFunc: func(ctx context.Context, processedBy string) ([]*chainhash.Hash, error) { -// panic("mock out the GetBlockHashesProcessingInProgress method") -// }, -// GetChainTipFunc: func(ctx context.Context) (*blocktx_api.Block, error) { -// panic("mock out the GetChainTip method") -// }, -// GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { -// panic("mock out the GetLongestChainFromHeight method") -// }, -// GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { -// panic("mock out the GetMinedTransactions method") -// }, -// GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { -// panic("mock out the GetOrphanedChainUpFromHash method") -// }, -// GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { -// panic("mock out the GetRegisteredTxsByBlockHashes method") -// }, -// GetStaleChainBackFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { -// panic("mock out the GetStaleChainBackFromHash method") -// }, -// MarkBlockAsDoneFunc: func(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error { -// panic("mock out the MarkBlockAsDone method") -// }, -// PingFunc: func(ctx context.Context) error { -// panic("mock out the Ping method") -// }, -// RegisterTransactionsFunc: func(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) { -// panic("mock out the RegisterTransactions method") -// }, -// RollbackFunc: func() error { -// panic("mock out the Rollback method") -// }, -// SetBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) { -// panic("mock out the SetBlockProcessing method") -// }, -// StartUnitOfWorkFunc: func(ctx context.Context) (store.UnitOfWork, error) { -// panic("mock out the StartUnitOfWork method") -// }, -// UpdateBlocksStatusesFunc: func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { -// panic("mock out the UpdateBlocksStatuses method") -// }, -// UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { -// panic("mock out the UpsertBlock method") -// }, -// UpsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { -// panic("mock out the UpsertBlockTransactions method") -// }, -// VerifyMerkleRootsFunc: func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { -// panic("mock out the VerifyMerkleRoots method") -// }, -// WriteLockBlocksTableFunc: func(ctx context.Context) error { -// panic("mock out the WriteLockBlocksTable method") -// }, -// } -// -// // use mockedUnitOfWork in code that requires store.UnitOfWork -// // and then make assertions. -// -// } -type UnitOfWorkMock struct { - // ClearBlocktxTableFunc mocks the ClearBlocktxTable method. - ClearBlocktxTableFunc func(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) - - // CloseFunc mocks the Close method. - CloseFunc func() error - - // CommitFunc mocks the Commit method. - CommitFunc func() error - - // DelBlockProcessingFunc mocks the DelBlockProcessing method. - DelBlockProcessingFunc func(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) - - // GetBlockFunc mocks the GetBlock method. - GetBlockFunc func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) - - // GetBlockByHeightFunc mocks the GetBlockByHeight method. - GetBlockByHeightFunc func(ctx context.Context, height uint64) (*blocktx_api.Block, error) - - // GetBlockGapsFunc mocks the GetBlockGaps method. - GetBlockGapsFunc func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) - - // GetBlockHashesProcessingInProgressFunc mocks the GetBlockHashesProcessingInProgress method. - GetBlockHashesProcessingInProgressFunc func(ctx context.Context, processedBy string) ([]*chainhash.Hash, error) - - // GetChainTipFunc mocks the GetChainTip method. - GetChainTipFunc func(ctx context.Context) (*blocktx_api.Block, error) - - // GetLongestChainFromHeightFunc mocks the GetLongestChainFromHeight method. - GetLongestChainFromHeightFunc func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) - - // GetMinedTransactionsFunc mocks the GetMinedTransactions method. - GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) - - // GetOrphanedChainUpFromHashFunc mocks the GetOrphanedChainUpFromHash method. - GetOrphanedChainUpFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) - - // GetRegisteredTxsByBlockHashesFunc mocks the GetRegisteredTxsByBlockHashes method. - GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) - - // GetStaleChainBackFromHashFunc mocks the GetStaleChainBackFromHash method. - GetStaleChainBackFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) - - // MarkBlockAsDoneFunc mocks the MarkBlockAsDone method. - MarkBlockAsDoneFunc func(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error - - // PingFunc mocks the Ping method. - PingFunc func(ctx context.Context) error - - // RegisterTransactionsFunc mocks the RegisterTransactions method. - RegisterTransactionsFunc func(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) - - // RollbackFunc mocks the Rollback method. - RollbackFunc func() error - - // SetBlockProcessingFunc mocks the SetBlockProcessing method. - SetBlockProcessingFunc func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) - - // StartUnitOfWorkFunc mocks the StartUnitOfWork method. - StartUnitOfWorkFunc func(ctx context.Context) (store.UnitOfWork, error) - - // UpdateBlocksStatusesFunc mocks the UpdateBlocksStatuses method. - UpdateBlocksStatusesFunc func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error - - // UpsertBlockFunc mocks the UpsertBlock method. - UpsertBlockFunc func(ctx context.Context, block *blocktx_api.Block) (uint64, error) - - // UpsertBlockTransactionsFunc mocks the UpsertBlockTransactions method. - UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error - - // VerifyMerkleRootsFunc mocks the VerifyMerkleRoots method. - VerifyMerkleRootsFunc func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) - - // WriteLockBlocksTableFunc mocks the WriteLockBlocksTable method. - WriteLockBlocksTableFunc func(ctx context.Context) error - - // calls tracks calls to the methods. - calls struct { - // ClearBlocktxTable holds details about calls to the ClearBlocktxTable method. - ClearBlocktxTable []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // RetentionDays is the retentionDays argument value. - RetentionDays int32 - // Table is the table argument value. - Table string - } - // Close holds details about calls to the Close method. - Close []struct { - } - // Commit holds details about calls to the Commit method. - Commit []struct { - } - // DelBlockProcessing holds details about calls to the DelBlockProcessing method. - DelBlockProcessing []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Hash is the hash argument value. - Hash *chainhash.Hash - // ProcessedBy is the processedBy argument value. - ProcessedBy string - } - // GetBlock holds details about calls to the GetBlock method. - GetBlock []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Hash is the hash argument value. - Hash *chainhash.Hash - } - // GetBlockByHeight holds details about calls to the GetBlockByHeight method. - GetBlockByHeight []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Height is the height argument value. - Height uint64 - } - // GetBlockGaps holds details about calls to the GetBlockGaps method. - GetBlockGaps []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // HeightRange is the heightRange argument value. - HeightRange int - } - // GetBlockHashesProcessingInProgress holds details about calls to the GetBlockHashesProcessingInProgress method. - GetBlockHashesProcessingInProgress []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // ProcessedBy is the processedBy argument value. - ProcessedBy string - } - // GetChainTip holds details about calls to the GetChainTip method. - GetChainTip []struct { - // Ctx is the ctx argument value. - Ctx context.Context - } - // GetLongestChainFromHeight holds details about calls to the GetLongestChainFromHeight method. - GetLongestChainFromHeight []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Height is the height argument value. - Height uint64 - } - // GetMinedTransactions holds details about calls to the GetMinedTransactions method. - GetMinedTransactions []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Hashes is the hashes argument value. - Hashes [][]byte - // OnlyLongestChain is the onlyLongestChain argument value. - OnlyLongestChain bool - } - // GetOrphanedChainUpFromHash holds details about calls to the GetOrphanedChainUpFromHash method. - GetOrphanedChainUpFromHash []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Hash is the hash argument value. - Hash []byte - } - // GetRegisteredTxsByBlockHashes holds details about calls to the GetRegisteredTxsByBlockHashes method. - GetRegisteredTxsByBlockHashes []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // BlockHashes is the blockHashes argument value. - BlockHashes [][]byte - } - // GetStaleChainBackFromHash holds details about calls to the GetStaleChainBackFromHash method. - GetStaleChainBackFromHash []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Hash is the hash argument value. - Hash []byte - } - // MarkBlockAsDone holds details about calls to the MarkBlockAsDone method. - MarkBlockAsDone []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Hash is the hash argument value. - Hash *chainhash.Hash - // Size is the size argument value. - Size uint64 - // TxCount is the txCount argument value. - TxCount uint64 - } - // Ping holds details about calls to the Ping method. - Ping []struct { - // Ctx is the ctx argument value. - Ctx context.Context - } - // RegisterTransactions holds details about calls to the RegisterTransactions method. - RegisterTransactions []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // TxHashes is the txHashes argument value. - TxHashes [][]byte - } - // Rollback holds details about calls to the Rollback method. - Rollback []struct { - } - // SetBlockProcessing holds details about calls to the SetBlockProcessing method. - SetBlockProcessing []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Hash is the hash argument value. - Hash *chainhash.Hash - // ProcessedBy is the processedBy argument value. - ProcessedBy string - } - // StartUnitOfWork holds details about calls to the StartUnitOfWork method. - StartUnitOfWork []struct { - // Ctx is the ctx argument value. - Ctx context.Context - } - // UpdateBlocksStatuses holds details about calls to the UpdateBlocksStatuses method. - UpdateBlocksStatuses []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // BlockStatusUpdates is the blockStatusUpdates argument value. - BlockStatusUpdates []store.BlockStatusUpdate - } - // UpsertBlock holds details about calls to the UpsertBlock method. - UpsertBlock []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Block is the block argument value. - Block *blocktx_api.Block - } - // UpsertBlockTransactions holds details about calls to the UpsertBlockTransactions method. - UpsertBlockTransactions []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // BlockID is the blockID argument value. - BlockID uint64 - // TxsWithMerklePaths is the txsWithMerklePaths argument value. - TxsWithMerklePaths []store.TxWithMerklePath - } - // VerifyMerkleRoots holds details about calls to the VerifyMerkleRoots method. - VerifyMerkleRoots []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // MerkleRoots is the merkleRoots argument value. - MerkleRoots []*blocktx_api.MerkleRootVerificationRequest - // MaxAllowedBlockHeightMismatch is the maxAllowedBlockHeightMismatch argument value. - MaxAllowedBlockHeightMismatch int - } - // WriteLockBlocksTable holds details about calls to the WriteLockBlocksTable method. - WriteLockBlocksTable []struct { - // Ctx is the ctx argument value. - Ctx context.Context - } - } - lockClearBlocktxTable sync.RWMutex - lockClose sync.RWMutex - lockCommit sync.RWMutex - lockDelBlockProcessing sync.RWMutex - lockGetBlock sync.RWMutex - lockGetBlockByHeight sync.RWMutex - lockGetBlockGaps sync.RWMutex - lockGetBlockHashesProcessingInProgress sync.RWMutex - lockGetChainTip sync.RWMutex - lockGetLongestChainFromHeight sync.RWMutex - lockGetMinedTransactions sync.RWMutex - lockGetOrphanedChainUpFromHash sync.RWMutex - lockGetRegisteredTxsByBlockHashes sync.RWMutex - lockGetStaleChainBackFromHash sync.RWMutex - lockMarkBlockAsDone sync.RWMutex - lockPing sync.RWMutex - lockRegisterTransactions sync.RWMutex - lockRollback sync.RWMutex - lockSetBlockProcessing sync.RWMutex - lockStartUnitOfWork sync.RWMutex - lockUpdateBlocksStatuses sync.RWMutex - lockUpsertBlock sync.RWMutex - lockUpsertBlockTransactions sync.RWMutex - lockVerifyMerkleRoots sync.RWMutex - lockWriteLockBlocksTable sync.RWMutex -} - -// ClearBlocktxTable calls ClearBlocktxTableFunc. -func (mock *UnitOfWorkMock) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) { - if mock.ClearBlocktxTableFunc == nil { - panic("UnitOfWorkMock.ClearBlocktxTableFunc: method is nil but UnitOfWork.ClearBlocktxTable was just called") - } - callInfo := struct { - Ctx context.Context - RetentionDays int32 - Table string - }{ - Ctx: ctx, - RetentionDays: retentionDays, - Table: table, - } - mock.lockClearBlocktxTable.Lock() - mock.calls.ClearBlocktxTable = append(mock.calls.ClearBlocktxTable, callInfo) - mock.lockClearBlocktxTable.Unlock() - return mock.ClearBlocktxTableFunc(ctx, retentionDays, table) -} - -// ClearBlocktxTableCalls gets all the calls that were made to ClearBlocktxTable. -// Check the length with: -// -// len(mockedUnitOfWork.ClearBlocktxTableCalls()) -func (mock *UnitOfWorkMock) ClearBlocktxTableCalls() []struct { - Ctx context.Context - RetentionDays int32 - Table string -} { - var calls []struct { - Ctx context.Context - RetentionDays int32 - Table string - } - mock.lockClearBlocktxTable.RLock() - calls = mock.calls.ClearBlocktxTable - mock.lockClearBlocktxTable.RUnlock() - return calls -} - -// Close calls CloseFunc. -func (mock *UnitOfWorkMock) Close() error { - if mock.CloseFunc == nil { - panic("UnitOfWorkMock.CloseFunc: method is nil but UnitOfWork.Close was just called") - } - callInfo := struct { - }{} - mock.lockClose.Lock() - mock.calls.Close = append(mock.calls.Close, callInfo) - mock.lockClose.Unlock() - return mock.CloseFunc() -} - -// CloseCalls gets all the calls that were made to Close. -// Check the length with: -// -// len(mockedUnitOfWork.CloseCalls()) -func (mock *UnitOfWorkMock) CloseCalls() []struct { -} { - var calls []struct { - } - mock.lockClose.RLock() - calls = mock.calls.Close - mock.lockClose.RUnlock() - return calls -} - -// Commit calls CommitFunc. -func (mock *UnitOfWorkMock) Commit() error { - if mock.CommitFunc == nil { - panic("UnitOfWorkMock.CommitFunc: method is nil but UnitOfWork.Commit was just called") - } - callInfo := struct { - }{} - mock.lockCommit.Lock() - mock.calls.Commit = append(mock.calls.Commit, callInfo) - mock.lockCommit.Unlock() - return mock.CommitFunc() -} - -// CommitCalls gets all the calls that were made to Commit. -// Check the length with: -// -// len(mockedUnitOfWork.CommitCalls()) -func (mock *UnitOfWorkMock) CommitCalls() []struct { -} { - var calls []struct { - } - mock.lockCommit.RLock() - calls = mock.calls.Commit - mock.lockCommit.RUnlock() - return calls -} - -// DelBlockProcessing calls DelBlockProcessingFunc. -func (mock *UnitOfWorkMock) DelBlockProcessing(ctx context.Context, hash *chainhash.Hash, processedBy string) (int64, error) { - if mock.DelBlockProcessingFunc == nil { - panic("UnitOfWorkMock.DelBlockProcessingFunc: method is nil but UnitOfWork.DelBlockProcessing was just called") - } - callInfo := struct { - Ctx context.Context - Hash *chainhash.Hash - ProcessedBy string - }{ - Ctx: ctx, - Hash: hash, - ProcessedBy: processedBy, - } - mock.lockDelBlockProcessing.Lock() - mock.calls.DelBlockProcessing = append(mock.calls.DelBlockProcessing, callInfo) - mock.lockDelBlockProcessing.Unlock() - return mock.DelBlockProcessingFunc(ctx, hash, processedBy) -} - -// DelBlockProcessingCalls gets all the calls that were made to DelBlockProcessing. -// Check the length with: -// -// len(mockedUnitOfWork.DelBlockProcessingCalls()) -func (mock *UnitOfWorkMock) DelBlockProcessingCalls() []struct { - Ctx context.Context - Hash *chainhash.Hash - ProcessedBy string -} { - var calls []struct { - Ctx context.Context - Hash *chainhash.Hash - ProcessedBy string - } - mock.lockDelBlockProcessing.RLock() - calls = mock.calls.DelBlockProcessing - mock.lockDelBlockProcessing.RUnlock() - return calls -} - -// GetBlock calls GetBlockFunc. -func (mock *UnitOfWorkMock) GetBlock(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { - if mock.GetBlockFunc == nil { - panic("UnitOfWorkMock.GetBlockFunc: method is nil but UnitOfWork.GetBlock was just called") - } - callInfo := struct { - Ctx context.Context - Hash *chainhash.Hash - }{ - Ctx: ctx, - Hash: hash, - } - mock.lockGetBlock.Lock() - mock.calls.GetBlock = append(mock.calls.GetBlock, callInfo) - mock.lockGetBlock.Unlock() - return mock.GetBlockFunc(ctx, hash) -} - -// GetBlockCalls gets all the calls that were made to GetBlock. -// Check the length with: -// -// len(mockedUnitOfWork.GetBlockCalls()) -func (mock *UnitOfWorkMock) GetBlockCalls() []struct { - Ctx context.Context - Hash *chainhash.Hash -} { - var calls []struct { - Ctx context.Context - Hash *chainhash.Hash - } - mock.lockGetBlock.RLock() - calls = mock.calls.GetBlock - mock.lockGetBlock.RUnlock() - return calls -} - -// GetBlockByHeight calls GetBlockByHeightFunc. -func (mock *UnitOfWorkMock) GetBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { - if mock.GetBlockByHeightFunc == nil { - panic("UnitOfWorkMock.GetBlockByHeightFunc: method is nil but UnitOfWork.GetBlockByHeight was just called") - } - callInfo := struct { - Ctx context.Context - Height uint64 - }{ - Ctx: ctx, - Height: height, - } - mock.lockGetBlockByHeight.Lock() - mock.calls.GetBlockByHeight = append(mock.calls.GetBlockByHeight, callInfo) - mock.lockGetBlockByHeight.Unlock() - return mock.GetBlockByHeightFunc(ctx, height) -} - -// GetBlockByHeightCalls gets all the calls that were made to GetBlockByHeight. -// Check the length with: -// -// len(mockedUnitOfWork.GetBlockByHeightCalls()) -func (mock *UnitOfWorkMock) GetBlockByHeightCalls() []struct { - Ctx context.Context - Height uint64 -} { - var calls []struct { - Ctx context.Context - Height uint64 - } - mock.lockGetBlockByHeight.RLock() - calls = mock.calls.GetBlockByHeight - mock.lockGetBlockByHeight.RUnlock() - return calls -} - -// GetBlockGaps calls GetBlockGapsFunc. -func (mock *UnitOfWorkMock) GetBlockGaps(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { - if mock.GetBlockGapsFunc == nil { - panic("UnitOfWorkMock.GetBlockGapsFunc: method is nil but UnitOfWork.GetBlockGaps was just called") - } - callInfo := struct { - Ctx context.Context - HeightRange int - }{ - Ctx: ctx, - HeightRange: heightRange, - } - mock.lockGetBlockGaps.Lock() - mock.calls.GetBlockGaps = append(mock.calls.GetBlockGaps, callInfo) - mock.lockGetBlockGaps.Unlock() - return mock.GetBlockGapsFunc(ctx, heightRange) -} - -// GetBlockGapsCalls gets all the calls that were made to GetBlockGaps. -// Check the length with: -// -// len(mockedUnitOfWork.GetBlockGapsCalls()) -func (mock *UnitOfWorkMock) GetBlockGapsCalls() []struct { - Ctx context.Context - HeightRange int -} { - var calls []struct { - Ctx context.Context - HeightRange int - } - mock.lockGetBlockGaps.RLock() - calls = mock.calls.GetBlockGaps - mock.lockGetBlockGaps.RUnlock() - return calls -} - -// GetBlockHashesProcessingInProgress calls GetBlockHashesProcessingInProgressFunc. -func (mock *UnitOfWorkMock) GetBlockHashesProcessingInProgress(ctx context.Context, processedBy string) ([]*chainhash.Hash, error) { - if mock.GetBlockHashesProcessingInProgressFunc == nil { - panic("UnitOfWorkMock.GetBlockHashesProcessingInProgressFunc: method is nil but UnitOfWork.GetBlockHashesProcessingInProgress was just called") - } - callInfo := struct { - Ctx context.Context - ProcessedBy string - }{ - Ctx: ctx, - ProcessedBy: processedBy, - } - mock.lockGetBlockHashesProcessingInProgress.Lock() - mock.calls.GetBlockHashesProcessingInProgress = append(mock.calls.GetBlockHashesProcessingInProgress, callInfo) - mock.lockGetBlockHashesProcessingInProgress.Unlock() - return mock.GetBlockHashesProcessingInProgressFunc(ctx, processedBy) -} - -// GetBlockHashesProcessingInProgressCalls gets all the calls that were made to GetBlockHashesProcessingInProgress. -// Check the length with: -// -// len(mockedUnitOfWork.GetBlockHashesProcessingInProgressCalls()) -func (mock *UnitOfWorkMock) GetBlockHashesProcessingInProgressCalls() []struct { - Ctx context.Context - ProcessedBy string -} { - var calls []struct { - Ctx context.Context - ProcessedBy string - } - mock.lockGetBlockHashesProcessingInProgress.RLock() - calls = mock.calls.GetBlockHashesProcessingInProgress - mock.lockGetBlockHashesProcessingInProgress.RUnlock() - return calls -} - -// GetChainTip calls GetChainTipFunc. -func (mock *UnitOfWorkMock) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) { - if mock.GetChainTipFunc == nil { - panic("UnitOfWorkMock.GetChainTipFunc: method is nil but UnitOfWork.GetChainTip was just called") - } - callInfo := struct { - Ctx context.Context - }{ - Ctx: ctx, - } - mock.lockGetChainTip.Lock() - mock.calls.GetChainTip = append(mock.calls.GetChainTip, callInfo) - mock.lockGetChainTip.Unlock() - return mock.GetChainTipFunc(ctx) -} - -// GetChainTipCalls gets all the calls that were made to GetChainTip. -// Check the length with: -// -// len(mockedUnitOfWork.GetChainTipCalls()) -func (mock *UnitOfWorkMock) GetChainTipCalls() []struct { - Ctx context.Context -} { - var calls []struct { - Ctx context.Context - } - mock.lockGetChainTip.RLock() - calls = mock.calls.GetChainTip - mock.lockGetChainTip.RUnlock() - return calls -} - -// GetLongestChainFromHeight calls GetLongestChainFromHeightFunc. -func (mock *UnitOfWorkMock) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { - if mock.GetLongestChainFromHeightFunc == nil { - panic("UnitOfWorkMock.GetLongestChainFromHeightFunc: method is nil but UnitOfWork.GetLongestChainFromHeight was just called") - } - callInfo := struct { - Ctx context.Context - Height uint64 - }{ - Ctx: ctx, - Height: height, - } - mock.lockGetLongestChainFromHeight.Lock() - mock.calls.GetLongestChainFromHeight = append(mock.calls.GetLongestChainFromHeight, callInfo) - mock.lockGetLongestChainFromHeight.Unlock() - return mock.GetLongestChainFromHeightFunc(ctx, height) -} - -// GetLongestChainFromHeightCalls gets all the calls that were made to GetLongestChainFromHeight. -// Check the length with: -// -// len(mockedUnitOfWork.GetLongestChainFromHeightCalls()) -func (mock *UnitOfWorkMock) GetLongestChainFromHeightCalls() []struct { - Ctx context.Context - Height uint64 -} { - var calls []struct { - Ctx context.Context - Height uint64 - } - mock.lockGetLongestChainFromHeight.RLock() - calls = mock.calls.GetLongestChainFromHeight - mock.lockGetLongestChainFromHeight.RUnlock() - return calls -} - -// GetMinedTransactions calls GetMinedTransactionsFunc. -func (mock *UnitOfWorkMock) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { - if mock.GetMinedTransactionsFunc == nil { - panic("UnitOfWorkMock.GetMinedTransactionsFunc: method is nil but UnitOfWork.GetMinedTransactions was just called") - } - callInfo := struct { - Ctx context.Context - Hashes [][]byte - OnlyLongestChain bool - }{ - Ctx: ctx, - Hashes: hashes, - OnlyLongestChain: onlyLongestChain, - } - mock.lockGetMinedTransactions.Lock() - mock.calls.GetMinedTransactions = append(mock.calls.GetMinedTransactions, callInfo) - mock.lockGetMinedTransactions.Unlock() - return mock.GetMinedTransactionsFunc(ctx, hashes, onlyLongestChain) -} - -// GetMinedTransactionsCalls gets all the calls that were made to GetMinedTransactions. -// Check the length with: -// -// len(mockedUnitOfWork.GetMinedTransactionsCalls()) -func (mock *UnitOfWorkMock) GetMinedTransactionsCalls() []struct { - Ctx context.Context - Hashes [][]byte - OnlyLongestChain bool -} { - var calls []struct { - Ctx context.Context - Hashes [][]byte - OnlyLongestChain bool - } - mock.lockGetMinedTransactions.RLock() - calls = mock.calls.GetMinedTransactions - mock.lockGetMinedTransactions.RUnlock() - return calls -} - -// GetOrphanedChainUpFromHash calls GetOrphanedChainUpFromHashFunc. -func (mock *UnitOfWorkMock) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { - if mock.GetOrphanedChainUpFromHashFunc == nil { - panic("UnitOfWorkMock.GetOrphanedChainUpFromHashFunc: method is nil but UnitOfWork.GetOrphanedChainUpFromHash was just called") - } - callInfo := struct { - Ctx context.Context - Hash []byte - }{ - Ctx: ctx, - Hash: hash, - } - mock.lockGetOrphanedChainUpFromHash.Lock() - mock.calls.GetOrphanedChainUpFromHash = append(mock.calls.GetOrphanedChainUpFromHash, callInfo) - mock.lockGetOrphanedChainUpFromHash.Unlock() - return mock.GetOrphanedChainUpFromHashFunc(ctx, hash) -} - -// GetOrphanedChainUpFromHashCalls gets all the calls that were made to GetOrphanedChainUpFromHash. -// Check the length with: -// -// len(mockedUnitOfWork.GetOrphanedChainUpFromHashCalls()) -func (mock *UnitOfWorkMock) GetOrphanedChainUpFromHashCalls() []struct { - Ctx context.Context - Hash []byte -} { - var calls []struct { - Ctx context.Context - Hash []byte - } - mock.lockGetOrphanedChainUpFromHash.RLock() - calls = mock.calls.GetOrphanedChainUpFromHash - mock.lockGetOrphanedChainUpFromHash.RUnlock() - return calls -} - -// GetRegisteredTxsByBlockHashes calls GetRegisteredTxsByBlockHashesFunc. -func (mock *UnitOfWorkMock) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { - if mock.GetRegisteredTxsByBlockHashesFunc == nil { - panic("UnitOfWorkMock.GetRegisteredTxsByBlockHashesFunc: method is nil but UnitOfWork.GetRegisteredTxsByBlockHashes was just called") - } - callInfo := struct { - Ctx context.Context - BlockHashes [][]byte - }{ - Ctx: ctx, - BlockHashes: blockHashes, - } - mock.lockGetRegisteredTxsByBlockHashes.Lock() - mock.calls.GetRegisteredTxsByBlockHashes = append(mock.calls.GetRegisteredTxsByBlockHashes, callInfo) - mock.lockGetRegisteredTxsByBlockHashes.Unlock() - return mock.GetRegisteredTxsByBlockHashesFunc(ctx, blockHashes) -} - -// GetRegisteredTxsByBlockHashesCalls gets all the calls that were made to GetRegisteredTxsByBlockHashes. -// Check the length with: -// -// len(mockedUnitOfWork.GetRegisteredTxsByBlockHashesCalls()) -func (mock *UnitOfWorkMock) GetRegisteredTxsByBlockHashesCalls() []struct { - Ctx context.Context - BlockHashes [][]byte -} { - var calls []struct { - Ctx context.Context - BlockHashes [][]byte - } - mock.lockGetRegisteredTxsByBlockHashes.RLock() - calls = mock.calls.GetRegisteredTxsByBlockHashes - mock.lockGetRegisteredTxsByBlockHashes.RUnlock() - return calls -} - -// GetStaleChainBackFromHash calls GetStaleChainBackFromHashFunc. -func (mock *UnitOfWorkMock) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { - if mock.GetStaleChainBackFromHashFunc == nil { - panic("UnitOfWorkMock.GetStaleChainBackFromHashFunc: method is nil but UnitOfWork.GetStaleChainBackFromHash was just called") - } - callInfo := struct { - Ctx context.Context - Hash []byte - }{ - Ctx: ctx, - Hash: hash, - } - mock.lockGetStaleChainBackFromHash.Lock() - mock.calls.GetStaleChainBackFromHash = append(mock.calls.GetStaleChainBackFromHash, callInfo) - mock.lockGetStaleChainBackFromHash.Unlock() - return mock.GetStaleChainBackFromHashFunc(ctx, hash) -} - -// GetStaleChainBackFromHashCalls gets all the calls that were made to GetStaleChainBackFromHash. -// Check the length with: -// -// len(mockedUnitOfWork.GetStaleChainBackFromHashCalls()) -func (mock *UnitOfWorkMock) GetStaleChainBackFromHashCalls() []struct { - Ctx context.Context - Hash []byte -} { - var calls []struct { - Ctx context.Context - Hash []byte - } - mock.lockGetStaleChainBackFromHash.RLock() - calls = mock.calls.GetStaleChainBackFromHash - mock.lockGetStaleChainBackFromHash.RUnlock() - return calls -} - -// MarkBlockAsDone calls MarkBlockAsDoneFunc. -func (mock *UnitOfWorkMock) MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error { - if mock.MarkBlockAsDoneFunc == nil { - panic("UnitOfWorkMock.MarkBlockAsDoneFunc: method is nil but UnitOfWork.MarkBlockAsDone was just called") - } - callInfo := struct { - Ctx context.Context - Hash *chainhash.Hash - Size uint64 - TxCount uint64 - }{ - Ctx: ctx, - Hash: hash, - Size: size, - TxCount: txCount, - } - mock.lockMarkBlockAsDone.Lock() - mock.calls.MarkBlockAsDone = append(mock.calls.MarkBlockAsDone, callInfo) - mock.lockMarkBlockAsDone.Unlock() - return mock.MarkBlockAsDoneFunc(ctx, hash, size, txCount) -} - -// MarkBlockAsDoneCalls gets all the calls that were made to MarkBlockAsDone. -// Check the length with: -// -// len(mockedUnitOfWork.MarkBlockAsDoneCalls()) -func (mock *UnitOfWorkMock) MarkBlockAsDoneCalls() []struct { - Ctx context.Context - Hash *chainhash.Hash - Size uint64 - TxCount uint64 -} { - var calls []struct { - Ctx context.Context - Hash *chainhash.Hash - Size uint64 - TxCount uint64 - } - mock.lockMarkBlockAsDone.RLock() - calls = mock.calls.MarkBlockAsDone - mock.lockMarkBlockAsDone.RUnlock() - return calls -} - -// Ping calls PingFunc. -func (mock *UnitOfWorkMock) Ping(ctx context.Context) error { - if mock.PingFunc == nil { - panic("UnitOfWorkMock.PingFunc: method is nil but UnitOfWork.Ping was just called") - } - callInfo := struct { - Ctx context.Context - }{ - Ctx: ctx, - } - mock.lockPing.Lock() - mock.calls.Ping = append(mock.calls.Ping, callInfo) - mock.lockPing.Unlock() - return mock.PingFunc(ctx) -} - -// PingCalls gets all the calls that were made to Ping. -// Check the length with: -// -// len(mockedUnitOfWork.PingCalls()) -func (mock *UnitOfWorkMock) PingCalls() []struct { - Ctx context.Context -} { - var calls []struct { - Ctx context.Context - } - mock.lockPing.RLock() - calls = mock.calls.Ping - mock.lockPing.RUnlock() - return calls -} - -// RegisterTransactions calls RegisterTransactionsFunc. -func (mock *UnitOfWorkMock) RegisterTransactions(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) { - if mock.RegisterTransactionsFunc == nil { - panic("UnitOfWorkMock.RegisterTransactionsFunc: method is nil but UnitOfWork.RegisterTransactions was just called") - } - callInfo := struct { - Ctx context.Context - TxHashes [][]byte - }{ - Ctx: ctx, - TxHashes: txHashes, - } - mock.lockRegisterTransactions.Lock() - mock.calls.RegisterTransactions = append(mock.calls.RegisterTransactions, callInfo) - mock.lockRegisterTransactions.Unlock() - return mock.RegisterTransactionsFunc(ctx, txHashes) -} - -// RegisterTransactionsCalls gets all the calls that were made to RegisterTransactions. -// Check the length with: -// -// len(mockedUnitOfWork.RegisterTransactionsCalls()) -func (mock *UnitOfWorkMock) RegisterTransactionsCalls() []struct { - Ctx context.Context - TxHashes [][]byte -} { - var calls []struct { - Ctx context.Context - TxHashes [][]byte - } - mock.lockRegisterTransactions.RLock() - calls = mock.calls.RegisterTransactions - mock.lockRegisterTransactions.RUnlock() - return calls -} - -// Rollback calls RollbackFunc. -func (mock *UnitOfWorkMock) Rollback() error { - if mock.RollbackFunc == nil { - panic("UnitOfWorkMock.RollbackFunc: method is nil but UnitOfWork.Rollback was just called") - } - callInfo := struct { - }{} - mock.lockRollback.Lock() - mock.calls.Rollback = append(mock.calls.Rollback, callInfo) - mock.lockRollback.Unlock() - return mock.RollbackFunc() -} - -// RollbackCalls gets all the calls that were made to Rollback. -// Check the length with: -// -// len(mockedUnitOfWork.RollbackCalls()) -func (mock *UnitOfWorkMock) RollbackCalls() []struct { -} { - var calls []struct { - } - mock.lockRollback.RLock() - calls = mock.calls.Rollback - mock.lockRollback.RUnlock() - return calls -} - -// SetBlockProcessing calls SetBlockProcessingFunc. -func (mock *UnitOfWorkMock) SetBlockProcessing(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) { - if mock.SetBlockProcessingFunc == nil { - panic("UnitOfWorkMock.SetBlockProcessingFunc: method is nil but UnitOfWork.SetBlockProcessing was just called") - } - callInfo := struct { - Ctx context.Context - Hash *chainhash.Hash - ProcessedBy string - }{ - Ctx: ctx, - Hash: hash, - ProcessedBy: processedBy, - } - mock.lockSetBlockProcessing.Lock() - mock.calls.SetBlockProcessing = append(mock.calls.SetBlockProcessing, callInfo) - mock.lockSetBlockProcessing.Unlock() - return mock.SetBlockProcessingFunc(ctx, hash, processedBy) -} - -// SetBlockProcessingCalls gets all the calls that were made to SetBlockProcessing. -// Check the length with: -// -// len(mockedUnitOfWork.SetBlockProcessingCalls()) -func (mock *UnitOfWorkMock) SetBlockProcessingCalls() []struct { - Ctx context.Context - Hash *chainhash.Hash - ProcessedBy string -} { - var calls []struct { - Ctx context.Context - Hash *chainhash.Hash - ProcessedBy string - } - mock.lockSetBlockProcessing.RLock() - calls = mock.calls.SetBlockProcessing - mock.lockSetBlockProcessing.RUnlock() - return calls -} - -// StartUnitOfWork calls StartUnitOfWorkFunc. -func (mock *UnitOfWorkMock) StartUnitOfWork(ctx context.Context) (store.UnitOfWork, error) { - if mock.StartUnitOfWorkFunc == nil { - panic("UnitOfWorkMock.StartUnitOfWorkFunc: method is nil but UnitOfWork.StartUnitOfWork was just called") - } - callInfo := struct { - Ctx context.Context - }{ - Ctx: ctx, - } - mock.lockStartUnitOfWork.Lock() - mock.calls.StartUnitOfWork = append(mock.calls.StartUnitOfWork, callInfo) - mock.lockStartUnitOfWork.Unlock() - return mock.StartUnitOfWorkFunc(ctx) -} - -// StartUnitOfWorkCalls gets all the calls that were made to StartUnitOfWork. -// Check the length with: -// -// len(mockedUnitOfWork.StartUnitOfWorkCalls()) -func (mock *UnitOfWorkMock) StartUnitOfWorkCalls() []struct { - Ctx context.Context -} { - var calls []struct { - Ctx context.Context - } - mock.lockStartUnitOfWork.RLock() - calls = mock.calls.StartUnitOfWork - mock.lockStartUnitOfWork.RUnlock() - return calls -} - -// UpdateBlocksStatuses calls UpdateBlocksStatusesFunc. -func (mock *UnitOfWorkMock) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { - if mock.UpdateBlocksStatusesFunc == nil { - panic("UnitOfWorkMock.UpdateBlocksStatusesFunc: method is nil but UnitOfWork.UpdateBlocksStatuses was just called") - } - callInfo := struct { - Ctx context.Context - BlockStatusUpdates []store.BlockStatusUpdate - }{ - Ctx: ctx, - BlockStatusUpdates: blockStatusUpdates, - } - mock.lockUpdateBlocksStatuses.Lock() - mock.calls.UpdateBlocksStatuses = append(mock.calls.UpdateBlocksStatuses, callInfo) - mock.lockUpdateBlocksStatuses.Unlock() - return mock.UpdateBlocksStatusesFunc(ctx, blockStatusUpdates) -} - -// UpdateBlocksStatusesCalls gets all the calls that were made to UpdateBlocksStatuses. -// Check the length with: -// -// len(mockedUnitOfWork.UpdateBlocksStatusesCalls()) -func (mock *UnitOfWorkMock) UpdateBlocksStatusesCalls() []struct { - Ctx context.Context - BlockStatusUpdates []store.BlockStatusUpdate -} { - var calls []struct { - Ctx context.Context - BlockStatusUpdates []store.BlockStatusUpdate - } - mock.lockUpdateBlocksStatuses.RLock() - calls = mock.calls.UpdateBlocksStatuses - mock.lockUpdateBlocksStatuses.RUnlock() - return calls -} - -// UpsertBlock calls UpsertBlockFunc. -func (mock *UnitOfWorkMock) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) { - if mock.UpsertBlockFunc == nil { - panic("UnitOfWorkMock.UpsertBlockFunc: method is nil but UnitOfWork.UpsertBlock was just called") - } - callInfo := struct { - Ctx context.Context - Block *blocktx_api.Block - }{ - Ctx: ctx, - Block: block, - } - mock.lockUpsertBlock.Lock() - mock.calls.UpsertBlock = append(mock.calls.UpsertBlock, callInfo) - mock.lockUpsertBlock.Unlock() - return mock.UpsertBlockFunc(ctx, block) -} - -// UpsertBlockCalls gets all the calls that were made to UpsertBlock. -// Check the length with: -// -// len(mockedUnitOfWork.UpsertBlockCalls()) -func (mock *UnitOfWorkMock) UpsertBlockCalls() []struct { - Ctx context.Context - Block *blocktx_api.Block -} { - var calls []struct { - Ctx context.Context - Block *blocktx_api.Block - } - mock.lockUpsertBlock.RLock() - calls = mock.calls.UpsertBlock - mock.lockUpsertBlock.RUnlock() - return calls -} - -// UpsertBlockTransactions calls UpsertBlockTransactionsFunc. -func (mock *UnitOfWorkMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { - if mock.UpsertBlockTransactionsFunc == nil { - panic("UnitOfWorkMock.UpsertBlockTransactionsFunc: method is nil but UnitOfWork.UpsertBlockTransactions was just called") - } - callInfo := struct { - Ctx context.Context - BlockID uint64 - TxsWithMerklePaths []store.TxWithMerklePath - }{ - Ctx: ctx, - BlockID: blockID, - TxsWithMerklePaths: txsWithMerklePaths, - } - mock.lockUpsertBlockTransactions.Lock() - mock.calls.UpsertBlockTransactions = append(mock.calls.UpsertBlockTransactions, callInfo) - mock.lockUpsertBlockTransactions.Unlock() - return mock.UpsertBlockTransactionsFunc(ctx, blockID, txsWithMerklePaths) -} - -// UpsertBlockTransactionsCalls gets all the calls that were made to UpsertBlockTransactions. -// Check the length with: -// -// len(mockedUnitOfWork.UpsertBlockTransactionsCalls()) -func (mock *UnitOfWorkMock) UpsertBlockTransactionsCalls() []struct { - Ctx context.Context - BlockID uint64 - TxsWithMerklePaths []store.TxWithMerklePath -} { - var calls []struct { - Ctx context.Context - BlockID uint64 - TxsWithMerklePaths []store.TxWithMerklePath - } - mock.lockUpsertBlockTransactions.RLock() - calls = mock.calls.UpsertBlockTransactions - mock.lockUpsertBlockTransactions.RUnlock() - return calls -} - -// VerifyMerkleRoots calls VerifyMerkleRootsFunc. -func (mock *UnitOfWorkMock) VerifyMerkleRoots(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { - if mock.VerifyMerkleRootsFunc == nil { - panic("UnitOfWorkMock.VerifyMerkleRootsFunc: method is nil but UnitOfWork.VerifyMerkleRoots was just called") - } - callInfo := struct { - Ctx context.Context - MerkleRoots []*blocktx_api.MerkleRootVerificationRequest - MaxAllowedBlockHeightMismatch int - }{ - Ctx: ctx, - MerkleRoots: merkleRoots, - MaxAllowedBlockHeightMismatch: maxAllowedBlockHeightMismatch, - } - mock.lockVerifyMerkleRoots.Lock() - mock.calls.VerifyMerkleRoots = append(mock.calls.VerifyMerkleRoots, callInfo) - mock.lockVerifyMerkleRoots.Unlock() - return mock.VerifyMerkleRootsFunc(ctx, merkleRoots, maxAllowedBlockHeightMismatch) -} - -// VerifyMerkleRootsCalls gets all the calls that were made to VerifyMerkleRoots. -// Check the length with: -// -// len(mockedUnitOfWork.VerifyMerkleRootsCalls()) -func (mock *UnitOfWorkMock) VerifyMerkleRootsCalls() []struct { - Ctx context.Context - MerkleRoots []*blocktx_api.MerkleRootVerificationRequest - MaxAllowedBlockHeightMismatch int -} { - var calls []struct { - Ctx context.Context - MerkleRoots []*blocktx_api.MerkleRootVerificationRequest - MaxAllowedBlockHeightMismatch int - } - mock.lockVerifyMerkleRoots.RLock() - calls = mock.calls.VerifyMerkleRoots - mock.lockVerifyMerkleRoots.RUnlock() - return calls -} - -// WriteLockBlocksTable calls WriteLockBlocksTableFunc. -func (mock *UnitOfWorkMock) WriteLockBlocksTable(ctx context.Context) error { - if mock.WriteLockBlocksTableFunc == nil { - panic("UnitOfWorkMock.WriteLockBlocksTableFunc: method is nil but UnitOfWork.WriteLockBlocksTable was just called") - } - callInfo := struct { - Ctx context.Context - }{ - Ctx: ctx, - } - mock.lockWriteLockBlocksTable.Lock() - mock.calls.WriteLockBlocksTable = append(mock.calls.WriteLockBlocksTable, callInfo) - mock.lockWriteLockBlocksTable.Unlock() - return mock.WriteLockBlocksTableFunc(ctx) -} - -// WriteLockBlocksTableCalls gets all the calls that were made to WriteLockBlocksTable. -// Check the length with: -// -// len(mockedUnitOfWork.WriteLockBlocksTableCalls()) -func (mock *UnitOfWorkMock) WriteLockBlocksTableCalls() []struct { - Ctx context.Context -} { - var calls []struct { - Ctx context.Context - } - mock.lockWriteLockBlocksTable.RLock() - calls = mock.calls.WriteLockBlocksTable - mock.lockWriteLockBlocksTable.RUnlock() - return calls -} diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index a4fe30b23..d7acfb51d 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -33,9 +33,6 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetBlockFunc: func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) { // panic("mock out the GetBlock method") // }, -// GetBlockByHeightFunc: func(ctx context.Context, height uint64) (*blocktx_api.Block, error) { -// panic("mock out the GetBlockByHeight method") -// }, // GetBlockGapsFunc: func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { // panic("mock out the GetBlockGaps method") // }, @@ -45,14 +42,17 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetChainTipFunc: func(ctx context.Context) (*blocktx_api.Block, error) { // panic("mock out the GetChainTip method") // }, +// GetLongestBlockByHeightFunc: func(ctx context.Context, height uint64) (*blocktx_api.Block, error) { +// panic("mock out the GetLongestBlockByHeight method") +// }, // GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { // panic("mock out the GetLongestChainFromHeight method") // }, // GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { // panic("mock out the GetMinedTransactions method") // }, -// GetOrphanedChainUpFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { -// panic("mock out the GetOrphanedChainUpFromHash method") +// GetOrphansBackToNonOrphanAncestorFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) { +// panic("mock out the GetOrphansBackToNonOrphanAncestor method") // }, // GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { // panic("mock out the GetRegisteredTxsByBlockHashes method") @@ -75,9 +75,6 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // SetBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) { // panic("mock out the SetBlockProcessing method") // }, -// StartUnitOfWorkFunc: func(ctx context.Context) (store.UnitOfWork, error) { -// panic("mock out the StartUnitOfWork method") -// }, // UpdateBlocksStatusesFunc: func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { // panic("mock out the UpdateBlocksStatuses method") // }, @@ -109,9 +106,6 @@ type BlocktxStoreMock struct { // GetBlockFunc mocks the GetBlock method. GetBlockFunc func(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) - // GetBlockByHeightFunc mocks the GetBlockByHeight method. - GetBlockByHeightFunc func(ctx context.Context, height uint64) (*blocktx_api.Block, error) - // GetBlockGapsFunc mocks the GetBlockGaps method. GetBlockGapsFunc func(ctx context.Context, heightRange int) ([]*store.BlockGap, error) @@ -121,14 +115,17 @@ type BlocktxStoreMock struct { // GetChainTipFunc mocks the GetChainTip method. GetChainTipFunc func(ctx context.Context) (*blocktx_api.Block, error) + // GetLongestBlockByHeightFunc mocks the GetLongestBlockByHeight method. + GetLongestBlockByHeightFunc func(ctx context.Context, height uint64) (*blocktx_api.Block, error) + // GetLongestChainFromHeightFunc mocks the GetLongestChainFromHeight method. GetLongestChainFromHeightFunc func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) // GetMinedTransactionsFunc mocks the GetMinedTransactions method. GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) - // GetOrphanedChainUpFromHashFunc mocks the GetOrphanedChainUpFromHash method. - GetOrphanedChainUpFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) + // GetOrphansBackToNonOrphanAncestorFunc mocks the GetOrphansBackToNonOrphanAncestor method. + GetOrphansBackToNonOrphanAncestorFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) // GetRegisteredTxsByBlockHashesFunc mocks the GetRegisteredTxsByBlockHashes method. GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) @@ -151,9 +148,6 @@ type BlocktxStoreMock struct { // SetBlockProcessingFunc mocks the SetBlockProcessing method. SetBlockProcessingFunc func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) - // StartUnitOfWorkFunc mocks the StartUnitOfWork method. - StartUnitOfWorkFunc func(ctx context.Context) (store.UnitOfWork, error) - // UpdateBlocksStatusesFunc mocks the UpdateBlocksStatuses method. UpdateBlocksStatusesFunc func(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error @@ -196,13 +190,6 @@ type BlocktxStoreMock struct { // Hash is the hash argument value. Hash *chainhash.Hash } - // GetBlockByHeight holds details about calls to the GetBlockByHeight method. - GetBlockByHeight []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // Height is the height argument value. - Height uint64 - } // GetBlockGaps holds details about calls to the GetBlockGaps method. GetBlockGaps []struct { // Ctx is the ctx argument value. @@ -222,6 +209,13 @@ type BlocktxStoreMock struct { // Ctx is the ctx argument value. Ctx context.Context } + // GetLongestBlockByHeight holds details about calls to the GetLongestBlockByHeight method. + GetLongestBlockByHeight []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // Height is the height argument value. + Height uint64 + } // GetLongestChainFromHeight holds details about calls to the GetLongestChainFromHeight method. GetLongestChainFromHeight []struct { // Ctx is the ctx argument value. @@ -238,8 +232,8 @@ type BlocktxStoreMock struct { // OnlyLongestChain is the onlyLongestChain argument value. OnlyLongestChain bool } - // GetOrphanedChainUpFromHash holds details about calls to the GetOrphanedChainUpFromHash method. - GetOrphanedChainUpFromHash []struct { + // GetOrphansBackToNonOrphanAncestor holds details about calls to the GetOrphansBackToNonOrphanAncestor method. + GetOrphansBackToNonOrphanAncestor []struct { // Ctx is the ctx argument value. Ctx context.Context // Hash is the hash argument value. @@ -296,11 +290,6 @@ type BlocktxStoreMock struct { // ProcessedBy is the processedBy argument value. ProcessedBy string } - // StartUnitOfWork holds details about calls to the StartUnitOfWork method. - StartUnitOfWork []struct { - // Ctx is the ctx argument value. - Ctx context.Context - } // UpdateBlocksStatuses holds details about calls to the UpdateBlocksStatuses method. UpdateBlocksStatuses []struct { // Ctx is the ctx argument value. @@ -338,13 +327,13 @@ type BlocktxStoreMock struct { lockClose sync.RWMutex lockDelBlockProcessing sync.RWMutex lockGetBlock sync.RWMutex - lockGetBlockByHeight sync.RWMutex lockGetBlockGaps sync.RWMutex lockGetBlockHashesProcessingInProgress sync.RWMutex lockGetChainTip sync.RWMutex + lockGetLongestBlockByHeight sync.RWMutex lockGetLongestChainFromHeight sync.RWMutex lockGetMinedTransactions sync.RWMutex - lockGetOrphanedChainUpFromHash sync.RWMutex + lockGetOrphansBackToNonOrphanAncestor sync.RWMutex lockGetRegisteredTxsByBlockHashes sync.RWMutex lockGetStaleChainBackFromHash sync.RWMutex lockGetStats sync.RWMutex @@ -352,7 +341,6 @@ type BlocktxStoreMock struct { lockPing sync.RWMutex lockRegisterTransactions sync.RWMutex lockSetBlockProcessing sync.RWMutex - lockStartUnitOfWork sync.RWMutex lockUpdateBlocksStatuses sync.RWMutex lockUpsertBlock sync.RWMutex lockUpsertBlockTransactions sync.RWMutex @@ -502,42 +490,6 @@ func (mock *BlocktxStoreMock) GetBlockCalls() []struct { return calls } -// GetBlockByHeight calls GetBlockByHeightFunc. -func (mock *BlocktxStoreMock) GetBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { - if mock.GetBlockByHeightFunc == nil { - panic("BlocktxStoreMock.GetBlockByHeightFunc: method is nil but BlocktxStore.GetBlockByHeight was just called") - } - callInfo := struct { - Ctx context.Context - Height uint64 - }{ - Ctx: ctx, - Height: height, - } - mock.lockGetBlockByHeight.Lock() - mock.calls.GetBlockByHeight = append(mock.calls.GetBlockByHeight, callInfo) - mock.lockGetBlockByHeight.Unlock() - return mock.GetBlockByHeightFunc(ctx, height) -} - -// GetBlockByHeightCalls gets all the calls that were made to GetBlockByHeight. -// Check the length with: -// -// len(mockedBlocktxStore.GetBlockByHeightCalls()) -func (mock *BlocktxStoreMock) GetBlockByHeightCalls() []struct { - Ctx context.Context - Height uint64 -} { - var calls []struct { - Ctx context.Context - Height uint64 - } - mock.lockGetBlockByHeight.RLock() - calls = mock.calls.GetBlockByHeight - mock.lockGetBlockByHeight.RUnlock() - return calls -} - // GetBlockGaps calls GetBlockGapsFunc. func (mock *BlocktxStoreMock) GetBlockGaps(ctx context.Context, heightRange int) ([]*store.BlockGap, error) { if mock.GetBlockGapsFunc == nil { @@ -642,6 +594,42 @@ func (mock *BlocktxStoreMock) GetChainTipCalls() []struct { return calls } +// GetLongestBlockByHeight calls GetLongestBlockByHeightFunc. +func (mock *BlocktxStoreMock) GetLongestBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { + if mock.GetLongestBlockByHeightFunc == nil { + panic("BlocktxStoreMock.GetLongestBlockByHeightFunc: method is nil but BlocktxStore.GetLongestBlockByHeight was just called") + } + callInfo := struct { + Ctx context.Context + Height uint64 + }{ + Ctx: ctx, + Height: height, + } + mock.lockGetLongestBlockByHeight.Lock() + mock.calls.GetLongestBlockByHeight = append(mock.calls.GetLongestBlockByHeight, callInfo) + mock.lockGetLongestBlockByHeight.Unlock() + return mock.GetLongestBlockByHeightFunc(ctx, height) +} + +// GetLongestBlockByHeightCalls gets all the calls that were made to GetLongestBlockByHeight. +// Check the length with: +// +// len(mockedBlocktxStore.GetLongestBlockByHeightCalls()) +func (mock *BlocktxStoreMock) GetLongestBlockByHeightCalls() []struct { + Ctx context.Context + Height uint64 +} { + var calls []struct { + Ctx context.Context + Height uint64 + } + mock.lockGetLongestBlockByHeight.RLock() + calls = mock.calls.GetLongestBlockByHeight + mock.lockGetLongestBlockByHeight.RUnlock() + return calls +} + // GetLongestChainFromHeight calls GetLongestChainFromHeightFunc. func (mock *BlocktxStoreMock) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { if mock.GetLongestChainFromHeightFunc == nil { @@ -718,10 +706,10 @@ func (mock *BlocktxStoreMock) GetMinedTransactionsCalls() []struct { return calls } -// GetOrphanedChainUpFromHash calls GetOrphanedChainUpFromHashFunc. -func (mock *BlocktxStoreMock) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { - if mock.GetOrphanedChainUpFromHashFunc == nil { - panic("BlocktxStoreMock.GetOrphanedChainUpFromHashFunc: method is nil but BlocktxStore.GetOrphanedChainUpFromHash was just called") +// GetOrphansBackToNonOrphanAncestor calls GetOrphansBackToNonOrphanAncestorFunc. +func (mock *BlocktxStoreMock) GetOrphansBackToNonOrphanAncestor(ctx context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) { + if mock.GetOrphansBackToNonOrphanAncestorFunc == nil { + panic("BlocktxStoreMock.GetOrphansBackToNonOrphanAncestorFunc: method is nil but BlocktxStore.GetOrphansBackToNonOrphanAncestor was just called") } callInfo := struct { Ctx context.Context @@ -730,17 +718,17 @@ func (mock *BlocktxStoreMock) GetOrphanedChainUpFromHash(ctx context.Context, ha Ctx: ctx, Hash: hash, } - mock.lockGetOrphanedChainUpFromHash.Lock() - mock.calls.GetOrphanedChainUpFromHash = append(mock.calls.GetOrphanedChainUpFromHash, callInfo) - mock.lockGetOrphanedChainUpFromHash.Unlock() - return mock.GetOrphanedChainUpFromHashFunc(ctx, hash) + mock.lockGetOrphansBackToNonOrphanAncestor.Lock() + mock.calls.GetOrphansBackToNonOrphanAncestor = append(mock.calls.GetOrphansBackToNonOrphanAncestor, callInfo) + mock.lockGetOrphansBackToNonOrphanAncestor.Unlock() + return mock.GetOrphansBackToNonOrphanAncestorFunc(ctx, hash) } -// GetOrphanedChainUpFromHashCalls gets all the calls that were made to GetOrphanedChainUpFromHash. +// GetOrphansBackToNonOrphanAncestorCalls gets all the calls that were made to GetOrphansBackToNonOrphanAncestor. // Check the length with: // -// len(mockedBlocktxStore.GetOrphanedChainUpFromHashCalls()) -func (mock *BlocktxStoreMock) GetOrphanedChainUpFromHashCalls() []struct { +// len(mockedBlocktxStore.GetOrphansBackToNonOrphanAncestorCalls()) +func (mock *BlocktxStoreMock) GetOrphansBackToNonOrphanAncestorCalls() []struct { Ctx context.Context Hash []byte } { @@ -748,9 +736,9 @@ func (mock *BlocktxStoreMock) GetOrphanedChainUpFromHashCalls() []struct { Ctx context.Context Hash []byte } - mock.lockGetOrphanedChainUpFromHash.RLock() - calls = mock.calls.GetOrphanedChainUpFromHash - mock.lockGetOrphanedChainUpFromHash.RUnlock() + mock.lockGetOrphansBackToNonOrphanAncestor.RLock() + calls = mock.calls.GetOrphansBackToNonOrphanAncestor + mock.lockGetOrphansBackToNonOrphanAncestor.RUnlock() return calls } @@ -1010,38 +998,6 @@ func (mock *BlocktxStoreMock) SetBlockProcessingCalls() []struct { return calls } -// StartUnitOfWork calls StartUnitOfWorkFunc. -func (mock *BlocktxStoreMock) StartUnitOfWork(ctx context.Context) (store.UnitOfWork, error) { - if mock.StartUnitOfWorkFunc == nil { - panic("BlocktxStoreMock.StartUnitOfWorkFunc: method is nil but BlocktxStore.StartUnitOfWork was just called") - } - callInfo := struct { - Ctx context.Context - }{ - Ctx: ctx, - } - mock.lockStartUnitOfWork.Lock() - mock.calls.StartUnitOfWork = append(mock.calls.StartUnitOfWork, callInfo) - mock.lockStartUnitOfWork.Unlock() - return mock.StartUnitOfWorkFunc(ctx) -} - -// StartUnitOfWorkCalls gets all the calls that were made to StartUnitOfWork. -// Check the length with: -// -// len(mockedBlocktxStore.StartUnitOfWorkCalls()) -func (mock *BlocktxStoreMock) StartUnitOfWorkCalls() []struct { - Ctx context.Context -} { - var calls []struct { - Ctx context.Context - } - mock.lockStartUnitOfWork.RLock() - calls = mock.calls.StartUnitOfWork - mock.lockStartUnitOfWork.RUnlock() - return calls -} - // UpdateBlocksStatuses calls UpdateBlocksStatusesFunc. func (mock *BlocktxStoreMock) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error { if mock.UpdateBlocksStatusesFunc == nil { diff --git a/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml index b8f7b9e32..dd193b81d 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml @@ -46,12 +46,49 @@ status: 30 # ORPHANED chainwork: '123456' is_longest: false -- inserted_at: 2023-12-15 14:50:00 +- inserted_at: 2023-12-15 14:40:00 id: 4 + hash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + prevhash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822019 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:50:00 + id: 5 hash: 0x000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 - height: 822020 # Another gap + height: 822020 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false + +# GAP + +- inserted_at: 2023-12-15 14:50:00 + id: 6 + hash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + prevhash: 0x0000000000000000094510c50011a891b74ef054d6cac0a5ae8bd60f02c85f1d + merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 + height: 822022 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + chainwork: '123456' + is_longest: false +- inserted_at: 2023-12-15 14:50:00 + id: 7 + hash: 0x0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd + prevhash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 + height: 822023 size: 8630000 tx_count: 36724 status: 30 # ORPHANED diff --git a/internal/blocktx/store/postgresql/get_block.go b/internal/blocktx/store/postgresql/get_block.go index b8852d460..55b9646f3 100644 --- a/internal/blocktx/store/postgresql/get_block.go +++ b/internal/blocktx/store/postgresql/get_block.go @@ -16,7 +16,7 @@ func (p *PostgreSQL) GetBlock(ctx context.Context, hash *chainhash.Hash) (*block return p.queryBlockByPredicate(ctx, predicate, hash[:]) } -func (p *PostgreSQL) GetBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { +func (p *PostgreSQL) GetLongestBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) { predicate := "WHERE height = $1 AND is_longest = true" return p.queryBlockByPredicate(ctx, predicate, height) diff --git a/internal/blocktx/store/postgresql/get_orphaned_chain.go b/internal/blocktx/store/postgresql/get_orphaned_chain.go index 14ce171b3..441034a43 100644 --- a/internal/blocktx/store/postgresql/get_orphaned_chain.go +++ b/internal/blocktx/store/postgresql/get_orphaned_chain.go @@ -4,15 +4,17 @@ import ( "context" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/libsv/go-p2p/chaincfg/chainhash" ) -// GetOrphanedChainUpFromHash is a function that recursively searches for blocks marked -// as ORPHANED from the given hash - up to the tip of orphaned chain of blocks. +// GetOrphansBackToNonOrphanAncestor recursively searches for blocks marked +// as ORPHANED from the given hash - back to the first ORPHANED block. Then, it +// tries to get the first non-orphaned ancestor of that orphan chain. // -// It searches for the block whose prevhash matches the hash of the given block, +// It searches for the block whose hash matches the prevhash of the given block, // and then repeats that recursively for each newly found orphaned block until // it has the entire orphaned chain. -func (p *PostgreSQL) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { +func (p *PostgreSQL) GetOrphansBackToNonOrphanAncestor(ctx context.Context, hash []byte) (orphans []*blocktx_api.Block, nonOrphanAncestor *blocktx_api.Block, err error) { // The way this query works, is that the result from the first SELECT // will be stored in the `orphans` variable, which is later used // for recursion in the second SELECT. @@ -31,7 +33,7 @@ func (p *PostgreSQL) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte ,processed_at ,status ,chainwork - FROM blocktx.blocks WHERE prevhash = $1 AND status = $2 + FROM blocktx.blocks WHERE hash = $1 AND status = $2 UNION ALL SELECT b.hash @@ -41,7 +43,7 @@ func (p *PostgreSQL) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte ,b.processed_at ,b.status ,b.chainwork - FROM blocktx.blocks b JOIN orphans o ON b.prevhash = o.hash AND b.status = $2 + FROM blocktx.blocks b JOIN orphans o ON o.prevhash = b.hash AND b.status = $2 ) SELECT hash @@ -52,13 +54,33 @@ func (p *PostgreSQL) GetOrphanedChainUpFromHash(ctx context.Context, hash []byte ,status ,chainwork FROM orphans + ORDER BY height ` rows, err := p.db.QueryContext(ctx, q, hash, blocktx_api.Status_ORPHANED) if err != nil { - return nil, err + return } defer rows.Close() - return p.parseBlocks(rows) + orphans, err = p.parseBlocks(rows) + if err != nil { + return + } + + // first element in orphans + // will be the given block + if len(orphans) < 2 { + return + } + + // try to get first non-orphan ancestor + nonOrphanHash, err := chainhash.NewHash(orphans[0].PreviousHash) + if err != nil { + return + } + + nonOrphanAncestor, _ = p.GetBlock(ctx, nonOrphanHash) + + return } diff --git a/internal/blocktx/store/postgresql/get_stale_chain.go b/internal/blocktx/store/postgresql/get_stale_chain.go index 71c96aa63..66aadb75b 100644 --- a/internal/blocktx/store/postgresql/get_stale_chain.go +++ b/internal/blocktx/store/postgresql/get_stale_chain.go @@ -58,6 +58,7 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ,status ,chainwork FROM prevBlocks + ORDER BY height ` rows, err := p.db.QueryContext(ctx, q, hash, blocktx_api.Status_STALE) diff --git a/internal/blocktx/store/postgresql/postgres.go b/internal/blocktx/store/postgresql/postgres.go index e73d94b2a..f3b6ec321 100644 --- a/internal/blocktx/store/postgresql/postgres.go +++ b/internal/blocktx/store/postgresql/postgres.go @@ -20,24 +20,12 @@ const ( maxPostgresBulkInsertRows = 8192 ) -type QueryAble interface { - ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error) - Prepare(query string) (*sql.Stmt, error) - Query(query string, args ...interface{}) (*sql.Rows, error) - QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error) - QueryRow(query string, args ...interface{}) *sql.Row - QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row -} - type PostgreSQL struct { - _db *sql.DB - _tx *sql.Tx - db QueryAble // this would be pointing either to _db or _tx + db *sql.DB now func() time.Time maxPostgresBulkInsertRows int tracingEnabled bool tracingAttributes []attribute.KeyValue - dbInfo string } func WithNow(nowFunc func() time.Time) func(*PostgreSQL) { @@ -72,11 +60,9 @@ func New(dbInfo string, idleConns int, maxOpenConns int, opts ...func(postgreSQL db.SetMaxOpenConns(maxOpenConns) p := &PostgreSQL{ - _db: db, db: db, now: time.Now, maxPostgresBulkInsertRows: maxPostgresBulkInsertRows, - dbInfo: dbInfo, } for _, opt := range opts { @@ -87,65 +73,14 @@ func New(dbInfo string, idleConns int, maxOpenConns int, opts ...func(postgreSQL } func (p *PostgreSQL) Close() error { - return p._db.Close() + return p.db.Close() } func (p *PostgreSQL) Ping(ctx context.Context) error { - r, err := p._db.QueryContext(ctx, "SELECT 1;") + r, err := p.db.QueryContext(ctx, "SELECT 1;") if err != nil { return err } return r.Close() } - -func (p *PostgreSQL) StartUnitOfWork(ctx context.Context) (store.UnitOfWork, error) { - tx, err := p._db.BeginTx(ctx, nil) - if err != nil { - return nil, err - } - - // This will create a clone of the store and use the transaction created - // above to avoid messing with the state of the main singleton store - cloneStore := &PostgreSQL{ - _tx: tx, - db: tx, - now: time.Now, - maxPostgresBulkInsertRows: maxPostgresBulkInsertRows, - tracingEnabled: p.tracingEnabled, - tracingAttributes: p.tracingAttributes, - } - - return cloneStore, nil -} - -// UnitOfWork methods below -func (p *PostgreSQL) Commit() error { - if p._tx == nil { - return ErrNoTransaction - } - return p._tx.Commit() -} - -func (p *PostgreSQL) Rollback() error { - if p._tx == nil { - return ErrNoTransaction - } - return p._tx.Rollback() -} - -func (p *PostgreSQL) WriteLockBlocksTable(ctx context.Context) error { - if p._tx == nil { - return ErrNoTransaction - } - - // This will lock `blocks` table for writing, when performing reorg. - // Any INSERT or UPDATE to the table will wait until the lock is released. - // Another instance wanting to acquire this lock at the same time will have - // to wait until the transaction holding the lock is completed and the lock - // is released. - // - // Reading from the table is still allowed. - _, err := p._tx.ExecContext(ctx, "LOCK TABLE blocktx.blocks IN EXCLUSIVE MODE") - return err -} diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index 8281e5d8b..ce9ba3ac1 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -92,14 +92,14 @@ func testmain(m *testing.M) int { func prepareDb(t *testing.T, postgres *PostgreSQL, fixture string) { t.Helper() - testutils.PruneTables(t, postgres._db, + testutils.PruneTables(t, postgres.db, "blocktx.blocks", "blocktx.transactions", "blocktx.block_transactions_map", ) if fixture != "" { - testutils.LoadFixtures(t, postgres._db, fixture) + testutils.LoadFixtures(t, postgres.db, fixture) } } @@ -200,11 +200,11 @@ func TestPostgresDB(t *testing.T) { hashAtTip := testutils.RevChainhash(t, "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000") // when -> then - actualBlock, err := postgresDB.GetBlockByHeight(context.Background(), height) + actualBlock, err := postgresDB.GetLongestBlockByHeight(context.Background(), height) require.NoError(t, err) require.Equal(t, expectedHashAtHeightLongest[:], actualBlock.Hash) - actualBlock, err = postgresDB.GetBlockByHeight(context.Background(), heightNotFound) + actualBlock, err = postgresDB.GetLongestBlockByHeight(context.Background(), heightNotFound) require.Nil(t, actualBlock) require.Equal(t, store.ErrBlockNotFound, err) @@ -303,9 +303,9 @@ func TestPostgresDB(t *testing.T) { hash4Stale := testutils.RevChainhash(t, "000000000000000004bf3e68405b31650559ff28d38a42b5e4f1440a865611ca") expectedStaleHashes := [][]byte{ - hash4Stale[:], - hash3Stale[:], hash2Stale[:], + hash3Stale[:], + hash4Stale[:], } // when @@ -319,23 +319,33 @@ func TestPostgresDB(t *testing.T) { } }) - t.Run("get orphaned chain up from hash", func(t *testing.T) { + t.Run("get orphans back to non-orphaned ancestor", func(t *testing.T) { // given prepareDb(t, postgresDB, "fixtures/get_orphaned_chain") - hashGapFiller := testutils.RevChainhash(t, "0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067") + newHash := testutils.RevChainhash(t, "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656") hash2Orphaned := testutils.RevChainhash(t, "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde") hash3Orphaned := testutils.RevChainhash(t, "00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9") hash4Orphaned := testutils.RevChainhash(t, "0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1") + noAncestorHash := testutils.RevChainhash(t, "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd") + hash6Orphaned := testutils.RevChainhash(t, "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c") + expectedOrphanedHashes := [][]byte{ hash2Orphaned[:], hash3Orphaned[:], hash4Orphaned[:], + newHash[:], + } + expectedAncestorHash := testutils.RevChainhash(t, "0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067") + + expectedNoAncestorOrphanedHashes := [][]byte{ + noAncestorHash[:], + hash6Orphaned[:], } // when - actualOrphanedBlocks, err := postgresDB.GetOrphanedChainUpFromHash(ctx, hashGapFiller[:]) + actualOrphanedBlocks, actualAncestor, err := postgresDB.GetOrphansBackToNonOrphanAncestor(ctx, newHash[:]) require.NoError(t, err) // then @@ -343,6 +353,15 @@ func TestPostgresDB(t *testing.T) { for i, b := range actualOrphanedBlocks { require.Equal(t, expectedOrphanedHashes[i], b.Hash) } + require.Equal(t, expectedAncestorHash[:], actualAncestor.Hash) + + // when + actualOrphanedBlocks, actualAncestor, err = postgresDB.GetOrphansBackToNonOrphanAncestor(ctx, noAncestorHash[:]) + require.NoError(t, err) + + // then + require.Equal(t, len(expectedNoAncestorOrphanedHashes), len(actualOrphanedBlocks)) + require.Nil(t, actualAncestor) }) t.Run("update blocks statuses", func(t *testing.T) { @@ -664,24 +683,6 @@ func TestPostgresDB(t *testing.T) { // then assert.Equal(t, expectedUnverifiedBlockHeights, res.UnverifiedBlockHeights) }) - - t.Run("lock blocks table", func(t *testing.T) { - err := postgresDB.WriteLockBlocksTable(context.Background()) - require.Error(t, err) - require.Equal(t, ErrNoTransaction, err) - - uow, err := postgresDB.StartUnitOfWork(context.Background()) - require.NoError(t, err) - - err = uow.WriteLockBlocksTable(context.Background()) - require.NoError(t, err) - - err = uow.Rollback() - require.NoError(t, err) - - err = uow.Commit() - require.Equal(t, ErrNoTransaction, err) - }) } func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { diff --git a/internal/blocktx/store/postgresql/update_block_statuses.go b/internal/blocktx/store/postgresql/update_block_statuses.go index 3f5a56e7c..249fe4295 100644 --- a/internal/blocktx/store/postgresql/update_block_statuses.go +++ b/internal/blocktx/store/postgresql/update_block_statuses.go @@ -13,7 +13,10 @@ func (p *PostgreSQL) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdate q := ` UPDATE blocktx.blocks b SET status = updates.status, is_longest = updates.is_longest - FROM (SELECT * FROM UNNEST($1::BYTEA[], $2::INTEGER[], $3::BOOLEAN[]) AS u(hash, status, is_longest)) AS updates + FROM ( + SELECT * FROM UNNEST($1::BYTEA[], $2::INTEGER[], $3::BOOLEAN[]) AS u(hash, status, is_longest) + WHERE is_longest = $4 + ) AS updates WHERE b.hash = updates.hash ` @@ -27,7 +30,27 @@ func (p *PostgreSQL) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdate isLongest[i] = update.Status == blocktx_api.Status_LONGEST } - _, err := p.db.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(isLongest)) + tx, err := p.db.Begin() + if err != nil { + return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) + } + defer func() { + _ = tx.Rollback() + }() + + // first update blocks that are changing statuses to non-LONGEST + _, err = tx.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(isLongest), false) + if err != nil { + return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) + } + + // then update blocks that are changing statuses to LONGEST + _, err = tx.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(isLongest), true) + if err != nil { + return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) + } + + err = tx.Commit() if err != nil { return errors.Join(store.ErrFailedToUpdateBlockStatuses, err) } diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index dd30b8f44..198bedac1 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -31,7 +31,7 @@ type Stats struct { type BlocktxStore interface { RegisterTransactions(ctx context.Context, txHashes [][]byte) (updatedTxs []*chainhash.Hash, err error) GetBlock(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) - GetBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) + GetLongestBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []TxWithMerklePath) error @@ -41,7 +41,7 @@ type BlocktxStore interface { GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]TransactionBlock, error) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) - GetOrphanedChainUpFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) + GetOrphansBackToNonOrphanAncestor(ctx context.Context, hash []byte) (orphans []*blocktx_api.Block, nonOrphanAncestor *blocktx_api.Block, err error) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]TransactionBlock, error) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []BlockStatusUpdate) error GetStats(ctx context.Context) (*Stats, error) @@ -53,13 +53,4 @@ type BlocktxStore interface { Ping(ctx context.Context) error Close() error - - StartUnitOfWork(ctx context.Context) (UnitOfWork, error) -} - -type UnitOfWork interface { - BlocktxStore - Commit() error - Rollback() error - WriteLockBlocksTable(ctx context.Context) error } diff --git a/internal/blocktx/store/store_mocks.go b/internal/blocktx/store/store_mocks.go index 746a61325..a82b9ba66 100644 --- a/internal/blocktx/store/store_mocks.go +++ b/internal/blocktx/store/store_mocks.go @@ -1,4 +1,3 @@ package store //go:generate moq -pkg mocks -out ./mocks/blocktx_store_mock.go . BlocktxStore -//go:generate moq -pkg mocks -out ./mocks/blocktx_db_tx_mock.go . UnitOfWork From db428695adabf46bbbfb2640dbd5600fed40896b Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 20 Nov 2024 18:38:39 +0100 Subject: [PATCH 27/36] chore: removing unused variables, using different testing methods --- .../reorg_integration_test.go | 12 ++--- internal/blocktx/processor.go | 26 ++++++++++- .../blocktx/store/postgresql/postgres_test.go | 46 ++----------------- 3 files changed, 31 insertions(+), 53 deletions(-) diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index 56cb848e2..be4a66848 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -11,12 +11,12 @@ package integrationtest // 2. A hardcoded msg with competing block at height 822015 is being sent through the mocked PeerHandler // 3. This block has a chainwork lower than the current tip of chain - becomes STALE // 4. Registered transactions from this block are ignored -// 5. Next competing block, at height 822016 is being send through the mocked PeerHandler +// 5. Next competing block, at height 822016 is being sent through the mocked PeerHandler // 6. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest // 7. Verification of reorg - checking if statuses are correctly switched // 8. Verification of transactions // - transactions from the stale chain becoming the longest are published -// - transactions that were previously in the longest chain are published with udpated block data +// - transactions that were previously in the longest chain are published with updated block data // - transactions that were previously in the longest chain, but are not in the stale chain are published with blockstatus = STALE // 9. A new block at height 822021 is being sent through the mocked PeerHandler // 10. This block is extending the orphaned chain and finds that it's connected to the stale chain - orphans get updated to STALE @@ -55,13 +55,7 @@ import ( "google.golang.org/protobuf/proto" ) -const ( - postgresPort = "5432" - migrationsPath = "file://../store/postgresql/migrations" - dbName = "main_test" - dbUsername = "arcuser" - dbPassword = "arcpass" -) +const migrationsPath = "file://../store/postgresql/migrations" var ( dbInfo string diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 23b57a2e4..528a99cb5 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -513,6 +513,9 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { } func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMessage) (*blocktx_api.Block, error) { + ctx, span := tracing.StartTracing(ctx, "verifyAndInsertBlock", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) + blockHash := msg.Header.BlockHash() previousBlockHash := msg.Header.PrevBlock merkleRoot := msg.Header.MerkleRoot @@ -543,6 +546,9 @@ func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMess } func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Block, prevBlockHash chainhash.Hash) error { + ctx, span := tracing.StartTracing(ctx, "assignBlockStatus", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) + prevBlock, _ := p.store.GetBlock(ctx, &prevBlockHash) if prevBlock == nil { @@ -617,6 +623,9 @@ func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { } func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) ([]store.TransactionBlock, error) { + ctx, span := tracing.StartTracing(ctx, "getRegisteredTransactions", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) + blockHashes := make([][]byte, len(blocks)) for i, b := range blocks { blockHashes[i] = b.Hash @@ -633,6 +642,9 @@ func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blo } func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) error { + ctx, span := tracing.StartTracing(ctx, "insertBlockAndStoreTransactions", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) + blockID, err := p.store.UpsertBlock(ctx, incomingBlock) if err != nil { p.logger.Error("unable to insert block at given height", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) @@ -655,7 +667,7 @@ func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomin } func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { - ctx, span := tracing.StartTracing(ctx, "markTransactionsAsMined", p.tracingEnabled, p.tracingAttributes...) + ctx, span := tracing.StartTracing(ctx, "storeTransactions", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) }() @@ -731,6 +743,9 @@ func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block } func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block) ([]store.TransactionBlock, error) { + ctx, span := tracing.StartTracing(ctx, "handleStaleBlock", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) + staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, block.Hash) if err != nil { p.logger.Error("unable to get STALE blocks to verify chainwork", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) @@ -766,6 +781,9 @@ func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Blo } func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) ([]store.TransactionBlock, error) { + ctx, span := tracing.StartTracing(ctx, "performReorg", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) + staleHashes := make([][]byte, len(staleBlocks)) longestHashes := make([][]byte, len(longestBlocks)) @@ -819,6 +837,9 @@ func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api } func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) ([]store.TransactionBlock, error) { + ctx, span := tracing.StartTracing(ctx, "handleOrphans", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) + orphans, ancestor, err := p.store.GetOrphansBackToNonOrphanAncestor(ctx, block.Hash) if err != nil { p.logger.Error("unable to get ORPHANED blocks", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) @@ -879,6 +900,9 @@ func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) } func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.Block, chain blocktx_api.Status) error { + ctx, span := tracing.StartTracing(ctx, "acceptIntoChain", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span) + blockStatusUpdates := make([]store.BlockStatusUpdate, len(blocks)) for i, b := range blocks { diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index ce9ba3ac1..84572f781 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -3,7 +3,6 @@ package postgresql import ( "bytes" "context" - "errors" "log" "os" "testing" @@ -174,7 +173,7 @@ func TestPostgresDB(t *testing.T) { _, err = postgresDB.UpsertBlock(ctx, expectedBlockViolatingUniqueIndex) // then - require.True(t, errors.Is(err, store.ErrFailedToInsertBlock)) + require.ErrorIs(t, err, store.ErrFailedToInsertBlock) // when id, err = postgresDB.UpsertBlock(ctx, expectedBlockOverrideStatus) @@ -408,7 +407,7 @@ func TestPostgresDB(t *testing.T) { // when err = postgresDB.UpdateBlocksStatuses(ctx, blockStatusUpdatesViolating) - require.True(t, errors.Is(err, store.ErrFailedToUpdateBlockStatuses)) + require.ErrorIs(t, err, store.ErrFailedToUpdateBlockStatuses) }) t.Run("get mined txs", func(t *testing.T) { @@ -463,45 +462,6 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, expectedTxs, actualTxs) }) - t.Run("get registered txs", func(t *testing.T) { - // given - prepareDb(t, postgresDB, "fixtures/get_transactions") - - blockHash := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") - blockHash2 := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") - - expectedTxs := []store.TransactionBlock{ - { - TxHash: testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e")[:], - BlockHash: blockHash[:], - BlockHeight: 822013, - MerklePath: "merkle-path-2", - BlockStatus: blocktx_api.Status_LONGEST, - }, - { - TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], - BlockHash: blockHash2[:], - BlockHeight: 822012, - MerklePath: "merkle-path-6", - BlockStatus: blocktx_api.Status_STALE, - }, - { - TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], - BlockHash: blockHash2[:], - BlockHeight: 822012, - MerklePath: "merkle-path-7", - BlockStatus: blocktx_api.Status_STALE, - }, - } - - // when - actualTxs, err := postgresDB.GetRegisteredTxsByBlockHashes(ctx, [][]byte{blockHash[:], blockHash2[:]}) - - // then - require.NoError(t, err) - require.Equal(t, expectedTxs, actualTxs) - }) - t.Run("get registered txs by block hashes", func(t *testing.T) { // given prepareDb(t, postgresDB, "fixtures/get_transactions") @@ -1084,7 +1044,7 @@ func TestUpsertBlockConditions(t *testing.T) { require.NoError(t, err) } else { require.Equal(t, uint64(0), blockID) - require.True(t, errors.Is(err, store.ErrFailedToInsertBlock)) + require.ErrorIs(t, err, store.ErrFailedToInsertBlock) } }) } From 255f28a639229635f3630c1680d15a9d2b15764d Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Fri, 22 Nov 2024 13:46:45 +0100 Subject: [PATCH 28/36] feat: adapt to new error traces --- internal/blocktx/processor.go | 74 ++++++++++++------- .../store/postgresql/get_transactions.go | 13 +++- 2 files changed, 57 insertions(+), 30 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 528a99cb5..003340124 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -503,7 +503,7 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { p.logger.Info("publishing tx", slog.String("txHash", getHashStringNoErr(tx.TxHash))) - err = p.mqClient.PublishMarshal(MinedTxsTopic, txBlock) + err = p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) if err != nil { p.logger.Error("failed to publish mined txs", slog.String("blockHash", getHashStringNoErr(tx.BlockHash)), slog.Uint64("height", tx.BlockHeight), slog.String("txHash", getHashStringNoErr(tx.TxHash)), slog.String("err", err.Error())) } @@ -512,15 +512,17 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return nil } -func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMessage) (*blocktx_api.Block, error) { +func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMessage) (incomingBlock *blocktx_api.Block, err error) { ctx, span := tracing.StartTracing(ctx, "verifyAndInsertBlock", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() blockHash := msg.Header.BlockHash() previousBlockHash := msg.Header.PrevBlock merkleRoot := msg.Header.MerkleRoot - incomingBlock := &blocktx_api.Block{ + incomingBlock = &blocktx_api.Block{ Hash: blockHash[:], PreviousHash: previousBlockHash[:], MerkleRoot: merkleRoot[:], @@ -528,7 +530,7 @@ func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMess Chainwork: calculateChainwork(msg.Header.Bits).String(), } - err := p.assignBlockStatus(ctx, incomingBlock, previousBlockHash) + err = p.assignBlockStatus(ctx, incomingBlock, previousBlockHash) if err != nil { p.logger.Error("unable to assign block status", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) return nil, err @@ -545,16 +547,19 @@ func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMess return incomingBlock, nil } -func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Block, prevBlockHash chainhash.Hash) error { +func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Block, prevBlockHash chainhash.Hash) (err error) { ctx, span := tracing.StartTracing(ctx, "assignBlockStatus", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() prevBlock, _ := p.store.GetBlock(ctx, &prevBlockHash) if prevBlock == nil { // This check is only in case there's a fresh, empty database // with no blocks, to mark the first block as the LONGEST chain - longestTipExists, err := p.longestTipExists(ctx) + var longestTipExists bool + longestTipExists, err = p.longestTipExists(ctx) if err != nil { p.logger.Error("unable to verify the longest tip existance in db", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) return err @@ -581,7 +586,8 @@ func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Bl } if prevBlock.Status == blocktx_api.Status_LONGEST { - competingBlock, err := p.store.GetLongestBlockByHeight(ctx, block.Height) + var competingBlock *blocktx_api.Block + competingBlock, err = p.store.GetLongestBlockByHeight(ctx, block.Height) if err != nil && !errors.Is(err, store.ErrBlockNotFound) { p.logger.Error("unable to get the competing block from db", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) return err @@ -596,7 +602,8 @@ func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Bl // this means that another instance is already processing // or have processed this block that we're processing here // so we can throw an error and finish processing - return ErrBlockAlreadyExists + err = ErrBlockAlreadyExists + return err } block.Status = blocktx_api.Status_STALE @@ -622,16 +629,18 @@ func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { return true, nil } -func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) ([]store.TransactionBlock, error) { +func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) (txsToPublish []store.TransactionBlock, err error) { ctx, span := tracing.StartTracing(ctx, "getRegisteredTransactions", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() blockHashes := make([][]byte, len(blocks)) for i, b := range blocks { blockHashes[i] = b.Hash } - txsToPublish, err := p.store.GetRegisteredTxsByBlockHashes(ctx, blockHashes) + txsToPublish, err = p.store.GetRegisteredTxsByBlockHashes(ctx, blockHashes) if err != nil { block := blocks[len(blocks)-1] p.logger.Error("unable to get registered transactions", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) @@ -641,9 +650,11 @@ func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blo return txsToPublish, nil } -func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) error { +func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) (err error) { ctx, span := tracing.StartTracing(ctx, "insertBlockAndStoreTransactions", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() blockID, err := p.store.UpsertBlock(ctx, incomingBlock) if err != nil { @@ -742,9 +753,11 @@ func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block return nil } -func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block) ([]store.TransactionBlock, error) { +func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block) (txsToPublish []store.TransactionBlock, err error) { ctx, span := tracing.StartTracing(ctx, "handleStaleBlock", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, block.Hash) if err != nil { @@ -769,7 +782,7 @@ func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Blo if longestChainwork.Cmp(staleChainwork) < 0 { p.logger.Info("chain reorg detected", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height)) - txsToPublish, err := p.performReorg(ctx, staleBlocks, longestBlocks) + txsToPublish, err = p.performReorg(ctx, staleBlocks, longestBlocks) if err != nil { p.logger.Error("unable to perform reorg", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) return nil, err @@ -780,9 +793,11 @@ func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Blo return nil, nil } -func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) ([]store.TransactionBlock, error) { +func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) (txsToPublish []store.TransactionBlock, err error) { ctx, span := tracing.StartTracing(ctx, "performReorg", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() staleHashes := make([][]byte, len(staleBlocks)) longestHashes := make([][]byte, len(longestBlocks)) @@ -805,7 +820,7 @@ func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api blockStatusUpdates[i+len(longestBlocks)] = update } - err := p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { return nil, err } @@ -836,9 +851,11 @@ func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api return append(longestTxs, staleTxs...), nil } -func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) ([]store.TransactionBlock, error) { +func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) (txsToPublis []store.TransactionBlock, err error) { ctx, span := tracing.StartTracing(ctx, "handleOrphans", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() orphans, ancestor, err := p.store.GetOrphansBackToNonOrphanAncestor(ctx, block.Hash) if err != nil { @@ -871,7 +888,8 @@ func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) // of the first orphan, then we can assume that // there's no competing chain at all. - competingBlock, err := p.store.GetLongestBlockByHeight(ctx, orphans[0].Height) + var competingBlock *blocktx_api.Block + competingBlock, err = p.store.GetLongestBlockByHeight(ctx, orphans[0].Height) if err != nil && !errors.Is(err, store.ErrBlockNotFound) { p.logger.Error("unable to get competing block when handling orphans", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) return nil, err @@ -899,9 +917,11 @@ func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) return nil, nil } -func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.Block, chain blocktx_api.Status) error { +func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.Block, chain blocktx_api.Status) (err error) { ctx, span := tracing.StartTracing(ctx, "acceptIntoChain", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() blockStatusUpdates := make([]store.BlockStatusUpdate, len(blocks)) @@ -915,7 +935,7 @@ func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.B tip := blocks[len(blocks)-1] - err := p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) + err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { p.logger.Error("unable to accept blocks into chain", slog.String("hash", getHashStringNoErr(tip.Hash)), slog.Uint64("height", tip.Height), slog.String("chain", chain.String()), slog.String("err", err.Error())) return err diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go index b7e9902cd..ed7f2ee1c 100644 --- a/internal/blocktx/store/postgresql/get_transactions.go +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -9,9 +9,11 @@ import ( "github.com/lib/pq" ) -func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { +func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) (minedTransactions []store.TransactionBlock, err error) { ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span) + defer func() { + tracing.EndTracing(span, err) + }() if onlyLongestChain { predicate := "WHERE t.hash = ANY($1) AND b.is_longest = true" @@ -27,7 +29,12 @@ func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, ) } -func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { +func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) (registeredTxs []store.TransactionBlock, err error) { + ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + predicate := "WHERE b.hash = ANY($1) AND t.is_registered = TRUE" return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(blockHashes)) From 16f572274b35e92ec2dbed9d93d15fe0a74f5943 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Thu, 28 Nov 2024 11:49:07 +0100 Subject: [PATCH 29/36] refactor: move blockhash higher --- internal/blocktx/processor.go | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 003340124..e57aa84a7 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -446,7 +446,9 @@ func (p *Processor) buildMerkleTreeStoreChainHash(ctx context.Context, txids []* func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { ctx := p.ctx - var blockHash chainhash.Hash + + blockHash := msg.Header.BlockHash() + ctx, span := tracing.StartTracing(ctx, "processBlock", p.tracingEnabled, p.tracingAttributes...) defer func() { if span != nil { @@ -456,16 +458,13 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { tracing.EndTracing(span, err) }() - blockHash = msg.Header.BlockHash() - blockHeight := msg.Height - - p.logger.Info("processing incoming block", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight)) + p.logger.Info("processing incoming block", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height)) // check if we've already processed that block existingBlock, _ := p.store.GetBlock(ctx, &blockHash) if existingBlock != nil && existingBlock.Processed { - p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String()), slog.Uint64("height", blockHeight)) + p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height)) return nil } From ed0e3783a8a3ab216b0219db6c17e196e73d1bb7 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Tue, 3 Dec 2024 09:10:00 +0100 Subject: [PATCH 30/36] feat: checking for processed blocks in blocktx store queries and small improvements --- internal/blocktx/processor.go | 186 +++++++++--------- internal/blocktx/processor_helpers.go | 18 +- internal/blocktx/processor_helpers_test.go | 8 +- .../get_block_by_height/blocktx.blocks.yaml | 1 + .../get_orphaned_chain/blocktx.blocks.yaml | 3 + .../blocktx.blocks.yaml | 1 + .../verify_merkle_roots/blocktx.blocks.yaml | 2 + .../blocktx/store/postgresql/get_block.go | 2 +- .../store/postgresql/get_longest_chain.go | 2 +- .../store/postgresql/get_orphaned_chain.go | 1 + .../store/postgresql/get_stale_chain.go | 1 + .../store/postgresql/get_transactions.go | 2 +- .../blocktx/store/postgresql/postgres_test.go | 8 +- .../store/postgresql/verify_merkle_roots.go | 4 +- 14 files changed, 129 insertions(+), 110 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index e57aa84a7..6a9c9dcf8 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -32,6 +32,7 @@ var ( ErrFailedToInsertBlockTransactions = errors.New("failed to insert block transactions") ErrBlockAlreadyExists = errors.New("block already exists in the database") ErrUnexpectedBlockStatus = errors.New("unexpected block status") + ErrFailedToProcessBlock = errors.New("failed to process block") ) const ( @@ -224,7 +225,6 @@ func (p *Processor) StartBlockProcessing() { if err != nil { p.logger.Error("block processing failed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) p.unlockBlock(p.ctx, &blockHash) - p.stopBlockProcessGuard(&blockHash) // release guardian continue } @@ -232,13 +232,11 @@ func (p *Processor) StartBlockProcessing() { if storeErr != nil { p.logger.Error("unable to mark block as processed", slog.String("hash", blockHash.String()), slog.String("err", storeErr.Error())) p.unlockBlock(p.ctx, &blockHash) - p.stopBlockProcessGuard(&blockHash) // release guardian continue } // add the total block processing time to the stats p.logger.Info("Processed block", slog.String("hash", blockHash.String()), slog.Int("txs", len(blockMsg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) - p.stopBlockProcessGuard(&blockHash) // release guardian } } }() @@ -265,7 +263,7 @@ func (p *Processor) startBlockProcessGuard(ctx context.Context, hash *chainhash. // check if block was processed successfully block, _ := p.store.GetBlock(execCtx, hash) - if block != nil && block.Processed { + if block != nil { return // success } @@ -447,12 +445,17 @@ func (p *Processor) buildMerkleTreeStoreChainHash(ctx context.Context, txids []* func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { ctx := p.ctx + var block *blocktx_api.Block blockHash := msg.Header.BlockHash() + // release guardian + defer p.stopBlockProcessGuard(&blockHash) + ctx, span := tracing.StartTracing(ctx, "processBlock", p.tracingEnabled, p.tracingAttributes...) defer func() { if span != nil { span.SetAttributes(attribute.String("hash", blockHash.String())) + span.SetAttributes(attribute.String("status", block.Status.String())) } tracing.EndTracing(span, err) @@ -463,50 +466,37 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { // check if we've already processed that block existingBlock, _ := p.store.GetBlock(ctx, &blockHash) - if existingBlock != nil && existingBlock.Processed { + if existingBlock != nil { p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height)) return nil } - block, err := p.verifyAndInsertBlock(ctx, msg) + block, err = p.verifyAndInsertBlock(ctx, msg) if err != nil { return err } - var txsToPublish []store.TransactionBlock + var longestTxs, staleTxs []store.TransactionBlock + var ok bool switch block.Status { case blocktx_api.Status_LONGEST: - txsToPublish, err = p.getRegisteredTransactions(ctx, []*blocktx_api.Block{block}) + longestTxs, ok = p.getRegisteredTransactions(ctx, []*blocktx_api.Block{block}) case blocktx_api.Status_STALE: - txsToPublish, err = p.handleStaleBlock(ctx, block) + longestTxs, staleTxs, ok = p.handleStaleBlock(ctx, block) case blocktx_api.Status_ORPHANED: - txsToPublish, err = p.handleOrphans(ctx, block) + longestTxs, staleTxs, ok = p.handleOrphans(ctx, block) default: return ErrUnexpectedBlockStatus } - if err != nil { + if !ok { // error is already logged in each method above - return err + return ErrFailedToProcessBlock } - for _, tx := range txsToPublish { - txBlock := &blocktx_api.TransactionBlock{ - BlockHash: tx.BlockHash, - BlockHeight: tx.BlockHeight, - TransactionHash: tx.TxHash, - MerklePath: tx.MerklePath, - BlockStatus: tx.BlockStatus, - } - - p.logger.Info("publishing tx", slog.String("txHash", getHashStringNoErr(tx.TxHash))) - - err = p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) - if err != nil { - p.logger.Error("failed to publish mined txs", slog.String("blockHash", getHashStringNoErr(tx.BlockHash)), slog.Uint64("height", tx.BlockHeight), slog.String("txHash", getHashStringNoErr(tx.TxHash)), slog.String("err", err.Error())) - } - } + p.publishTxsToMetamorph(ctx, longestTxs) + p.publishTxsToMetamorph(ctx, staleTxs) return nil } @@ -575,15 +565,6 @@ func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Bl return nil } - // if the previous block exists in the db but is currently being - // processed by another instance, we don't know what the final - // status of that parent block will be, so mark the incoming block - // as ORPHANED and wait for the next block to confirm the status - if !prevBlock.Processed { - block.Status = blocktx_api.Status_ORPHANED - return nil - } - if prevBlock.Status == blocktx_api.Status_LONGEST { var competingBlock *blocktx_api.Block competingBlock, err = p.store.GetLongestBlockByHeight(ctx, block.Height) @@ -628,7 +609,8 @@ func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { return true, nil } -func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) (txsToPublish []store.TransactionBlock, err error) { +func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) (txsToPublish []store.TransactionBlock, ok bool) { + var err error ctx, span := tracing.StartTracing(ctx, "getRegisteredTransactions", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -643,10 +625,10 @@ func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blo if err != nil { block := blocks[len(blocks)-1] p.logger.Error("unable to get registered transactions", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) - return nil, err + return nil, false } - return txsToPublish, nil + return txsToPublish, true } func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) (err error) { @@ -655,19 +637,18 @@ func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomin tracing.EndTracing(span, err) }() - blockID, err := p.store.UpsertBlock(ctx, incomingBlock) - if err != nil { - p.logger.Error("unable to insert block at given height", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) - return err - } - calculatedMerkleTree := p.buildMerkleTreeStoreChainHash(ctx, txHashes) - if !merkleRoot.IsEqual(calculatedMerkleTree[len(calculatedMerkleTree)-1]) { p.logger.Error("merkle root mismatch", slog.String("hash", getHashStringNoErr(incomingBlock.Hash))) return err } + blockID, err := p.store.UpsertBlock(ctx, incomingBlock) + if err != nil { + p.logger.Error("unable to insert block at given height", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return err + } + if err = p.storeTransactions(ctx, blockID, incomingBlock, calculatedMerkleTree); err != nil { p.logger.Error("unable to store transactions from block", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.String("err", err.Error())) return err @@ -752,7 +733,8 @@ func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block return nil } -func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block) (txsToPublish []store.TransactionBlock, err error) { +func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, ok bool) { + var err error ctx, span := tracing.StartTracing(ctx, "handleStaleBlock", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -761,7 +743,7 @@ func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Blo staleBlocks, err := p.store.GetStaleChainBackFromHash(ctx, block.Hash) if err != nil { p.logger.Error("unable to get STALE blocks to verify chainwork", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) - return nil, err + return nil, nil, false } lowestHeight := block.Height @@ -772,7 +754,7 @@ func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Blo longestBlocks, err := p.store.GetLongestChainFromHeight(ctx, lowestHeight) if err != nil { p.logger.Error("unable to get LONGEST blocks to verify chainwork", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) - return nil, err + return nil, nil, false } staleChainwork := sumChainwork(staleBlocks) @@ -781,18 +763,18 @@ func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Blo if longestChainwork.Cmp(staleChainwork) < 0 { p.logger.Info("chain reorg detected", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height)) - txsToPublish, err = p.performReorg(ctx, staleBlocks, longestBlocks) + longestTxs, staleTxs, err = p.performReorg(ctx, staleBlocks, longestBlocks) if err != nil { p.logger.Error("unable to perform reorg", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) - return nil, err + return nil, nil, false } - return txsToPublish, nil + return longestTxs, staleTxs, true } - return nil, nil + return nil, nil, true } -func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) (txsToPublish []store.TransactionBlock, err error) { +func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, err error) { ctx, span := tracing.StartTracing(ctx, "performReorg", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -821,36 +803,32 @@ func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { - return nil, err + return nil, nil, err } p.logger.Info("reorg performed successfully") - registeredTxs, err := p.store.GetRegisteredTxsByBlockHashes(ctx, append(staleHashes, longestHashes...)) + // now the previously stale chain is the longest, + // so longestTxs are from previously stale block hashes + longestTxs, err = p.store.GetRegisteredTxsByBlockHashes(ctx, staleHashes) if err != nil { - return nil, err + return nil, nil, err } - longestTxs := make([]store.TransactionBlock, 0) - staleTxs := make([]store.TransactionBlock, 0) - - for _, tx := range registeredTxs { - switch tx.BlockStatus { - case blocktx_api.Status_LONGEST: - longestTxs = append(longestTxs, tx) - case blocktx_api.Status_STALE: - staleTxs = append(staleTxs, tx) - default: - // do nothing - ignore txs from ORPHANED or UNKNOWN blocks - } + // now the previously longest chain is stale, + // so staleTxs are from previously longest block hashes + staleTxs, err = p.store.GetRegisteredTxsByBlockHashes(ctx, longestHashes) + if err != nil { + return nil, nil, err } - staleTxs = findDistinctStaleTxs(longestTxs, staleTxs) + staleTxs = exclusiveRightTxs(longestTxs, staleTxs) - return append(longestTxs, staleTxs...), nil + return longestTxs, staleTxs, nil } -func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) (txsToPublis []store.TransactionBlock, err error) { +func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, ok bool) { + var err error ctx, span := tracing.StartTracing(ctx, "handleOrphans", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -859,19 +837,19 @@ func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) orphans, ancestor, err := p.store.GetOrphansBackToNonOrphanAncestor(ctx, block.Hash) if err != nil { p.logger.Error("unable to get ORPHANED blocks", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) - return nil, err + return nil, nil, false } - if ancestor == nil || !ancestor.Processed || len(orphans) == 0 { - return nil, nil + if ancestor == nil || len(orphans) == 0 { + return nil, nil, true } p.logger.Info("orphaned chain found", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("status", block.Status.String())) if ancestor.Status == blocktx_api.Status_STALE { - err = p.acceptIntoChain(ctx, orphans, ancestor.Status) - if err != nil { - return nil, err + ok = p.acceptIntoChain(ctx, orphans, ancestor.Status) + if !ok { + return nil, nil, false } block.Status = blocktx_api.Status_STALE @@ -891,32 +869,34 @@ func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) competingBlock, err = p.store.GetLongestBlockByHeight(ctx, orphans[0].Height) if err != nil && !errors.Is(err, store.ErrBlockNotFound) { p.logger.Error("unable to get competing block when handling orphans", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) - return nil, err + return nil, nil, false } if competingBlock != nil && !bytes.Equal(competingBlock.Hash, orphans[0].Hash) { - err = p.acceptIntoChain(ctx, orphans, blocktx_api.Status_STALE) - if err != nil { - return nil, err + ok = p.acceptIntoChain(ctx, orphans, blocktx_api.Status_STALE) + if !ok { + return nil, nil, false } block.Status = blocktx_api.Status_STALE return p.handleStaleBlock(ctx, block) } - err = p.acceptIntoChain(ctx, orphans, ancestor.Status) // LONGEST - if err != nil { - return nil, err + ok = p.acceptIntoChain(ctx, orphans, ancestor.Status) // LONGEST + if !ok { + return nil, nil, false } p.logger.Info("orphaned chain accepted into LONGEST chain", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height)) - return p.getRegisteredTransactions(ctx, orphans) + longestTxs, ok = p.getRegisteredTransactions(ctx, orphans) + return longestTxs, nil, ok } - return nil, nil + return nil, nil, true } -func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.Block, chain blocktx_api.Status) (err error) { +func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.Block, chain blocktx_api.Status) (ok bool) { + var err error ctx, span := tracing.StartTracing(ctx, "acceptIntoChain", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -937,11 +917,35 @@ func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.B err = p.store.UpdateBlocksStatuses(ctx, blockStatusUpdates) if err != nil { p.logger.Error("unable to accept blocks into chain", slog.String("hash", getHashStringNoErr(tip.Hash)), slog.Uint64("height", tip.Height), slog.String("chain", chain.String()), slog.String("err", err.Error())) - return err + return false } p.logger.Info("blocks successfully accepted into chain", slog.String("hash", getHashStringNoErr(tip.Hash)), slog.Uint64("height", tip.Height), slog.String("chain", chain.String())) - return nil + return true +} + +func (p *Processor) publishTxsToMetamorph(ctx context.Context, txs []store.TransactionBlock) { + var publishErr error + ctx, span := tracing.StartTracing(ctx, "publish transactions", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, publishErr) + }() + + for _, tx := range txs { + txBlock := &blocktx_api.TransactionBlock{ + BlockHash: tx.BlockHash, + BlockHeight: tx.BlockHeight, + TransactionHash: tx.TxHash, + MerklePath: tx.MerklePath, + BlockStatus: tx.BlockStatus, + } + + err := p.mqClient.PublishMarshal(ctx, MinedTxsTopic, txBlock) + if err != nil { + p.logger.Error("failed to publish mined txs", slog.String("blockHash", getHashStringNoErr(tx.BlockHash)), slog.Uint64("height", tx.BlockHeight), slog.String("txHash", getHashStringNoErr(tx.TxHash)), slog.String("err", err.Error())) + publishErr = err + } + } } func (p *Processor) Shutdown() { diff --git a/internal/blocktx/processor_helpers.go b/internal/blocktx/processor_helpers.go index a5fbcd53a..bb339fd8c 100644 --- a/internal/blocktx/processor_helpers.go +++ b/internal/blocktx/processor_helpers.go @@ -29,21 +29,21 @@ func sumChainwork(blocks []*blocktx_api.Block) *big.Int { return sum } -func findDistinctStaleTxs(longestTxs, staleTxs []store.TransactionBlock) []store.TransactionBlock { - longestTxsMap := make(map[string]struct{}) +func exclusiveRightTxs(leftTxs, rightTxs []store.TransactionBlock) []store.TransactionBlock { + leftTxsMap := make(map[string]struct{}) - for _, tx := range longestTxs { - longestTxsMap[string(tx.TxHash)] = struct{}{} + for _, tx := range leftTxs { + leftTxsMap[string(tx.TxHash)] = struct{}{} } - distinctStaleTxs := make([]store.TransactionBlock, 0) - for _, tx := range staleTxs { - if _, found := longestTxsMap[string(tx.TxHash)]; !found { - distinctStaleTxs = append(distinctStaleTxs, tx) + exclusiveRightTxs := make([]store.TransactionBlock, 0) + for _, tx := range rightTxs { + if _, found := leftTxsMap[string(tx.TxHash)]; !found { + exclusiveRightTxs = append(exclusiveRightTxs, tx) } } - return distinctStaleTxs + return exclusiveRightTxs } // calculateChainwork calculates chainwork from the given difficulty bits diff --git a/internal/blocktx/processor_helpers_test.go b/internal/blocktx/processor_helpers_test.go index b7ad49fd3..794ebef95 100644 --- a/internal/blocktx/processor_helpers_test.go +++ b/internal/blocktx/processor_helpers_test.go @@ -8,9 +8,9 @@ import ( "github.com/stretchr/testify/require" ) -func TestFindMinedAndStaleTxs(t *testing.T) { +func TestExlusiveRightTxs(t *testing.T) { // given - longestTxs := []store.TransactionBlock{ + leftTxs := []store.TransactionBlock{ { TxHash: []byte("1"), }, @@ -18,7 +18,7 @@ func TestFindMinedAndStaleTxs(t *testing.T) { TxHash: []byte("2"), }, } - staleTxs := []store.TransactionBlock{ + rightTxs := []store.TransactionBlock{ { TxHash: []byte("A"), }, @@ -40,7 +40,7 @@ func TestFindMinedAndStaleTxs(t *testing.T) { } // when - actualStaleTxs := findDistinctStaleTxs(longestTxs, staleTxs) + actualStaleTxs := exclusiveRightTxs(leftTxs, rightTxs) // then require.Equal(t, expectedStaleTxs, actualStaleTxs) diff --git a/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml index 654530c00..c1a8f0b30 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml @@ -49,6 +49,7 @@ prevhash: 0x5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822020 + processed_at: 2023-12-15 14:40:00 size: 8630000 tx_count: 36724 status: 10 diff --git a/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml index dd193b81d..0c06f92ae 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_orphaned_chain/blocktx.blocks.yaml @@ -64,6 +64,7 @@ prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822020 + processed_at: 2023-12-15 14:10:00 size: 8630000 tx_count: 36724 status: 30 # ORPHANED @@ -78,6 +79,7 @@ prevhash: 0x0000000000000000094510c50011a891b74ef054d6cac0a5ae8bd60f02c85f1d merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 height: 822022 + processed_at: 2023-12-15 14:10:00 size: 8630000 tx_count: 36724 status: 30 # ORPHANED @@ -89,6 +91,7 @@ prevhash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 height: 822023 + processed_at: 2023-12-15 14:10:00 size: 8630000 tx_count: 36724 status: 30 # ORPHANED diff --git a/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml index ec6976daf..bb97faed5 100644 --- a/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/update_blocks_statuses/blocktx.blocks.yaml @@ -52,6 +52,7 @@ prevhash: 0x0000000000000000082ec88d757ddaeb0aa87a5d5408b5960f27e7e67312dfe1 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822017 + processed_at: 2023-12-15 14:10:00 size: 8630000 tx_count: 36724 status: 20 # STALE diff --git a/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml index f8c707254..9a623765e 100644 --- a/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/verify_merkle_roots/blocktx.blocks.yaml @@ -4,6 +4,7 @@ prevhash: 0x3dc8a25238765c58a9fccbcf83fba59bf93894bcaaee8e040000000000000000 merkleroot: 0x4f3d1594808f485303ba8a1ed4f7c15f8087e5da1b7ebd25fa266630a8a6d880 height: 812010 + processed_at: 2023-12-10 14:10:00 size: 244000000 tx_count: 4437 - inserted_at: 2023-12-10 14:00:00 @@ -57,6 +58,7 @@ prevhash: 0x5696fc6e504b6aa2ae5d9c46b9418192dc61bd1b2e3364030000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 height: 822020 + processed_at: 2023-12-10 14:10:00 size: 8630000 tx_count: 36724 - inserted_at: 2023-12-15 15:00:00 diff --git a/internal/blocktx/store/postgresql/get_block.go b/internal/blocktx/store/postgresql/get_block.go index 55b9646f3..adfaadb39 100644 --- a/internal/blocktx/store/postgresql/get_block.go +++ b/internal/blocktx/store/postgresql/get_block.go @@ -41,7 +41,7 @@ func (p *PostgreSQL) queryBlockByPredicate(ctx context.Context, predicate string FROM blocktx.blocks ` - q += " " + predicate + q += " " + predicate + " AND processed_at IS NOT NULL" var block blocktx_api.Block diff --git a/internal/blocktx/store/postgresql/get_longest_chain.go b/internal/blocktx/store/postgresql/get_longest_chain.go index cca3607a2..10bff0265 100644 --- a/internal/blocktx/store/postgresql/get_longest_chain.go +++ b/internal/blocktx/store/postgresql/get_longest_chain.go @@ -17,7 +17,7 @@ func (p *PostgreSQL) GetLongestChainFromHeight(ctx context.Context, height uint6 ,status ,chainwork FROM blocktx.blocks - WHERE height >= $1 AND is_longest = true + WHERE height >= $1 AND is_longest = true AND processed_at IS NOT NULL ` rows, err := p.db.QueryContext(ctx, q, height) diff --git a/internal/blocktx/store/postgresql/get_orphaned_chain.go b/internal/blocktx/store/postgresql/get_orphaned_chain.go index 441034a43..79f28b59b 100644 --- a/internal/blocktx/store/postgresql/get_orphaned_chain.go +++ b/internal/blocktx/store/postgresql/get_orphaned_chain.go @@ -44,6 +44,7 @@ func (p *PostgreSQL) GetOrphansBackToNonOrphanAncestor(ctx context.Context, hash ,b.status ,b.chainwork FROM blocktx.blocks b JOIN orphans o ON o.prevhash = b.hash AND b.status = $2 + WHERE b.processed_at IS NOT NULL ) SELECT hash diff --git a/internal/blocktx/store/postgresql/get_stale_chain.go b/internal/blocktx/store/postgresql/get_stale_chain.go index 66aadb75b..435e70909 100644 --- a/internal/blocktx/store/postgresql/get_stale_chain.go +++ b/internal/blocktx/store/postgresql/get_stale_chain.go @@ -48,6 +48,7 @@ func (p *PostgreSQL) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ,b.status ,b.chainwork FROM blocktx.blocks b JOIN prevBlocks p ON b.hash = p.prevhash AND b.status = $2 + WHERE b.processed_at IS NOT NULL ) SELECT hash diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go index ed7f2ee1c..5900cf308 100644 --- a/internal/blocktx/store/postgresql/get_transactions.go +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -20,7 +20,7 @@ func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(hashes)) } - predicate := "WHERE t.hash = ANY($1) AND (b.status = $2 OR b.status = $3)" + predicate := "WHERE t.hash = ANY($1) AND (b.status = $2 OR b.status = $3) AND b.processed_at IS NOT NULL" return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(hashes), diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index 84572f781..774ac37e9 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -144,6 +144,7 @@ func TestPostgresDB(t *testing.T) { MerkleRoot: merkleRoot[:], Height: 100, Status: blocktx_api.Status_LONGEST, + Processed: true, } expectedBlockViolatingUniqueIndex := &blocktx_api.Block{ Hash: blockHashViolating[:], @@ -158,13 +159,18 @@ func TestPostgresDB(t *testing.T) { MerkleRoot: merkleRoot[:], Height: 100, Status: blocktx_api.Status_ORPHANED, + Processed: true, } - // when -> then + // when id, err := postgresDB.UpsertBlock(ctx, expectedBlock) require.NoError(t, err) require.Equal(t, uint64(1), id) + err = postgresDB.MarkBlockAsDone(ctx, blockHash2, uint64(1000), uint64(1)) + require.NoError(t, err) + + // then actualBlockResp, err := postgresDB.GetBlock(ctx, blockHash2) require.NoError(t, err) require.Equal(t, expectedBlock, actualBlockResp) diff --git a/internal/blocktx/store/postgresql/verify_merkle_roots.go b/internal/blocktx/store/postgresql/verify_merkle_roots.go index a02d201ee..0da1d452b 100644 --- a/internal/blocktx/store/postgresql/verify_merkle_roots.go +++ b/internal/blocktx/store/postgresql/verify_merkle_roots.go @@ -17,7 +17,7 @@ func (p *PostgreSQL) VerifyMerkleRoots( maxAllowedBlockHeightMismatch int, ) (*blocktx_api.MerkleRootVerificationResponse, error) { qTopHeight := ` - SELECT MAX(b.height), MIN(b.height) FROM blocktx.blocks b WHERE b.is_longest = true + SELECT MAX(b.height), MIN(b.height) FROM blocktx.blocks b WHERE b.is_longest = true AND b.processed_at IS NOT NULL ` var topHeight uint64 @@ -32,7 +32,7 @@ func (p *PostgreSQL) VerifyMerkleRoots( } qMerkleRoot := ` - SELECT b.height FROM blocktx.blocks b WHERE b.merkleroot = $1 AND b.height = $2 AND b.is_longest = true + SELECT b.height FROM blocktx.blocks b WHERE b.merkleroot = $1 AND b.height = $2 AND b.is_longest = true AND b.processed_at IS NOT NULL ` var unverifiedBlockHeights []uint64 From 26321688ebd73a62a908d71522c60b2b600ce738 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Tue, 3 Dec 2024 10:59:41 +0100 Subject: [PATCH 31/36] feat: add hash to blockmessage --- .../blockchain_communication/block_message.go | 1 + .../blockchain_communication/p2p/init.go | 2 ++ internal/blocktx/processor.go | 36 +++++++++---------- .../blocktx/store/postgresql/postgres_test.go | 2 +- 4 files changed, 21 insertions(+), 20 deletions(-) diff --git a/internal/blocktx/blockchain_communication/block_message.go b/internal/blocktx/blockchain_communication/block_message.go index 70902f4b8..1df43f095 100644 --- a/internal/blocktx/blockchain_communication/block_message.go +++ b/internal/blocktx/blockchain_communication/block_message.go @@ -9,6 +9,7 @@ import ( // BlockMessage only stores the transaction IDs of the block, not the full transactions type BlockMessage struct { + Hash *chainhash.Hash Header *wire.BlockHeader Height uint64 TransactionHashes []*chainhash.Hash diff --git a/internal/blocktx/blockchain_communication/p2p/init.go b/internal/blocktx/blockchain_communication/p2p/init.go index f7741b0fd..11c7f8490 100644 --- a/internal/blocktx/blockchain_communication/p2p/init.go +++ b/internal/blocktx/blockchain_communication/p2p/init.go @@ -60,6 +60,8 @@ func init() { } blockMessage.Size = uint64(bytesRead) + blockHash := blockMessage.Header.BlockHash() + blockMessage.Hash = &blockHash return bytesRead, blockMessage, nil, nil }) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 6a9c9dcf8..f9bf98496 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -442,36 +442,35 @@ func (p *Processor) buildMerkleTreeStoreChainHash(ctx context.Context, txids []* return bc.BuildMerkleTreeStoreChainHash(txids) } -func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { +func (p *Processor) processBlock(blockMsg *blockchain.BlockMessage) (err error) { ctx := p.ctx var block *blocktx_api.Block - blockHash := msg.Header.BlockHash() // release guardian - defer p.stopBlockProcessGuard(&blockHash) + defer p.stopBlockProcessGuard(blockMsg.Hash) ctx, span := tracing.StartTracing(ctx, "processBlock", p.tracingEnabled, p.tracingAttributes...) defer func() { if span != nil { - span.SetAttributes(attribute.String("hash", blockHash.String())) + span.SetAttributes(attribute.String("hash", blockMsg.Hash.String())) span.SetAttributes(attribute.String("status", block.Status.String())) } tracing.EndTracing(span, err) }() - p.logger.Info("processing incoming block", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height)) + p.logger.Info("processing incoming block", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", blockMsg.Height)) // check if we've already processed that block - existingBlock, _ := p.store.GetBlock(ctx, &blockHash) + existingBlock, _ := p.store.GetBlock(ctx, blockMsg.Hash) if existingBlock != nil { - p.logger.Warn("ignoring already existing block", slog.String("hash", blockHash.String()), slog.Uint64("height", msg.Height)) + p.logger.Warn("ignoring already existing block", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", blockMsg.Height)) return nil } - block, err = p.verifyAndInsertBlock(ctx, msg) + block, err = p.verifyAndInsertBlock(ctx, blockMsg) if err != nil { return err } @@ -501,35 +500,34 @@ func (p *Processor) processBlock(msg *blockchain.BlockMessage) (err error) { return nil } -func (p *Processor) verifyAndInsertBlock(ctx context.Context, msg *p2p.BlockMessage) (incomingBlock *blocktx_api.Block, err error) { +func (p *Processor) verifyAndInsertBlock(ctx context.Context, blockMsg *blockchain.BlockMessage) (incomingBlock *blocktx_api.Block, err error) { ctx, span := tracing.StartTracing(ctx, "verifyAndInsertBlock", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) }() - blockHash := msg.Header.BlockHash() - previousBlockHash := msg.Header.PrevBlock - merkleRoot := msg.Header.MerkleRoot + previousBlockHash := blockMsg.Header.PrevBlock + merkleRoot := blockMsg.Header.MerkleRoot incomingBlock = &blocktx_api.Block{ - Hash: blockHash[:], + Hash: blockMsg.Hash[:], PreviousHash: previousBlockHash[:], MerkleRoot: merkleRoot[:], - Height: msg.Height, - Chainwork: calculateChainwork(msg.Header.Bits).String(), + Height: blockMsg.Height, + Chainwork: calculateChainwork(blockMsg.Header.Bits).String(), } err = p.assignBlockStatus(ctx, incomingBlock, previousBlockHash) if err != nil { - p.logger.Error("unable to assign block status", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + p.logger.Error("unable to assign block status", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) return nil, err } - p.logger.Info("Inserting block", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) + p.logger.Info("Inserting block", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) - err = p.insertBlockAndStoreTransactions(ctx, incomingBlock, msg.TransactionHashes, msg.Header.MerkleRoot) + err = p.insertBlockAndStoreTransactions(ctx, incomingBlock, blockMsg.TransactionHashes, blockMsg.Header.MerkleRoot) if err != nil { - p.logger.Error("unable to insert block and store its transactions", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + p.logger.Error("unable to insert block and store its transactions", slog.String("hash", blockMsg.Hash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) return nil, err } diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index 774ac37e9..2c47ef8bf 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -264,7 +264,7 @@ func TestPostgresDB(t *testing.T) { t.Run("get stats for block geps", func(t *testing.T) { // given - prepareDb(t, postgresDB.db, "fixtures/get_block_gaps") + prepareDb(t, postgresDB, "fixtures/get_block_gaps") // when actualBlockGaps, err := postgresDB.GetStats(ctx) From 1b1112948f785a961f895c485f2b47fe6cc926e0 Mon Sep 17 00:00:00 2001 From: Kuba <127198012+kuba-4chain@users.noreply.github.com> Date: Wed, 4 Dec 2024 17:52:39 +0100 Subject: [PATCH 32/36] refactor(reorg-support): improved integration test (#689) --- .../reorg/blocktx.block_transactions_map.yaml | 16 + .../fixtures/reorg/blocktx.blocks.yaml | 72 ++ .../{ => reorg}/blocktx.transactions.yaml | 0 .../blocktx.block_transactions_map.yaml | 20 + .../reorg_orphans/blocktx.blocks.yaml | 138 ++++ .../reorg_orphans/blocktx.transactions.yaml | 20 + .../blocktx.block_transactions_map.yaml | 4 + .../fixtures/stale_block/blocktx.blocks.yaml | 24 + .../stale_block/blocktx.transactions.yaml | 12 + .../blocktx.block_transactions_map.yaml | 2 +- .../{ => stale_orphans}/blocktx.blocks.yaml | 40 +- .../stale_orphans/blocktx.transactions.yaml | 12 + internal/blocktx/integration_test/helpers.go | 90 +++ .../reorg_integration_test.go | 665 +++++++++--------- internal/blocktx/processor.go | 15 +- 15 files changed, 772 insertions(+), 358 deletions(-) create mode 100644 internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml create mode 100644 internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml rename internal/blocktx/integration_test/fixtures/{ => reorg}/blocktx.transactions.yaml (100%) create mode 100644 internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml create mode 100644 internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.blocks.yaml create mode 100644 internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml create mode 100644 internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml create mode 100644 internal/blocktx/integration_test/fixtures/stale_block/blocktx.blocks.yaml create mode 100644 internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml rename internal/blocktx/integration_test/fixtures/{ => stale_orphans}/blocktx.block_transactions_map.yaml (91%) rename internal/blocktx/integration_test/fixtures/{ => stale_orphans}/blocktx.blocks.yaml (88%) create mode 100644 internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..54294aa74 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml @@ -0,0 +1,16 @@ +- blockid: 1002 + txid: 1000003 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1999 # the same tx also in stale block + txid: 1000003 + merkle_path: merkle-path-999 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1999 # the same tx also in stale block + txid: 1000001 + merkle_path: merkle-path-999 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1004 + txid: 1000005 + merkle_path: merkle-path-5 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml new file mode 100644 index 000000000..58b3fddeb --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml @@ -0,0 +1,72 @@ +- inserted_at: 2023-12-15 14:00:00 + id: 1001 + hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 + prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822014 + processed_at: 2023-12-15 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:30:00 + id: 1002 + hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:30:00 + id: 1999 + hash: 0x82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 20 # STALE - competing block + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:40:00 + id: 1003 + hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + prevhash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822016 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1004 + hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822017 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 10052 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + merkleroot: 0xde0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9 + height: 822018 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' diff --git a/internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.transactions.yaml similarity index 100% rename from internal/blocktx/integration_test/fixtures/blocktx.transactions.yaml rename to internal/blocktx/integration_test/fixtures/reorg/blocktx.transactions.yaml diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..e02912cfd --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml @@ -0,0 +1,20 @@ +- blockid: 1002 + txid: 1000001 + merkle_path: merkle-path-1 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1002 + txid: 1000003 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1004 + txid: 1000003 + merkle_path: merkle-path-3-competing + inserted_at: 2023-12-10 14:00:00 +- blockid: 1003 + txid: 1000004 + merkle_path: merkle-path-4 + inserted_at: 2023-12-10 14:00:00 +- blockid: 1006 + txid: 1000005 + merkle_path: merkle-path-5 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.blocks.yaml new file mode 100644 index 000000000..3e18f16d3 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.blocks.yaml @@ -0,0 +1,138 @@ +- inserted_at: 2023-12-15 14:00:00 + id: 1001 + hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 + prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822014 + processed_at: 2023-12-15 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 + is_longest: true + chainwork: '62209952899966' + +- inserted_at: 2023-12-15 14:30:00 + id: 1002 + hash: 0x82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:30:00 + id: 1003 + hash: 0x032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8 + prevhash: 0x82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822016 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 + is_longest: true + chainwork: '12301577519373468' + +- inserted_at: 2023-12-15 14:30:00 + id: 1004 + hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 20 # STALE + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:40:00 + id: 1005 + hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + prevhash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822016 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 20 # STALE + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1006 + hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822017 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 20 # STALE + is_longest: false + chainwork: '62209952899966' + +- inserted_at: 2023-12-15 14:50:00 + id: 1007 + hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + merkleroot: 0xde0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9 + height: 822018 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1008 + hash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + prevhash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde + merkleroot: 0x51f33784f6d54f1d6414fa4d8b8d6904215cb16a3fa0a8b1fe02e456a90544d4 + height: 822019 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1009 + hash: 0x00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076 + prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822020 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '12301481384759134' + +# gap + +- inserted_at: 2023-12-15 14:50:00 + id: 1010 + hash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + prevhash: 0xd46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060 + merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 + height: 822022 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:50:00 + id: 1011 + hash: 0x0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd + prevhash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c + merkleroot: 0x8e3177a33d6a87785b7104f20ca345e1713ae11ec2723a41028efddabebb861b + height: 822023 + size: 8630000 + processed_at: 2023-12-15 14:40:00 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '12301481384759134' diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml new file mode 100644 index 000000000..0b42d6d15 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml @@ -0,0 +1,20 @@ +- id: 1000001 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000002 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 1000003 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000004 + hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000005 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-10 14:00:00 + is_registered: true diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml new file mode 100644 index 000000000..317aaf634 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml @@ -0,0 +1,4 @@ +- blockid: 1002 + txid: 1000003 + merkle_path: merkle-path-3 + inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.blocks.yaml new file mode 100644 index 000000000..4336826d4 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.blocks.yaml @@ -0,0 +1,24 @@ +- inserted_at: 2023-12-15 14:00:00 + id: 1001 + hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 + prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822014 + processed_at: 2023-12-15 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 + is_longest: true + chainwork: '62209952899966' +- inserted_at: 2023-12-15 14:30:00 + id: 1002 + hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 10 + is_longest: true + chainwork: '62209952899966' diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml new file mode 100644 index 000000000..423b9b5b2 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml @@ -0,0 +1,12 @@ +- id: 1000001 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000002 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 + is_registered: false +- id: 1000003 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: true diff --git a/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml similarity index 91% rename from internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml rename to internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml index d59dea517..5ed5b4e2e 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.block_transactions_map.yaml +++ b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml @@ -2,7 +2,7 @@ txid: 1000003 merkle_path: merkle-path-3 inserted_at: 2023-12-10 14:00:00 -- blockid: 1004 +- blockid: 1005 txid: 1000005 merkle_path: merkle-path-5 inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml similarity index 88% rename from internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml rename to internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml index 1e913f285..f3021dd39 100644 --- a/internal/blocktx/integration_test/fixtures/blocktx.blocks.yaml +++ b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml @@ -1,15 +1,3 @@ -- inserted_at: 2023-12-15 14:00:00 - id: 1001 - hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 - prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 - merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 - height: 822014 - processed_at: 2023-12-15 14:10:00 - size: 86840000 - tx_count: 23477 - status: 10 - is_longest: true - chainwork: '62209952899966' - inserted_at: 2023-12-15 14:30:00 id: 1002 hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000 @@ -34,8 +22,20 @@ status: 10 is_longest: true chainwork: '62209952899966' -- inserted_at: 2023-12-15 14:50:00 +- inserted_at: 2023-12-15 14:40:00 id: 1004 + hash: 0x00000000000000000643d48201cf609b8cc50befe804194f19a7ec61cf046239 + prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 + merkleroot: 0x4b58b0402a84012269b124f78c91a78a814eb3c9caa03f1df1d33172b23082d1 + height: 822017 + processed_at: 2023-12-15 14:40:00 + size: 299650000 + tx_count: 62162 + status: 10 + is_longest: true + chainwork: '12301577519373468' # Higher chainwork +- inserted_at: 2023-12-15 14:50:00 + id: 1005 hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 prevhash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 @@ -43,11 +43,11 @@ size: 8630000 processed_at: 2023-12-15 14:40:00 tx_count: 36724 - status: 10 - is_longest: true + status: 20 # STALE + is_longest: false chainwork: '62209952899966' - inserted_at: 2023-12-15 14:50:00 - id: 10052 + id: 1006 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 merkleroot: 0xde0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9 @@ -59,7 +59,7 @@ is_longest: false chainwork: '62209952899966' - inserted_at: 2023-12-15 14:50:00 - id: 1005 + id: 1007 hash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 prevhash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde merkleroot: 0x51f33784f6d54f1d6414fa4d8b8d6904215cb16a3fa0a8b1fe02e456a90544d4 @@ -71,7 +71,7 @@ is_longest: false chainwork: '62209952899966' - inserted_at: 2023-12-15 14:50:00 - id: 1006 + id: 1008 hash: 0x00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076 prevhash: 0x00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656 merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 @@ -86,7 +86,7 @@ # gap - inserted_at: 2023-12-15 14:50:00 - id: 1007 + id: 1009 hash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c prevhash: 0xd46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060 merkleroot: 0xda71199f8ed9203d8a765595e6c030a22e5ed8330b1abb467a82c97d7d21d512 @@ -98,7 +98,7 @@ is_longest: false chainwork: '62209952899966' - inserted_at: 2023-12-15 14:50:00 - id: 1008 + id: 1010 hash: 0x0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd prevhash: 0x0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c merkleroot: 0x8e3177a33d6a87785b7104f20ca345e1713ae11ec2723a41028efddabebb861b diff --git a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml new file mode 100644 index 000000000..9989bb806 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml @@ -0,0 +1,12 @@ +- id: 1000001 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000003 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-10 14:00:00 + is_registered: true +- id: 1000005 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-10 14:00:00 + is_registered: true diff --git a/internal/blocktx/integration_test/helpers.go b/internal/blocktx/integration_test/helpers.go index c151f09af..9ce3fff08 100644 --- a/internal/blocktx/integration_test/helpers.go +++ b/internal/blocktx/integration_test/helpers.go @@ -1,12 +1,102 @@ package integrationtest import ( + "context" "database/sql" + "log/slog" + "os" "testing" + "github.com/bitcoin-sv/arc/internal/blocktx" + blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" + blocktx_p2p "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication/p2p" + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/store/postgresql" + "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core" + nats_mock "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core/mocks" testutils "github.com/bitcoin-sv/arc/internal/test_utils" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" ) +func setupSut(t *testing.T, dbInfo string) (*blocktx.Processor, *blocktx_p2p.MsgHandler, *postgresql.PostgreSQL, chan *blocktx_api.TransactionBlock) { + t.Helper() + + logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) + + blockProcessCh := make(chan *blockchain.BlockMessage, 10) + + publishedTxsCh := make(chan *blocktx_api.TransactionBlock, 10) + + store, err := postgresql.New(dbInfo, 10, 80) + require.NoError(t, err) + + mockNatsConn := &nats_mock.NatsConnectionMock{ + PublishFunc: func(_ string, data []byte) error { + serialized := &blocktx_api.TransactionBlock{} + err := proto.Unmarshal(data, serialized) + require.NoError(t, err) + + publishedTxsCh <- serialized + return nil + }, + } + mqClient := nats_core.New(mockNatsConn, nats_core.WithLogger(logger)) + + p2pMsgHandler := blocktx_p2p.NewMsgHandler(logger, nil, blockProcessCh) + processor, err := blocktx.NewProcessor( + logger, + store, + nil, + blockProcessCh, + blocktx.WithMessageQueueClient(mqClient), + ) + require.NoError(t, err) + + return processor, p2pMsgHandler, store, publishedTxsCh +} + +func getPublishedTxs(publishedTxsCh chan *blocktx_api.TransactionBlock) []*blocktx_api.TransactionBlock { + publishedTxs := make([]*blocktx_api.TransactionBlock, 0) + + for { + select { + case tx := <-publishedTxsCh: + publishedTxs = append(publishedTxs, tx) + default: + return publishedTxs + } + } +} + func pruneTables(t *testing.T, db *sql.DB) { + t.Helper() testutils.PruneTables(t, db, "blocktx.blocks") + testutils.PruneTables(t, db, "blocktx.transactions") + testutils.PruneTables(t, db, "blocktx.block_transactions_map") +} + +func verifyBlock(t *testing.T, store *postgresql.PostgreSQL, hashStr string, height uint64, status blocktx_api.Status) { + t.Helper() + hash := testutils.RevChainhash(t, hashStr) + block, err := store.GetBlock(context.Background(), hash) + require.NoError(t, err) + require.Equal(t, height, block.Height) + require.Equal(t, status, block.Status) +} + +func verifyTxs(t *testing.T, expectedTxs []*blocktx_api.TransactionBlock, publishedTxs []*blocktx_api.TransactionBlock) { + t.Helper() + + strippedTxs := make([]*blocktx_api.TransactionBlock, len(publishedTxs)) + for i, tx := range publishedTxs { + strippedTxs[i] = &blocktx_api.TransactionBlock{ + BlockHash: tx.BlockHash, + BlockHeight: tx.BlockHeight, + TransactionHash: tx.TransactionHash, + BlockStatus: tx.BlockStatus, + } + } + + require.ElementsMatch(t, expectedTxs, strippedTxs) } diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index be4a66848..425547e8a 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -29,21 +29,14 @@ package integrationtest // 15. Verification of reorg - checking if statuses are correctly switched (for blocks and for transactions) import ( - "context" "database/sql" "log" - "log/slog" "os" "testing" "time" - "github.com/bitcoin-sv/arc/internal/blocktx" blockchain "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication" - blocktx_p2p "github.com/bitcoin-sv/arc/internal/blocktx/blockchain_communication/p2p" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/store/postgresql" - "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core" - nats_mock "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core/mocks" testutils "github.com/bitcoin-sv/arc/internal/test_utils" _ "github.com/golang-migrate/migrate/v4/source/file" _ "github.com/lib/pq" @@ -52,7 +45,6 @@ import ( "github.com/libsv/go-p2p/wire" "github.com/ory/dockertest/v3" "github.com/stretchr/testify/require" - "google.golang.org/protobuf/proto" ) const migrationsPath = "file://../store/postgresql/migrations" @@ -97,338 +89,353 @@ func testmain(m *testing.M) int { return m.Run() } -const ( - blockHash822011 = "bf9be09b345cc2d904b59951cc8a2ed452d8d143e2e25cde64058270fb3a667a" - - blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" - blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" - blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" - blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" - - blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" - blockHash822016Fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" - - blockHash822018Orphan = "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde" - blockHash822019Orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" - blockHash822020Orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" - blockHash822021 = "d46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060" - blockHash822022Orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" - blockHash822023Orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" - blockHash822024 = "5d60cfea9a7ef96554768150716788e9643eaafd5a1979636777a6a5835b07c6" - - txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" - txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" - txhash822016 = "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f" - txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" -) - func TestReorg(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") } - defer pruneTables(t, dbConn) - - logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) - - var blockRequestCh chan blocktx_p2p.BlockRequest = nil // nolint: revive - blockProcessCh := make(chan *blockchain.BlockMessage, 10) - - blocktxStore, err := postgresql.New(dbInfo, 10, 80) - require.NoError(t, err) - - publishedTxs := make([]*blocktx_api.TransactionBlock, 0) - - mockNatsConn := &nats_mock.NatsConnectionMock{ - PublishFunc: func(_ string, data []byte) error { - serialized := &blocktx_api.TransactionBlock{} - err := proto.Unmarshal(data, serialized) - require.NoError(t, err) - - publishedTxs = append(publishedTxs, serialized) - return nil - }, - } - mqClient := nats_core.New(mockNatsConn, nats_core.WithLogger(logger)) - - p2pMsgHandler := blocktx_p2p.NewMsgHandler(logger, blockRequestCh, blockProcessCh) - processor, err := blocktx.NewProcessor( - logger, - blocktxStore, - blockRequestCh, - blockProcessCh, - blocktx.WithMessageQueueClient(mqClient), - ) - require.NoError(t, err) - - processor.StartBlockProcessing() - - testHandleBlockOnEmptyDatabase(t, p2pMsgHandler, blocktxStore) - - // only load fixtures at this point - testutils.LoadFixtures(t, dbConn, "fixtures") - - testHandleStaleBlock(t, p2pMsgHandler, blocktxStore) - // verify the no transaction was published to metamorph - require.Len(t, publishedTxs, 0) - - expectedTxs := testHandleReorg(t, p2pMsgHandler, blocktxStore) - verifyTxs(t, expectedTxs, publishedTxs) - publishedTxs = publishedTxs[:0] - - testHandleStaleOrphans(t, p2pMsgHandler, blocktxStore) - require.Len(t, publishedTxs, 0) - - expectedTxs = testHandleOrphansReorg(t, p2pMsgHandler, blocktxStore) - verifyTxs(t, expectedTxs, publishedTxs) -} - -func testHandleBlockOnEmptyDatabase(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) { - // test for empty database edge case before inserting fixtures - prevBlockHash := testutils.RevChainhash(t, "00000000000000000a00c377b260a3219b0c314763f486bc363df7aa7e22ad72") - txHash, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") - require.NoError(t, err) - merkleRoot, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") - require.NoError(t, err) - - // should become LONGEST - blockMessage := &blockchain.BlockMessage{ - Header: &wire.BlockHeader{ - Version: 541065216, - PrevBlock: *prevBlockHash, // NON-existent in the db - MerkleRoot: *merkleRoot, - Bits: 0x1d00ffff, - }, - Height: uint64(822011), - TransactionHashes: []*chainhash.Hash{txHash}, - } - - p2pMsgHandler.OnReceive(blockMessage, nil) - - // Allow DB to process the block - time.Sleep(200 * time.Millisecond) - - verifyBlock(t, store, blockHash822011, 822011, blocktx_api.Status_LONGEST) -} - -func testHandleStaleBlock(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) { - prevBlockHash := testutils.RevChainhash(t, blockHash822014StartOfChain) - txHash := testutils.RevChainhash(t, txhash822015) - txHash2 := testutils.RevChainhash(t, txhash822015Competing) // should not be published - is already in the longest chain - treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) - merkleRoot := treeStore[len(treeStore)-1] - - // should become STALE - blockMessage := &blockchain.BlockMessage{ - Header: &wire.BlockHeader{ - Version: 541065216, - PrevBlock: *prevBlockHash, // block with status LONGEST at height 822014 - MerkleRoot: *merkleRoot, - Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing block - }, - Height: uint64(822015), // competing block already exists at this height - TransactionHashes: []*chainhash.Hash{txHash, txHash2}, - } - - p2pMsgHandler.OnReceive(blockMessage, nil) - // Allow DB to process the block - time.Sleep(200 * time.Millisecond) - - verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) -} - -func testHandleReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { - txHash := testutils.RevChainhash(t, txhash822016) - txHash2 := testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa") - treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) - merkleRoot := treeStore[len(treeStore)-1] - prevhash := testutils.RevChainhash(t, blockHash822015Fork) - - // should become LONGEST - // reorg should happen - blockMessage := &blockchain.BlockMessage{ - Header: &wire.BlockHeader{ - Version: 541065216, - PrevBlock: *prevhash, // block with status STALE at height 822015 - MerkleRoot: *merkleRoot, - Bits: 0x1a05db8b, // chainwork: "12301577519373468" higher than the competing chain - }, - Height: uint64(822016), // competing block already exists at this height - TransactionHashes: []*chainhash.Hash{txHash, txHash2}, - } - blockHash := blockMessage.Header.BlockHash() - - p2pMsgHandler.OnReceive(blockMessage, nil) - // Allow DB to process the block and perform reorg - time.Sleep(1 * time.Second) - - // verify that reorg happened - verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_LONGEST) - - verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_STALE) - - verifyBlock(t, store, blockHash822014StartOfChain, 822014, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_ORPHANED) - - previouslyLongestBlockHash := testutils.RevChainhash(t, blockHash822017) - - expectedTxs := []*blocktx_api.TransactionBlock{ - { // previously in stale chain - BlockHash: prevhash[:], - BlockHeight: 822015, - TransactionHash: testutils.RevChainhash(t, txhash822015)[:], - BlockStatus: blocktx_api.Status_LONGEST, - }, - { // previously in longest chain - also in stale - should have blockdata updated - BlockHash: prevhash[:], - BlockHeight: 822015, - TransactionHash: testutils.RevChainhash(t, txhash822015Competing)[:], - BlockStatus: blocktx_api.Status_LONGEST, - }, - { // newly mined from stale block that became longest after reorg - BlockHash: blockHash[:], - BlockHeight: 822016, - TransactionHash: txHash[:], - BlockStatus: blocktx_api.Status_LONGEST, - }, - { // previously longest chain - not found in the new longest chain - BlockHash: previouslyLongestBlockHash[:], - BlockHeight: 822017, - TransactionHash: testutils.RevChainhash(t, txhash822017)[:], - BlockStatus: blocktx_api.Status_STALE, - }, - } - - return expectedTxs -} - -func testHandleStaleOrphans(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) { - txHash := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") - merkleRoot := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") - prevhash := testutils.RevChainhash(t, blockHash822020Orphan) - - // should become STALE - blockMessage := &blockchain.BlockMessage{ - Header: &wire.BlockHeader{ - Version: 541065216, - PrevBlock: *prevhash, // block with status ORPHANED at height 822020 - connected to STALE chain - MerkleRoot: *merkleRoot, - Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain - }, - Height: uint64(822021), - TransactionHashes: []*chainhash.Hash{txHash}, - } - - p2pMsgHandler.OnReceive(blockMessage, nil) - // Allow DB to process the block and find orphans - time.Sleep(1 * time.Second) - - // verify that the block and orphans have STALE status - verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_STALE) + t.Run("block on empty database", func(t *testing.T) { + defer pruneTables(t, dbConn) + + processor, p2pMsgHandler, store, _ := setupSut(t, dbInfo) + + const blockHash822011 = "bf9be09b345cc2d904b59951cc8a2ed452d8d143e2e25cde64058270fb3a667a" + + blockHash := testutils.RevChainhash(t, blockHash822011) + prevBlockHash := testutils.RevChainhash(t, "00000000000000000a00c377b260a3219b0c314763f486bc363df7aa7e22ad72") + txHash, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") + require.NoError(t, err) + merkleRoot, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") + require.NoError(t, err) + + // should become LONGEST + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevBlockHash, // NON-existent in the db + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, + }, + Height: uint64(822011), + TransactionHashes: []*chainhash.Hash{txHash}, + } - // verify that the blocks after the next gap are still orphans - verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_ORPHANED) - verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_ORPHANED) -} + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + + // Allow DB to process the block + time.Sleep(200 * time.Millisecond) + + verifyBlock(t, store, blockHash822011, 822011, blocktx_api.Status_LONGEST) + }) + + t.Run("stale block", func(t *testing.T) { + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/stale_block") + + processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + + const ( + blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" + blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" + txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" + txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" + ) + + blockHash := testutils.RevChainhash(t, blockHash822015Fork) + prevBlockHash := testutils.RevChainhash(t, blockHash822014StartOfChain) + txHash := testutils.RevChainhash(t, txhash822015) + txHash2 := testutils.RevChainhash(t, txhash822015Competing) // should not be published - is already in the longest chain + treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) + merkleRoot := treeStore[len(treeStore)-1] + + // should become STALE + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevBlockHash, // block with status LONGEST at height 822014 + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing block + }, + Height: uint64(822015), // competing block already exists at this height + TransactionHashes: []*chainhash.Hash{txHash, txHash2}, + } -func testHandleOrphansReorg(t *testing.T, p2pMsgHandler *blocktx_p2p.MsgHandler, store *postgresql.PostgreSQL) []*blocktx_api.TransactionBlock { - txHash := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") - merkleRoot := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") - prevhash := testutils.RevChainhash(t, blockHash822023Orphan) - - // should become LONGEST - // reorg should happen - blockMessage := &blockchain.BlockMessage{ - Header: &wire.BlockHeader{ - Version: 541065216, - PrevBlock: *prevhash, // block with status ORPHANED at height 822023 - connected to STALE chain - MerkleRoot: *merkleRoot, - Bits: 0x1d00ffff, // chainwork: "4295032833" - // the sum of orphan chain has a higher chainwork and should cause a reorg - }, - Height: uint64(822024), - TransactionHashes: []*chainhash.Hash{txHash}, - } + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + + // Allow DB to process the block + time.Sleep(200 * time.Millisecond) + + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) + + publishedTxs := getPublishedTxs(publishedTxsCh) + + // verify the no transaction was published to metamorph + require.Len(t, publishedTxs, 0) + }) + + t.Run("reorg", func(t *testing.T) { + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/reorg") + + processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + + const ( + blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" + blockHash822016Fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" + + blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" + blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" + blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" + blockHash822018Orphan = "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde" + + txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" + txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" + txhash822016 = "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f" + txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" + ) + + blockHash := testutils.RevChainhash(t, blockHash822016Fork) + txHash := testutils.RevChainhash(t, txhash822016) + txHash2 := testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa") + treeStore := bc.BuildMerkleTreeStoreChainHash([]*chainhash.Hash{txHash, txHash2}) + merkleRoot := treeStore[len(treeStore)-1] + prevhash := testutils.RevChainhash(t, blockHash822015Fork) + + // should become LONGEST + // reorg should happen + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevhash, // block with status STALE at height 822015 + MerkleRoot: *merkleRoot, + Bits: 0x1a05db8b, // chainwork: "12301577519373468" higher than the competing chain + }, + Height: uint64(822016), // competing block already exists at this height + TransactionHashes: []*chainhash.Hash{txHash, txHash2}, + } - p2pMsgHandler.OnReceive(blockMessage, nil) - // Allow DB to process the block, find orphans and perform reorg - time.Sleep(2 * time.Second) - - // verify that the reorg happened - verifyBlock(t, store, blockHash822014StartOfChain, 822014, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_LONGEST) - verifyBlock(t, store, blockHash822024, 822024, blocktx_api.Status_LONGEST) - - verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) - verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_STALE) - - bh822015 := testutils.RevChainhash(t, blockHash822015) - bh822015Fork := testutils.RevChainhash(t, blockHash822015Fork) - bh822016Fork := testutils.RevChainhash(t, blockHash822016Fork) - bh822017 := testutils.RevChainhash(t, blockHash822017) - - expectedTxs := []*blocktx_api.TransactionBlock{ - { // in stale chain - BlockHash: bh822015Fork[:], - BlockHeight: 822015, - TransactionHash: testutils.RevChainhash(t, txhash822015)[:], - BlockStatus: blocktx_api.Status_STALE, - }, - { // in both chains - should have blockdata updated - BlockHash: bh822015[:], - BlockHeight: 822015, - TransactionHash: testutils.RevChainhash(t, txhash822015Competing)[:], - BlockStatus: blocktx_api.Status_LONGEST, - }, - { // in stale chain - BlockHash: bh822016Fork[:], - BlockHeight: 822016, - TransactionHash: testutils.RevChainhash(t, txhash822016)[:], - BlockStatus: blocktx_api.Status_STALE, - }, - { // in now longest chain - BlockHash: bh822017[:], - BlockHeight: 822017, - TransactionHash: testutils.RevChainhash(t, txhash822017)[:], - BlockStatus: blocktx_api.Status_LONGEST, - }, - } + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + + // Allow DB to process the block and perform reorg + time.Sleep(1 * time.Second) + + // verify that reorg happened + verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_LONGEST) + + verifyBlock(t, store, blockHash822014StartOfChain, 822014, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_STALE) + + verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_ORPHANED) + + previouslyLongestBlockHash := testutils.RevChainhash(t, blockHash822017) + + expectedTxs := []*blocktx_api.TransactionBlock{ + { // previously in stale chain + BlockHash: prevhash[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // previously in longest chain - also in stale - should have blockdata updated + BlockHash: prevhash[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015Competing)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // newly mined from stale block that became longest after reorg + BlockHash: blockHash[:], + BlockHeight: 822016, + TransactionHash: txHash[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // previously longest chain - not found in the new longest chain + BlockHash: previouslyLongestBlockHash[:], + BlockHeight: 822017, + TransactionHash: testutils.RevChainhash(t, txhash822017)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + } - return expectedTxs -} + publishedTxs := getPublishedTxs(publishedTxsCh) + + verifyTxs(t, expectedTxs, publishedTxs) + }) + + t.Run("stale orphans", func(t *testing.T) { + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/stale_orphans") + + processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + + const ( + blockHash822017Longest = "00000000000000000643d48201cf609b8cc50befe804194f19a7ec61cf046239" + blockHash822017Stale = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" + blockHash822018Orphan = "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde" + blockHash822019Orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" + blockHash822020Orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" + blockHash822021 = "d46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060" + blockHash822022Orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" + blockHash822023Orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" + ) + + blockHash := testutils.RevChainhash(t, blockHash822021) + txHash := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") + merkleRoot := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") + prevhash := testutils.RevChainhash(t, blockHash822020Orphan) + + // should become STALE + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevhash, // block with status ORPHANED at height 822020 - connected to STALE chain + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain + }, + Height: uint64(822021), + TransactionHashes: []*chainhash.Hash{txHash}, + } -func verifyBlock(t *testing.T, store *postgresql.PostgreSQL, hashStr string, height uint64, status blocktx_api.Status) { - hash := testutils.RevChainhash(t, hashStr) - block, err := store.GetBlock(context.Background(), hash) - require.NoError(t, err) - require.Equal(t, height, block.Height) - require.Equal(t, status, block.Status) -} + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + // Allow DB to process the block and find orphans + time.Sleep(1 * time.Second) + + // verify that the block and orphans have STALE status + verifyBlock(t, store, blockHash822017Stale, 822017, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_STALE) + + // verify that the longest chain is still the same + verifyBlock(t, store, blockHash822017Longest, 822017, blocktx_api.Status_LONGEST) + + // verify that the blocks after the next gap are still orphans + verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_ORPHANED) + verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_ORPHANED) + + publishedTxs := getPublishedTxs(publishedTxsCh) + + // verify no transaction was published + require.Len(t, publishedTxs, 0) + }) + + t.Run("reorg orphans", func(t *testing.T) { + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/reorg_orphans") + + processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + + const ( + blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" + blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" + blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" + + blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" + blockHash822016Fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" + + blockHash822018Orphan = "000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde" + blockHash822019Orphan = "00000000000000000364332e1bbd61dc928141b9469c5daea26a4b506efc9656" + blockHash822020Orphan = "00000000000000000a5c4d27edc0178e953a5bb0ab0081e66cb30c8890484076" + blockHash822021 = "d46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060" + blockHash822022Orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" + blockHash822023Orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" + + txhash822015 = "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853" + txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" + txhash822016 = "2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f" + txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" + ) + + blockHash := testutils.RevChainhash(t, blockHash822021) + prevhash := testutils.RevChainhash(t, blockHash822020Orphan) + txHash := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") + merkleRoot := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") + + // should become LONGEST + // reorg should happen + blockMessage := &blockchain.BlockMessage{ + Hash: blockHash, + Header: &wire.BlockHeader{ + Version: 541065216, + PrevBlock: *prevhash, // block with status ORPHANED at height 822020 - connected to STALE chain + MerkleRoot: *merkleRoot, + Bits: 0x1d00ffff, // chainwork: "4295032833" lower than the competing chain + // the sum of orphan chain has a higher chainwork and should cause a reorg + }, + Height: uint64(822021), + TransactionHashes: []*chainhash.Hash{txHash}, + } -func verifyTxs(t *testing.T, expectedTxs []*blocktx_api.TransactionBlock, publishedTxs []*blocktx_api.TransactionBlock) { - strippedTxs := make([]*blocktx_api.TransactionBlock, len(publishedTxs)) - for i, tx := range publishedTxs { - strippedTxs[i] = &blocktx_api.TransactionBlock{ - BlockHash: tx.BlockHash, - BlockHeight: tx.BlockHeight, - TransactionHash: tx.TransactionHash, - BlockStatus: tx.BlockStatus, + processor.StartBlockProcessing() + p2pMsgHandler.OnReceive(blockMessage, nil) + // Allow DB to process the block, find orphans and perform reorg + time.Sleep(2 * time.Second) + + // verify that the reorg happened + verifyBlock(t, store, blockHash822014StartOfChain, 822014, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822016, 822016, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822017, 822017, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822018Orphan, 822018, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822019Orphan, 822019, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822020Orphan, 822020, blocktx_api.Status_LONGEST) + verifyBlock(t, store, blockHash822021, 822021, blocktx_api.Status_LONGEST) + + verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) + verifyBlock(t, store, blockHash822016Fork, 822016, blocktx_api.Status_STALE) + + verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_ORPHANED) + verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_ORPHANED) + + bh822015 := testutils.RevChainhash(t, blockHash822015) + bh822015Fork := testutils.RevChainhash(t, blockHash822015Fork) + bh822016Fork := testutils.RevChainhash(t, blockHash822016Fork) + bh822017 := testutils.RevChainhash(t, blockHash822017) + + expectedTxs := []*blocktx_api.TransactionBlock{ + { // in stale chain + BlockHash: bh822015Fork[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + { // in both chains - should have blockdata updated + BlockHash: bh822015[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015Competing)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, + { // in stale chain + BlockHash: bh822016Fork[:], + BlockHeight: 822016, + TransactionHash: testutils.RevChainhash(t, txhash822016)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + { // in now longest chain + BlockHash: bh822017[:], + BlockHeight: 822017, + TransactionHash: testutils.RevChainhash(t, txhash822017)[:], + BlockStatus: blocktx_api.Status_LONGEST, + }, } - } - require.ElementsMatch(t, expectedTxs, strippedTxs) + publishedTxs := getPublishedTxs(publishedTxsCh) + + verifyTxs(t, expectedTxs, publishedTxs) + }) } diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index f9bf98496..3d529750f 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -216,27 +216,26 @@ func (p *Processor) StartBlockProcessing() { return case blockMsg := <-p.blockProcessCh: var err error - blockHash := blockMsg.Header.BlockHash() timeStart := time.Now() - p.logger.Info("received block", slog.String("hash", blockHash.String())) + p.logger.Info("received block", slog.String("hash", blockMsg.Hash.String())) err = p.processBlock(blockMsg) if err != nil { - p.logger.Error("block processing failed", slog.String("hash", blockHash.String()), slog.String("err", err.Error())) - p.unlockBlock(p.ctx, &blockHash) + p.logger.Error("block processing failed", slog.String("hash", blockMsg.Hash.String()), slog.String("err", err.Error())) + p.unlockBlock(p.ctx, blockMsg.Hash) continue } - storeErr := p.store.MarkBlockAsDone(p.ctx, &blockHash, blockMsg.Size, uint64(len(blockMsg.TransactionHashes))) + storeErr := p.store.MarkBlockAsDone(p.ctx, blockMsg.Hash, blockMsg.Size, uint64(len(blockMsg.TransactionHashes))) if storeErr != nil { - p.logger.Error("unable to mark block as processed", slog.String("hash", blockHash.String()), slog.String("err", storeErr.Error())) - p.unlockBlock(p.ctx, &blockHash) + p.logger.Error("unable to mark block as processed", slog.String("hash", blockMsg.Hash.String()), slog.String("err", storeErr.Error())) + p.unlockBlock(p.ctx, blockMsg.Hash) continue } // add the total block processing time to the stats - p.logger.Info("Processed block", slog.String("hash", blockHash.String()), slog.Int("txs", len(blockMsg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) + p.logger.Info("Processed block", slog.String("hash", blockMsg.Hash.String()), slog.Int("txs", len(blockMsg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) } } }() From 8600b2deb167e7e50b255dd841b8ba7773188ad6 Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 4 Dec 2024 18:14:45 +0100 Subject: [PATCH 33/36] fix: fix tests --- internal/blocktx/processor.go | 2 +- internal/blocktx/processor_test.go | 2 ++ internal/blocktx/store/postgresql/postgres_test.go | 3 ++- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 3d529750f..5a85ee90c 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -547,7 +547,7 @@ func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Bl var longestTipExists bool longestTipExists, err = p.longestTipExists(ctx) if err != nil { - p.logger.Error("unable to verify the longest tip existance in db", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) + p.logger.Error("unable to verify the longest tip existence in db", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) return err } diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index 28115d28e..dc531767b 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -207,6 +207,7 @@ func TestHandleBlock(t *testing.T) { require.NoError(t, err) blockMessage := &blockchain.BlockMessage{ + Hash: testdata.Block1Hash, Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: tc.prevBlockHash, @@ -439,6 +440,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { require.NoError(t, err) blockMessage := &blockchain.BlockMessage{ + Hash: testdata.Block1Hash, Header: &wire.BlockHeader{ Version: 541065216, MerkleRoot: *merkleRoot, diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index 2c47ef8bf..13d686959 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -265,6 +265,7 @@ func TestPostgresDB(t *testing.T) { t.Run("get stats for block geps", func(t *testing.T) { // given prepareDb(t, postgresDB, "fixtures/get_block_gaps") + const expectedGaps = int64(2007) // range (2016) - already existing blocks (9) = gaps (2007) // when actualBlockGaps, err := postgresDB.GetStats(ctx) @@ -272,7 +273,7 @@ func TestPostgresDB(t *testing.T) { // then require.NoError(t, err) // as we have 6 blocks in the range only it should return remaining 2006 (BlockDistance) - 6 - require.Equal(t, int64(2010), actualBlockGaps.CurrentNumOfBlockGaps) + require.Equal(t, expectedGaps, actualBlockGaps.CurrentNumOfBlockGaps) }) t.Run("get longest chain from height", func(t *testing.T) { From bd1fb819ade4e8b5d711d7afbf0ba437e1cf2d9c Mon Sep 17 00:00:00 2001 From: kuba-4chain Date: Wed, 4 Dec 2024 19:52:20 +0100 Subject: [PATCH 34/36] feat: add status MINED_IN_STALE_BLOCK to callbacker --- .../blocktx/blocktx_api/blocktx_api.pb.go | 2 +- .../blocktx_api/blocktx_api_grpc.pb.go | 2 +- .../callbacker_api/callbacker_api.pb.go | 37 +++++++++++-------- .../callbacker_api/callbacker_api.proto | 1 + .../callbacker_api/callbacker_api_grpc.pb.go | 2 +- .../metamorph_api/metamorph_api.pb.go | 2 +- .../metamorph_api/metamorph_api_grpc.pb.go | 2 +- 7 files changed, 27 insertions(+), 21 deletions(-) diff --git a/internal/blocktx/blocktx_api/blocktx_api.pb.go b/internal/blocktx/blocktx_api/blocktx_api.pb.go index 3b0d4038d..553aef540 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.pb.go +++ b/internal/blocktx/blocktx_api/blocktx_api.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.34.2 -// protoc v5.28.2 +// protoc v5.28.3 // source: internal/blocktx/blocktx_api/blocktx_api.proto package blocktx_api diff --git a/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go b/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go index 8efd20a68..fdc58f99e 100644 --- a/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go +++ b/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.5.1 -// - protoc v5.28.2 +// - protoc v5.28.3 // source: internal/blocktx/blocktx_api/blocktx_api.proto package blocktx_api diff --git a/internal/callbacker/callbacker_api/callbacker_api.pb.go b/internal/callbacker/callbacker_api/callbacker_api.pb.go index 7d5595532..51705d010 100644 --- a/internal/callbacker/callbacker_api/callbacker_api.pb.go +++ b/internal/callbacker/callbacker_api/callbacker_api.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.34.2 -// protoc v5.28.2 +// protoc v5.28.3 // source: internal/callbacker/callbacker_api/callbacker_api.proto package callbacker_api @@ -40,6 +40,7 @@ const ( Status_SEEN_ON_NETWORK Status = 90 Status_DOUBLE_SPEND_ATTEMPTED Status = 100 Status_REJECTED Status = 110 + Status_MINED_IN_STALE_BLOCK Status = 115 Status_MINED Status = 120 ) @@ -58,6 +59,7 @@ var ( 90: "SEEN_ON_NETWORK", 100: "DOUBLE_SPEND_ATTEMPTED", 110: "REJECTED", + 115: "MINED_IN_STALE_BLOCK", 120: "MINED", } Status_value = map[string]int32{ @@ -73,6 +75,7 @@ var ( "SEEN_ON_NETWORK": 90, "DOUBLE_SPEND_ATTEMPTED": 100, "REJECTED": 110, + "MINED_IN_STALE_BLOCK": 115, "MINED": 120, } ) @@ -363,7 +366,7 @@ var file_internal_callbacker_callbacker_api_callbacker_api_proto_rawDesc = []byt 0x65, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x62, 0x61, 0x74, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x42, 0x61, 0x74, 0x63, 0x68, - 0x2a, 0x83, 0x02, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, + 0x2a, 0x9d, 0x02, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x51, 0x55, 0x45, 0x55, 0x45, 0x44, 0x10, 0x0a, 0x12, 0x0c, 0x0a, 0x08, 0x52, 0x45, 0x43, 0x45, 0x49, 0x56, 0x45, 0x44, 0x10, 0x14, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x4f, 0x52, 0x45, 0x44, 0x10, 0x1e, 0x12, 0x18, @@ -378,20 +381,22 @@ var file_internal_callbacker_callbacker_api_callbacker_api_proto_rawDesc = []byt 0x53, 0x45, 0x45, 0x4e, 0x5f, 0x4f, 0x4e, 0x5f, 0x4e, 0x45, 0x54, 0x57, 0x4f, 0x52, 0x4b, 0x10, 0x5a, 0x12, 0x1a, 0x0a, 0x16, 0x44, 0x4f, 0x55, 0x42, 0x4c, 0x45, 0x5f, 0x53, 0x50, 0x45, 0x4e, 0x44, 0x5f, 0x41, 0x54, 0x54, 0x45, 0x4d, 0x50, 0x54, 0x45, 0x44, 0x10, 0x64, 0x12, 0x0c, 0x0a, - 0x08, 0x52, 0x45, 0x4a, 0x45, 0x43, 0x54, 0x45, 0x44, 0x10, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x4d, - 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, 0x32, 0xa2, 0x01, 0x0a, 0x0d, 0x43, 0x61, 0x6c, 0x6c, 0x62, - 0x61, 0x63, 0x6b, 0x65, 0x72, 0x41, 0x50, 0x49, 0x12, 0x42, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, - 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1e, 0x2e, 0x63, 0x61, 0x6c, - 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, - 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4d, 0x0a, 0x0c, - 0x53, 0x65, 0x6e, 0x64, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x23, 0x2e, 0x63, - 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, - 0x6e, 0x64, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x12, 0x5a, 0x10, 0x2e, - 0x3b, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x08, 0x52, 0x45, 0x4a, 0x45, 0x43, 0x54, 0x45, 0x44, 0x10, 0x6e, 0x12, 0x18, 0x0a, 0x14, 0x4d, + 0x49, 0x4e, 0x45, 0x44, 0x5f, 0x49, 0x4e, 0x5f, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x5f, 0x42, 0x4c, + 0x4f, 0x43, 0x4b, 0x10, 0x73, 0x12, 0x09, 0x0a, 0x05, 0x4d, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x78, + 0x32, 0xa2, 0x01, 0x0a, 0x0d, 0x43, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x41, + 0x50, 0x49, 0x12, 0x42, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, + 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1e, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x65, + 0x72, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4d, 0x0a, 0x0c, 0x53, 0x65, 0x6e, 0x64, 0x43, 0x61, + 0x6c, 0x6c, 0x62, 0x61, 0x63, 0x6b, 0x12, 0x23, 0x2e, 0x63, 0x61, 0x6c, 0x6c, 0x62, 0x61, 0x63, + 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x53, 0x65, 0x6e, 0x64, 0x43, 0x61, 0x6c, 0x6c, + 0x62, 0x61, 0x63, 0x6b, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, + 0x70, 0x74, 0x79, 0x22, 0x00, 0x42, 0x12, 0x5a, 0x10, 0x2e, 0x3b, 0x63, 0x61, 0x6c, 0x6c, 0x62, + 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, } var ( diff --git a/internal/callbacker/callbacker_api/callbacker_api.proto b/internal/callbacker/callbacker_api/callbacker_api.proto index a0c4bb38b..3a93795c2 100644 --- a/internal/callbacker/callbacker_api/callbacker_api.proto +++ b/internal/callbacker/callbacker_api/callbacker_api.proto @@ -28,6 +28,7 @@ enum Status { SEEN_ON_NETWORK = 90; DOUBLE_SPEND_ATTEMPTED = 100; REJECTED = 110; + MINED_IN_STALE_BLOCK = 115; MINED = 120; } diff --git a/internal/callbacker/callbacker_api/callbacker_api_grpc.pb.go b/internal/callbacker/callbacker_api/callbacker_api_grpc.pb.go index e9bedb75d..c2c597bd8 100644 --- a/internal/callbacker/callbacker_api/callbacker_api_grpc.pb.go +++ b/internal/callbacker/callbacker_api/callbacker_api_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.5.1 -// - protoc v5.28.2 +// - protoc v5.28.3 // source: internal/callbacker/callbacker_api/callbacker_api.proto package callbacker_api diff --git a/internal/metamorph/metamorph_api/metamorph_api.pb.go b/internal/metamorph/metamorph_api/metamorph_api.pb.go index b665b3e69..a842d084b 100644 --- a/internal/metamorph/metamorph_api/metamorph_api.pb.go +++ b/internal/metamorph/metamorph_api/metamorph_api.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.34.2 -// protoc v5.28.2 +// protoc v5.28.3 // source: internal/metamorph/metamorph_api/metamorph_api.proto package metamorph_api diff --git a/internal/metamorph/metamorph_api/metamorph_api_grpc.pb.go b/internal/metamorph/metamorph_api/metamorph_api_grpc.pb.go index f9257b91d..63ab512ee 100644 --- a/internal/metamorph/metamorph_api/metamorph_api_grpc.pb.go +++ b/internal/metamorph/metamorph_api/metamorph_api_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.5.1 -// - protoc v5.28.2 +// - protoc v5.28.3 // source: internal/metamorph/metamorph_api/metamorph_api.proto package metamorph_api From 6689e3ae18f3c32e732ddb622ab1e64337d70f8e Mon Sep 17 00:00:00 2001 From: Kuba <127198012+kuba-4chain@users.noreply.github.com> Date: Thu, 5 Dec 2024 14:57:36 +0100 Subject: [PATCH 35/36] feat(ARCO-180): reorg e2e tests (#666) --- .github/workflows/go.yaml | 2 +- internal/node_client/node_client.go | 1 - internal/node_client/test_utils.go | 85 +++++++++++++ test/submit_05_reorg_test.go | 178 ++++++++++++++++++++++++++++ test/utils.go | 1 + 5 files changed, 265 insertions(+), 2 deletions(-) create mode 100644 test/submit_05_reorg_test.go diff --git a/.github/workflows/go.yaml b/.github/workflows/go.yaml index 3fa5a8cf0..061c6d3a8 100644 --- a/.github/workflows/go.yaml +++ b/.github/workflows/go.yaml @@ -7,7 +7,7 @@ on: push: branches: ["main"] pull_request: - branches: ["main"] + branches: ["main", "reorg-support"] # TODO: to be removed before merge to main jobs: build: diff --git a/internal/node_client/node_client.go b/internal/node_client/node_client.go index 730965646..f450fc4ab 100644 --- a/internal/node_client/node_client.go +++ b/internal/node_client/node_client.go @@ -106,7 +106,6 @@ func (n NodeClient) GetRawTransaction(ctx context.Context, id string) (rt *sdkTx }() nTx, err := n.bitcoinClient.GetRawTransaction(id) - if err != nil { return nil, errors.Join(ErrFailedToGetRawTransaction, err) } diff --git a/internal/node_client/test_utils.go b/internal/node_client/test_utils.go index 1fb2e4833..cb7becf97 100644 --- a/internal/node_client/test_utils.go +++ b/internal/node_client/test_utils.go @@ -1,7 +1,12 @@ package node_client import ( + "bytes" + "encoding/json" + "errors" "fmt" + "io" + "net/http" "testing" "time" @@ -37,6 +42,19 @@ type BlockData struct { MerkleRoot string `json:"merkleroot"` } +type RPCRequest struct { + Method string `json:"method"` + Params interface{} `json:"params"` + ID int64 `json:"id"` + JSONRpc string `json:"jsonrpc"` +} + +type RPCResponse struct { + ID int64 `json:"id"` + Result json.RawMessage `json:"result"` + Err interface{} `json:"error"` +} + func GetNewWalletAddress(t *testing.T, bitcoind *bitcoin.Bitcoind) (address, privateKey string) { address, err := bitcoind.GetNewAddress() require.NoError(t, err) @@ -287,3 +305,70 @@ func CreateTxFrom(privateKey string, address string, utxos []UnspentOutput, fee return tx, nil } + +func CustomRPCCall(method string, params []interface{}, nodeHost string, nodePort int, nodeUser, nodePassword string) error { + c := http.Client{} + + rpcRequest := RPCRequest{method, params, time.Now().UnixNano(), "1.0"} + payloadBuffer := &bytes.Buffer{} + jsonEncoder := json.NewEncoder(payloadBuffer) + + err := jsonEncoder.Encode(rpcRequest) + if err != nil { + return err + } + + req, err := http.NewRequest( + "POST", + fmt.Sprintf("%s://%s:%d", "http", nodeHost, nodePort), + payloadBuffer, + ) + if err != nil { + return err + } + + req.SetBasicAuth(nodeUser, nodePassword) + req.Header.Add("Content-Type", "application/json;charset=utf-8") + req.Header.Add("Accept", "application/json") + + resp, err := c.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + data, err := io.ReadAll(resp.Body) + if err != nil { + return err + } + + var rpcResponse RPCResponse + + if resp.StatusCode != 200 { + _ = json.Unmarshal(data, &rpcResponse) + v, ok := rpcResponse.Err.(map[string]interface{}) + if ok { + err = errors.New(v["message"].(string)) + } else { + err = errors.New("HTTP error: " + resp.Status) + } + if err != nil { + return err + } + } + + err = json.Unmarshal(data, &rpcResponse) + if err != nil { + return err + } + + if rpcResponse.Err != nil { + e, ok := rpcResponse.Err.(error) + if ok { + return e + } + return errors.New("unknown error returned from node in rpc response") + } + + return nil +} diff --git a/test/submit_05_reorg_test.go b/test/submit_05_reorg_test.go new file mode 100644 index 000000000..03e0ac9ec --- /dev/null +++ b/test/submit_05_reorg_test.go @@ -0,0 +1,178 @@ +//go:build e2e + +package test + +import ( + "fmt" + "net/http" + "testing" + "time" + + "github.com/bitcoin-sv/arc/internal/node_client" + "github.com/stretchr/testify/require" +) + +func TestReorg(t *testing.T) { + address, privateKey := node_client.FundNewWallet(t, bitcoind) + + utxos := node_client.GetUtxos(t, bitcoind, address) + require.True(t, len(utxos) > 0, "No UTXOs available for the address") + + tx1, err := node_client.CreateTx(privateKey, address, utxos[0]) + require.NoError(t, err) + + // submit tx1 + rawTx, err := tx1.EFHex() + require.NoError(t, err) + resp := postRequest[TransactionResponse](t, arcEndpointV1Tx, createPayload(t, TransactionRequest{RawTx: rawTx}), map[string]string{"X-WaitFor": StatusSeenOnNetwork}, http.StatusOK) + require.Equal(t, StatusSeenOnNetwork, resp.TxStatus) + + // mine tx1 + invHash := node_client.Generate(t, bitcoind, 1) + + // verify tx1 = MINED + statusURL := fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx1.TxID()) + statusResp := getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, invHash, *statusResp.BlockHash) + + // get new UTXO for tx2 + txID := node_client.SendToAddress(t, bitcoind, address, float64(0.002)) + utxos = node_client.GetUtxos(t, bitcoind, address) + require.True(t, len(utxos) > 0, "No UTXOs available for the address") + + // make sure to pick the correct UTXO + var utxo node_client.UnspentOutput + for _, u := range utxos { + if u.Txid == txID { + utxo = u + } + } + + tx2, err := node_client.CreateTx(privateKey, address, utxo) + require.NoError(t, err) + + // prepare a callback server for tx2 + callbackReceivedChan := make(chan *TransactionResponse) + callbackErrChan := make(chan error) + callbackURL, token, shutdown := startCallbackSrv(t, callbackReceivedChan, callbackErrChan, nil) + defer shutdown() + + // submit tx2 + rawTx, err = tx2.EFHex() + require.NoError(t, err) + resp = postRequest[TransactionResponse](t, arcEndpointV1Tx, createPayload(t, TransactionRequest{RawTx: rawTx}), + map[string]string{ + "X-WaitFor": StatusSeenOnNetwork, + "X-CallbackUrl": callbackURL, + "X-CallbackToken": token, + }, http.StatusOK) + require.Equal(t, StatusSeenOnNetwork, resp.TxStatus) + + // mine tx2 + tx2BlockHash := node_client.Generate(t, bitcoind, 1) + + // verify tx2 = MINED + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx2.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, tx2BlockHash, *statusResp.BlockHash) + + select { + case status := <-callbackReceivedChan: + require.Equal(t, tx2.TxID(), status.Txid) + require.Equal(t, StatusMined, status.TxStatus) + case err := <-callbackErrChan: + t.Fatalf("callback error: %v", err) + case <-time.After(1 * time.Second): + t.Fatal("callback exceeded timeout") + } + + // invalidate the chain with tx1 and tx2 + call(t, "invalidateblock", []interface{}{invHash}) + + // prepare txStale + txID = node_client.SendToAddress(t, bitcoind, address, float64(0.003)) + utxos = node_client.GetUtxos(t, bitcoind, address) + require.True(t, len(utxos) > 0, "No UTXOs available for the address") + + // make sure to pick the correct UTXO + for _, u := range utxos { + if u.Txid == txID { + utxo = u + } + } + + txStale, err := node_client.CreateTx(privateKey, address, utxo) + require.NoError(t, err) + + // post a tx to the STALE chain + rawTx, err = txStale.EFHex() + require.NoError(t, err) + resp = postRequest[TransactionResponse](t, arcEndpointV1Tx, createPayload(t, TransactionRequest{RawTx: rawTx}), map[string]string{"X-WaitFor": StatusSeenOnNetwork}, http.StatusOK) + require.Equal(t, StatusSeenOnNetwork, resp.TxStatus) + + // post the previously mined tx1 to a STALE chain + rawTx, err = tx1.EFHex() + require.NoError(t, err) + _ = postRequest[TransactionResponse](t, arcEndpointV1Tx, createPayload(t, TransactionRequest{RawTx: rawTx}), map[string]string{"X-WaitFor": StatusSeenOnNetwork}, http.StatusOK) + + // generate new block that will create a stale chain that includes the txStale and tx1 + staleHash := node_client.Generate(t, bitcoind, 1) + + // verify that stale tx is still SEEN_ON_NETWORK + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txStale.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusSeenOnNetwork, statusResp.TxStatus) + + // verify that nothing changed so far with previous mined txs + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx1.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, invHash, *statusResp.BlockHash) + + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx2.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, tx2BlockHash, *statusResp.BlockHash) + + // make the STALE chain LONGEST by adding 2 new blocks + node_client.Generate(t, bitcoind, 1) + node_client.Generate(t, bitcoind, 1) + + // verify that stale tx is now MINED + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txStale.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, staleHash, *statusResp.BlockHash) + + // verify that previous mined tx1 have updated block info + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx1.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResp.TxStatus) + require.Equal(t, staleHash, *statusResp.BlockHash) + + // verify that tx2 is now MINED_IN_STALE_BLOCK + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx2.TxID()) + statusResp = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMinedInStaleBlock, statusResp.TxStatus) + require.Equal(t, tx2BlockHash, *statusResp.BlockHash) + + // verify that callback for tx2 was received with status MINED_IN_STALE_BLOCK + select { + case status := <-callbackReceivedChan: + require.Equal(t, tx2.TxID(), status.Txid) + require.Equal(t, StatusMinedInStaleBlock, status.TxStatus) + case err := <-callbackErrChan: + t.Fatalf("callback error: %v", err) + case <-time.After(1 * time.Second): + t.Fatal("callback exceeded timeout") + } +} + +func call(t *testing.T, method string, params []interface{}) { + err := node_client.CustomRPCCall(method, params, nodeHost, nodePort, nodeUser, nodePassword) + require.NoError(t, err) + + time.Sleep(5 * time.Second) +} diff --git a/test/utils.go b/test/utils.go index 5f652db07..76da39afa 100644 --- a/test/utils.go +++ b/test/utils.go @@ -30,6 +30,7 @@ const ( StatusDoubleSpendAttempted = "DOUBLE_SPEND_ATTEMPTED" StatusRejected = "REJECTED" StatusMined = "MINED" + StatusMinedInStaleBlock = "MINED_IN_STALE_BLOCK" ) type TransactionResponseBatch []TransactionResponse From 86d2ec6480e9a1f5775d87855327a533defd1d80 Mon Sep 17 00:00:00 2001 From: Kuba <127198012+kuba-4chain@users.noreply.github.com> Date: Thu, 5 Dec 2024 14:57:50 +0100 Subject: [PATCH 36/36] feat: documentation for reorg (#682) --- CHANGELOG.md | 6 ++++++ README.md | 1 + doc/README.md | 33 ++++++++++++++++++++++++++------- 3 files changed, 33 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1b725bce6..3eef40867 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,7 @@ All notable changes to this project will be documented in this file. The format ## Table of Contents - [Unreleased](#unreleased) +- [1.3.13](#1313---2024-12-05) - [1.3.12](#1312---2024-12-05) - [1.3.2](#132---2024-10-30) - [1.3.0](#130---2024-08-21) @@ -20,6 +21,11 @@ All notable changes to this project will be documented in this file. The format ## [Unreleased] +## [1.3.13] - 2024-12-04 + +### Added +- [Reorg Support](https://bitcoin-sv.github.io/arc/#/?id=chain-reorg) - adapting ARC to handle chain reorganisations. Whenever reorg happens, ARC will update the block info for each transaction affected and rebroadcast those that are not seen in the newest longest chain. + ## [1.3.12] - 2024-12-05 ### Changed diff --git a/README.md b/README.md index 1ee820436..0075a89b9 100644 --- a/README.md +++ b/README.md @@ -185,6 +185,7 @@ The following statuses are available: | `SEEN_IN_ORPHAN_MEMPOOL` | The transaction has been sent to at least 1 Bitcoin node but parent transaction was not found. | | `SEEN_ON_NETWORK` | The transaction has been seen on the Bitcoin network and propagated to other nodes. This status is set when metamorph receives an INV message for the transaction from another node than it was sent to. | | `DOUBLE_SPEND_ATTEMPTED` | The transaction is a double spend attempt. Competing transaction(s) will be returned with this status. | +| `MINED_IN_STALE_BLOCK` | The transaction has been mined into a block that became stale after a reorganisation of chain (reorg). | | `REJECTED` | The transaction has been rejected by the Bitcoin network. | | `MINED` | The transaction has been mined into a block by a mining node. | diff --git a/doc/README.md b/doc/README.md index 002cb94dc..125b1541e 100644 --- a/doc/README.md +++ b/doc/README.md @@ -41,6 +41,7 @@ stateDiagram-v2 state SEEN_ON_NETWORK state REJECTED state MINED + state MINED_IN_STALE_BLOCK [*] --> UNKNOWN UNKNOWN --> ERROR: Transaction validation failed @@ -61,6 +62,7 @@ stateDiagram-v2 SEEN_ON_NETWORK --> DOUBLE_SPEND_ATTEMPTED: A competing transactions entered the mempool DOUBLE_SPEND_ATTEMPTED --> MINED: This transaction was accepted and mined DOUBLE_SPEND_ATTEMPTED --> REJECTED: This transaction was rejected in favor\n of one of the competing transactions + MINED --> MINED_IN_STALE_BLOCK: This transaction was mined in a block that became stale after reorg MINED --> [*] ``` @@ -113,7 +115,7 @@ The Callbacker handles request retries and treats any HTTP status code outside t ### BlockTx -BlockTx is a microservice that is responsible for processing blocks mined on the Bitcoin network, and for propagating +BlockTx is a microservice that is responsible for processing blocks mined on the Bitcoin network, handling chain reorganisations ([reorgs](#chain-reorg)) and for propagating the status of transactions to each Metamorph that has subscribed to this service. The main purpose of BlockTx is to de-duplicate processing of (large) blocks. As an incoming block is processed by BlockTx, Metamorph is notified about mined transactions by means of a message queue. BlockTx does not store the transaction data, but instead stores only the transaction IDs and the block height in which they were mined. Metamorph is responsible for storing the transaction data. @@ -514,15 +516,32 @@ Expected outcome * ARC responds with status `ANNOUNCED_TO_NETWORK` or `SEEN_ON_NETWORK` * At latest a couple of minutes later the status will switch to `SEEN_ON_NETWORK` -### Block reorg +### Chain reorg -A block reorg happens and two different blocks get announced at the same block height +A chain reorganization (chain reorg) occurs when the blockchain switches its primary chain of blocks to a different branch, typically because a previously stale chain (fork) has accumulated more chainwork than the current longest chain. Chainwork, which represents the cumulative computational effort used to build a chain, is the deciding factor in determining the longest and most valid chain. This process can impact transaction statuses based on their presence in the affected chains. -Expected outcome: -* ARC does not update the transaction statuses according to the block in the longest chain. -* Information and Merkle path of the block received first will be persisted in the transaction record and not overwritten +#### Transaction in both chains +This is the most common scenario. If a transaction is included in both the original longest chain and the stale chain (which becomes the longest), the transition to the new longest chain does not disrupt the transaction's confirmation status. + +A new `MINED` callback will be send for that transaction with updated block data (block hash, block height, Merkle path). + +#### Transaction in the previously longest chain, but not in the stale chain that becomes longest +When a transaction exists in the original longest chain but not in the stale chain that subsequently becomes the longest, the transaction is effectively removed from the confirmed state of the blockchain. + +A callback with status `MINED_IN_STALE_BLOCK` will be sent for that transaction and ARC will rebroadcast that transaction. Because of that process, the transaction may cycle through statuses, such as `SEEN_ON_NETWORK`, again. User should keep in mind that the status `MINED_IN_STALE_BLOCK` **is not final**. + +#### Transaction in the stale chain only (without reorg) +When a transaction is present only in the stale chain and not in the original longest chain, it remains unconfirmed, in the `MINED_IN_STALE_BLOCK` state, until the stale chain becomes the longest or the transaction is found in the longest chain. + +A callback with status `MINED_IN_STALE_BLOCK` will be sent for that transaction and ARC will rebroadcast that transaction. Because of that process, the transaction may cycle through statuses, such as `SEEN_ON_NETWORK`, again. User should keep in mind that the status `MINED_IN_STALE_BLOCK` **is not final**. + +#### Summary table +| Transaction Scenario | Callback status after reorg | Extra info / Action | +|--------------------------------------|-----------------------------|--------------------------------------------------------------------------------------------------------| +| In both chains | `MINED` | Block data (hash, height, Merkle path) will be updated in the callback | +| In longest chain, not in stale chain | `MINED_IN_STALE_BLOCK` | Transaction will be rebroadcasted and cycle through statuses again until is found in the longest chain | +| In stale chain only (no reorg) | `MINED_IN_STALE_BLOCK` | Transaction will be rebroadcasted and cycle through statuses again until is found in the longest chain | -The planned feature [Update of transactions in case of block reorgs](https://github.com/bitcoin-sv/arc/blob/main/ROADMAP.md#update-of-transactions-in-case-of-block-reorgs) will ensure that ARC updates the statuses of transactions. Transactions which are not in the block of the longest chain will be updated to `REJECTED` status and transactions which are included in the block of the longest chain are updated to `MINED` status. ## Cumulative fees validation