diff --git a/Makefile b/Makefile index 4020840c1..f8af4f502 100644 --- a/Makefile +++ b/Makefile @@ -25,18 +25,18 @@ build_docker: .PHONY: run run: - docker compose down --remove-orphans + docker compose down -v --remove-orphans docker compose up --build blocktx callbacker metamorph api .PHONY: run_e2e_tests run_e2e_tests: - docker compose down --remove-orphans + docker compose down -v --remove-orphans docker compose up --build blocktx callbacker metamorph api tests --scale blocktx=4 --scale metamorph=2 --exit-code-from tests docker compose down .PHONY: run_e2e_tests_with_tracing run_e2e_tests_with_tracing: - docker compose down --remove-orphans + docker compose down -v --remove-orphans ARC_TRACING_ENABLED=TRUE docker compose up --build blocktx callbacker metamorph api tests jaeger --scale blocktx=4 --scale metamorph=2 --no-attach jaeger .PHONY: test diff --git a/cmd/arc/services/blocktx.go b/cmd/arc/services/blocktx.go index cf6c44389..3202a703f 100644 --- a/cmd/arc/services/blocktx.go +++ b/cmd/arc/services/blocktx.go @@ -8,12 +8,13 @@ import ( "go.opentelemetry.io/otel/attribute" + "github.com/libsv/go-p2p" + "github.com/bitcoin-sv/arc/internal/grpc_opts" "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core" "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_jetstream" "github.com/bitcoin-sv/arc/internal/message_queue/nats/nats_connection" "github.com/bitcoin-sv/arc/internal/tracing" - "github.com/libsv/go-p2p" "github.com/bitcoin-sv/arc/config" "github.com/bitcoin-sv/arc/internal/blocktx" @@ -127,6 +128,7 @@ func StartBlockTx(logger *slog.Logger, arcConfig *config.ArcConfig) (func(), err blocktx.WithRegisterTxsInterval(btxConfig.RegisterTxsInterval), blocktx.WithMessageQueueClient(mqClient), blocktx.WithMaxBlockProcessingDuration(btxConfig.MaxBlockProcessingDuration), + blocktx.WithIncomingIsLongest(btxConfig.IncomingIsLongest), ) blockRequestCh := make(chan blocktx.BlockRequest, blockProcessingBuffer) diff --git a/config/config.go b/config/config.go index 793bfe117..eb715c08b 100644 --- a/config/config.go +++ b/config/config.go @@ -128,6 +128,7 @@ type BlocktxConfig struct { MaxAllowedBlockHeightMismatch int `mapstructure:"maxAllowedBlockHeightMismatch"` MessageQueue *MessageQueueConfig `mapstructure:"mq"` P2pReadBufferSize int `mapstructure:"p2pReadBufferSize"` + IncomingIsLongest bool `mapstructure:"incomingIsLongest"` } type DbConfig struct { diff --git a/config/defaults.go b/config/defaults.go index 869d44492..e46783a04 100644 --- a/config/defaults.go +++ b/config/defaults.go @@ -129,6 +129,7 @@ func getBlocktxConfig() *BlocktxConfig { MaxBlockProcessingDuration: 5 * time.Minute, MessageQueue: &MessageQueueConfig{}, P2pReadBufferSize: 8 * 1024 * 1024, + IncomingIsLongest: false, } } diff --git a/config/example_config.yaml b/config/example_config.yaml index 6f6f20c6f..69cf1415d 100644 --- a/config/example_config.yaml +++ b/config/example_config.yaml @@ -106,6 +106,7 @@ blocktx: registerTxsInterval: 10s # time interval to read from the channel registered transactions maxBlockProcessingDuration: 5m # maximum time a blocktx can spend on processing a block before unlocking it to be requested again monitorPeers: false # if enabled, peers which do not receive alive signal from nodes will be restarted + incomingIsLongest: false # whether each new block received is considered to be from the longest blockchain. If there are a lot of block gaps in blocktx database it is advisable to set this to true fillGaps: enabled: true interval: 15m # time interval to check and fill gaps in processed blocks diff --git a/doc/README.md b/doc/README.md index 125b1541e..65e55627c 100644 --- a/doc/README.md +++ b/doc/README.md @@ -58,12 +58,13 @@ stateDiagram-v2 ACCEPTED_BY_NETWORK --> SEEN_ON_NETWORK: ARC has received Transaction ID\n announcement from another peer ACCEPTED_BY_NETWORK --> SEEN_IN_ORPHAN_MEMPOOL: Peer has sent a 'missing inputs' message SEEN_IN_ORPHAN_MEMPOOL --> SEEN_ON_NETWORK: All parent transactions\n have been received by peer - SEEN_ON_NETWORK --> MINED: Transaction ID was included in a BLOCK message SEEN_ON_NETWORK --> DOUBLE_SPEND_ATTEMPTED: A competing transactions entered the mempool - DOUBLE_SPEND_ATTEMPTED --> MINED: This transaction was accepted and mined DOUBLE_SPEND_ATTEMPTED --> REJECTED: This transaction was rejected in favor\n of one of the competing transactions - MINED --> MINED_IN_STALE_BLOCK: This transaction was mined in a block that became stale after reorg + DOUBLE_SPEND_ATTEMPTED --> MINED: This transaction was accepted and mined + SEEN_ON_NETWORK --> MINED: Transaction ID was included in a BLOCK message + MINED_IN_STALE_BLOCK --> SEEN_ON_NETWORK: Transaction gets re-broadcasted MINED --> [*] + MINED --> MINED_IN_STALE_BLOCK: This transaction was mined in a block that became stale after reorg ``` ## Microservices diff --git a/go.mod b/go.mod index b07e8e0e6..8e8d332eb 100644 --- a/go.mod +++ b/go.mod @@ -17,6 +17,7 @@ require ( github.com/google/uuid v1.6.0 github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.0.1 github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.1.0 + github.com/jackc/pgx/v5 v5.3.1 github.com/jedib0t/go-pretty/v6 v6.5.9 github.com/jmoiron/sqlx v1.3.5 github.com/labstack/echo-contrib v0.17.1 @@ -99,6 +100,8 @@ require ( github.com/hashicorp/hcl v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/invopop/yaml v0.3.1 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.17.9 // indirect diff --git a/go.sum b/go.sum index 017010041..ee2756a92 100644 --- a/go.sum +++ b/go.sum @@ -171,6 +171,8 @@ github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw= github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= github.com/jackc/pgx/v4 v4.18.1 h1:YP7G1KABtKpB5IHrO9vYwSrCOhs7p3uqhvhhQBptya0= github.com/jackc/pgx/v4 v4.18.1/go.mod h1:FydWkUyadDmdNH/mHnGob881GawxeEm7TcMCzkb+qQE= +github.com/jackc/pgx/v5 v5.3.1 h1:Fcr8QJ1ZeLi5zsPZqQeUZhNhxfkkKBOgJuYkJHoBOtU= +github.com/jackc/pgx/v5 v5.3.1/go.mod h1:t3JDKnCBlYIc0ewLF0Q7B8MXmoIaBOZj/ic7iHozM/8= github.com/jedib0t/go-pretty/v6 v6.5.9 h1:ACteMBRrrmm1gMsXe9PSTOClQ63IXDUt03H5U+UV8OU= github.com/jedib0t/go-pretty/v6 v6.5.9/go.mod h1:zbn98qrYlh95FIhwwsbIip0LYpwSG8SUOScs+v9/t0E= github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= diff --git a/internal/blocktx/background_workers_test.go b/internal/blocktx/background_workers_test.go index 89e1262e1..edcdd5c90 100644 --- a/internal/blocktx/background_workers_test.go +++ b/internal/blocktx/background_workers_test.go @@ -8,14 +8,16 @@ import ( "testing" "time" + "github.com/libsv/go-p2p" + "github.com/bitcoin-sv/arc/internal/blocktx" "github.com/bitcoin-sv/arc/internal/blocktx/mocks" "github.com/bitcoin-sv/arc/internal/blocktx/store" storeMocks "github.com/bitcoin-sv/arc/internal/blocktx/store/mocks" - "github.com/libsv/go-p2p" - "github.com/bitcoin-sv/arc/internal/testdata" "github.com/stretchr/testify/require" + + "github.com/bitcoin-sv/arc/internal/testdata" ) func TestStartFillGaps(t *testing.T) { diff --git a/internal/blocktx/blocktx_api/blocktx_api.pb.go b/internal/blocktx/blocktx_api/blocktx_api.pb.go index 8ea1e161b..d0f317be2 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.pb.go +++ b/internal/blocktx/blocktx_api/blocktx_api.pb.go @@ -852,23 +852,18 @@ var file_internal_blocktx_blocktx_api_blocktx_api_proto_rawDesc = []byte{ 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x0b, 0x0a, 0x07, 0x4c, 0x4f, 0x4e, 0x47, 0x45, 0x53, 0x54, 0x10, 0x0a, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x4c, 0x45, 0x10, 0x14, 0x12, 0x0c, 0x0a, 0x08, 0x4f, 0x52, 0x50, 0x48, 0x41, 0x4e, - 0x45, 0x44, 0x10, 0x1e, 0x32, 0xad, 0x04, 0x0a, 0x0a, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x78, + 0x45, 0x44, 0x10, 0x1e, 0x32, 0xdd, 0x03, 0x0a, 0x0a, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x78, 0x41, 0x50, 0x49, 0x12, 0x3f, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x00, 0x12, 0x50, 0x0a, 0x11, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x54, 0x72, 0x61, - 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, - 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, - 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, - 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0b, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, - 0x6c, 0x6f, 0x63, 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, - 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, - 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, - 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x00, 0x12, 0x58, 0x0a, 0x19, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, 0x63, 0x6b, - 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x4d, 0x61, 0x70, 0x12, + 0x73, 0x65, 0x22, 0x00, 0x12, 0x4a, 0x0a, 0x0b, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x42, 0x6c, 0x6f, + 0x63, 0x6b, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, + 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, + 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, + 0x66, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x12, 0x5a, 0x0a, 0x1b, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, + 0x72, 0x65, 0x64, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x61, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x16, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x21, 0x2e, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x74, 0x78, 0x5f, 0x61, 0x70, 0x69, 0x2e, 0x52, 0x6f, 0x77, 0x73, 0x41, 0x66, 0x66, 0x65, 0x63, 0x74, @@ -930,19 +925,17 @@ var file_internal_blocktx_blocktx_api_blocktx_api_proto_depIdxs = []int32{ 4, // 4: blocktx_api.TransactionBlocks.transaction_blocks:type_name -> blocktx_api.TransactionBlock 10, // 5: blocktx_api.MerkleRootsVerificationRequest.merkle_roots:type_name -> blocktx_api.MerkleRootVerificationRequest 14, // 6: blocktx_api.BlockTxAPI.Health:input_type -> google.protobuf.Empty - 7, // 7: blocktx_api.BlockTxAPI.ClearTransactions:input_type -> blocktx_api.ClearData - 7, // 8: blocktx_api.BlockTxAPI.ClearBlocks:input_type -> blocktx_api.ClearData - 7, // 9: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:input_type -> blocktx_api.ClearData - 9, // 10: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:input_type -> blocktx_api.DelUnfinishedBlockProcessingRequest - 11, // 11: blocktx_api.BlockTxAPI.VerifyMerkleRoots:input_type -> blocktx_api.MerkleRootsVerificationRequest - 1, // 12: blocktx_api.BlockTxAPI.Health:output_type -> blocktx_api.HealthResponse - 8, // 13: blocktx_api.BlockTxAPI.ClearTransactions:output_type -> blocktx_api.RowsAffectedResponse - 8, // 14: blocktx_api.BlockTxAPI.ClearBlocks:output_type -> blocktx_api.RowsAffectedResponse - 8, // 15: blocktx_api.BlockTxAPI.ClearBlockTransactionsMap:output_type -> blocktx_api.RowsAffectedResponse - 8, // 16: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:output_type -> blocktx_api.RowsAffectedResponse - 12, // 17: blocktx_api.BlockTxAPI.VerifyMerkleRoots:output_type -> blocktx_api.MerkleRootVerificationResponse - 12, // [12:18] is the sub-list for method output_type - 6, // [6:12] is the sub-list for method input_type + 7, // 7: blocktx_api.BlockTxAPI.ClearBlocks:input_type -> blocktx_api.ClearData + 7, // 8: blocktx_api.BlockTxAPI.ClearRegisteredTransactions:input_type -> blocktx_api.ClearData + 9, // 9: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:input_type -> blocktx_api.DelUnfinishedBlockProcessingRequest + 11, // 10: blocktx_api.BlockTxAPI.VerifyMerkleRoots:input_type -> blocktx_api.MerkleRootsVerificationRequest + 1, // 11: blocktx_api.BlockTxAPI.Health:output_type -> blocktx_api.HealthResponse + 8, // 12: blocktx_api.BlockTxAPI.ClearBlocks:output_type -> blocktx_api.RowsAffectedResponse + 8, // 13: blocktx_api.BlockTxAPI.ClearRegisteredTransactions:output_type -> blocktx_api.RowsAffectedResponse + 8, // 14: blocktx_api.BlockTxAPI.DelUnfinishedBlockProcessing:output_type -> blocktx_api.RowsAffectedResponse + 12, // 15: blocktx_api.BlockTxAPI.VerifyMerkleRoots:output_type -> blocktx_api.MerkleRootVerificationResponse + 11, // [11:16] is the sub-list for method output_type + 6, // [6:11] is the sub-list for method input_type 6, // [6:6] is the sub-list for extension type_name 6, // [6:6] is the sub-list for extension extendee 0, // [0:6] is the sub-list for field type_name diff --git a/internal/blocktx/blocktx_api/blocktx_api.proto b/internal/blocktx/blocktx_api/blocktx_api.proto index ab7ca5364..78e5ed0f7 100644 --- a/internal/blocktx/blocktx_api/blocktx_api.proto +++ b/internal/blocktx/blocktx_api/blocktx_api.proto @@ -11,14 +11,11 @@ service BlockTxAPI { // Health returns the health of the API. rpc Health (google.protobuf.Empty) returns (HealthResponse) {} - // ClearTransactions clears transaction data - rpc ClearTransactions(ClearData) returns (RowsAffectedResponse) {} - // ClearBlocks clears block data rpc ClearBlocks(ClearData) returns (RowsAffectedResponse) {} - // ClearBlockTransactionsMap clears block-transaction-map data - rpc ClearBlockTransactionsMap(ClearData) returns (RowsAffectedResponse) {} + // ClearRegisteredTransactions clears registered transactions + rpc ClearRegisteredTransactions(ClearData) returns (RowsAffectedResponse) {} // DelUnfinishedBlockProcessing deletes unfinished block processing rpc DelUnfinishedBlockProcessing(DelUnfinishedBlockProcessingRequest) returns (RowsAffectedResponse) {} diff --git a/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go b/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go index 69b2a2c60..74bd4aeeb 100644 --- a/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go +++ b/internal/blocktx/blocktx_api/blocktx_api_grpc.pb.go @@ -21,9 +21,8 @@ const _ = grpc.SupportPackageIsVersion9 const ( BlockTxAPI_Health_FullMethodName = "/blocktx_api.BlockTxAPI/Health" - BlockTxAPI_ClearTransactions_FullMethodName = "/blocktx_api.BlockTxAPI/ClearTransactions" BlockTxAPI_ClearBlocks_FullMethodName = "/blocktx_api.BlockTxAPI/ClearBlocks" - BlockTxAPI_ClearBlockTransactionsMap_FullMethodName = "/blocktx_api.BlockTxAPI/ClearBlockTransactionsMap" + BlockTxAPI_ClearRegisteredTransactions_FullMethodName = "/blocktx_api.BlockTxAPI/ClearRegisteredTransactions" BlockTxAPI_DelUnfinishedBlockProcessing_FullMethodName = "/blocktx_api.BlockTxAPI/DelUnfinishedBlockProcessing" BlockTxAPI_VerifyMerkleRoots_FullMethodName = "/blocktx_api.BlockTxAPI/VerifyMerkleRoots" ) @@ -34,12 +33,10 @@ const ( type BlockTxAPIClient interface { // Health returns the health of the API. Health(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*HealthResponse, error) - // ClearTransactions clears transaction data - ClearTransactions(ctx context.Context, in *ClearData, opts ...grpc.CallOption) (*RowsAffectedResponse, error) // ClearBlocks clears block data ClearBlocks(ctx context.Context, in *ClearData, opts ...grpc.CallOption) (*RowsAffectedResponse, error) - // ClearBlockTransactionsMap clears block-transaction-map data - ClearBlockTransactionsMap(ctx context.Context, in *ClearData, opts ...grpc.CallOption) (*RowsAffectedResponse, error) + // ClearRegisteredTransactions clears registered transactions + ClearRegisteredTransactions(ctx context.Context, in *ClearData, opts ...grpc.CallOption) (*RowsAffectedResponse, error) // DelUnfinishedBlockProcessing deletes unfinished block processing DelUnfinishedBlockProcessing(ctx context.Context, in *DelUnfinishedBlockProcessingRequest, opts ...grpc.CallOption) (*RowsAffectedResponse, error) // VerifyMerkleRoots verifies the merkle roots existance in blocktx db and returns unverified block heights @@ -64,16 +61,6 @@ func (c *blockTxAPIClient) Health(ctx context.Context, in *emptypb.Empty, opts . return out, nil } -func (c *blockTxAPIClient) ClearTransactions(ctx context.Context, in *ClearData, opts ...grpc.CallOption) (*RowsAffectedResponse, error) { - cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) - out := new(RowsAffectedResponse) - err := c.cc.Invoke(ctx, BlockTxAPI_ClearTransactions_FullMethodName, in, out, cOpts...) - if err != nil { - return nil, err - } - return out, nil -} - func (c *blockTxAPIClient) ClearBlocks(ctx context.Context, in *ClearData, opts ...grpc.CallOption) (*RowsAffectedResponse, error) { cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(RowsAffectedResponse) @@ -84,10 +71,10 @@ func (c *blockTxAPIClient) ClearBlocks(ctx context.Context, in *ClearData, opts return out, nil } -func (c *blockTxAPIClient) ClearBlockTransactionsMap(ctx context.Context, in *ClearData, opts ...grpc.CallOption) (*RowsAffectedResponse, error) { +func (c *blockTxAPIClient) ClearRegisteredTransactions(ctx context.Context, in *ClearData, opts ...grpc.CallOption) (*RowsAffectedResponse, error) { cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...) out := new(RowsAffectedResponse) - err := c.cc.Invoke(ctx, BlockTxAPI_ClearBlockTransactionsMap_FullMethodName, in, out, cOpts...) + err := c.cc.Invoke(ctx, BlockTxAPI_ClearRegisteredTransactions_FullMethodName, in, out, cOpts...) if err != nil { return nil, err } @@ -120,12 +107,10 @@ func (c *blockTxAPIClient) VerifyMerkleRoots(ctx context.Context, in *MerkleRoot type BlockTxAPIServer interface { // Health returns the health of the API. Health(context.Context, *emptypb.Empty) (*HealthResponse, error) - // ClearTransactions clears transaction data - ClearTransactions(context.Context, *ClearData) (*RowsAffectedResponse, error) // ClearBlocks clears block data ClearBlocks(context.Context, *ClearData) (*RowsAffectedResponse, error) - // ClearBlockTransactionsMap clears block-transaction-map data - ClearBlockTransactionsMap(context.Context, *ClearData) (*RowsAffectedResponse, error) + // ClearRegisteredTransactions clears registered transactions + ClearRegisteredTransactions(context.Context, *ClearData) (*RowsAffectedResponse, error) // DelUnfinishedBlockProcessing deletes unfinished block processing DelUnfinishedBlockProcessing(context.Context, *DelUnfinishedBlockProcessingRequest) (*RowsAffectedResponse, error) // VerifyMerkleRoots verifies the merkle roots existance in blocktx db and returns unverified block heights @@ -143,14 +128,11 @@ type UnimplementedBlockTxAPIServer struct{} func (UnimplementedBlockTxAPIServer) Health(context.Context, *emptypb.Empty) (*HealthResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method Health not implemented") } -func (UnimplementedBlockTxAPIServer) ClearTransactions(context.Context, *ClearData) (*RowsAffectedResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ClearTransactions not implemented") -} func (UnimplementedBlockTxAPIServer) ClearBlocks(context.Context, *ClearData) (*RowsAffectedResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method ClearBlocks not implemented") } -func (UnimplementedBlockTxAPIServer) ClearBlockTransactionsMap(context.Context, *ClearData) (*RowsAffectedResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method ClearBlockTransactionsMap not implemented") +func (UnimplementedBlockTxAPIServer) ClearRegisteredTransactions(context.Context, *ClearData) (*RowsAffectedResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ClearRegisteredTransactions not implemented") } func (UnimplementedBlockTxAPIServer) DelUnfinishedBlockProcessing(context.Context, *DelUnfinishedBlockProcessingRequest) (*RowsAffectedResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method DelUnfinishedBlockProcessing not implemented") @@ -197,24 +179,6 @@ func _BlockTxAPI_Health_Handler(srv interface{}, ctx context.Context, dec func(i return interceptor(ctx, in, info, handler) } -func _BlockTxAPI_ClearTransactions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(ClearData) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(BlockTxAPIServer).ClearTransactions(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: BlockTxAPI_ClearTransactions_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(BlockTxAPIServer).ClearTransactions(ctx, req.(*ClearData)) - } - return interceptor(ctx, in, info, handler) -} - func _BlockTxAPI_ClearBlocks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ClearData) if err := dec(in); err != nil { @@ -233,20 +197,20 @@ func _BlockTxAPI_ClearBlocks_Handler(srv interface{}, ctx context.Context, dec f return interceptor(ctx, in, info, handler) } -func _BlockTxAPI_ClearBlockTransactionsMap_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { +func _BlockTxAPI_ClearRegisteredTransactions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(ClearData) if err := dec(in); err != nil { return nil, err } if interceptor == nil { - return srv.(BlockTxAPIServer).ClearBlockTransactionsMap(ctx, in) + return srv.(BlockTxAPIServer).ClearRegisteredTransactions(ctx, in) } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: BlockTxAPI_ClearBlockTransactionsMap_FullMethodName, + FullMethod: BlockTxAPI_ClearRegisteredTransactions_FullMethodName, } handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(BlockTxAPIServer).ClearBlockTransactionsMap(ctx, req.(*ClearData)) + return srv.(BlockTxAPIServer).ClearRegisteredTransactions(ctx, req.(*ClearData)) } return interceptor(ctx, in, info, handler) } @@ -298,17 +262,13 @@ var BlockTxAPI_ServiceDesc = grpc.ServiceDesc{ MethodName: "Health", Handler: _BlockTxAPI_Health_Handler, }, - { - MethodName: "ClearTransactions", - Handler: _BlockTxAPI_ClearTransactions_Handler, - }, { MethodName: "ClearBlocks", Handler: _BlockTxAPI_ClearBlocks_Handler, }, { - MethodName: "ClearBlockTransactionsMap", - Handler: _BlockTxAPI_ClearBlockTransactionsMap_Handler, + MethodName: "ClearRegisteredTransactions", + Handler: _BlockTxAPI_ClearRegisteredTransactions_Handler, }, { MethodName: "DelUnfinishedBlockProcessing", diff --git a/internal/blocktx/client.go b/internal/blocktx/client.go index 7073947b4..1dc1b410e 100644 --- a/internal/blocktx/client.go +++ b/internal/blocktx/client.go @@ -17,9 +17,7 @@ var _ MerkleRootsVerifier = &Client{} type Watcher interface { Health(ctx context.Context) error - ClearTransactions(ctx context.Context, retentionDays int32) (int64, error) ClearBlocks(ctx context.Context, retentionDays int32) (int64, error) - ClearBlockTransactionsMap(ctx context.Context, retentionDays int32) (int64, error) DelUnfinishedBlockProcessing(ctx context.Context, processedBy string) (int64, error) } @@ -63,14 +61,6 @@ func (btc *Client) DelUnfinishedBlockProcessing(ctx context.Context, processedBy return resp.Rows, nil } -func (btc *Client) ClearTransactions(ctx context.Context, retentionDays int32) (int64, error) { - resp, err := btc.client.ClearTransactions(ctx, &blocktx_api.ClearData{RetentionDays: retentionDays}) - if err != nil { - return 0, err - } - return resp.Rows, nil -} - func (btc *Client) ClearBlocks(ctx context.Context, retentionDays int32) (int64, error) { resp, err := btc.client.ClearBlocks(ctx, &blocktx_api.ClearData{RetentionDays: retentionDays}) if err != nil { @@ -79,14 +69,6 @@ func (btc *Client) ClearBlocks(ctx context.Context, retentionDays int32) (int64, return resp.Rows, nil } -func (btc *Client) ClearBlockTransactionsMap(ctx context.Context, retentionDays int32) (int64, error) { - resp, err := btc.client.ClearBlockTransactionsMap(ctx, &blocktx_api.ClearData{RetentionDays: retentionDays}) - if err != nil { - return 0, err - } - return resp.Rows, nil -} - func (btc *Client) VerifyMerkleRoots(ctx context.Context, merkleRootVerificationRequest []MerkleRootVerificationRequest) ([]uint64, error) { merkleRoots := make([]*blocktx_api.MerkleRootVerificationRequest, 0) diff --git a/internal/blocktx/client_test.go b/internal/blocktx/client_test.go index cc6f44777..9ed51f76f 100644 --- a/internal/blocktx/client_test.go +++ b/internal/blocktx/client_test.go @@ -3,13 +3,15 @@ package blocktx_test import ( "context" "errors" - "github.com/bitcoin-sv/arc/internal/blocktx" "testing" - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - "github.com/bitcoin-sv/arc/internal/blocktx/mocks" + "github.com/bitcoin-sv/arc/internal/blocktx" + "github.com/stretchr/testify/require" "google.golang.org/grpc" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" + "github.com/bitcoin-sv/arc/internal/blocktx/mocks" ) func TestClient_DelUnfinishedBlockProcessing(t *testing.T) { @@ -89,84 +91,6 @@ func TestClient_ClearBlocks(t *testing.T) { } } -func TestClient_ClearTransactions(t *testing.T) { - tt := []struct { - name string - clearErr error - - expectedErrorStr string - }{ - { - name: "success", - }, - { - name: "err", - clearErr: errors.New("failed to clear data"), - - expectedErrorStr: "failed to clear data", - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - apiClient := &mocks.BlockTxAPIClientMock{ - ClearTransactionsFunc: func(_ context.Context, _ *blocktx_api.ClearData, _ ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { - return &blocktx_api.RowsAffectedResponse{Rows: 5}, tc.clearErr - }, - } - client := blocktx.NewClient(apiClient) - - res, err := client.ClearTransactions(context.Background(), 1) - if tc.expectedErrorStr != "" { - require.ErrorContains(t, err, tc.expectedErrorStr) - return - } - - require.NoError(t, err) - require.Equal(t, int64(5), res) - }) - } -} - -func TestClient_ClearBlockTransactionsMap(t *testing.T) { - tt := []struct { - name string - clearErr error - - expectedErrorStr string - }{ - { - name: "success", - }, - { - name: "err", - clearErr: errors.New("failed to clear data"), - - expectedErrorStr: "failed to clear data", - }, - } - - for _, tc := range tt { - t.Run(tc.name, func(t *testing.T) { - apiClient := &mocks.BlockTxAPIClientMock{ - ClearBlockTransactionsMapFunc: func(_ context.Context, _ *blocktx_api.ClearData, _ ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { - return &blocktx_api.RowsAffectedResponse{Rows: 5}, tc.clearErr - }, - } - client := blocktx.NewClient(apiClient) - - res, err := client.ClearBlockTransactionsMap(context.Background(), 1) - if tc.expectedErrorStr != "" { - require.ErrorContains(t, err, tc.expectedErrorStr) - return - } - - require.NoError(t, err) - require.Equal(t, int64(5), res) - }) - } -} - func TestClient_VerifyMerkleRoots(t *testing.T) { tt := []struct { name string diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/merkle_paths/blocktx.block_transactions.yaml similarity index 51% rename from internal/blocktx/integration_test/fixtures/reorg/blocktx.transactions.yaml rename to internal/blocktx/integration_test/fixtures/merkle_paths/blocktx.block_transactions.yaml index 0b42d6d15..4593920a0 100644 --- a/internal/blocktx/integration_test/fixtures/reorg/blocktx.transactions.yaml +++ b/internal/blocktx/integration_test/fixtures/merkle_paths/blocktx.block_transactions.yaml @@ -1,20 +1,15 @@ -- id: 1000001 +- block_id: 1001 hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000002 + merkle_tree_index: 0 +- block_id: 1001 hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e - inserted_at: 2023-12-10 14:00:00 - is_registered: false -- id: 1000003 + merkle_tree_index: 1 +- block_id: 1001 hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000004 + merkle_tree_index: 3 +- block_id: 1001 hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000005 + merkle_tree_index: 4 +- block_id: 1001 hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 - inserted_at: 2023-12-10 14:00:00 - is_registered: true + merkle_tree_index: 5 diff --git a/internal/blocktx/integration_test/fixtures/merkle_paths/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/merkle_paths/blocktx.blocks.yaml new file mode 100644 index 000000000..961eb95ab --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/merkle_paths/blocktx.blocks.yaml @@ -0,0 +1,12 @@ +- inserted_at: 2023-12-15 14:00:00 + id: 1001 + hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 + prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822014 + processed_at: 2023-12-15 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 + is_longest: true + chainwork: '62209952899966' diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions.yaml new file mode 100644 index 000000000..cd25464fb --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions.yaml @@ -0,0 +1,12 @@ +- block_id: 1002 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + merkle_tree_index: 0 +- block_id: 1999 # the same tx also in stale block + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + merkle_tree_index: 0 +- block_id: 1999 # the same tx also in stale block + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + merkle_tree_index: 1 +- block_id: 1004 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + merkle_tree_index: 0 diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml deleted file mode 100644 index 54294aa74..000000000 --- a/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions_map.yaml +++ /dev/null @@ -1,16 +0,0 @@ -- blockid: 1002 - txid: 1000003 - merkle_path: merkle-path-3 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1999 # the same tx also in stale block - txid: 1000003 - merkle_path: merkle-path-999 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1999 # the same tx also in stale block - txid: 1000001 - merkle_path: merkle-path-999 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1004 - txid: 1000005 - merkle_path: merkle-path-5 - inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml index 58b3fddeb..af089a49b 100644 --- a/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml +++ b/internal/blocktx/integration_test/fixtures/reorg/blocktx.blocks.yaml @@ -1,6 +1,6 @@ - inserted_at: 2023-12-15 14:00:00 id: 1001 - hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000 + hash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000 merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 height: 822014 @@ -22,18 +22,6 @@ status: 10 is_longest: true chainwork: '62209952899966' -- inserted_at: 2023-12-15 14:30:00 - id: 1999 - hash: 0x82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3 - prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 - merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 - height: 822015 - processed_at: 2023-12-15 14:30:00 - size: 20160000 - tx_count: 6523 - status: 20 # STALE - competing block - is_longest: false - chainwork: '62209952899966' - inserted_at: 2023-12-15 14:40:00 id: 1003 hash: 0xe1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000 @@ -58,6 +46,23 @@ status: 10 is_longest: true chainwork: '62209952899966' + +# Stale +- inserted_at: 2023-12-15 14:30:00 + id: 1999 + hash: 0x82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3 + prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000 + merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257 + height: 822015 + processed_at: 2023-12-15 14:30:00 + size: 20160000 + tx_count: 6523 + status: 20 # STALE - competing block + is_longest: false + chainwork: '62209952899966' + + + - inserted_at: 2023-12-15 14:50:00 id: 10052 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde diff --git a/internal/blocktx/integration_test/fixtures/reorg/blocktx.registered_transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg/blocktx.registered_transactions.yaml new file mode 100644 index 000000000..2dab66118 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg/blocktx.registered_transactions.yaml @@ -0,0 +1,8 @@ +- hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-15 14:00:00 +- hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f + inserted_at: 2023-12-15 14:00:00 +- hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions.yaml new file mode 100644 index 000000000..0d010ab7f --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions.yaml @@ -0,0 +1,15 @@ +- block_id: 1002 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + merkle_tree_index: 0 +- block_id: 1002 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + merkle_tree_index: 1 +- block_id: 1004 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + merkle_tree_index: 0 +- block_id: 1003 + hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f + merkle_tree_index: 0 +- block_id: 1006 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + merkle_tree_index: 0 diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml deleted file mode 100644 index e02912cfd..000000000 --- a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions_map.yaml +++ /dev/null @@ -1,20 +0,0 @@ -- blockid: 1002 - txid: 1000001 - merkle_path: merkle-path-1 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1002 - txid: 1000003 - merkle_path: merkle-path-3 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1004 - txid: 1000003 - merkle_path: merkle-path-3-competing - inserted_at: 2023-12-10 14:00:00 -- blockid: 1003 - txid: 1000004 - merkle_path: merkle-path-4 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1006 - txid: 1000005 - merkle_path: merkle-path-5 - inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.registered_transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.registered_transactions.yaml new file mode 100644 index 000000000..2dab66118 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.registered_transactions.yaml @@ -0,0 +1,8 @@ +- hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + inserted_at: 2023-12-15 14:00:00 +- hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f + inserted_at: 2023-12-15 14:00:00 +- hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml deleted file mode 100644 index 0b42d6d15..000000000 --- a/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.transactions.yaml +++ /dev/null @@ -1,20 +0,0 @@ -- id: 1000001 - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000002 - hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e - inserted_at: 2023-12-10 14:00:00 - is_registered: false -- id: 1000003 - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000004 - hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000005 - hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 - inserted_at: 2023-12-10 14:00:00 - is_registered: true diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions.yaml new file mode 100644 index 000000000..9e7ae0ee0 --- /dev/null +++ b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions.yaml @@ -0,0 +1,3 @@ +- block_id: 1002 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + merkle_tree_index: 3 diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml deleted file mode 100644 index 317aaf634..000000000 --- a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions_map.yaml +++ /dev/null @@ -1,4 +0,0 @@ -- blockid: 1002 - txid: 1000003 - merkle_path: merkle-path-3 - inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml deleted file mode 100644 index 423b9b5b2..000000000 --- a/internal/blocktx/integration_test/fixtures/stale_block/blocktx.transactions.yaml +++ /dev/null @@ -1,12 +0,0 @@ -- id: 1000001 - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000002 - hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e - inserted_at: 2023-12-10 14:00:00 - is_registered: false -- id: 1000003 - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 - inserted_at: 2023-12-10 14:00:00 - is_registered: true diff --git a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml deleted file mode 100644 index 5ed5b4e2e..000000000 --- a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.block_transactions_map.yaml +++ /dev/null @@ -1,8 +0,0 @@ -- blockid: 1002 - txid: 1000003 - merkle_path: merkle-path-3 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1005 - txid: 1000005 - merkle_path: merkle-path-5 - inserted_at: 2023-12-10 14:00:00 diff --git a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml index f3021dd39..5ac837362 100644 --- a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml +++ b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.blocks.yaml @@ -34,6 +34,7 @@ status: 10 is_longest: true chainwork: '12301577519373468' # Higher chainwork +# Competing - inserted_at: 2023-12-15 14:50:00 id: 1005 hash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 @@ -46,6 +47,7 @@ status: 20 # STALE is_longest: false chainwork: '62209952899966' +# Orphans - inserted_at: 2023-12-15 14:50:00 id: 1006 hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde diff --git a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml b/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml deleted file mode 100644 index 9989bb806..000000000 --- a/internal/blocktx/integration_test/fixtures/stale_orphans/blocktx.transactions.yaml +++ /dev/null @@ -1,12 +0,0 @@ -- id: 1000001 - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000003 - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 1000005 - hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 - inserted_at: 2023-12-10 14:00:00 - is_registered: true diff --git a/internal/blocktx/integration_test/helpers.go b/internal/blocktx/integration_test/helpers.go index 079a2d53d..b00fe0e7c 100644 --- a/internal/blocktx/integration_test/helpers.go +++ b/internal/blocktx/integration_test/helpers.go @@ -7,24 +7,26 @@ import ( "os" "testing" + "github.com/libsv/go-p2p" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" + "github.com/bitcoin-sv/arc/internal/blocktx" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store/postgresql" "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core" nats_mock "github.com/bitcoin-sv/arc/internal/message_queue/nats/client/nats_core/mocks" testutils "github.com/bitcoin-sv/arc/internal/test_utils" - "github.com/libsv/go-p2p" - "github.com/stretchr/testify/require" - "google.golang.org/protobuf/proto" ) -func setupSut(t *testing.T, dbInfo string) (*blocktx.Processor, *blocktx.PeerHandler, *postgresql.PostgreSQL, chan *blocktx_api.TransactionBlock) { +func setupSut(t *testing.T, dbInfo string) (*blocktx.Processor, *blocktx.PeerHandler, *postgresql.PostgreSQL, chan []byte, chan *blocktx_api.TransactionBlock) { t.Helper() logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug})) blockProcessCh := make(chan *p2p.BlockMessage, 10) + requestTxChannel := make(chan []byte, 10) publishedTxsCh := make(chan *blocktx_api.TransactionBlock, 10) store, err := postgresql.New(dbInfo, 10, 80) @@ -42,6 +44,17 @@ func setupSut(t *testing.T, dbInfo string) (*blocktx.Processor, *blocktx.PeerHan } mqClient := nats_core.New(mockNatsConn, nats_core.WithLogger(logger)) + //mqClient := &mocks.MessageQueueClientMock{ + // PublishMarshalFunc: func(ctx context.Context, topic string, m proto.Message) error { + // serialized := &blocktx_api.TransactionBlock{} + // err := proto.Unmarshal(m, serialized) + // require.NoError(t, err) + // + // publishedTxsCh <- m + // return nil + // }, + //} + p2pMsgHandler := blocktx.NewPeerHandler(logger, nil, blockProcessCh) processor, err := blocktx.NewProcessor( logger, @@ -49,10 +62,12 @@ func setupSut(t *testing.T, dbInfo string) (*blocktx.Processor, *blocktx.PeerHan nil, blockProcessCh, blocktx.WithMessageQueueClient(mqClient), + blocktx.WithRequestTxChan(requestTxChannel), + blocktx.WithRegisterRequestTxsBatchSize(1), // process transaction immediately ) require.NoError(t, err) - return processor, p2pMsgHandler, store, publishedTxsCh + return processor, p2pMsgHandler, store, requestTxChannel, publishedTxsCh } func getPublishedTxs(publishedTxsCh chan *blocktx_api.TransactionBlock) []*blocktx_api.TransactionBlock { @@ -70,9 +85,11 @@ func getPublishedTxs(publishedTxsCh chan *blocktx_api.TransactionBlock) []*block func pruneTables(t *testing.T, db *sql.DB) { t.Helper() - testutils.PruneTables(t, db, "blocktx.blocks") - testutils.PruneTables(t, db, "blocktx.transactions") - testutils.PruneTables(t, db, "blocktx.block_transactions_map") + + _, err := db.Exec("DELETE FROM blocktx.blocks WHERE hash IS NOT NULL") + if err != nil { + t.Fatal(err) + } } func verifyBlock(t *testing.T, store *postgresql.PostgreSQL, hashStr string, height uint64, status blocktx_api.Status) { diff --git a/internal/blocktx/integration_test/merkle_paths_test.go b/internal/blocktx/integration_test/merkle_paths_test.go new file mode 100644 index 000000000..778920279 --- /dev/null +++ b/internal/blocktx/integration_test/merkle_paths_test.go @@ -0,0 +1,39 @@ +package integrationtest + +import ( + "testing" + "time" + + testutils "github.com/bitcoin-sv/arc/internal/test_utils" + "github.com/stretchr/testify/require" +) + +func TestMerklePaths(t *testing.T) { + if testing.Short() { + t.Skip("skipping integration test") + } + + t.Run("request unregistered tx", func(t *testing.T) { + // given + defer pruneTables(t, dbConn) + testutils.LoadFixtures(t, dbConn, "fixtures/merkle_paths") + + processor, _, _, requestTxChannel, publishedTxsCh := setupSut(t, dbInfo) + + txWithoutMerklePath := testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853") + expectedMerklePath := "fefe8a0c0003020002cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853010021132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e010100f883cc2d3bb5d4485accaa3502cf834934420616d8556b204da5658456b48b21010100e2277e52528e1a5e6117e45300e3f5f169b1712292399d065bc5167c54b8e0b5" + + // when + requestTxChannel <- txWithoutMerklePath[:] + processor.StartProcessRequestTxs() + + // give blocktx time to pull all transactions from block and calculate the merkle path + time.Sleep(200 * time.Millisecond) + + // then + publishedTxs := getPublishedTxs(publishedTxsCh) + tx := publishedTxs[0] + require.Equal(t, txWithoutMerklePath[:], tx.GetTransactionHash()) + require.Equal(t, expectedMerklePath, tx.GetMerklePath()) + }) +} diff --git a/internal/blocktx/integration_test/reorg_integration_test.go b/internal/blocktx/integration_test/reorg_integration_test.go index f347ec26c..0df04bb08 100644 --- a/internal/blocktx/integration_test/reorg_integration_test.go +++ b/internal/blocktx/integration_test/reorg_integration_test.go @@ -10,7 +10,7 @@ package integrationtest // 1. Blocks at heights 822014-822017 (LONGEST), 822018-822020 (ORPHANED) and 822022-822023 (ORPHANED) are added to db from fixtures // 2. A hardcoded msg with competing block at height 822015 is being sent through the mocked PeerHandler // 3. This block has a chainwork lower than the current tip of chain - becomes STALE -// 4. Registered transactions from this block are ignored +// 4. Registered transactions from this block that are not in the longest chain are published with blockstatus = STALE // 5. Next competing block, at height 822016 is being sent through the mocked PeerHandler // 6. This block has a greater chainwork than the current tip of longest chain - it becomes LONGEST despite not being the highest // 7. Verification of reorg - checking if statuses are correctly switched @@ -29,71 +29,23 @@ package integrationtest // 15. Verification of reorg - checking if statuses are correctly switched (for blocks and for transactions) import ( - "database/sql" - "log" - "os" "testing" "time" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" - testutils "github.com/bitcoin-sv/arc/internal/test_utils" _ "github.com/golang-migrate/migrate/v4/source/file" _ "github.com/lib/pq" "github.com/libsv/go-bc" "github.com/libsv/go-p2p" "github.com/libsv/go-p2p/chaincfg/chainhash" "github.com/libsv/go-p2p/wire" - "github.com/ory/dockertest/v3" "github.com/stretchr/testify/require" -) - -const migrationsPath = "file://../store/postgresql/migrations" -var ( - dbInfo string - dbConn *sql.DB + testutils "github.com/bitcoin-sv/arc/internal/test_utils" ) -func TestMain(m *testing.M) { - os.Exit(testmain(m)) -} - -func testmain(m *testing.M) int { - pool, err := dockertest.NewPool("") - if err != nil { - log.Fatalf("failed to create pool: %v", err) - return 1 - } - - port := "5437" - resource, connStr, err := testutils.RunAndMigratePostgresql(pool, port, "blocktx", migrationsPath) - if err != nil { - log.Print(err) - return 1 - } - defer func() { - err = pool.Purge(resource) - if err != nil { - log.Fatalf("failed to purge pool: %v", err) - } - }() - - dbInfo = connStr - - dbConn, err = sql.Open("postgres", dbInfo) - if err != nil { - log.Fatalf("failed to create db connection: %v", err) - return 1 - } - - return m.Run() -} - func TestReorg(t *testing.T) { - // TODO: remove the skip when gaps are filling quickly again - t.Skip("Skipping until gaps are being processed quickly again") - if testing.Short() { t.Skip("skipping integration test") } @@ -101,11 +53,11 @@ func TestReorg(t *testing.T) { t.Run("block on empty database", func(t *testing.T) { defer pruneTables(t, dbConn) - processor, p2pMsgHandler, store, _ := setupSut(t, dbInfo) + processor, p2pMsgHandler, store, _, _ := setupSut(t, dbInfo) const blockHash822011 = "bf9be09b345cc2d904b59951cc8a2ed452d8d143e2e25cde64058270fb3a667a" - //blockHash := testutils.RevChainhash(t, blockHash822011) + // blockHash := testutils.RevChainhash(t, blockHash822011) prevBlockHash := testutils.RevChainhash(t, "00000000000000000a00c377b260a3219b0c314763f486bc363df7aa7e22ad72") txHash, err := chainhash.NewHashFromStr("be181e91217d5f802f695e52144078f8dfbe51b8a815c3d6fb48c0d853ec683b") require.NoError(t, err) @@ -114,7 +66,7 @@ func TestReorg(t *testing.T) { // should become LONGEST blockMessage := &p2p.BlockMessage{ - //Hash: blockHash, + // Hash: blockHash, Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: *prevBlockHash, // NON-existent in the db @@ -139,7 +91,7 @@ func TestReorg(t *testing.T) { defer pruneTables(t, dbConn) testutils.LoadFixtures(t, dbConn, "fixtures/stale_block") - processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + processor, p2pMsgHandler, store, _, publishedTxsCh := setupSut(t, dbInfo) const ( blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" @@ -149,7 +101,7 @@ func TestReorg(t *testing.T) { txhash822015Competing = "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430" ) - // blockHash := testutils.RevChainhash(t, blockHash822015Fork) + blockHash := testutils.RevChainhash(t, blockHash822015Fork) prevBlockHash := testutils.RevChainhash(t, blockHash822014StartOfChain) txHash := testutils.RevChainhash(t, txhash822015) txHash2 := testutils.RevChainhash(t, txhash822015Competing) // should not be published - is already in the longest chain @@ -158,7 +110,7 @@ func TestReorg(t *testing.T) { // should become STALE blockMessage := &p2p.BlockMessage{ - //Hash: blockHash, + // Hash: blockHash, Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: *prevBlockHash, // block with status LONGEST at height 822014 @@ -179,23 +131,31 @@ func TestReorg(t *testing.T) { verifyBlock(t, store, blockHash822015Fork, 822015, blocktx_api.Status_STALE) verifyBlock(t, store, blockHash822015, 822015, blocktx_api.Status_LONGEST) + expectedTxs := []*blocktx_api.TransactionBlock{ + { + BlockHash: blockHash[:], + BlockHeight: 822015, + TransactionHash: testutils.RevChainhash(t, txhash822015)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + } + publishedTxs := getPublishedTxs(publishedTxsCh) - // verify the no transaction was published to metamorph - require.Len(t, publishedTxs, 0) + verifyTxs(t, expectedTxs, publishedTxs) }) t.Run("reorg", func(t *testing.T) { defer pruneTables(t, dbConn) testutils.LoadFixtures(t, dbConn, "fixtures/reorg") - processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + processor, p2pMsgHandler, store, _, publishedTxsCh := setupSut(t, dbInfo) const ( blockHash822015Fork = "82471bbf045ab13825a245b37de71d77ec12513b37e2524ec11551d18c19f7c3" blockHash822016Fork = "032c3688bc7536b2d787f3a196b1145a09bf33183cd1448ff6b1a9dfbb022db8" - blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" + blockHash822014StartOfChain = "67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000" blockHash822015 = "c9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000" blockHash822016 = "e1df1273e6e7270f96b508545d7aa80aebda7d758dc82e080000000000000000" blockHash822017 = "76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000" @@ -217,7 +177,7 @@ func TestReorg(t *testing.T) { // should become LONGEST // reorg should happen blockMessage := &p2p.BlockMessage{ - //Hash: blockHash, + // Hash: blockHash, Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: *prevhash, // block with status STALE at height 822015 @@ -284,7 +244,7 @@ func TestReorg(t *testing.T) { defer pruneTables(t, dbConn) testutils.LoadFixtures(t, dbConn, "fixtures/stale_orphans") - processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + processor, p2pMsgHandler, store, _, publishedTxsCh := setupSut(t, dbInfo) const ( blockHash822017Longest = "00000000000000000643d48201cf609b8cc50befe804194f19a7ec61cf046239" @@ -295,16 +255,18 @@ func TestReorg(t *testing.T) { blockHash822021 = "d46bf0a189927b62c8ff785d393a545093ca01af159aed771a8d94749f06c060" blockHash822022Orphan = "0000000000000000059d6add76e3ddb8ec4f5ffd6efecd4c8b8c577bd32aed6c" blockHash822023Orphan = "0000000000000000082131979a4e25a5101912a5f8461e18f306d23e158161cd" + + txhash822019 = "71fbb8fb5c0f978e3c221bc6ac235587f3c26fa10e231b54fce972d4a5c30e5e" ) - //blockHash := testutils.RevChainhash(t, blockHash822021) + // blockHash := testutils.RevChainhash(t, blockHash822021) txHash := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") merkleRoot := testutils.RevChainhash(t, "de0753d9ce6f92e340843cbfdd11e58beff8c578956ecdec4c461b018a26b8a9") prevhash := testutils.RevChainhash(t, blockHash822020Orphan) // should become STALE blockMessage := &p2p.BlockMessage{ - //Hash: blockHash, + // Hash: blockHash, Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: *prevhash, // block with status ORPHANED at height 822020 - connected to STALE chain @@ -336,17 +298,26 @@ func TestReorg(t *testing.T) { verifyBlock(t, store, blockHash822022Orphan, 822022, blocktx_api.Status_ORPHANED) verifyBlock(t, store, blockHash822023Orphan, 822023, blocktx_api.Status_ORPHANED) + bh := testutils.RevChainhash(t, blockHash822019Orphan) + expectedTxs := []*blocktx_api.TransactionBlock{ + { + BlockHash: bh[:], + BlockHeight: 822019, + TransactionHash: testutils.RevChainhash(t, txhash822019)[:], + BlockStatus: blocktx_api.Status_STALE, + }, + } + publishedTxs := getPublishedTxs(publishedTxsCh) - // verify no transaction was published - require.Len(t, publishedTxs, 0) + verifyTxs(t, expectedTxs, publishedTxs) }) t.Run("reorg orphans", func(t *testing.T) { defer pruneTables(t, dbConn) testutils.LoadFixtures(t, dbConn, "fixtures/reorg_orphans") - processor, p2pMsgHandler, store, publishedTxsCh := setupSut(t, dbInfo) + processor, p2pMsgHandler, store, _, publishedTxsCh := setupSut(t, dbInfo) const ( blockHash822014StartOfChain = "f97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000" @@ -370,7 +341,7 @@ func TestReorg(t *testing.T) { txhash822017 = "ece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6" ) - //blockHash := testutils.RevChainhash(t, blockHash822021) + // blockHash := testutils.RevChainhash(t, blockHash822021) prevhash := testutils.RevChainhash(t, blockHash822020Orphan) txHash := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") merkleRoot := testutils.RevChainhash(t, "3e15f823a7de25c26ce9001d4814a6f0ebc915a1ca4f1ba9cfac720bd941c39c") @@ -378,7 +349,7 @@ func TestReorg(t *testing.T) { // should become LONGEST // reorg should happen blockMessage := &p2p.BlockMessage{ - //Hash: blockHash, + // Hash: blockHash, Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: *prevhash, // block with status ORPHANED at height 822020 - connected to STALE chain diff --git a/internal/blocktx/integration_test/setup_test.go b/internal/blocktx/integration_test/setup_test.go new file mode 100644 index 000000000..22787873d --- /dev/null +++ b/internal/blocktx/integration_test/setup_test.go @@ -0,0 +1,55 @@ +package integrationtest + +import ( + "database/sql" + "log" + "os" + "testing" + + testutils "github.com/bitcoin-sv/arc/internal/test_utils" + _ "github.com/golang-migrate/migrate/v4/source/file" + _ "github.com/lib/pq" + "github.com/ory/dockertest/v3" +) + +const migrationsPath = "file://../store/postgresql/migrations" + +var ( + dbInfo string + dbConn *sql.DB +) + +func TestMain(m *testing.M) { + os.Exit(testmain(m)) +} + +func testmain(m *testing.M) int { + pool, err := dockertest.NewPool("") + if err != nil { + log.Fatalf("failed to create pool: %v", err) + return 1 + } + + port := "5437" + resource, connStr, err := testutils.RunAndMigratePostgresql(pool, port, "blocktx", migrationsPath) + if err != nil { + log.Print(err) + return 1 + } + defer func() { + err = pool.Purge(resource) + if err != nil { + log.Fatalf("failed to purge pool: %v", err) + } + }() + + dbInfo = connStr + + dbConn, err = sql.Open("postgres", dbInfo) + if err != nil { + log.Fatalf("failed to create db connection: %v", err) + return 1 + } + + return m.Run() +} diff --git a/internal/blocktx/mocks/blocktx_api_mock.go b/internal/blocktx/mocks/blocktx_api_mock.go index be9c6247e..3bc561e9b 100644 --- a/internal/blocktx/mocks/blocktx_api_mock.go +++ b/internal/blocktx/mocks/blocktx_api_mock.go @@ -21,14 +21,11 @@ var _ blocktx_api.BlockTxAPIClient = &BlockTxAPIClientMock{} // // // make and configure a mocked blocktx_api.BlockTxAPIClient // mockedBlockTxAPIClient := &BlockTxAPIClientMock{ -// ClearBlockTransactionsMapFunc: func(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { -// panic("mock out the ClearBlockTransactionsMap method") -// }, // ClearBlocksFunc: func(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { // panic("mock out the ClearBlocks method") // }, -// ClearTransactionsFunc: func(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { -// panic("mock out the ClearTransactions method") +// ClearRegisteredTransactionsFunc: func(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { +// panic("mock out the ClearRegisteredTransactions method") // }, // DelUnfinishedBlockProcessingFunc: func(ctx context.Context, in *blocktx_api.DelUnfinishedBlockProcessingRequest, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { // panic("mock out the DelUnfinishedBlockProcessing method") @@ -46,14 +43,11 @@ var _ blocktx_api.BlockTxAPIClient = &BlockTxAPIClientMock{} // // } type BlockTxAPIClientMock struct { - // ClearBlockTransactionsMapFunc mocks the ClearBlockTransactionsMap method. - ClearBlockTransactionsMapFunc func(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) - // ClearBlocksFunc mocks the ClearBlocks method. ClearBlocksFunc func(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) - // ClearTransactionsFunc mocks the ClearTransactions method. - ClearTransactionsFunc func(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) + // ClearRegisteredTransactionsFunc mocks the ClearRegisteredTransactions method. + ClearRegisteredTransactionsFunc func(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) // DelUnfinishedBlockProcessingFunc mocks the DelUnfinishedBlockProcessing method. DelUnfinishedBlockProcessingFunc func(ctx context.Context, in *blocktx_api.DelUnfinishedBlockProcessingRequest, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) @@ -66,15 +60,6 @@ type BlockTxAPIClientMock struct { // calls tracks calls to the methods. calls struct { - // ClearBlockTransactionsMap holds details about calls to the ClearBlockTransactionsMap method. - ClearBlockTransactionsMap []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // In is the in argument value. - In *blocktx_api.ClearData - // Opts is the opts argument value. - Opts []grpc.CallOption - } // ClearBlocks holds details about calls to the ClearBlocks method. ClearBlocks []struct { // Ctx is the ctx argument value. @@ -84,8 +69,8 @@ type BlockTxAPIClientMock struct { // Opts is the opts argument value. Opts []grpc.CallOption } - // ClearTransactions holds details about calls to the ClearTransactions method. - ClearTransactions []struct { + // ClearRegisteredTransactions holds details about calls to the ClearRegisteredTransactions method. + ClearRegisteredTransactions []struct { // Ctx is the ctx argument value. Ctx context.Context // In is the in argument value. @@ -121,54 +106,13 @@ type BlockTxAPIClientMock struct { Opts []grpc.CallOption } } - lockClearBlockTransactionsMap sync.RWMutex lockClearBlocks sync.RWMutex - lockClearTransactions sync.RWMutex + lockClearRegisteredTransactions sync.RWMutex lockDelUnfinishedBlockProcessing sync.RWMutex lockHealth sync.RWMutex lockVerifyMerkleRoots sync.RWMutex } -// ClearBlockTransactionsMap calls ClearBlockTransactionsMapFunc. -func (mock *BlockTxAPIClientMock) ClearBlockTransactionsMap(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { - if mock.ClearBlockTransactionsMapFunc == nil { - panic("BlockTxAPIClientMock.ClearBlockTransactionsMapFunc: method is nil but BlockTxAPIClient.ClearBlockTransactionsMap was just called") - } - callInfo := struct { - Ctx context.Context - In *blocktx_api.ClearData - Opts []grpc.CallOption - }{ - Ctx: ctx, - In: in, - Opts: opts, - } - mock.lockClearBlockTransactionsMap.Lock() - mock.calls.ClearBlockTransactionsMap = append(mock.calls.ClearBlockTransactionsMap, callInfo) - mock.lockClearBlockTransactionsMap.Unlock() - return mock.ClearBlockTransactionsMapFunc(ctx, in, opts...) -} - -// ClearBlockTransactionsMapCalls gets all the calls that were made to ClearBlockTransactionsMap. -// Check the length with: -// -// len(mockedBlockTxAPIClient.ClearBlockTransactionsMapCalls()) -func (mock *BlockTxAPIClientMock) ClearBlockTransactionsMapCalls() []struct { - Ctx context.Context - In *blocktx_api.ClearData - Opts []grpc.CallOption -} { - var calls []struct { - Ctx context.Context - In *blocktx_api.ClearData - Opts []grpc.CallOption - } - mock.lockClearBlockTransactionsMap.RLock() - calls = mock.calls.ClearBlockTransactionsMap - mock.lockClearBlockTransactionsMap.RUnlock() - return calls -} - // ClearBlocks calls ClearBlocksFunc. func (mock *BlockTxAPIClientMock) ClearBlocks(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { if mock.ClearBlocksFunc == nil { @@ -209,10 +153,10 @@ func (mock *BlockTxAPIClientMock) ClearBlocksCalls() []struct { return calls } -// ClearTransactions calls ClearTransactionsFunc. -func (mock *BlockTxAPIClientMock) ClearTransactions(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { - if mock.ClearTransactionsFunc == nil { - panic("BlockTxAPIClientMock.ClearTransactionsFunc: method is nil but BlockTxAPIClient.ClearTransactions was just called") +// ClearRegisteredTransactions calls ClearRegisteredTransactionsFunc. +func (mock *BlockTxAPIClientMock) ClearRegisteredTransactions(ctx context.Context, in *blocktx_api.ClearData, opts ...grpc.CallOption) (*blocktx_api.RowsAffectedResponse, error) { + if mock.ClearRegisteredTransactionsFunc == nil { + panic("BlockTxAPIClientMock.ClearRegisteredTransactionsFunc: method is nil but BlockTxAPIClient.ClearRegisteredTransactions was just called") } callInfo := struct { Ctx context.Context @@ -223,17 +167,17 @@ func (mock *BlockTxAPIClientMock) ClearTransactions(ctx context.Context, in *blo In: in, Opts: opts, } - mock.lockClearTransactions.Lock() - mock.calls.ClearTransactions = append(mock.calls.ClearTransactions, callInfo) - mock.lockClearTransactions.Unlock() - return mock.ClearTransactionsFunc(ctx, in, opts...) + mock.lockClearRegisteredTransactions.Lock() + mock.calls.ClearRegisteredTransactions = append(mock.calls.ClearRegisteredTransactions, callInfo) + mock.lockClearRegisteredTransactions.Unlock() + return mock.ClearRegisteredTransactionsFunc(ctx, in, opts...) } -// ClearTransactionsCalls gets all the calls that were made to ClearTransactions. +// ClearRegisteredTransactionsCalls gets all the calls that were made to ClearRegisteredTransactions. // Check the length with: // -// len(mockedBlockTxAPIClient.ClearTransactionsCalls()) -func (mock *BlockTxAPIClientMock) ClearTransactionsCalls() []struct { +// len(mockedBlockTxAPIClient.ClearRegisteredTransactionsCalls()) +func (mock *BlockTxAPIClientMock) ClearRegisteredTransactionsCalls() []struct { Ctx context.Context In *blocktx_api.ClearData Opts []grpc.CallOption @@ -243,9 +187,9 @@ func (mock *BlockTxAPIClientMock) ClearTransactionsCalls() []struct { In *blocktx_api.ClearData Opts []grpc.CallOption } - mock.lockClearTransactions.RLock() - calls = mock.calls.ClearTransactions - mock.lockClearTransactions.RUnlock() + mock.lockClearRegisteredTransactions.RLock() + calls = mock.calls.ClearRegisteredTransactions + mock.lockClearRegisteredTransactions.RUnlock() return calls } diff --git a/internal/blocktx/mocks/blocktx_client_mock.go b/internal/blocktx/mocks/blocktx_client_mock.go index fae8d96cb..2eed4073e 100644 --- a/internal/blocktx/mocks/blocktx_client_mock.go +++ b/internal/blocktx/mocks/blocktx_client_mock.go @@ -19,15 +19,9 @@ var _ blocktx.Watcher = &WatcherMock{} // // // make and configure a mocked blocktx.Watcher // mockedWatcher := &WatcherMock{ -// ClearBlockTransactionsMapFunc: func(ctx context.Context, retentionDays int32) (int64, error) { -// panic("mock out the ClearBlockTransactionsMap method") -// }, // ClearBlocksFunc: func(ctx context.Context, retentionDays int32) (int64, error) { // panic("mock out the ClearBlocks method") // }, -// ClearTransactionsFunc: func(ctx context.Context, retentionDays int32) (int64, error) { -// panic("mock out the ClearTransactions method") -// }, // DelUnfinishedBlockProcessingFunc: func(ctx context.Context, processedBy string) (int64, error) { // panic("mock out the DelUnfinishedBlockProcessing method") // }, @@ -41,15 +35,9 @@ var _ blocktx.Watcher = &WatcherMock{} // // } type WatcherMock struct { - // ClearBlockTransactionsMapFunc mocks the ClearBlockTransactionsMap method. - ClearBlockTransactionsMapFunc func(ctx context.Context, retentionDays int32) (int64, error) - // ClearBlocksFunc mocks the ClearBlocks method. ClearBlocksFunc func(ctx context.Context, retentionDays int32) (int64, error) - // ClearTransactionsFunc mocks the ClearTransactions method. - ClearTransactionsFunc func(ctx context.Context, retentionDays int32) (int64, error) - // DelUnfinishedBlockProcessingFunc mocks the DelUnfinishedBlockProcessing method. DelUnfinishedBlockProcessingFunc func(ctx context.Context, processedBy string) (int64, error) @@ -58,13 +46,6 @@ type WatcherMock struct { // calls tracks calls to the methods. calls struct { - // ClearBlockTransactionsMap holds details about calls to the ClearBlockTransactionsMap method. - ClearBlockTransactionsMap []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // RetentionDays is the retentionDays argument value. - RetentionDays int32 - } // ClearBlocks holds details about calls to the ClearBlocks method. ClearBlocks []struct { // Ctx is the ctx argument value. @@ -72,13 +53,6 @@ type WatcherMock struct { // RetentionDays is the retentionDays argument value. RetentionDays int32 } - // ClearTransactions holds details about calls to the ClearTransactions method. - ClearTransactions []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // RetentionDays is the retentionDays argument value. - RetentionDays int32 - } // DelUnfinishedBlockProcessing holds details about calls to the DelUnfinishedBlockProcessing method. DelUnfinishedBlockProcessing []struct { // Ctx is the ctx argument value. @@ -92,49 +66,11 @@ type WatcherMock struct { Ctx context.Context } } - lockClearBlockTransactionsMap sync.RWMutex lockClearBlocks sync.RWMutex - lockClearTransactions sync.RWMutex lockDelUnfinishedBlockProcessing sync.RWMutex lockHealth sync.RWMutex } -// ClearBlockTransactionsMap calls ClearBlockTransactionsMapFunc. -func (mock *WatcherMock) ClearBlockTransactionsMap(ctx context.Context, retentionDays int32) (int64, error) { - if mock.ClearBlockTransactionsMapFunc == nil { - panic("WatcherMock.ClearBlockTransactionsMapFunc: method is nil but Watcher.ClearBlockTransactionsMap was just called") - } - callInfo := struct { - Ctx context.Context - RetentionDays int32 - }{ - Ctx: ctx, - RetentionDays: retentionDays, - } - mock.lockClearBlockTransactionsMap.Lock() - mock.calls.ClearBlockTransactionsMap = append(mock.calls.ClearBlockTransactionsMap, callInfo) - mock.lockClearBlockTransactionsMap.Unlock() - return mock.ClearBlockTransactionsMapFunc(ctx, retentionDays) -} - -// ClearBlockTransactionsMapCalls gets all the calls that were made to ClearBlockTransactionsMap. -// Check the length with: -// -// len(mockedWatcher.ClearBlockTransactionsMapCalls()) -func (mock *WatcherMock) ClearBlockTransactionsMapCalls() []struct { - Ctx context.Context - RetentionDays int32 -} { - var calls []struct { - Ctx context.Context - RetentionDays int32 - } - mock.lockClearBlockTransactionsMap.RLock() - calls = mock.calls.ClearBlockTransactionsMap - mock.lockClearBlockTransactionsMap.RUnlock() - return calls -} - // ClearBlocks calls ClearBlocksFunc. func (mock *WatcherMock) ClearBlocks(ctx context.Context, retentionDays int32) (int64, error) { if mock.ClearBlocksFunc == nil { @@ -171,42 +107,6 @@ func (mock *WatcherMock) ClearBlocksCalls() []struct { return calls } -// ClearTransactions calls ClearTransactionsFunc. -func (mock *WatcherMock) ClearTransactions(ctx context.Context, retentionDays int32) (int64, error) { - if mock.ClearTransactionsFunc == nil { - panic("WatcherMock.ClearTransactionsFunc: method is nil but Watcher.ClearTransactions was just called") - } - callInfo := struct { - Ctx context.Context - RetentionDays int32 - }{ - Ctx: ctx, - RetentionDays: retentionDays, - } - mock.lockClearTransactions.Lock() - mock.calls.ClearTransactions = append(mock.calls.ClearTransactions, callInfo) - mock.lockClearTransactions.Unlock() - return mock.ClearTransactionsFunc(ctx, retentionDays) -} - -// ClearTransactionsCalls gets all the calls that were made to ClearTransactions. -// Check the length with: -// -// len(mockedWatcher.ClearTransactionsCalls()) -func (mock *WatcherMock) ClearTransactionsCalls() []struct { - Ctx context.Context - RetentionDays int32 -} { - var calls []struct { - Ctx context.Context - RetentionDays int32 - } - mock.lockClearTransactions.RLock() - calls = mock.calls.ClearTransactions - mock.lockClearTransactions.RUnlock() - return calls -} - // DelUnfinishedBlockProcessing calls DelUnfinishedBlockProcessingFunc. func (mock *WatcherMock) DelUnfinishedBlockProcessing(ctx context.Context, processedBy string) (int64, error) { if mock.DelUnfinishedBlockProcessingFunc == nil { diff --git a/internal/blocktx/processor.go b/internal/blocktx/processor.go index 10644067f..8e0109997 100644 --- a/internal/blocktx/processor.go +++ b/internal/blocktx/processor.go @@ -3,11 +3,14 @@ package blocktx import ( "bytes" "context" + "encoding/hex" "errors" "fmt" "log/slog" + "math" "os" "sync" + "sync/atomic" "time" "github.com/cenkalti/backoff/v4" @@ -16,7 +19,7 @@ import ( "github.com/libsv/go-p2p/chaincfg/chainhash" "github.com/libsv/go-p2p/wire" "go.opentelemetry.io/otel/attribute" - "go.opentelemetry.io/otel/trace" + "golang.org/x/sync/errgroup" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" @@ -24,15 +27,17 @@ import ( ) var ( - ErrFailedToSubscribeToTopic = errors.New("failed to subscribe to register topic") - ErrFailedToCreateBUMP = errors.New("failed to create new bump for tx hash from merkle tree and index") - ErrFailedToGetStringFromBUMPHex = errors.New("failed to get string from bump for tx hash") - ErrFailedToParseBlockHash = errors.New("failed to parse block hash") - ErrFailedToInsertBlockTransactions = errors.New("failed to insert block transactions") - ErrBlockAlreadyExists = errors.New("block already exists in the database") - ErrUnexpectedBlockStatus = errors.New("unexpected block status") - ErrFailedToProcessBlock = errors.New("failed to process block") - ErrFailedToStartCollectingStats = errors.New("failed to start collecting stats") + ErrFailedToSubscribeToTopic = errors.New("failed to subscribe to register topic") + ErrFailedToGetStringFromBump = errors.New("failed to get string from bump for tx hash") + ErrFailedToGetBump = errors.New("failed to get bump for tx hash") + ErrFailedToGetBlockTransactions = errors.New("failed to get block transactions") + ErrFailedToParseBlockHash = errors.New("failed to parse block hash") + ErrFailedToInsertBlockTransactions = errors.New("failed to insert block transactions") + ErrBlockAlreadyExists = errors.New("block already exists in the database") + ErrUnexpectedBlockStatus = errors.New("unexpected block status") + ErrFailedToProcessBlock = errors.New("failed to process block") + ErrFailedToStartCollectingStats = errors.New("failed to start collecting stats") + ErrFailedToCalculateMissingMerklePaths = errors.New("failed to calculate missing merkle paths") ) const ( @@ -44,6 +49,7 @@ const ( registerTxsBatchSizeDefault = 100 registerRequestTxBatchSizeDefault = 100 waitForBlockProcessing = 5 * time.Minute + parallellism = 5 ) type Processor struct { @@ -66,6 +72,7 @@ type Processor struct { processGuardsMap sync.Map stats *processorStats statCollectionInterval time.Duration + incomingIsLongest bool now func() time.Time maxBlockProcessingDuration time.Duration @@ -244,8 +251,16 @@ func (p *Processor) StartBlockProcessing() { continue } + timeElapsed := time.Since(timeStart) + nTxs := len(blockMsg.TransactionHashes) + // add the total block processing time to the stats - p.logger.Info("Processed block", slog.String("hash", hash.String()), slog.Int("txs", len(blockMsg.TransactionHashes)), slog.String("duration", time.Since(timeStart).String())) + p.logger.Info("Processed block", slog.String("hash", hash.String()), + slog.Uint64("height", blockMsg.Height), + slog.Int("txs", nTxs), + slog.String("duration", timeElapsed.String()), + slog.Float64("txs/s", float64(nTxs)/timeElapsed.Seconds()), + ) } } }() @@ -349,7 +364,7 @@ func (p *Processor) StartProcessRegisterTxs() { func (p *Processor) StartProcessRequestTxs() { p.waitGroup.Add(1) - txHashes := make([]*chainhash.Hash, 0, p.registerRequestTxsBatchSize) + txHashes := make([][]byte, 0, p.registerRequestTxsBatchSize) ticker := time.NewTicker(p.registerRequestTxsInterval) @@ -361,13 +376,13 @@ func (p *Processor) StartProcessRequestTxs() { case <-p.ctx.Done(): return case txHash := <-p.requestTxChannel: - tx, err := chainhash.NewHash(txHash) + _, err := chainhash.NewHash(txHash) if err != nil { p.logger.Error("Failed to create hash from byte array", slog.String("err", err.Error())) continue } - txHashes = append(txHashes, tx) + txHashes = append(txHashes, txHash) if len(txHashes) < p.registerRequestTxsBatchSize || len(txHashes) == 0 { continue @@ -379,7 +394,7 @@ func (p *Processor) StartProcessRequestTxs() { continue // retry, don't clear the txHashes slice } - txHashes = make([]*chainhash.Hash, 0, p.registerRequestTxsBatchSize) + txHashes = make([][]byte, 0, p.registerRequestTxsBatchSize) ticker.Reset(p.registerRequestTxsInterval) case <-ticker.C: @@ -394,25 +409,29 @@ func (p *Processor) StartProcessRequestTxs() { continue // retry, don't clear the txHashes slice } - txHashes = make([]*chainhash.Hash, 0, p.registerRequestTxsBatchSize) + txHashes = make([][]byte, 0, p.registerRequestTxsBatchSize) ticker.Reset(p.registerRequestTxsInterval) } } }() } -func (p *Processor) publishMinedTxs(txHashes []*chainhash.Hash) error { - hashesBytes := make([][]byte, len(txHashes)) - for i, h := range txHashes { - hashesBytes[i] = h[:] +func (p *Processor) publishMinedTxs(txHashes [][]byte) error { + minedTxs, err := p.store.GetMinedTransactions(p.ctx, txHashes) + if err != nil { + return fmt.Errorf("failed to get mined transactions: %v", err) + } + + if len(minedTxs) == 0 { + return nil } - minedTxs, err := p.store.GetMinedTransactions(p.ctx, hashesBytes, false) + minedTxsIncludingMP, err := p.calculateMerklePaths(p.ctx, minedTxs) if err != nil { - return fmt.Errorf("failed to get mined transactions: %v", err) + return errors.Join(ErrFailedToCalculateMissingMerklePaths, err) } - for _, minedTx := range minedTxs { + for _, minedTx := range minedTxsIncludingMP { txBlock := &blocktx_api.TransactionBlock{ TransactionHash: minedTx.TxHash, BlockHash: minedTx.BlockHash, @@ -431,26 +450,21 @@ func (p *Processor) publishMinedTxs(txHashes []*chainhash.Hash) error { } func (p *Processor) registerTransactions(txHashes [][]byte) { - updatedTxs, err := p.store.RegisterTransactions(p.ctx, txHashes) + if len(txHashes) == 0 { + return + } + + err := p.store.RegisterTransactions(p.ctx, txHashes) if err != nil { p.logger.Error("failed to register transactions", slog.String("err", err.Error())) } - if len(updatedTxs) > 0 { - err = p.publishMinedTxs(updatedTxs) - if err != nil { - p.logger.Error("failed to publish mined txs", slog.String("err", err.Error())) - } + err = p.publishMinedTxs(txHashes) + if err != nil { + p.logger.Error("failed to publish mined txs", slog.String("err", err.Error())) } } -func (p *Processor) buildMerkleTreeStoreChainHash(ctx context.Context, txids []*chainhash.Hash) []*chainhash.Hash { - _, span := tracing.StartTracing(ctx, "buildMerkleTreeStoreChainHash", p.tracingEnabled, p.tracingAttributes...) - defer tracing.EndTracing(span, nil) - - return bc.BuildMerkleTreeStoreChainHash(txids) -} - func (p *Processor) processBlock(blockMsg *p2p.BlockMessage) (err error) { ctx := p.ctx @@ -487,7 +501,7 @@ func (p *Processor) processBlock(blockMsg *p2p.BlockMessage) (err error) { return err } - var longestTxs, staleTxs []store.TransactionBlock + var longestTxs, staleTxs []store.BlockTransaction var ok bool switch block.Status { @@ -506,8 +520,14 @@ func (p *Processor) processBlock(blockMsg *p2p.BlockMessage) (err error) { return ErrFailedToProcessBlock } - p.publishTxsToMetamorph(ctx, longestTxs) - p.publishTxsToMetamorph(ctx, staleTxs) + allTxs := append(longestTxs, staleTxs...) + + txsToPublish, err := p.calculateMerklePaths(ctx, allTxs) + if err != nil { + return ErrFailedToCalculateMissingMerklePaths + } + + p.publishTxsToMetamorph(ctx, txsToPublish) return nil } @@ -529,15 +549,17 @@ func (p *Processor) verifyAndInsertBlock(ctx context.Context, blockMsg *p2p.Bloc MerkleRoot: merkleRoot[:], Height: blockMsg.Height, Chainwork: calculateChainwork(blockMsg.Header.Bits).String(), - Status: blocktx_api.Status_LONGEST, // temporary fix (!), TODO: remove this when gaps are filling quickly again } - // TODO: uncomment when gaps are filling quickly again - // err = p.assignBlockStatus(ctx, incomingBlock, previousBlockHash) - // if err != nil { - // p.logger.Error("unable to assign block status", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) - // return nil, err - // } + if p.incomingIsLongest { + incomingBlock.Status = blocktx_api.Status_LONGEST + } else { + err = p.assignBlockStatus(ctx, incomingBlock, previousBlockHash) + if err != nil { + p.logger.Error("unable to assign block status", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("err", err.Error())) + return nil, err + } + } p.logger.Info("Inserting block", slog.String("hash", blockHash.String()), slog.Uint64("height", incomingBlock.Height), slog.String("status", incomingBlock.Status.String())) @@ -550,7 +572,6 @@ func (p *Processor) verifyAndInsertBlock(ctx context.Context, blockMsg *p2p.Bloc return incomingBlock, nil } -//lint:ignore U1000 Ignored until gaps are filling quickly again TODO: remove this ignore func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Block, prevBlockHash chainhash.Hash) (err error) { ctx, span := tracing.StartTracing(ctx, "assignBlockStatus", p.tracingEnabled, p.tracingAttributes...) defer func() { @@ -611,9 +632,14 @@ func (p *Processor) assignBlockStatus(ctx context.Context, block *blocktx_api.Bl return nil } -//lint:ignore U1000 Ignored until gaps are filling quickly again TODO: remove this ignore func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { - _, err := p.store.GetChainTip(ctx) + const ( + hoursPerDay = 24 + blocksPerHour = 6 + ) + heightRange := p.dataRetentionDays * hoursPerDay * blocksPerHour + + _, err := p.store.GetChainTip(ctx, heightRange) if err != nil && !errors.Is(err, store.ErrBlockNotFound) { return false, err } @@ -625,7 +651,7 @@ func (p *Processor) longestTipExists(ctx context.Context) (bool, error) { return true, nil } -func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) (txsToPublish []store.TransactionBlock, ok bool) { +func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blocktx_api.Block) (txs []store.BlockTransaction, ok bool) { var err error ctx, span := tracing.StartTracing(ctx, "getRegisteredTransactions", p.tracingEnabled, p.tracingAttributes...) defer func() { @@ -637,14 +663,14 @@ func (p *Processor) getRegisteredTransactions(ctx context.Context, blocks []*blo blockHashes[i] = b.Hash } - txsToPublish, err = p.store.GetRegisteredTxsByBlockHashes(ctx, blockHashes) + txs, err = p.store.GetRegisteredTxsByBlockHashes(ctx, blockHashes) if err != nil { block := blocks[len(blocks)-1] p.logger.Error("unable to get registered transactions", slog.String("hash", getHashStringNoErr(block.Hash)), slog.Uint64("height", block.Height), slog.String("err", err.Error())) return nil, false } - return txsToPublish, true + return txs, true } func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomingBlock *blocktx_api.Block, txHashes []*chainhash.Hash, merkleRoot chainhash.Hash) (err error) { @@ -653,7 +679,9 @@ func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomin tracing.EndTracing(span, err) }() - calculatedMerkleTree := p.buildMerkleTreeStoreChainHash(ctx, txHashes) + _, buildMerkleSpan := tracing.StartTracing(ctx, "BuildMerkleTreeStoreChainHash", p.tracingEnabled, p.tracingAttributes...) + calculatedMerkleTree := bc.BuildMerkleTreeStoreChainHash(txHashes) + tracing.EndTracing(buildMerkleSpan, nil) if !merkleRoot.IsEqual(calculatedMerkleTree[len(calculatedMerkleTree)-1]) { p.logger.Error("merkle root mismatch", slog.String("hash", getHashStringNoErr(incomingBlock.Hash))) return err @@ -665,7 +693,7 @@ func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomin return err } - if err = p.storeTransactions(ctx, blockID, incomingBlock, calculatedMerkleTree); err != nil { + if err = p.storeTransactions(ctx, blockID, incomingBlock, txHashes); err != nil { p.logger.Error("unable to store transactions from block", slog.String("hash", getHashStringNoErr(incomingBlock.Hash)), slog.String("err", err.Error())) return err } @@ -673,75 +701,91 @@ func (p *Processor) insertBlockAndStoreTransactions(ctx context.Context, incomin return nil } -func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block *blocktx_api.Block, merkleTree []*chainhash.Hash) (err error) { +func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block *blocktx_api.Block, txHashes []*chainhash.Hash) (err error) { ctx, span := tracing.StartTracing(ctx, "storeTransactions", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) }() - txs := make([]store.TxWithMerklePath, 0, p.transactionStorageBatchSize) - leaves := merkleTree[:(len(merkleTree)+1)/2] + txs := make([]store.TxHashWithMerkleTreeIndex, 0, p.transactionStorageBatchSize) + + for txIndex, hash := range txHashes { + tx := store.TxHashWithMerkleTreeIndex{ + Hash: hash[:], + MerkleTreeIndex: int64(txIndex), + } + + txs = append(txs, tx) + } blockhash, err := chainhash.NewHash(block.Hash) if err != nil { return errors.Join(ErrFailedToParseBlockHash, fmt.Errorf("block height: %d", block.Height), err) } - var totalSize int - for totalSize = 1; totalSize < len(leaves); totalSize++ { - if leaves[totalSize] == nil { - // Everything to the right of the first nil will also be nil, as this is just padding upto the next PoT. - break - } - } + totalSize := len(txHashes) - progress := progressIndices(totalSize, 5) now := time.Now() - var iterateMerkleTree trace.Span - ctx, iterateMerkleTree = tracing.StartTracing(ctx, "iterateMerkleTree", p.tracingEnabled, p.tracingAttributes...) + batchSize := p.transactionStorageBatchSize - for txIndex, hash := range leaves { - // Everything to the right of the first nil will also be nil, as this is just padding upto the next PoT. - if hash == nil { - break - } + batches := math.Ceil(float64(len(txs)) / float64(batchSize)) + g, ctx := errgroup.WithContext(ctx) - bump, err := bc.NewBUMPFromMerkleTreeAndIndex(block.Height, merkleTree, uint64(txIndex)) // #nosec G115 - if err != nil { - return errors.Join(ErrFailedToCreateBUMP, fmt.Errorf("tx hash %s, block height: %d", hash.String(), block.Height), err) - } + g.SetLimit(parallellism) - bumpHex, err := bump.String() - if err != nil { - return errors.Join(ErrFailedToGetStringFromBUMPHex, err) - } + var txsInserted int64 - txs = append(txs, store.TxWithMerklePath{ - Hash: hash[:], - MerklePath: bumpHex, - }) + finished := make(chan struct{}) + defer func() { + finished <- struct{}{} + }() + go func() { + step := int64(math.Ceil(float64(len(txs)) / 5)) - if (txIndex+1)%p.transactionStorageBatchSize == 0 { - err := p.store.UpsertBlockTransactions(ctx, blockID, txs) - if err != nil { - return errors.Join(ErrFailedToInsertBlockTransactions, err) + showProgress := step + ticker := time.NewTicker(1 * time.Second) + for { + select { + case <-ticker.C: + inserted := atomic.LoadInt64(&txsInserted) + if inserted > showProgress { + percentage := int64(math.Floor(100 * float64(inserted) / float64(totalSize))) + p.logger.Info( + fmt.Sprintf("%d txs out of %d stored", inserted, totalSize), + slog.Int64("percentage", percentage), + slog.String("hash", blockhash.String()), + slog.Uint64("height", block.Height), + slog.String("duration", time.Since(now).String()), + ) + showProgress += step + } + case <-finished: + ticker.Stop() + return } - // free up memory - txs = txs[:0] } + }() - if percentage, found := progress[txIndex+1]; found { - if totalSize > 0 { - p.logger.Info(fmt.Sprintf("%d txs out of %d stored", txIndex+1, totalSize), slog.Int("percentage", percentage), slog.String("hash", blockhash.String()), slog.Uint64("height", block.Height), slog.String("duration", time.Since(now).String())) - } + for i := 0; i < int(batches); i++ { + batch := make([]store.TxHashWithMerkleTreeIndex, 0, batchSize) + if (i+1)*batchSize > len(txs) { + batch = txs[i*batchSize:] + } else { + batch = txs[i*batchSize : (i+1)*batchSize] } - } + g.Go(func() error { + insertErr := p.store.InsertBlockTransactions(ctx, blockID, batch) + if insertErr != nil { + return errors.Join(ErrFailedToInsertBlockTransactions, insertErr) + } - tracing.EndTracing(iterateMerkleTree, nil) + atomic.AddInt64(&txsInserted, int64(len(batch))) + return nil + }) + } - // update all remaining transactions - err = p.store.UpsertBlockTransactions(ctx, blockID, txs) + err = g.Wait() if err != nil { return errors.Join(ErrFailedToInsertBlockTransactions, fmt.Errorf("block height: %d", block.Height), err) } @@ -749,7 +793,7 @@ func (p *Processor) storeTransactions(ctx context.Context, blockID uint64, block return nil } -func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, ok bool) { +func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Block, orphans ...*blocktx_api.Block) (longestTxs, staleTxs []store.BlockTransaction, ok bool) { var err error ctx, span := tracing.StartTracing(ctx, "handleStaleBlock", p.tracingEnabled, p.tracingAttributes...) defer func() { @@ -787,10 +831,24 @@ func (p *Processor) handleStaleBlock(ctx context.Context, block *blocktx_api.Blo return longestTxs, staleTxs, true } - return nil, nil, true + if len(orphans) > 0 { + staleTxs, ok = p.getRegisteredTransactions(ctx, orphans) + } else { + staleTxs, ok = p.getRegisteredTransactions(ctx, staleBlocks) + } + if !ok { + return nil, nil, false + } + + longestTxs, ok = p.getRegisteredTransactions(ctx, longestBlocks) + if !ok { + return nil, nil, false + } + + return nil, exclusiveRightTxs(longestTxs, staleTxs), true } -func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, err error) { +func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api.Block, longestBlocks []*blocktx_api.Block) (longestTxs, staleTxs []store.BlockTransaction, err error) { ctx, span := tracing.StartTracing(ctx, "performReorg", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) @@ -843,7 +901,7 @@ func (p *Processor) performReorg(ctx context.Context, staleBlocks []*blocktx_api return longestTxs, staleTxs, nil } -func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) (longestTxs, staleTxs []store.TransactionBlock, ok bool) { +func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) (longestTxs, staleTxs []store.BlockTransaction, ok bool) { var err error ctx, span := tracing.StartTracing(ctx, "handleOrphans", p.tracingEnabled, p.tracingAttributes...) defer func() { @@ -869,7 +927,7 @@ func (p *Processor) handleOrphans(ctx context.Context, block *blocktx_api.Block) } block.Status = blocktx_api.Status_STALE - return p.handleStaleBlock(ctx, block) + return p.handleStaleBlock(ctx, block, orphans...) } if ancestor.Status == blocktx_api.Status_LONGEST { @@ -940,7 +998,7 @@ func (p *Processor) acceptIntoChain(ctx context.Context, blocks []*blocktx_api.B return true } -func (p *Processor) publishTxsToMetamorph(ctx context.Context, txs []store.TransactionBlock) { +func (p *Processor) publishTxsToMetamorph(ctx context.Context, txs []store.BlockTransactionWithMerklePath) { var publishErr error ctx, span := tracing.StartTracing(ctx, "publish transactions", p.tracingEnabled, p.tracingAttributes...) defer func() { @@ -964,6 +1022,71 @@ func (p *Processor) publishTxsToMetamorph(ctx context.Context, txs []store.Trans } } +func (p *Processor) calculateMerklePaths(ctx context.Context, txs []store.BlockTransaction) (updatedTxs []store.BlockTransactionWithMerklePath, err error) { + ctx, span := tracing.StartTracing(ctx, "calculateMerklePaths", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + // gather all transactions with missing merkle paths for each block in a map + // to avoid getting all transaction from the same block multiple times + blockTxsMap := make(map[string][]store.BlockTransactionWithMerklePath) + + for _, tx := range txs { + blockTransactionWithMerklePath := store.BlockTransactionWithMerklePath{ + BlockTransaction: store.BlockTransaction{ + TxHash: tx.TxHash, + BlockHash: tx.BlockHash, + BlockHeight: tx.BlockHeight, + MerkleTreeIndex: tx.MerkleTreeIndex, + BlockStatus: tx.BlockStatus, + }, + } + + blockTxsMap[hex.EncodeToString(tx.BlockHash)] = append(blockTxsMap[hex.EncodeToString(tx.BlockHash)], blockTransactionWithMerklePath) + } + + for bh, blockTxs := range blockTxsMap { + blockHash, err := hex.DecodeString(bh) + if err != nil { + return nil, err + } + + txHashes, err := p.store.GetBlockTransactionsHashes(ctx, blockHash) + if err != nil { + return nil, errors.Join(ErrFailedToGetBlockTransactions, fmt.Errorf("block hash %s", getHashStringNoErr(blockHash)), err) + } + + if len(txHashes) == 0 { + continue + } + + merkleTree := bc.BuildMerkleTreeStoreChainHash(txHashes) + + for _, tx := range blockTxs { + if tx.MerkleTreeIndex == -1 { + p.logger.Warn("missing merkle tree index for transaction", slog.String("hash", getHashStringNoErr(tx.TxHash))) + continue + } + + bump, err := bc.NewBUMPFromMerkleTreeAndIndex(tx.BlockHeight, merkleTree, uint64(tx.MerkleTreeIndex)) // #nosec G115 + if err != nil { + return nil, errors.Join(ErrFailedToGetBump, fmt.Errorf("block hash %s", getHashStringNoErr(blockHash)), err) + } + + bumpHex, err := bump.String() + if err != nil { + return nil, errors.Join(ErrFailedToGetStringFromBump, fmt.Errorf("block hash %s", getHashStringNoErr(blockHash)), err) + } + + tx.MerklePath = bumpHex + updatedTxs = append(updatedTxs, tx) + } + } + + return updatedTxs, nil +} + func (p *Processor) Shutdown() { p.cancelAll() p.waitGroup.Wait() diff --git a/internal/blocktx/processor_helpers.go b/internal/blocktx/processor_helpers.go index bb339fd8c..c4174f889 100644 --- a/internal/blocktx/processor_helpers.go +++ b/internal/blocktx/processor_helpers.go @@ -1,12 +1,12 @@ package blocktx import ( - "math" "math/big" + "github.com/libsv/go-p2p/chaincfg/chainhash" + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" - "github.com/libsv/go-p2p/chaincfg/chainhash" ) func getHashStringNoErr(hash []byte) string { @@ -29,14 +29,14 @@ func sumChainwork(blocks []*blocktx_api.Block) *big.Int { return sum } -func exclusiveRightTxs(leftTxs, rightTxs []store.TransactionBlock) []store.TransactionBlock { +func exclusiveRightTxs(leftTxs, rightTxs []store.BlockTransaction) []store.BlockTransaction { leftTxsMap := make(map[string]struct{}) for _, tx := range leftTxs { leftTxsMap[string(tx.TxHash)] = struct{}{} } - exclusiveRightTxs := make([]store.TransactionBlock, 0) + exclusiveRightTxs := make([]store.BlockTransaction, 0) for _, tx := range rightTxs { if _, found := leftTxsMap[string(tx.TxHash)]; !found { exclusiveRightTxs = append(exclusiveRightTxs, tx) @@ -97,19 +97,3 @@ func compactToBig(compact uint32) *big.Int { return bn } - -func progressIndices(total, steps int) map[int]int { - totalF := float64(total) - stepsF := float64(steps) - - step := int(math.Max(math.Round(totalF/stepsF), 1)) - stepF := float64(step) - - progress := make(map[int]int) - for i := float64(1); i < stepsF; i++ { - progress[step*int(i)] = int(stepF * i / totalF * 100) - } - - progress[total] = 100 - return progress -} diff --git a/internal/blocktx/processor_helpers_test.go b/internal/blocktx/processor_helpers_test.go index 794ebef95..b883e217a 100644 --- a/internal/blocktx/processor_helpers_test.go +++ b/internal/blocktx/processor_helpers_test.go @@ -4,13 +4,14 @@ import ( "fmt" "testing" - "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/stretchr/testify/require" + + "github.com/bitcoin-sv/arc/internal/blocktx/store" ) func TestExlusiveRightTxs(t *testing.T) { // given - leftTxs := []store.TransactionBlock{ + leftTxs := []store.BlockTransaction{ { TxHash: []byte("1"), }, @@ -18,7 +19,7 @@ func TestExlusiveRightTxs(t *testing.T) { TxHash: []byte("2"), }, } - rightTxs := []store.TransactionBlock{ + rightTxs := []store.BlockTransaction{ { TxHash: []byte("A"), }, @@ -30,7 +31,7 @@ func TestExlusiveRightTxs(t *testing.T) { }, } - expectedStaleTxs := []store.TransactionBlock{ + expectedStaleTxs := []store.BlockTransaction{ { TxHash: []byte("A"), }, diff --git a/internal/blocktx/processor_opts.go b/internal/blocktx/processor_opts.go index fe0ac49ed..3cb5b9401 100644 --- a/internal/blocktx/processor_opts.go +++ b/internal/blocktx/processor_opts.go @@ -7,61 +7,61 @@ import ( "go.opentelemetry.io/otel/attribute" ) -func WithMessageQueueClient(mqClient MessageQueueClient) func(handler *Processor) { +func WithMessageQueueClient(mqClient MessageQueueClient) func(*Processor) { return func(p *Processor) { p.mqClient = mqClient } } -func WithTransactionBatchSize(size int) func(handler *Processor) { +func WithTransactionBatchSize(size int) func(*Processor) { return func(p *Processor) { p.transactionStorageBatchSize = size } } -func WithRetentionDays(dataRetentionDays int) func(handler *Processor) { +func WithRetentionDays(dataRetentionDays int) func(*Processor) { return func(p *Processor) { p.dataRetentionDays = dataRetentionDays } } -func WithRegisterTxsInterval(d time.Duration) func(handler *Processor) { +func WithRegisterTxsInterval(d time.Duration) func(*Processor) { return func(p *Processor) { p.registerTxsInterval = d } } -func WithRegisterRequestTxsInterval(d time.Duration) func(handler *Processor) { +func WithRegisterRequestTxsInterval(d time.Duration) func(*Processor) { return func(p *Processor) { p.registerRequestTxsInterval = d } } -func WithRegisterTxsChan(registerTxsChan chan []byte) func(handler *Processor) { - return func(handler *Processor) { - handler.registerTxsChan = registerTxsChan +func WithRegisterTxsChan(registerTxsChan chan []byte) func(*Processor) { + return func(processor *Processor) { + processor.registerTxsChan = registerTxsChan } } -func WithRequestTxChan(requestTxChannel chan []byte) func(handler *Processor) { - return func(handler *Processor) { - handler.requestTxChannel = requestTxChannel +func WithRequestTxChan(requestTxChannel chan []byte) func(*Processor) { + return func(processor *Processor) { + processor.requestTxChannel = requestTxChannel } } -func WithRegisterTxsBatchSize(size int) func(handler *Processor) { - return func(handler *Processor) { - handler.registerTxsBatchSize = size +func WithRegisterTxsBatchSize(size int) func(*Processor) { + return func(processor *Processor) { + processor.registerTxsBatchSize = size } } -func WithRegisterRequestTxsBatchSize(size int) func(handler *Processor) { - return func(handler *Processor) { - handler.registerRequestTxsBatchSize = size +func WithRegisterRequestTxsBatchSize(size int) func(*Processor) { + return func(processor *Processor) { + processor.registerRequestTxsBatchSize = size } } -func WithTracer(attr ...attribute.KeyValue) func(s *Processor) { +func WithTracer(attr ...attribute.KeyValue) func(*Processor) { return func(p *Processor) { p.tracingEnabled = true if len(attr) > 0 { @@ -74,8 +74,14 @@ func WithTracer(attr ...attribute.KeyValue) func(s *Processor) { } } -func WithMaxBlockProcessingDuration(d time.Duration) func(handler *Processor) { - return func(handler *Processor) { - handler.maxBlockProcessingDuration = d +func WithMaxBlockProcessingDuration(d time.Duration) func(*Processor) { + return func(processor *Processor) { + processor.maxBlockProcessingDuration = d + } +} + +func WithIncomingIsLongest(enabled bool) func(*Processor) { + return func(processor *Processor) { + processor.incomingIsLongest = enabled } } diff --git a/internal/blocktx/processor_test.go b/internal/blocktx/processor_test.go index f9917867f..81ab8dac5 100644 --- a/internal/blocktx/processor_test.go +++ b/internal/blocktx/processor_test.go @@ -143,17 +143,15 @@ func TestHandleBlock(t *testing.T) { // given const batchSize = 4 - var expectedInsertedTransactions [][]byte transactionHashes := make([]*chainhash.Hash, len(tc.txHashes)) for i, hash := range tc.txHashes { txHash, err := chainhash.NewHashFromStr(hash) require.NoError(t, err) transactionHashes[i] = txHash - - expectedInsertedTransactions = append(expectedInsertedTransactions, txHash[:]) } - var actualInsertedBlockTransactions [][]byte + actualInsertedBlockTransactionsCh := make(chan string, 100) + storeMock := &storeMocks.BlocktxStoreMock{ GetBlockFunc: func(_ context.Context, _ *chainhash.Hash) (*blocktx_api.Block, error) { if tc.blockAlreadyProcessed { @@ -164,30 +162,33 @@ func TestHandleBlock(t *testing.T) { GetLongestBlockByHeightFunc: func(_ context.Context, _ uint64) (*blocktx_api.Block, error) { return nil, store.ErrBlockNotFound }, - GetChainTipFunc: func(_ context.Context) (*blocktx_api.Block, error) { + GetChainTipFunc: func(_ context.Context, _ int) (*blocktx_api.Block, error) { return nil, store.ErrBlockNotFound }, UpsertBlockFunc: func(_ context.Context, _ *blocktx_api.Block) (uint64, error) { return 0, nil }, - GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte, _ bool) ([]store.TransactionBlock, error) { + GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte) ([]store.BlockTransaction, error) { + return nil, nil + }, + GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.BlockTransaction, error) { return nil, nil }, - GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.TransactionBlock, error) { + GetBlockTransactionsHashesFunc: func(_ context.Context, _ []byte) ([]*chainhash.Hash, error) { return nil, nil }, MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _ uint64, _ uint64) error { return nil }, GetBlockHashesProcessingInProgressFunc: func(_ context.Context, _ string) ([]*chainhash.Hash, error) { return nil, nil }, } - storeMock.UpsertBlockTransactionsFunc = func(_ context.Context, _ uint64, txsWithMerklePaths []store.TxWithMerklePath) error { + storeMock.InsertBlockTransactionsFunc = func(_ context.Context, _ uint64, txsWithMerklePaths []store.TxHashWithMerkleTreeIndex) error { require.LessOrEqual(t, len(txsWithMerklePaths), batchSize) for _, txWithMr := range txsWithMerklePaths { tx, err := chainhash.NewHash(txWithMr.Hash) require.NoError(t, err) - actualInsertedBlockTransactions = append(actualInsertedBlockTransactions, tx[:]) + actualInsertedBlockTransactionsCh <- tx.String() } return nil @@ -205,7 +206,7 @@ func TestHandleBlock(t *testing.T) { require.NoError(t, err) blockMessage := &p2p.BlockMessage{ - // Hash: testdata.Block1Hash, + //Hash: testdata.Block1Hash, Header: &wire.BlockHeader{ Version: 541065216, PrevBlock: tc.prevBlockHash, @@ -225,9 +226,21 @@ func TestHandleBlock(t *testing.T) { err = p2pMsgHandler.HandleBlock(blockMessage, &mocks.PeerMock{StringFunc: func() string { return "peer" }}) require.NoError(t, err) + var actualInsertedBlockTransactions []string time.Sleep(20 * time.Millisecond) sut.Shutdown() + loop: + for { + select { + case inserted := <-actualInsertedBlockTransactionsCh: + actualInsertedBlockTransactions = append(actualInsertedBlockTransactions, inserted) + default: + break loop + } + } + + expectedInsertedTransactions := tc.txHashes // then require.ElementsMatch(t, expectedInsertedTransactions, actualInsertedBlockTransactions) }) @@ -235,9 +248,6 @@ func TestHandleBlock(t *testing.T) { } func TestHandleBlockReorgAndOrphans(t *testing.T) { - // TODO: remove the skip when gaps are filling quickly again - t.Skip("Skipping until gaps are being processed quickly again") - testCases := []struct { name string blockAlreadyExists bool @@ -356,7 +366,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { } return nil, store.ErrBlockNotFound }, - GetChainTipFunc: func(_ context.Context) (*blocktx_api.Block, error) { + GetChainTipFunc: func(_ context.Context, _ int) (*blocktx_api.Block, error) { return &blocktx_api.Block{}, nil }, UpsertBlockFunc: func(_ context.Context, block *blocktx_api.Block) (uint64, error) { @@ -410,13 +420,16 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { } return nil, nil, nil }, - UpsertBlockTransactionsFunc: func(_ context.Context, _ uint64, _ []store.TxWithMerklePath) error { + InsertBlockTransactionsFunc: func(_ context.Context, _ uint64, _ []store.TxHashWithMerkleTreeIndex) error { return nil }, - GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.TransactionBlock, error) { + GetRegisteredTxsByBlockHashesFunc: func(_ context.Context, _ [][]byte) ([]store.BlockTransaction, error) { return nil, nil }, - GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte, _ bool) ([]store.TransactionBlock, error) { + GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte) ([]store.BlockTransaction, error) { + return nil, nil + }, + GetBlockTransactionsHashesFunc: func(_ context.Context, _ []byte) ([]*chainhash.Hash, error) { return nil, nil }, MarkBlockAsDoneFunc: func(_ context.Context, _ *chainhash.Hash, _, _ uint64) error { @@ -442,7 +455,7 @@ func TestHandleBlockReorgAndOrphans(t *testing.T) { require.NoError(t, err) blockMessage := &p2p.BlockMessage{ - // Hash: testdata.Block1Hash, + //Hash: testdata.Block1Hash, Header: &wire.BlockHeader{ Version: 541065216, MerkleRoot: *merkleRoot, @@ -493,12 +506,18 @@ func TestStartProcessRegisterTxs(t *testing.T) { // given registerErrTest := tc.registerErr storeMock := &storeMocks.BlocktxStoreMock{ - RegisterTransactionsFunc: func(_ context.Context, _ [][]byte) ([]*chainhash.Hash, error) { - return nil, registerErrTest + RegisterTransactionsFunc: func(_ context.Context, _ [][]byte) error { + return registerErrTest }, GetBlockHashesProcessingInProgressFunc: func(_ context.Context, _ string) ([]*chainhash.Hash, error) { return nil, nil }, + GetBlockTransactionsHashesFunc: func(_ context.Context, _ []byte) ([]*chainhash.Hash, error) { + return nil, nil + }, + GetMinedTransactionsFunc: func(_ context.Context, _ [][]byte) ([]store.BlockTransaction, error) { + return nil, nil + }, } txChan := make(chan []byte, 10) @@ -714,12 +733,12 @@ func TestStartProcessRequestTxs(t *testing.T) { t.Run(tc.name, func(t *testing.T) { // given storeMock := &storeMocks.BlocktxStoreMock{ - GetMinedTransactionsFunc: func(_ context.Context, hashes [][]byte, _ bool) ([]store.TransactionBlock, error) { + GetMinedTransactionsFunc: func(_ context.Context, hashes [][]byte) ([]store.BlockTransaction, error) { for _, hash := range hashes { require.Equal(t, testdata.TX1Hash[:], hash) } - return []store.TransactionBlock{{ + return []store.BlockTransaction{{ TxHash: testdata.TX1Hash[:], BlockHash: testdata.Block1Hash[:], BlockHeight: 1, @@ -728,6 +747,9 @@ func TestStartProcessRequestTxs(t *testing.T) { GetBlockHashesProcessingInProgressFunc: func(_ context.Context, _ string) ([]*chainhash.Hash, error) { return nil, nil }, + GetBlockTransactionsHashesFunc: func(_ context.Context, _ []byte) ([]*chainhash.Hash, error) { + return []*chainhash.Hash{testdata.TX1Hash}, nil + }, } mq := &mocks.MessageQueueClientMock{ diff --git a/internal/blocktx/server.go b/internal/blocktx/server.go index 7eb0542eb..e1ae3b3d9 100644 --- a/internal/blocktx/server.go +++ b/internal/blocktx/server.go @@ -9,11 +9,12 @@ import ( "google.golang.org/protobuf/types/known/emptypb" "google.golang.org/protobuf/types/known/timestamppb" + "github.com/libsv/go-p2p" + "github.com/bitcoin-sv/arc/config" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/bitcoin-sv/arc/internal/grpc_opts" - "github.com/libsv/go-p2p" ) // Server type carries the logger within it. @@ -58,16 +59,12 @@ func (s *Server) Health(_ context.Context, _ *emptypb.Empty) (*blocktx_api.Healt }, nil } -func (s *Server) ClearTransactions(ctx context.Context, clearData *blocktx_api.ClearData) (*blocktx_api.RowsAffectedResponse, error) { - return s.store.ClearBlocktxTable(ctx, clearData.GetRetentionDays(), "transactions") -} - func (s *Server) ClearBlocks(ctx context.Context, clearData *blocktx_api.ClearData) (*blocktx_api.RowsAffectedResponse, error) { return s.store.ClearBlocktxTable(ctx, clearData.GetRetentionDays(), "blocks") } -func (s *Server) ClearBlockTransactionsMap(ctx context.Context, clearData *blocktx_api.ClearData) (*blocktx_api.RowsAffectedResponse, error) { - return s.store.ClearBlocktxTable(ctx, clearData.GetRetentionDays(), "block_transactions_map") +func (s *Server) ClearRegisteredTransactions(ctx context.Context, clearData *blocktx_api.ClearData) (*blocktx_api.RowsAffectedResponse, error) { + return s.store.ClearBlocktxTable(ctx, clearData.GetRetentionDays(), "registered_transactions") } func (s *Server) DelUnfinishedBlockProcessing(ctx context.Context, req *blocktx_api.DelUnfinishedBlockProcessingRequest) (*blocktx_api.RowsAffectedResponse, error) { diff --git a/internal/blocktx/store/mocks/blocktx_store_mock.go b/internal/blocktx/store/mocks/blocktx_store_mock.go index d7acfb51d..07d23807f 100644 --- a/internal/blocktx/store/mocks/blocktx_store_mock.go +++ b/internal/blocktx/store/mocks/blocktx_store_mock.go @@ -39,7 +39,10 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetBlockHashesProcessingInProgressFunc: func(ctx context.Context, processedBy string) ([]*chainhash.Hash, error) { // panic("mock out the GetBlockHashesProcessingInProgress method") // }, -// GetChainTipFunc: func(ctx context.Context) (*blocktx_api.Block, error) { +// GetBlockTransactionsHashesFunc: func(ctx context.Context, blockHash []byte) ([]*chainhash.Hash, error) { +// panic("mock out the GetBlockTransactionsHashes method") +// }, +// GetChainTipFunc: func(ctx context.Context, heightRange int) (*blocktx_api.Block, error) { // panic("mock out the GetChainTip method") // }, // GetLongestBlockByHeightFunc: func(ctx context.Context, height uint64) (*blocktx_api.Block, error) { @@ -48,13 +51,13 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetLongestChainFromHeightFunc: func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) { // panic("mock out the GetLongestChainFromHeight method") // }, -// GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { +// GetMinedTransactionsFunc: func(ctx context.Context, hashes [][]byte) ([]store.BlockTransaction, error) { // panic("mock out the GetMinedTransactions method") // }, // GetOrphansBackToNonOrphanAncestorFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) { // panic("mock out the GetOrphansBackToNonOrphanAncestor method") // }, -// GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { +// GetRegisteredTxsByBlockHashesFunc: func(ctx context.Context, blockHashes [][]byte) ([]store.BlockTransaction, error) { // panic("mock out the GetRegisteredTxsByBlockHashes method") // }, // GetStaleChainBackFromHashFunc: func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) { @@ -63,13 +66,16 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // GetStatsFunc: func(ctx context.Context) (*store.Stats, error) { // panic("mock out the GetStats method") // }, +// InsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxHashWithMerkleTreeIndex) error { +// panic("mock out the InsertBlockTransactions method") +// }, // MarkBlockAsDoneFunc: func(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error { // panic("mock out the MarkBlockAsDone method") // }, // PingFunc: func(ctx context.Context) error { // panic("mock out the Ping method") // }, -// RegisterTransactionsFunc: func(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) { +// RegisterTransactionsFunc: func(ctx context.Context, txHashes [][]byte) error { // panic("mock out the RegisterTransactions method") // }, // SetBlockProcessingFunc: func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) { @@ -81,9 +87,6 @@ var _ store.BlocktxStore = &BlocktxStoreMock{} // UpsertBlockFunc: func(ctx context.Context, block *blocktx_api.Block) (uint64, error) { // panic("mock out the UpsertBlock method") // }, -// UpsertBlockTransactionsFunc: func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { -// panic("mock out the UpsertBlockTransactions method") -// }, // VerifyMerkleRootsFunc: func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { // panic("mock out the VerifyMerkleRoots method") // }, @@ -112,8 +115,11 @@ type BlocktxStoreMock struct { // GetBlockHashesProcessingInProgressFunc mocks the GetBlockHashesProcessingInProgress method. GetBlockHashesProcessingInProgressFunc func(ctx context.Context, processedBy string) ([]*chainhash.Hash, error) + // GetBlockTransactionsHashesFunc mocks the GetBlockTransactionsHashes method. + GetBlockTransactionsHashesFunc func(ctx context.Context, blockHash []byte) ([]*chainhash.Hash, error) + // GetChainTipFunc mocks the GetChainTip method. - GetChainTipFunc func(ctx context.Context) (*blocktx_api.Block, error) + GetChainTipFunc func(ctx context.Context, heightRange int) (*blocktx_api.Block, error) // GetLongestBlockByHeightFunc mocks the GetLongestBlockByHeight method. GetLongestBlockByHeightFunc func(ctx context.Context, height uint64) (*blocktx_api.Block, error) @@ -122,13 +128,13 @@ type BlocktxStoreMock struct { GetLongestChainFromHeightFunc func(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) // GetMinedTransactionsFunc mocks the GetMinedTransactions method. - GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) + GetMinedTransactionsFunc func(ctx context.Context, hashes [][]byte) ([]store.BlockTransaction, error) // GetOrphansBackToNonOrphanAncestorFunc mocks the GetOrphansBackToNonOrphanAncestor method. GetOrphansBackToNonOrphanAncestorFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, *blocktx_api.Block, error) // GetRegisteredTxsByBlockHashesFunc mocks the GetRegisteredTxsByBlockHashes method. - GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) + GetRegisteredTxsByBlockHashesFunc func(ctx context.Context, blockHashes [][]byte) ([]store.BlockTransaction, error) // GetStaleChainBackFromHashFunc mocks the GetStaleChainBackFromHash method. GetStaleChainBackFromHashFunc func(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) @@ -136,6 +142,9 @@ type BlocktxStoreMock struct { // GetStatsFunc mocks the GetStats method. GetStatsFunc func(ctx context.Context) (*store.Stats, error) + // InsertBlockTransactionsFunc mocks the InsertBlockTransactions method. + InsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxHashWithMerkleTreeIndex) error + // MarkBlockAsDoneFunc mocks the MarkBlockAsDone method. MarkBlockAsDoneFunc func(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error @@ -143,7 +152,7 @@ type BlocktxStoreMock struct { PingFunc func(ctx context.Context) error // RegisterTransactionsFunc mocks the RegisterTransactions method. - RegisterTransactionsFunc func(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) + RegisterTransactionsFunc func(ctx context.Context, txHashes [][]byte) error // SetBlockProcessingFunc mocks the SetBlockProcessing method. SetBlockProcessingFunc func(ctx context.Context, hash *chainhash.Hash, processedBy string) (string, error) @@ -154,9 +163,6 @@ type BlocktxStoreMock struct { // UpsertBlockFunc mocks the UpsertBlock method. UpsertBlockFunc func(ctx context.Context, block *blocktx_api.Block) (uint64, error) - // UpsertBlockTransactionsFunc mocks the UpsertBlockTransactions method. - UpsertBlockTransactionsFunc func(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error - // VerifyMerkleRootsFunc mocks the VerifyMerkleRoots method. VerifyMerkleRootsFunc func(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) @@ -204,10 +210,19 @@ type BlocktxStoreMock struct { // ProcessedBy is the processedBy argument value. ProcessedBy string } + // GetBlockTransactionsHashes holds details about calls to the GetBlockTransactionsHashes method. + GetBlockTransactionsHashes []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // BlockHash is the blockHash argument value. + BlockHash []byte + } // GetChainTip holds details about calls to the GetChainTip method. GetChainTip []struct { // Ctx is the ctx argument value. Ctx context.Context + // HeightRange is the heightRange argument value. + HeightRange int } // GetLongestBlockByHeight holds details about calls to the GetLongestBlockByHeight method. GetLongestBlockByHeight []struct { @@ -229,8 +244,6 @@ type BlocktxStoreMock struct { Ctx context.Context // Hashes is the hashes argument value. Hashes [][]byte - // OnlyLongestChain is the onlyLongestChain argument value. - OnlyLongestChain bool } // GetOrphansBackToNonOrphanAncestor holds details about calls to the GetOrphansBackToNonOrphanAncestor method. GetOrphansBackToNonOrphanAncestor []struct { @@ -258,6 +271,15 @@ type BlocktxStoreMock struct { // Ctx is the ctx argument value. Ctx context.Context } + // InsertBlockTransactions holds details about calls to the InsertBlockTransactions method. + InsertBlockTransactions []struct { + // Ctx is the ctx argument value. + Ctx context.Context + // BlockID is the blockID argument value. + BlockID uint64 + // TxsWithMerklePaths is the txsWithMerklePaths argument value. + TxsWithMerklePaths []store.TxHashWithMerkleTreeIndex + } // MarkBlockAsDone holds details about calls to the MarkBlockAsDone method. MarkBlockAsDone []struct { // Ctx is the ctx argument value. @@ -304,15 +326,6 @@ type BlocktxStoreMock struct { // Block is the block argument value. Block *blocktx_api.Block } - // UpsertBlockTransactions holds details about calls to the UpsertBlockTransactions method. - UpsertBlockTransactions []struct { - // Ctx is the ctx argument value. - Ctx context.Context - // BlockID is the blockID argument value. - BlockID uint64 - // TxsWithMerklePaths is the txsWithMerklePaths argument value. - TxsWithMerklePaths []store.TxWithMerklePath - } // VerifyMerkleRoots holds details about calls to the VerifyMerkleRoots method. VerifyMerkleRoots []struct { // Ctx is the ctx argument value. @@ -329,6 +342,7 @@ type BlocktxStoreMock struct { lockGetBlock sync.RWMutex lockGetBlockGaps sync.RWMutex lockGetBlockHashesProcessingInProgress sync.RWMutex + lockGetBlockTransactionsHashes sync.RWMutex lockGetChainTip sync.RWMutex lockGetLongestBlockByHeight sync.RWMutex lockGetLongestChainFromHeight sync.RWMutex @@ -337,13 +351,13 @@ type BlocktxStoreMock struct { lockGetRegisteredTxsByBlockHashes sync.RWMutex lockGetStaleChainBackFromHash sync.RWMutex lockGetStats sync.RWMutex + lockInsertBlockTransactions sync.RWMutex lockMarkBlockAsDone sync.RWMutex lockPing sync.RWMutex lockRegisterTransactions sync.RWMutex lockSetBlockProcessing sync.RWMutex lockUpdateBlocksStatuses sync.RWMutex lockUpsertBlock sync.RWMutex - lockUpsertBlockTransactions sync.RWMutex lockVerifyMerkleRoots sync.RWMutex } @@ -562,20 +576,58 @@ func (mock *BlocktxStoreMock) GetBlockHashesProcessingInProgressCalls() []struct return calls } +// GetBlockTransactionsHashes calls GetBlockTransactionsHashesFunc. +func (mock *BlocktxStoreMock) GetBlockTransactionsHashes(ctx context.Context, blockHash []byte) ([]*chainhash.Hash, error) { + if mock.GetBlockTransactionsHashesFunc == nil { + panic("BlocktxStoreMock.GetBlockTransactionsHashesFunc: method is nil but BlocktxStore.GetBlockTransactionsHashes was just called") + } + callInfo := struct { + Ctx context.Context + BlockHash []byte + }{ + Ctx: ctx, + BlockHash: blockHash, + } + mock.lockGetBlockTransactionsHashes.Lock() + mock.calls.GetBlockTransactionsHashes = append(mock.calls.GetBlockTransactionsHashes, callInfo) + mock.lockGetBlockTransactionsHashes.Unlock() + return mock.GetBlockTransactionsHashesFunc(ctx, blockHash) +} + +// GetBlockTransactionsHashesCalls gets all the calls that were made to GetBlockTransactionsHashes. +// Check the length with: +// +// len(mockedBlocktxStore.GetBlockTransactionsHashesCalls()) +func (mock *BlocktxStoreMock) GetBlockTransactionsHashesCalls() []struct { + Ctx context.Context + BlockHash []byte +} { + var calls []struct { + Ctx context.Context + BlockHash []byte + } + mock.lockGetBlockTransactionsHashes.RLock() + calls = mock.calls.GetBlockTransactionsHashes + mock.lockGetBlockTransactionsHashes.RUnlock() + return calls +} + // GetChainTip calls GetChainTipFunc. -func (mock *BlocktxStoreMock) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) { +func (mock *BlocktxStoreMock) GetChainTip(ctx context.Context, heightRange int) (*blocktx_api.Block, error) { if mock.GetChainTipFunc == nil { panic("BlocktxStoreMock.GetChainTipFunc: method is nil but BlocktxStore.GetChainTip was just called") } callInfo := struct { - Ctx context.Context + Ctx context.Context + HeightRange int }{ - Ctx: ctx, + Ctx: ctx, + HeightRange: heightRange, } mock.lockGetChainTip.Lock() mock.calls.GetChainTip = append(mock.calls.GetChainTip, callInfo) mock.lockGetChainTip.Unlock() - return mock.GetChainTipFunc(ctx) + return mock.GetChainTipFunc(ctx, heightRange) } // GetChainTipCalls gets all the calls that were made to GetChainTip. @@ -583,10 +635,12 @@ func (mock *BlocktxStoreMock) GetChainTip(ctx context.Context) (*blocktx_api.Blo // // len(mockedBlocktxStore.GetChainTipCalls()) func (mock *BlocktxStoreMock) GetChainTipCalls() []struct { - Ctx context.Context + Ctx context.Context + HeightRange int } { var calls []struct { - Ctx context.Context + Ctx context.Context + HeightRange int } mock.lockGetChainTip.RLock() calls = mock.calls.GetChainTip @@ -667,23 +721,21 @@ func (mock *BlocktxStoreMock) GetLongestChainFromHeightCalls() []struct { } // GetMinedTransactions calls GetMinedTransactionsFunc. -func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]store.TransactionBlock, error) { +func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [][]byte) ([]store.BlockTransaction, error) { if mock.GetMinedTransactionsFunc == nil { panic("BlocktxStoreMock.GetMinedTransactionsFunc: method is nil but BlocktxStore.GetMinedTransactions was just called") } callInfo := struct { - Ctx context.Context - Hashes [][]byte - OnlyLongestChain bool + Ctx context.Context + Hashes [][]byte }{ - Ctx: ctx, - Hashes: hashes, - OnlyLongestChain: onlyLongestChain, + Ctx: ctx, + Hashes: hashes, } mock.lockGetMinedTransactions.Lock() mock.calls.GetMinedTransactions = append(mock.calls.GetMinedTransactions, callInfo) mock.lockGetMinedTransactions.Unlock() - return mock.GetMinedTransactionsFunc(ctx, hashes, onlyLongestChain) + return mock.GetMinedTransactionsFunc(ctx, hashes) } // GetMinedTransactionsCalls gets all the calls that were made to GetMinedTransactions. @@ -691,14 +743,12 @@ func (mock *BlocktxStoreMock) GetMinedTransactions(ctx context.Context, hashes [ // // len(mockedBlocktxStore.GetMinedTransactionsCalls()) func (mock *BlocktxStoreMock) GetMinedTransactionsCalls() []struct { - Ctx context.Context - Hashes [][]byte - OnlyLongestChain bool + Ctx context.Context + Hashes [][]byte } { var calls []struct { - Ctx context.Context - Hashes [][]byte - OnlyLongestChain bool + Ctx context.Context + Hashes [][]byte } mock.lockGetMinedTransactions.RLock() calls = mock.calls.GetMinedTransactions @@ -743,7 +793,7 @@ func (mock *BlocktxStoreMock) GetOrphansBackToNonOrphanAncestorCalls() []struct } // GetRegisteredTxsByBlockHashes calls GetRegisteredTxsByBlockHashesFunc. -func (mock *BlocktxStoreMock) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.TransactionBlock, error) { +func (mock *BlocktxStoreMock) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]store.BlockTransaction, error) { if mock.GetRegisteredTxsByBlockHashesFunc == nil { panic("BlocktxStoreMock.GetRegisteredTxsByBlockHashesFunc: method is nil but BlocktxStore.GetRegisteredTxsByBlockHashes was just called") } @@ -846,6 +896,46 @@ func (mock *BlocktxStoreMock) GetStatsCalls() []struct { return calls } +// InsertBlockTransactions calls InsertBlockTransactionsFunc. +func (mock *BlocktxStoreMock) InsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxHashWithMerkleTreeIndex) error { + if mock.InsertBlockTransactionsFunc == nil { + panic("BlocktxStoreMock.InsertBlockTransactionsFunc: method is nil but BlocktxStore.InsertBlockTransactions was just called") + } + callInfo := struct { + Ctx context.Context + BlockID uint64 + TxsWithMerklePaths []store.TxHashWithMerkleTreeIndex + }{ + Ctx: ctx, + BlockID: blockID, + TxsWithMerklePaths: txsWithMerklePaths, + } + mock.lockInsertBlockTransactions.Lock() + mock.calls.InsertBlockTransactions = append(mock.calls.InsertBlockTransactions, callInfo) + mock.lockInsertBlockTransactions.Unlock() + return mock.InsertBlockTransactionsFunc(ctx, blockID, txsWithMerklePaths) +} + +// InsertBlockTransactionsCalls gets all the calls that were made to InsertBlockTransactions. +// Check the length with: +// +// len(mockedBlocktxStore.InsertBlockTransactionsCalls()) +func (mock *BlocktxStoreMock) InsertBlockTransactionsCalls() []struct { + Ctx context.Context + BlockID uint64 + TxsWithMerklePaths []store.TxHashWithMerkleTreeIndex +} { + var calls []struct { + Ctx context.Context + BlockID uint64 + TxsWithMerklePaths []store.TxHashWithMerkleTreeIndex + } + mock.lockInsertBlockTransactions.RLock() + calls = mock.calls.InsertBlockTransactions + mock.lockInsertBlockTransactions.RUnlock() + return calls +} + // MarkBlockAsDone calls MarkBlockAsDoneFunc. func (mock *BlocktxStoreMock) MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error { if mock.MarkBlockAsDoneFunc == nil { @@ -923,7 +1013,7 @@ func (mock *BlocktxStoreMock) PingCalls() []struct { } // RegisterTransactions calls RegisterTransactionsFunc. -func (mock *BlocktxStoreMock) RegisterTransactions(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) { +func (mock *BlocktxStoreMock) RegisterTransactions(ctx context.Context, txHashes [][]byte) error { if mock.RegisterTransactionsFunc == nil { panic("BlocktxStoreMock.RegisterTransactionsFunc: method is nil but BlocktxStore.RegisterTransactions was just called") } @@ -1070,46 +1160,6 @@ func (mock *BlocktxStoreMock) UpsertBlockCalls() []struct { return calls } -// UpsertBlockTransactions calls UpsertBlockTransactionsFunc. -func (mock *BlocktxStoreMock) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) error { - if mock.UpsertBlockTransactionsFunc == nil { - panic("BlocktxStoreMock.UpsertBlockTransactionsFunc: method is nil but BlocktxStore.UpsertBlockTransactions was just called") - } - callInfo := struct { - Ctx context.Context - BlockID uint64 - TxsWithMerklePaths []store.TxWithMerklePath - }{ - Ctx: ctx, - BlockID: blockID, - TxsWithMerklePaths: txsWithMerklePaths, - } - mock.lockUpsertBlockTransactions.Lock() - mock.calls.UpsertBlockTransactions = append(mock.calls.UpsertBlockTransactions, callInfo) - mock.lockUpsertBlockTransactions.Unlock() - return mock.UpsertBlockTransactionsFunc(ctx, blockID, txsWithMerklePaths) -} - -// UpsertBlockTransactionsCalls gets all the calls that were made to UpsertBlockTransactions. -// Check the length with: -// -// len(mockedBlocktxStore.UpsertBlockTransactionsCalls()) -func (mock *BlocktxStoreMock) UpsertBlockTransactionsCalls() []struct { - Ctx context.Context - BlockID uint64 - TxsWithMerklePaths []store.TxWithMerklePath -} { - var calls []struct { - Ctx context.Context - BlockID uint64 - TxsWithMerklePaths []store.TxWithMerklePath - } - mock.lockUpsertBlockTransactions.RLock() - calls = mock.calls.UpsertBlockTransactions - mock.lockUpsertBlockTransactions.RUnlock() - return calls -} - // VerifyMerkleRoots calls VerifyMerkleRootsFunc. func (mock *BlocktxStoreMock) VerifyMerkleRoots(ctx context.Context, merkleRoots []*blocktx_api.MerkleRootVerificationRequest, maxAllowedBlockHeightMismatch int) (*blocktx_api.MerkleRootVerificationResponse, error) { if mock.VerifyMerkleRootsFunc == nil { diff --git a/internal/blocktx/store/model.go b/internal/blocktx/store/model.go index e4aaaab57..b3183c907 100644 --- a/internal/blocktx/store/model.go +++ b/internal/blocktx/store/model.go @@ -1,8 +1,9 @@ package store import ( - "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/libsv/go-p2p/chaincfg/chainhash" + + "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" ) type BlockGap struct { @@ -10,17 +11,22 @@ type BlockGap struct { Hash *chainhash.Hash } -type TxWithMerklePath struct { - Hash []byte +type TxHashWithMerkleTreeIndex struct { + Hash []byte + MerkleTreeIndex int64 +} + +type BlockTransactionWithMerklePath struct { + BlockTransaction MerklePath string } -type TransactionBlock struct { - TxHash []byte - BlockHash []byte - BlockHeight uint64 - MerklePath string - BlockStatus blocktx_api.Status +type BlockTransaction struct { + TxHash []byte + BlockHash []byte + BlockHeight uint64 + MerkleTreeIndex int64 + BlockStatus blocktx_api.Status } type BlockStatusUpdate struct { diff --git a/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.block_transactions.yaml similarity index 63% rename from internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.transactions.yaml rename to internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.block_transactions.yaml index 8b36a806e..862a59db7 100644 --- a/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.transactions.yaml +++ b/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.block_transactions.yaml @@ -1,30 +1,30 @@ -- id: 1 +- block_id: 1 hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 - inserted_at: 2023-12-10 14:00:00 -- id: 2 + merkle_tree_index: 1 +- block_id: 1 hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e - inserted_at: 2023-12-10 14:00:00 -- id: 3 + merkle_tree_index: 2 +- block_id: 1 hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 - inserted_at: 2023-12-10 14:00:00 -- id: 4 + merkle_tree_index: 3 +- block_id: 1 hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa - inserted_at: 2023-12-10 14:00:00 -- id: 5 + merkle_tree_index: 4 +- block_id: 1 hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd - inserted_at: 2023-12-10 14:00:00 -- id: 6 + merkle_tree_index: 5 +- block_id: 2 hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 - inserted_at: 2023-12-15 14:00:00 -- id: 7 + merkle_tree_index: 6 +- block_id: 2 hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357 - inserted_at: 2023-12-15 14:00:00 -- id: 8 + merkle_tree_index: 7 +- block_id: 2 hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 - inserted_at: 2023-12-15 14:00:00 -- id: 9 + merkle_tree_index: 8 +- block_id: 2 hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799 - inserted_at: 2023-12-15 14:00:00 -- id: 10 + merkle_tree_index: 9 +- block_id: 2 hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8 - inserted_at: 2023-12-15 14:00:00 + merkle_tree_index: 10 diff --git a/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.block_transactions_map.yaml b/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.block_transactions_map.yaml deleted file mode 100644 index a405ab034..000000000 --- a/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.block_transactions_map.yaml +++ /dev/null @@ -1,40 +0,0 @@ -- blockid: 1 - txid: 1 - merkle_path: merkle-path-1 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1 - txid: 2 - merkle_path: merkle-path-2 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1 - txid: 3 - merkle_path: merkle-path-3 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1 - txid: 4 - merkle_path: merkle-path-4 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1 - txid: 5 - merkle_path: merkle-path-5 - inserted_at: 2023-12-10 14:00:00 -- blockid: 2 - txid: 6 - merkle_path: merkle-path-6 - inserted_at: 2023-12-15 14:00:00 -- blockid: 2 - txid: 7 - merkle_path: merkle-path-7 - inserted_at: 2023-12-15 14:00:00 -- blockid: 2 - txid: 8 - merkle_path: merkle-path-8 - inserted_at: 2023-12-15 14:00:00 -- blockid: 2 - txid: 9 - merkle_path: merkle-path-9 - inserted_at: 2023-12-15 14:00:00 -- blockid: 2 - txid: 10 - merkle_path: merkle-path-10 - inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml index c1a8f0b30..f6018101b 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_block_by_height/blocktx.blocks.yaml @@ -54,3 +54,15 @@ tx_count: 36724 status: 10 chainwork: '123456' +- inserted_at: 2023-12-15 14:50:00 + id: 5 + hash: 0x000000000000000005167c951069b0e3c803753b8ebeaa2ddaca85b89526b297 + prevhash: 0x76404890880cb36ce68100abb05b3a958e17c0ed274d5c0a0000000000000000 + merkleroot: 0xc458aa382364e216c9c0533175ec8579a544c750ca181b18296e784d1dc53085 + height: 822025 + processed_at: 2023-12-15 14:40:00 + size: 8630000 + tx_count: 36724 + status: 30 # ORPHANED + is_longest: false + chainwork: '123456' diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_block_transactions/blocktx.block_transactions.yaml similarity index 50% rename from internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml rename to internal/blocktx/store/postgresql/fixtures/get_block_transactions/blocktx.block_transactions.yaml index 8ae03913e..446cd479e 100644 --- a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.transactions.yaml +++ b/internal/blocktx/store/postgresql/fixtures/get_block_transactions/blocktx.block_transactions.yaml @@ -1,57 +1,39 @@ -# txs from block 1 - LONGEST -- id: 1 +- block_id: 1 hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 - inserted_at: 2023-12-10 14:00:00 - is_registered: false -- id: 2 + merkle_tree_index: 1 +- block_id: 1 hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e - inserted_at: 2023-12-10 14:00:00 - is_registered: true -- id: 3 + merkle_tree_index: 2 +- block_id: 1 hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 - inserted_at: 2023-12-10 14:00:00 - is_registered: false -- id: 4 + merkle_tree_index: 3 +- block_id: 1 hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa - inserted_at: 2023-12-10 14:00:00 - is_registered: false -- id: 5 + merkle_tree_index: 4 +- block_id: 1 hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd - inserted_at: 2023-12-10 14:00:00 - is_registered: false - -# txs from block 2 - STALE -- id: 6 + merkle_tree_index: 5 +- block_id: 2 hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 - inserted_at: 2023-12-15 14:00:00 - is_registered: true -- id: 7 + merkle_tree_index: 1 +- block_id: 2 hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357 - inserted_at: 2023-12-15 14:00:00 - is_registered: true -- id: 8 + merkle_tree_index: 2 +- block_id: 2 hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 - inserted_at: 2023-12-15 14:00:00 - is_registered: false -- id: 9 + merkle_tree_index: 3 +- block_id: 2 hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799 - inserted_at: 2023-12-15 14:00:00 - is_registered: false -- id: 10 + merkle_tree_index: 4 +- block_id: 2 hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8 - inserted_at: 2023-12-15 14:00:00 - is_registered: false - -# txs from block 3 - LONGEST -- id: 11 + merkle_tree_index: 5 +- block_id: 3 hash: 0x2eb9f15adaf9e7d1de19f3ebc6bf95b62871a4e053c30ac0d1b1df85a6163d8e - inserted_at: 2023-12-15 14:00:00 - is_registered: true -- id: 12 + merkle_tree_index: 6 +- block_id: 3 hash: 0x45ad0e3de133e386faeff8ecf12b665875d527031b9aa75ca96d3fc2b7098fa5 - inserted_at: 2023-12-15 14:00:00 - is_registered: false -- id: 13 + merkle_tree_index: 7 +- block_id: 3 hash: 0x4bac520c26dba4e24c3fb73bf4fd0d66e45ec39b976d1d052f8a4f499f4aa004 - inserted_at: 2023-12-15 14:00:00 - is_registered: false + merkle_tree_index: 8 diff --git a/internal/blocktx/store/postgresql/fixtures/get_block_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/get_block_transactions/blocktx.blocks.yaml new file mode 100644 index 000000000..06c1dba87 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_block_transactions/blocktx.blocks.yaml @@ -0,0 +1,33 @@ +- inserted_at: 2023-12-10 14:00:00 + id: 1 + hash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9 + prevhash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 + merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483 + height: 822013 + processed_at: 2023-12-10 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 # LONGEST + is_longest: true +- inserted_at: 2023-12-15 14:00:00 + id: 2 + hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7 + prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a + merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410 + height: 822012 + processed_at: 2023-12-15 14:10:00 + size: 3030000 + tx_count: 856 + status: 20 # STALE + is_longest: false +- inserted_at: 2023-12-10 14:00:00 + id: 3 + hash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067 + prevhash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9 + merkleroot: 0x713e7713ebc6414420a418e87bbd42ecaf7e0e38c923c2cf9e72718d3c329acb + height: 822014 + processed_at: 2023-12-10 14:10:00 + size: 86840000 + tx_count: 23477 + status: 10 # LONGEST + is_longest: true diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions.yaml new file mode 100644 index 000000000..a3f664271 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions.yaml @@ -0,0 +1,39 @@ +- block_id: 1 + hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853 + merkle_tree_index: 1 +- block_id: 1 + hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + merkle_tree_index: 2 +- block_id: 1 + hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430 + merkle_tree_index: 3 +- block_id: 1 + hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa + merkle_tree_index: 4 +- block_id: 1 + hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd + merkle_tree_index: 5 +- block_id: 2 + hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 + merkle_tree_index: 6 +- block_id: 2 + hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357 + merkle_tree_index: 7 +- block_id: 2 + hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6 + merkle_tree_index: 8 +- block_id: 2 + hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799 + merkle_tree_index: 9 +- block_id: 2 + hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8 + merkle_tree_index: 10 +- block_id: 3 + hash: 0x2eb9f15adaf9e7d1de19f3ebc6bf95b62871a4e053c30ac0d1b1df85a6163d8e + merkle_tree_index: 11 +- block_id: 3 + hash: 0x45ad0e3de133e386faeff8ecf12b665875d527031b9aa75ca96d3fc2b7098fa5 + merkle_tree_index: 12 +- block_id: 3 + hash: 0x4bac520c26dba4e24c3fb73bf4fd0d66e45ec39b976d1d052f8a4f499f4aa004 + merkle_tree_index: 13 diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml deleted file mode 100644 index 3bceb9ce5..000000000 --- a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions_map.yaml +++ /dev/null @@ -1,52 +0,0 @@ -- blockid: 1 - txid: 1 - merkle_path: merkle-path-1 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1 - txid: 2 - merkle_path: merkle-path-2 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1 - txid: 3 - merkle_path: merkle-path-3 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1 - txid: 4 - merkle_path: merkle-path-4 - inserted_at: 2023-12-10 14:00:00 -- blockid: 1 - txid: 5 - merkle_path: merkle-path-5 - inserted_at: 2023-12-10 14:00:00 -- blockid: 2 - txid: 6 - merkle_path: merkle-path-6 - inserted_at: 2023-12-15 14:00:00 -- blockid: 2 - txid: 7 - merkle_path: merkle-path-7 - inserted_at: 2023-12-15 14:00:00 -- blockid: 2 - txid: 8 - merkle_path: merkle-path-8 - inserted_at: 2023-12-15 14:00:00 -- blockid: 2 - txid: 9 - merkle_path: merkle-path-9 - inserted_at: 2023-12-15 14:00:00 -- blockid: 2 - txid: 10 - merkle_path: merkle-path-10 - inserted_at: 2023-12-15 14:00:00 -- blockid: 3 - txid: 11 - merkle_path: merkle-path-11 - inserted_at: 2023-12-15 14:00:00 -- blockid: 3 - txid: 12 - merkle_path: merkle-path-12 - inserted_at: 2023-12-15 14:00:00 -- blockid: 3 - txid: 13 - merkle_path: merkle-path-13 - inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.registered_transactions.yaml b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.registered_transactions.yaml new file mode 100644 index 000000000..d1b96a254 --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.registered_transactions.yaml @@ -0,0 +1,8 @@ +- hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e + inserted_at: 2023-12-10 14:00:00 +- hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x2eb9f15adaf9e7d1de19f3ebc6bf95b62871a4e053c30ac0d1b1df85a6163d8e + inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.block_transactions.yaml similarity index 50% rename from internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.transactions.yaml rename to internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.block_transactions.yaml index 944dd462a..c15db843e 100644 --- a/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.transactions.yaml +++ b/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.block_transactions.yaml @@ -1,32 +1,24 @@ -- id: 110383995 +- block_id: 9736 hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b - inserted_at: 2024-01-09 13:00:00 - is_registered: TRUE -- id: 115361489 + merkle_tree_index: 1 +- block_id: 9736 hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0 - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361490 + merkle_tree_index: 2 +- block_id: 9736 hash: 0xb4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743 - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361491 + merkle_tree_index: 3 +- block_id: 9736 hash: 0x37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669 - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361492 + merkle_tree_index: 4 +- block_id: 9736 hash: 0x952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361493 + merkle_tree_index: 5 +- block_id: 9736 hash: 0x861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608 - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361494 + merkle_tree_index: 6 +- block_id: 9736 hash: 0x9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313 - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361495 + merkle_tree_index: 7 +- block_id: 9736 hash: 0x8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE + merkle_tree_index: 8 diff --git a/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.blocks.yaml similarity index 97% rename from internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml rename to internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.blocks.yaml index 59e5f2fbe..8c4d7ec35 100644 --- a/internal/blocktx/store/postgresql/fixtures/upsert_block_transactions/blocktx.blocks.yaml +++ b/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.blocks.yaml @@ -4,7 +4,7 @@ prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313 merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb height: 826481 - status: 10 # STALE + status: 10 is_longest: true processed_at: 2024-01-10 13:06:06.122 size: 108689370 diff --git a/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.registered_transactions.yaml b/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.registered_transactions.yaml new file mode 100644 index 000000000..a1fb0d6fc --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.registered_transactions.yaml @@ -0,0 +1,16 @@ +- hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b + inserted_at: 2023-12-15 14:00:00 +- hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0 + inserted_at: 2023-12-15 14:00:00 +- hash: 0xb4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb + inserted_at: 2023-12-15 14:00:00 +- hash: 0x861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b + inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml b/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml deleted file mode 100644 index a2f4e567a..000000000 --- a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.blocks.yaml +++ /dev/null @@ -1,10 +0,0 @@ -- inserted_at: 2024-01-10 13:06:03.375 - id: 9736 - hash: 0x6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000 - prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313 - merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb - height: 826481 - processed_at: 2024-01-10 13:06:06.122 - size: 108689370 - tx_count: 799 - merkle_path: "" diff --git a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.registered_transactions.yaml b/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.registered_transactions.yaml new file mode 100644 index 000000000..a1fb0d6fc --- /dev/null +++ b/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.registered_transactions.yaml @@ -0,0 +1,16 @@ +- hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b + inserted_at: 2023-12-15 14:00:00 +- hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0 + inserted_at: 2023-12-15 14:00:00 +- hash: 0xb4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb + inserted_at: 2023-12-15 14:00:00 +- hash: 0x861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313 + inserted_at: 2023-12-15 14:00:00 +- hash: 0x8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b + inserted_at: 2023-12-15 14:00:00 diff --git a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.transactions.yaml b/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.transactions.yaml deleted file mode 100644 index de3289b32..000000000 --- a/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.transactions.yaml +++ /dev/null @@ -1,32 +0,0 @@ -- id: 110383995 - hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b - inserted_at: 2024-01-09 13:00:00 - is_registered: FALSE -- id: 115361489 - hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0 - inserted_at: 2024-01-10 12:00:00 - is_registered: FALSE -- id: 115361490 - hash: 0xb4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743 - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361491 - hash: 0x37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669 - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361492 - hash: 0x952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361493 - hash: 0x861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608 - inserted_at: 2024-01-10 12:00:00 - is_registered: TRUE -- id: 115361494 - hash: 0x9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313 - inserted_at: 2024-01-10 12:00:00 - is_registered: FALSE -- id: 115361495 - hash: 0x8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b - inserted_at: 2024-01-10 12:00:00 - is_registered: FALSE diff --git a/internal/blocktx/store/postgresql/get_block.go b/internal/blocktx/store/postgresql/get_block.go index adfaadb39..edac0b449 100644 --- a/internal/blocktx/store/postgresql/get_block.go +++ b/internal/blocktx/store/postgresql/get_block.go @@ -22,10 +22,14 @@ func (p *PostgreSQL) GetLongestBlockByHeight(ctx context.Context, height uint64) return p.queryBlockByPredicate(ctx, predicate, height) } -func (p *PostgreSQL) GetChainTip(ctx context.Context) (*blocktx_api.Block, error) { - predicate := "WHERE height = (SELECT MAX(height) FROM blocktx.blocks blks WHERE blks.is_longest = true)" +func (p *PostgreSQL) GetChainTip(ctx context.Context, heightRange int) (*blocktx_api.Block, error) { + predicate := `WHERE height = (SELECT MAX(height) from blocktx.blocks WHERE is_longest = true) + AND is_longest = true + AND height > (SELECT MAX(height) - $1 from blocktx.blocks) + AND processed_at IS NOT NULL + ` - return p.queryBlockByPredicate(ctx, predicate) + return p.queryBlockByPredicate(ctx, predicate, heightRange) } func (p *PostgreSQL) queryBlockByPredicate(ctx context.Context, predicate string, predicateParams ...any) (*blocktx_api.Block, error) { diff --git a/internal/blocktx/store/postgresql/get_block_transactions_hashes.go b/internal/blocktx/store/postgresql/get_block_transactions_hashes.go new file mode 100644 index 000000000..b4c6adbde --- /dev/null +++ b/internal/blocktx/store/postgresql/get_block_transactions_hashes.go @@ -0,0 +1,49 @@ +package postgresql + +import ( + "context" + "errors" + + "github.com/libsv/go-p2p/chaincfg/chainhash" + + "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/bitcoin-sv/arc/internal/tracing" +) + +func (p *PostgreSQL) GetBlockTransactionsHashes(ctx context.Context, blockHash []byte) (txHashes []*chainhash.Hash, err error) { + ctx, span := tracing.StartTracing(ctx, "GetBlockTransactionsHashes", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + q := ` + SELECT + bt.hash + FROM blocktx.block_transactions AS bt + JOIN blocktx.blocks AS b ON bt.block_id = b.id + WHERE b.hash = $1 + ` + + rows, err := p.db.QueryContext(ctx, q, blockHash) + if err != nil { + return nil, err + } + defer rows.Close() + + for rows.Next() { + var txHash []byte + err = rows.Scan(&txHash) + if err != nil { + return nil, errors.Join(store.ErrFailedToGetRows, err) + } + + cHash, err := chainhash.NewHash(txHash) + if err != nil { + return nil, errors.Join(store.ErrFailedToParseHash, err) + } + + txHashes = append(txHashes, cHash) + } + + return txHashes, nil +} diff --git a/internal/blocktx/store/postgresql/get_transactions.go b/internal/blocktx/store/postgresql/get_transactions.go index 5900cf308..8f1e38783 100644 --- a/internal/blocktx/store/postgresql/get_transactions.go +++ b/internal/blocktx/store/postgresql/get_transactions.go @@ -2,24 +2,21 @@ package postgresql import ( "context" + "database/sql" + + "github.com/lib/pq" "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api" "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/bitcoin-sv/arc/internal/tracing" - "github.com/lib/pq" ) -func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) (minedTransactions []store.TransactionBlock, err error) { +func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte) (minedTransactions []store.BlockTransaction, err error) { ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) defer func() { tracing.EndTracing(span, err) }() - if onlyLongestChain { - predicate := "WHERE t.hash = ANY($1) AND b.is_longest = true" - return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(hashes)) - } - predicate := "WHERE t.hash = ANY($1) AND (b.status = $2 OR b.status = $3) AND b.processed_at IS NOT NULL" return p.getTransactionBlocksByPredicate(ctx, predicate, @@ -29,30 +26,16 @@ func (p *PostgreSQL) GetMinedTransactions(ctx context.Context, hashes [][]byte, ) } -func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) (registeredTxs []store.TransactionBlock, err error) { - ctx, span := tracing.StartTracing(ctx, "GetMinedTransactions", p.tracingEnabled, p.tracingAttributes...) - defer func() { - tracing.EndTracing(span, err) - }() - - predicate := "WHERE b.hash = ANY($1) AND t.is_registered = TRUE" - - return p.getTransactionBlocksByPredicate(ctx, predicate, pq.Array(blockHashes)) -} - -func (p *PostgreSQL) getTransactionBlocksByPredicate(ctx context.Context, predicate string, predicateParams ...any) ([]store.TransactionBlock, error) { - transactionBlocks := make([]store.TransactionBlock, 0) - +func (p *PostgreSQL) getTransactionBlocksByPredicate(ctx context.Context, predicate string, predicateParams ...any) ([]store.BlockTransaction, error) { q := ` SELECT - t.hash, + bt.hash, b.hash, b.height, - m.merkle_path, + bt.merkle_tree_index, b.status - FROM blocktx.transactions AS t - JOIN blocktx.block_transactions_map AS m ON t.id = m.txid - JOIN blocktx.blocks AS b ON m.blockid = b.id + FROM blocktx.block_transactions AS bt + JOIN blocktx.blocks AS b ON bt.block_id = b.id ` q += " " + predicate @@ -62,32 +45,65 @@ func (p *PostgreSQL) getTransactionBlocksByPredicate(ctx context.Context, predic } defer rows.Close() + return p.getBlockTransactions(rows) +} + +func (p *PostgreSQL) getBlockTransactions(rows *sql.Rows) ([]store.BlockTransaction, error) { + transactionBlocks := make([]store.BlockTransaction, 0) for rows.Next() { var txHash []byte var blockHash []byte var blockHeight uint64 - var merklePath string + var merkleTreeIndex int64 var blockStatus blocktx_api.Status - err = rows.Scan( + err := rows.Scan( &txHash, &blockHash, &blockHeight, - &merklePath, + &merkleTreeIndex, &blockStatus, ) if err != nil { return nil, err } - transactionBlocks = append(transactionBlocks, store.TransactionBlock{ - TxHash: txHash, - BlockHash: blockHash, - BlockHeight: blockHeight, - MerklePath: merklePath, - BlockStatus: blockStatus, + transactionBlocks = append(transactionBlocks, store.BlockTransaction{ + TxHash: txHash, + BlockHash: blockHash, + BlockHeight: blockHeight, + MerkleTreeIndex: merkleTreeIndex, + BlockStatus: blockStatus, }) } return transactionBlocks, nil } + +func (p *PostgreSQL) GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) (registeredTxs []store.BlockTransaction, err error) { + ctx, span := tracing.StartTracing(ctx, "GetRegisteredTxsByBlockHashes", p.tracingEnabled, p.tracingAttributes...) + defer func() { + tracing.EndTracing(span, err) + }() + + q := ` + SELECT + bt.hash, + b.hash, + b.height, + bt.merkle_tree_index, + b.status + FROM blocktx.registered_transactions AS r + JOIN blocktx.block_transactions AS bt ON r.hash = bt.hash + JOIN blocktx.blocks AS b ON bt.block_id = b.id + WHERE b.hash = ANY($1) + ` + + rows, err := p.db.QueryContext(ctx, q, pq.Array(blockHashes)) + if err != nil { + return nil, err + } + defer rows.Close() + + return p.getBlockTransactions(rows) +} diff --git a/internal/blocktx/store/postgresql/insert_block_transactions.go b/internal/blocktx/store/postgresql/insert_block_transactions.go new file mode 100644 index 000000000..37eddca10 --- /dev/null +++ b/internal/blocktx/store/postgresql/insert_block_transactions.go @@ -0,0 +1,54 @@ +package postgresql + +import ( + "context" + "errors" + + "go.opentelemetry.io/otel/attribute" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgconn" + "github.com/jackc/pgx/v5/stdlib" + + "github.com/bitcoin-sv/arc/internal/blocktx/store" + "github.com/bitcoin-sv/arc/internal/tracing" +) + +// InsertBlockTransactions inserts the transaction hashes for a given block hash +func (p *PostgreSQL) InsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxHashWithMerkleTreeIndex) (err error) { + ctx, span := tracing.StartTracing(ctx, "InsertBlockTransactions", p.tracingEnabled, append(p.tracingAttributes, attribute.Int("updates", len(txsWithMerklePaths)))...) + defer func() { + tracing.EndTracing(span, err) + }() + + copyRows := make([][]any, len(txsWithMerklePaths)) + + for pos, tx := range txsWithMerklePaths { + copyRows[pos] = []any{blockID, tx.Hash, tx.MerkleTreeIndex} + } + + err = p.conn.Raw(func(driverConn any) error { + conn := driverConn.(*stdlib.Conn).Conn() // conn is a *pgx.Conn + var pqErr *pgconn.PgError + + _, err = conn.CopyFrom( + ctx, + pgx.Identifier{"blocktx", "block_transactions"}, + []string{"block_id", "hash", "merkle_tree_index"}, + pgx.CopyFromRows(copyRows), + ) + + // Error 23505 is: "duplicate key violates unique constraint" + if errors.As(err, &pqErr) && pqErr.Code == "23505" { + // ON CONFLICT DO NOTHING + err = nil + } + if err != nil { + return err + } + + return nil + }) + + return nil +} diff --git a/internal/blocktx/store/postgresql/migrations/000021_block_transactions.down.sql b/internal/blocktx/store/postgresql/migrations/000021_block_transactions.down.sql new file mode 100644 index 000000000..12892fecd --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000021_block_transactions.down.sql @@ -0,0 +1,25 @@ + +DROP TABLE blocktx.block_transactions; +DROP TABLE blocktx.registered_transactions; + +DROP INDEX ix_registered_transactions_inserted_at; + +CREATE TABLE blocktx.block_transactions_map ( + blockid int8 NOT NULL, + txid int8 NOT NULL, + inserted_at timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL, + merkle_path text DEFAULT ''::text NULL, + CONSTRAINT block_transactions_map_pkey PRIMARY KEY (blockid, txid) +); + +CREATE INDEX ix_block_transactions_map_inserted_at ON blocktx.block_transactions_map USING btree (inserted_at); + +CREATE TABLE blocktx.transactions ( + id bigserial NOT NULL, + hash bytea NOT NULL, + is_registered bool DEFAULT false NOT NULL, + inserted_at timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL, +CONSTRAINT transactions_pkey PRIMARY KEY (id) +); +CREATE INDEX ix_transactions_inserted_at ON blocktx.transactions USING btree (inserted_at); +CREATE UNIQUE INDEX ux_transactions_hash ON blocktx.transactions USING btree (hash); diff --git a/internal/blocktx/store/postgresql/migrations/000021_block_transactions.up.sql b/internal/blocktx/store/postgresql/migrations/000021_block_transactions.up.sql new file mode 100644 index 000000000..8ad6bd846 --- /dev/null +++ b/internal/blocktx/store/postgresql/migrations/000021_block_transactions.up.sql @@ -0,0 +1,26 @@ +CREATE TABLE IF NOT EXISTS blocktx.block_transactions ( + block_id BIGINT, + hash BYTEA NOT NULL, + merkle_tree_index BIGINT DEFAULT -1, -- this means no merkle_tree_index + PRIMARY KEY (hash, block_id), + CONSTRAINT fk_block + FOREIGN KEY(block_id) + REFERENCES blocktx.blocks(id) + ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS blocktx.registered_transactions ( + hash BYTEA PRIMARY KEY, + inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +CREATE INDEX IF NOT EXISTS ix_registered_transactions_inserted_at ON blocktx.registered_transactions USING btree (inserted_at); + +INSERT INTO blocktx.registered_transactions +SELECT t.hash AS hash FROM blocktx.transactions t WHERE t.is_registered = TRUE; + +DROP INDEX ix_block_transactions_map_inserted_at; +DROP TABLE blocktx.block_transactions_map; +DROP INDEX ix_transactions_inserted_at; +DROP INDEX ux_transactions_hash; +DROP TABLE blocktx.transactions; diff --git a/internal/blocktx/store/postgresql/postgres.go b/internal/blocktx/store/postgresql/postgres.go index f3b6ec321..c2d3a93f4 100644 --- a/internal/blocktx/store/postgresql/postgres.go +++ b/internal/blocktx/store/postgresql/postgres.go @@ -16,12 +16,13 @@ import ( var ErrNoTransaction = errors.New("sql: transaction has already been committed or rolled back") const ( - postgresDriverName = "postgres" + postgresDriverName = "pgx" maxPostgresBulkInsertRows = 8192 ) type PostgreSQL struct { db *sql.DB + conn *sql.Conn now func() time.Time maxPostgresBulkInsertRows int tracingEnabled bool @@ -50,28 +51,30 @@ func WithTracer(attr ...attribute.KeyValue) func(s *PostgreSQL) { func New(dbInfo string, idleConns int, maxOpenConns int, opts ...func(postgreSQL *PostgreSQL)) (*PostgreSQL, error) { var db *sql.DB var err error - db, err = sql.Open(postgresDriverName, dbInfo) if err != nil { return nil, errors.Join(store.ErrFailedToOpenDB, err) } - db.SetMaxIdleConns(idleConns) db.SetMaxOpenConns(maxOpenConns) + // get an existing connection from the pool instead of creating a new one + conn, err := db.Conn(context.TODO()) + if err != nil { + return nil, errors.Join(store.ErrUnableToGetSQLConnection, err) + } + p := &PostgreSQL{ db: db, + conn: conn, now: time.Now, maxPostgresBulkInsertRows: maxPostgresBulkInsertRows, } - for _, opt := range opts { opt(p) } - return p, nil } - func (p *PostgreSQL) Close() error { return p.db.Close() } diff --git a/internal/blocktx/store/postgresql/postgres_test.go b/internal/blocktx/store/postgresql/postgres_test.go index bb4ca5236..3510eeaf8 100644 --- a/internal/blocktx/store/postgresql/postgres_test.go +++ b/internal/blocktx/store/postgresql/postgres_test.go @@ -1,7 +1,6 @@ package postgresql import ( - "bytes" "context" "crypto/rand" "encoding/hex" @@ -10,11 +9,12 @@ import ( "testing" "time" + "github.com/lib/pq" + "github.com/bitcoin-sv/arc/internal/testdata" _ "github.com/golang-migrate/migrate/v4/source/file" "github.com/jmoiron/sqlx" - _ "github.com/lib/pq" "github.com/libsv/go-p2p/chaincfg/chainhash" "github.com/ory/dockertest/v3" "github.com/stretchr/testify/assert" @@ -43,18 +43,17 @@ type Block struct { InsertedAt time.Time `db:"inserted_at"` } -type Transaction struct { - ID int64 `db:"id"` - Hash []byte `db:"hash"` - IsRegistered bool `db:"is_registered"` - InsertedAt time.Time `db:"inserted_at"` +type RegisteredTransaction struct { + Hash []byte `db:"hash"` + InsertedAt time.Time `db:"inserted_at"` } -type BlockTransactionMap struct { - BlockID int64 `db:"blockid"` - TransactionID int64 `db:"txid"` - MerklePath string `db:"merkle_path"` - InsertedAt time.Time `db:"inserted_at"` +type BlockTransaction struct { + ID int64 `db:"id"` + BlockID int64 `db:"block_id"` + MerkleTreeIndex int64 `db:"merkle_tree_index"` + Hash []byte `db:"hash"` + InsertedAt time.Time `db:"inserted_at"` } const ( @@ -94,11 +93,10 @@ func testmain(m *testing.M) int { func prepareDb(t testing.TB, postgres *PostgreSQL, fixture string) { t.Helper() - testutils.PruneTables(t, postgres.db, - "blocktx.blocks", - "blocktx.transactions", - "blocktx.block_transactions_map", - ) + _, err := postgres.db.Exec("DELETE FROM blocktx.blocks WHERE hash IS NOT NULL") + if err != nil { + t.Fatal(err) + } if fixture != "" { testutils.LoadFixtures(t, postgres.db, fixture) @@ -218,10 +216,14 @@ func TestPostgresDB(t *testing.T) { require.Nil(t, actualBlock) require.Equal(t, store.ErrBlockNotFound, err) - actualBlock, err = postgresDB.GetChainTip(context.Background()) + actualBlock, err = postgresDB.GetChainTip(context.Background(), 10) require.NoError(t, err) require.Equal(t, hashAtTip[:], actualBlock.Hash) require.Equal(t, expectedTipHeight, actualBlock.Height) + + actualBlock, err = postgresDB.GetChainTip(context.Background(), 2) + require.Nil(t, actualBlock) + require.Equal(t, store.ErrBlockNotFound, err) }) t.Run("get block gaps", func(t *testing.T) { @@ -435,41 +437,32 @@ func TestPostgresDB(t *testing.T) { blockHash := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9") blockHash2 := testutils.RevChainhash(t, "0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7") - expectedTxs := []store.TransactionBlock{ + expectedTxs := []store.BlockTransaction{ { - TxHash: txHash1[:], - BlockHash: blockHash[:], - BlockHeight: 822013, - MerklePath: "merkle-path-1", - BlockStatus: blocktx_api.Status_LONGEST, + TxHash: txHash1[:], + BlockHash: blockHash[:], + BlockHeight: 822013, + MerkleTreeIndex: int64(1), + BlockStatus: blocktx_api.Status_LONGEST, }, { - TxHash: txHash2[:], - BlockHash: blockHash[:], - BlockHeight: 822013, - MerklePath: "merkle-path-2", - BlockStatus: blocktx_api.Status_LONGEST, + TxHash: txHash2[:], + BlockHash: blockHash[:], + BlockHeight: 822013, + MerkleTreeIndex: int64(2), + BlockStatus: blocktx_api.Status_LONGEST, }, { - TxHash: txHash3[:], - BlockHash: blockHash2[:], - BlockHeight: 822012, - MerklePath: "merkle-path-6", - BlockStatus: blocktx_api.Status_STALE, + TxHash: txHash3[:], + BlockHash: blockHash2[:], + BlockHeight: 822012, + MerkleTreeIndex: int64(6), + BlockStatus: blocktx_api.Status_STALE, }, } // when - onlyLongestChain := true - actualTxs, err := postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}, onlyLongestChain) - - // then - require.NoError(t, err) - require.ElementsMatch(t, expectedTxs[:2], actualTxs) - - // when - onlyLongestChain = false - actualTxs, err = postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}, onlyLongestChain) + actualTxs, err := postgresDB.GetMinedTransactions(ctx, [][]byte{txHash1[:], txHash2[:], txHash3[:]}) // then require.NoError(t, err) @@ -488,27 +481,27 @@ func TestPostgresDB(t *testing.T) { blockHashStale[:], } - expectedTxs := []store.TransactionBlock{ + expectedTxs := []store.BlockTransaction{ { - TxHash: testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e")[:], - BlockHash: blockHashLongest[:], - BlockHeight: 822013, - MerklePath: "merkle-path-2", - BlockStatus: blocktx_api.Status_LONGEST, + TxHash: testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e")[:], + BlockHash: blockHashLongest[:], + BlockHeight: 822013, + MerkleTreeIndex: int64(2), + BlockStatus: blocktx_api.Status_LONGEST, }, { - TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], - BlockHash: blockHashStale[:], - BlockHeight: 822012, - MerklePath: "merkle-path-6", - BlockStatus: blocktx_api.Status_STALE, + TxHash: testutils.RevChainhash(t, "213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0")[:], + BlockHash: blockHashStale[:], + BlockHeight: 822012, + MerkleTreeIndex: int64(6), + BlockStatus: blocktx_api.Status_STALE, }, { - TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], - BlockHash: blockHashStale[:], - BlockHeight: 822012, - MerklePath: "merkle-path-7", - BlockStatus: blocktx_api.Status_STALE, + TxHash: testutils.RevChainhash(t, "12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357")[:], + BlockHash: blockHashStale[:], + BlockHeight: 822012, + MerkleTreeIndex: int64(7), + BlockStatus: blocktx_api.Status_STALE, }, } @@ -520,6 +513,28 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, expectedTxs, actualTxs) }) + t.Run("get block transactions hashes", func(t *testing.T) { + // given + prepareDb(t, postgresDB, "fixtures/get_block_transactions") + + blockHash := testutils.RevChainhash(t, "000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9")[:] + + expectedTxsHashes := []*chainhash.Hash{ + testutils.RevChainhash(t, "cd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853"), + testutils.RevChainhash(t, "21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e"), + testutils.RevChainhash(t, "b16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430"), + testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa"), + testutils.RevChainhash(t, "3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd"), + } + + // when + actualTxsHashes, err := postgresDB.GetBlockTransactionsHashes(ctx, blockHash) + require.NoError(t, err) + + // then + require.ElementsMatch(t, expectedTxsHashes, actualTxsHashes) + }) + t.Run("clear data", func(t *testing.T) { prepareDb(t, postgresDB, "fixtures/clear_data") @@ -535,23 +550,10 @@ func TestPostgresDB(t *testing.T) { require.Len(t, blocks, 1) - resp, err = postgresDB.ClearBlocktxTable(context.Background(), 10, "block_transactions_map") - require.NoError(t, err) - require.Equal(t, int64(5), resp.Rows) - - var mps []BlockTransactionMap - require.NoError(t, d.Select(&mps, "SELECT blockid FROM blocktx.block_transactions_map")) - - require.Len(t, mps, 5) + var bts []BlockTransaction + require.NoError(t, d.Select(&bts, "SELECT block_id FROM blocktx.block_transactions")) - resp, err = postgresDB.ClearBlocktxTable(context.Background(), 10, "transactions") - require.NoError(t, err) - require.Equal(t, int64(5), resp.Rows) - - var txs []Transaction - require.NoError(t, d.Select(&txs, "SELECT hash FROM blocktx.transactions")) - - require.Len(t, txs, 5) + require.Len(t, bts, 5) }) t.Run("set/get/del block processing", func(t *testing.T) { @@ -659,82 +661,74 @@ func TestPostgresDB(t *testing.T) { }) } -func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { +func TestPostgresStore_InsertBlockTransactions(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") } tcs := []struct { name string - txsWithMerklePaths []store.TxWithMerklePath + txsWithMerklePaths []store.TxHashWithMerkleTreeIndex - expectedUpdatedResLen int - upsertRepeat bool + upsertRepeat bool }{ { - name: "upsert all registered transactions (updates only)", - txsWithMerklePaths: []store.TxWithMerklePath{ + name: "insert 6 existing", + txsWithMerklePaths: []store.TxHashWithMerkleTreeIndex{ { - Hash: testutils.RevChainhash(t, "76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b")[:], - MerklePath: "test1", + Hash: testutils.RevChainhash(t, "b4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743")[:], + MerkleTreeIndex: int64(1), }, { - Hash: testutils.RevChainhash(t, "164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0")[:], - MerklePath: "test2", + Hash: testutils.RevChainhash(t, "37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669")[:], + MerkleTreeIndex: int64(2), + }, + { + Hash: testutils.RevChainhash(t, "952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb")[:], + MerkleTreeIndex: int64(3), }, - }, - expectedUpdatedResLen: 2, - }, - { - name: "upsert all non-registered transactions (inserts only)", - txsWithMerklePaths: []store.TxWithMerklePath{ { - Hash: testutils.RevChainhash(t, "edd33fdcdfa68444d227780e2b62a4437c00120c5320d2026aeb24a781f4c3f1")[:], - MerklePath: "test1", + Hash: testutils.RevChainhash(t, "861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608")[:], + MerkleTreeIndex: int64(4), + }, + { + Hash: testutils.RevChainhash(t, "9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313")[:], + MerkleTreeIndex: int64(5), + }, + { + Hash: testutils.RevChainhash(t, "8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b")[:], + MerkleTreeIndex: int64(6), }, }, - expectedUpdatedResLen: 0, }, { - name: "update exceeds max batch size (more txs than 5)", - txsWithMerklePaths: []store.TxWithMerklePath{ + name: "insert 6 new", + txsWithMerklePaths: []store.TxHashWithMerkleTreeIndex{ { - Hash: testutils.RevChainhash(t, "b4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743")[:], - MerklePath: "test1", + Hash: testutils.RevChainhash(t, "6b86e32c1896ff25fb2d857b96484b86c44444f3796bafb456c51a67a19a3c93")[:], + MerkleTreeIndex: int64(1), }, { - Hash: testutils.RevChainhash(t, "37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669")[:], - MerklePath: "test2", + Hash: testutils.RevChainhash(t, "f27a3609d133eef8abaf17bf19a1481da265e39b82be91b76f8f4ac964907f36")[:], + MerkleTreeIndex: int64(2), }, { - Hash: testutils.RevChainhash(t, "952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb")[:], - MerklePath: "test3", + Hash: testutils.RevChainhash(t, "8088f5e915be6dba137080c031cb6ca2fcce6d44c7c0193f52d9f058673517f8")[:], + MerkleTreeIndex: int64(3), }, { - Hash: testutils.RevChainhash(t, "861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608")[:], - MerklePath: "test4", + Hash: testutils.RevChainhash(t, "58f803957943b70ac9161b9327065d9798e80b21bae82e9f7e0bf874aa143ed5")[:], + MerkleTreeIndex: int64(4), }, { - Hash: testutils.RevChainhash(t, "9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313")[:], - MerklePath: "test5", + Hash: testutils.RevChainhash(t, "f4a7f2ad6d0f4be651698b75fe0a816e7bc546097c6dc0acb281298dbf844f13")[:], + MerkleTreeIndex: int64(5), }, { - Hash: testutils.RevChainhash(t, "8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b")[:], - MerklePath: "test6", - }, - }, - expectedUpdatedResLen: 6, - }, - { - name: "upsert all registered transactions cause conflict with txid and blockid", - txsWithMerklePaths: []store.TxWithMerklePath{ - { - Hash: testutils.RevChainhash(t, "8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b")[:], - MerklePath: "test7", + Hash: testutils.RevChainhash(t, "07b6029cfcba536b88e49e5f2940b78d5583cfb755e0fa6bb2bfb2bff56f8651")[:], + MerkleTreeIndex: int64(6), }, }, - upsertRepeat: true, - expectedUpdatedResLen: 1, }, } @@ -746,52 +740,34 @@ func TestPostgresStore_UpsertBlockTransactions(t *testing.T) { for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { // given - prepareDb(t, sut, "fixtures/upsert_block_transactions") + prepareDb(t, sut, "fixtures/insert_block_transactions") testBlockID := uint64(9736) - testBlockHash := testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000") // when - err := sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) + err := sut.InsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) if tc.upsertRepeat { - err = sut.UpsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) + err = sut.InsertBlockTransactions(ctx, testBlockID, tc.txsWithMerklePaths) require.NoError(t, err) } // then require.NoError(t, err) - res, err := sut.GetRegisteredTxsByBlockHashes(ctx, [][]byte{testBlockHash[:]}) - require.NoError(t, err) - - require.Equal(t, tc.expectedUpdatedResLen, len(res)) - - // assert correctness of returned values - // assume registered transactions are at the beginning of tc.txs - for i := 0; i < tc.expectedUpdatedResLen; i++ { - require.True(t, bytes.Equal(tc.txsWithMerklePaths[i].Hash, res[i].TxHash)) - require.Equal(t, tc.txsWithMerklePaths[i].MerklePath, res[i].MerklePath) - } - // assert data are correctly saved in the store d, err := sqlx.Open("postgres", dbInfo) require.NoError(t, err) + hashes := make([][]byte, len(tc.txsWithMerklePaths)) for i, tx := range tc.txsWithMerklePaths { - var storedtx Transaction - - err = d.Get(&storedtx, "SELECT id, hash, is_registered from blocktx.transactions WHERE hash=$1", tx.Hash[:]) - require.NoError(t, err, "error during getting transaction") - - require.Equal(t, i < tc.expectedUpdatedResLen, storedtx.IsRegistered) + hashes[i] = tx.Hash + } - var mp BlockTransactionMap - err = d.Get(&mp, "SELECT blockid, txid, merkle_path from blocktx.block_transactions_map WHERE txid=$1", storedtx.ID) - require.NoError(t, err, "error during getting block transactions map") + var bt []BlockTransaction + err = d.Select(&bt, "SELECT block_id, hash, merkle_tree_index from blocktx.block_transactions bt WHERE bt.hash = ANY($1)", pq.Array(hashes)) + require.NoError(t, err, "error during getting block transactions map") - require.Equal(t, tx.MerklePath, mp.MerklePath) - require.Equal(t, testBlockID, uint64(mp.BlockID)) - } + require.Len(t, bt, len(tc.txsWithMerklePaths)) }) } } @@ -806,7 +782,7 @@ func NewHash(length int) ([]byte, error) { return hash, nil } -func BenchmarkUpsertBlockTransactions(b *testing.B) { +func BenchmarkInsertBlockTransactions(b *testing.B) { ctx, _, sut := setupPostgresTest(b) defer sut.Close() @@ -817,32 +793,32 @@ func BenchmarkUpsertBlockTransactions(b *testing.B) { iterations int }{ { - name: "UpsertBlockTransactions - 80000, 1 batch", + name: "batch size 80000, 1 batch", batch: totalRows, iterations: 1, }, { - name: "UpsertBlockTransactions - 20000, 4 batches", + name: "batch size 20000, 4 batches", batch: 20000, iterations: 4, }, { - name: "UpsertBlockTransactions - 10000, 8 batches", + name: "batch size 10000, 8 batches", batch: 10000, iterations: 8, }, { - name: "UpsertBlockTransactions - 8000, 10 batches", + name: "batch size 8000, 10 batches", batch: 8000, iterations: 10, }, { - name: "UpsertBlockTransactions - 5000, 16 batches", + name: "batch size 5000, 16 batches", batch: 5000, iterations: 16, }, { - name: "UpsertBlockTransactions - 2000, 40 batches", + name: "batch size 2000, 40 batches", batch: 2000, iterations: 40, }, @@ -853,7 +829,7 @@ func BenchmarkUpsertBlockTransactions(b *testing.B) { b.StopTimer() testBlockID := uint64(9736) - txsWithMerklePaths := make([]store.TxWithMerklePath, totalRows) + txsWithMerklePaths := make([]store.TxHashWithMerkleTreeIndex, totalRows) counter := 0 for range totalRows { @@ -863,9 +839,8 @@ func BenchmarkUpsertBlockTransactions(b *testing.B) { require.NoError(b, err) hex.EncodeToString(merklePath) - txsWithMerklePaths[counter] = store.TxWithMerklePath{ - Hash: hash, - MerklePath: hex.EncodeToString(merklePath), + txsWithMerklePaths[counter] = store.TxHashWithMerkleTreeIndex{ + Hash: hash, } counter++ @@ -874,14 +849,14 @@ func BenchmarkUpsertBlockTransactions(b *testing.B) { b.StartTimer() for i := 0; i < tc.iterations; i++ { - err := sut.UpsertBlockTransactions(ctx, testBlockID, txsWithMerklePaths[i*tc.batch:(i+1)*tc.batch-1]) + err := sut.InsertBlockTransactions(ctx, testBlockID, txsWithMerklePaths[i*tc.batch:(i+1)*tc.batch-1]) require.NoError(b, err) } }) } } -func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { +func TestPostgresStore_InsertTransactions_CompetingBlocks(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") } @@ -891,46 +866,51 @@ func TestPostgresStore_UpsertBlockTransactions_CompetingBlocks(t *testing.T) { defer sut.Close() sut.maxPostgresBulkInsertRows = 5 - prepareDb(t, sut, "fixtures/upsert_block_transactions") + prepareDb(t, sut, "fixtures/insert_block_transactions") testBlockID := uint64(9736) competingBlockID := uint64(9737) txHash := testutils.RevChainhash(t, "76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b") - txsWithMerklePaths := []store.TxWithMerklePath{ + txsWithMerklePaths := []store.TxHashWithMerkleTreeIndex{ { - Hash: txHash[:], - MerklePath: "merkle-path-1", + Hash: txHash[:], }, } - competingTxsWithMerklePaths := []store.TxWithMerklePath{ + competingTxsWithMerklePaths := []store.TxHashWithMerkleTreeIndex{ { - Hash: txHash[:], - MerklePath: "merkle-path-2", + Hash: txHash[:], }, } - expected := []store.TransactionBlock{ + expected := []store.BlockTransaction{ + { + TxHash: txHash[:], + BlockHash: testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000")[:], + BlockHeight: uint64(826481), + BlockStatus: blocktx_api.Status_LONGEST, + MerkleTreeIndex: int64(1), + }, { - TxHash: txHash[:], - BlockHash: testutils.RevChainhash(t, "6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000")[:], - BlockHeight: uint64(826481), - MerklePath: "merkle-path-1", - BlockStatus: blocktx_api.Status_LONGEST, + TxHash: txHash[:], + BlockHash: testutils.RevChainhash(t, "7258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000")[:], + BlockHeight: uint64(826481), + MerkleTreeIndex: int64(1), + BlockStatus: blocktx_api.Status_STALE, }, } // when - err := sut.UpsertBlockTransactions(ctx, testBlockID, txsWithMerklePaths) + err := sut.InsertBlockTransactions(ctx, testBlockID, txsWithMerklePaths) require.NoError(t, err) - err = sut.UpsertBlockTransactions(ctx, competingBlockID, competingTxsWithMerklePaths) + err = sut.InsertBlockTransactions(ctx, competingBlockID, competingTxsWithMerklePaths) require.NoError(t, err) // then - actual, err := sut.GetMinedTransactions(ctx, [][]byte{txHash[:]}, true) + actual, err := sut.GetMinedTransactions(ctx, [][]byte{txHash[:]}) require.NoError(t, err) require.ElementsMatch(t, expected, actual) @@ -954,15 +934,6 @@ func TestPostgresStore_RegisterTransactions(t *testing.T) { testdata.TX4Hash[:], }, }, - { - name: "register already known, not registered transactions", - txs: [][]byte{ - testutils.RevChainhash(t, "76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b")[:], - testutils.RevChainhash(t, "164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0")[:], - testutils.RevChainhash(t, "8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b")[:], - testutils.RevChainhash(t, "9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313")[:], - }, - }, { name: "register already registered transactions", txs: [][]byte{ @@ -975,7 +946,7 @@ func TestPostgresStore_RegisterTransactions(t *testing.T) { } // common setup for test cases - ctx, now, sut := setupPostgresTest(t) + ctx, _, sut := setupPostgresTest(t) defer sut.Close() for _, tc := range tcs { @@ -984,38 +955,18 @@ func TestPostgresStore_RegisterTransactions(t *testing.T) { prepareDb(t, sut, "fixtures/register_transactions") // when - result, err := sut.RegisterTransactions(ctx, tc.txs) + err := sut.RegisterTransactions(ctx, tc.txs) require.NoError(t, err) - require.NotNil(t, result) - - resultmap := make(map[chainhash.Hash]bool) - for _, h := range result { - resultmap[*h] = false - } // then // assert data are correctly saved in the store d, err := sqlx.Open("postgres", dbInfo) require.NoError(t, err) - updatedCounter := 0 - for _, hash := range tc.txs { - var storedtx Transaction - err = d.Get(&storedtx, "SELECT hash, is_registered from blocktx.transactions WHERE hash=$1", hash) - require.NoError(t, err) - - require.NotNil(t, storedtx) - require.True(t, storedtx.IsRegistered) - - if _, found := resultmap[chainhash.Hash(storedtx.Hash)]; found { - require.Greater(t, storedtx.InsertedAt, now) - updatedCounter++ - } else { - require.Less(t, storedtx.InsertedAt, now) - } - } - - require.Equal(t, len(result), updatedCounter) + var bt []RegisteredTransaction + err = d.Select(&bt, "SELECT rt.hash, rt.inserted_at from blocktx.registered_transactions rt WHERE rt.hash = ANY($1)", pq.Array(tc.txs)) + require.NoError(t, err) + require.Equal(t, len(tc.txs), len(bt)) }) } } diff --git a/internal/blocktx/store/postgresql/register_transactions.go b/internal/blocktx/store/postgresql/register_transactions.go index 772827d43..9efa8320e 100644 --- a/internal/blocktx/store/postgresql/register_transactions.go +++ b/internal/blocktx/store/postgresql/register_transactions.go @@ -3,45 +3,24 @@ package postgresql import ( "context" "errors" - "github.com/bitcoin-sv/arc/internal/blocktx/store" - "time" "github.com/lib/pq" - "github.com/libsv/go-p2p/chaincfg/chainhash" + + "github.com/bitcoin-sv/arc/internal/blocktx/store" ) -func (p *PostgreSQL) RegisterTransactions(ctx context.Context, txHashes [][]byte) ([]*chainhash.Hash, error) { +func (p *PostgreSQL) RegisterTransactions(ctx context.Context, txHashes [][]byte) error { const q = ` - INSERT INTO blocktx.transactions (hash, is_registered) - SELECT hash, TRUE + INSERT INTO blocktx.registered_transactions (hash) + SELECT hash FROM UNNEST ($1::BYTEA[]) as hash - ON CONFLICT (hash) DO UPDATE - SET is_registered = TRUE - RETURNING hash, inserted_at + ON CONFLICT (hash) DO NOTHING ` - now := p.now() - rows, err := p.db.QueryContext(ctx, q, pq.Array(txHashes)) + _, err := p.db.ExecContext(ctx, q, pq.Array(txHashes)) if err != nil { - return nil, errors.Join(store.ErrFailedToInsertTransactions, err) - } - defer rows.Close() - - updatedTxs := make([]*chainhash.Hash, 0) - for rows.Next() { - var hash []byte - var insertedAt time.Time - - err = rows.Scan(&hash, &insertedAt) - if err != nil { - return nil, errors.Join(store.ErrFailedToGetRows, err) - } - - if insertedAt.Before(now) { - ch, _ := chainhash.NewHash(hash) - updatedTxs = append(updatedTxs, ch) - } + return errors.Join(store.ErrFailedToInsertTransactions, err) } - return updatedTxs, nil + return nil } diff --git a/internal/blocktx/store/postgresql/set_block_processing.go b/internal/blocktx/store/postgresql/set_block_processing.go index eae77a939..2d2a61b4e 100644 --- a/internal/blocktx/store/postgresql/set_block_processing.go +++ b/internal/blocktx/store/postgresql/set_block_processing.go @@ -4,11 +4,11 @@ import ( "context" "errors" + "github.com/jackc/pgx/v5/pgconn" + "github.com/libsv/go-p2p/chaincfg/chainhash" + "github.com/bitcoin-sv/arc/internal/blocktx/store" "github.com/bitcoin-sv/arc/internal/tracing" - - "github.com/lib/pq" - "github.com/libsv/go-p2p/chaincfg/chainhash" ) func (p *PostgreSQL) SetBlockProcessing(ctx context.Context, hash *chainhash.Hash, setProcessedBy string) (string, error) { @@ -22,10 +22,10 @@ func (p *PostgreSQL) SetBlockProcessing(ctx context.Context, hash *chainhash.Has var processedBy string err := p.db.QueryRowContext(ctx, qInsert, hash[:], setProcessedBy).Scan(&processedBy) if err != nil { - var pqErr *pq.Error + var pqErr *pgconn.PgError // Error 23505 is: "duplicate key violates unique constraint" - if errors.As(err, &pqErr) && pqErr.Code == pq.ErrorCode("23505") { + if errors.As(err, &pqErr) && pqErr.Code == "23505" { err = p.db.QueryRowContext(ctx, `SELECT processed_by FROM blocktx.block_processing WHERE block_hash = $1`, hash[:]).Scan(&processedBy) if err != nil { return "", errors.Join(store.ErrFailedToSetBlockProcessing, err) diff --git a/internal/blocktx/store/postgresql/upsert_block_transactions.go b/internal/blocktx/store/postgresql/upsert_block_transactions.go deleted file mode 100644 index 9598151d9..000000000 --- a/internal/blocktx/store/postgresql/upsert_block_transactions.go +++ /dev/null @@ -1,74 +0,0 @@ -package postgresql - -import ( - "context" - "errors" - - "github.com/lib/pq" - "go.opentelemetry.io/otel/attribute" - - "github.com/bitcoin-sv/arc/internal/blocktx/store" - "github.com/bitcoin-sv/arc/internal/tracing" -) - -// UpsertBlockTransactions upserts the transaction hashes for a given block hash and returns updated registered transactions hashes. -func (p *PostgreSQL) UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []store.TxWithMerklePath) (err error) { - ctx, span := tracing.StartTracing(ctx, "UpsertBlockTransactions", p.tracingEnabled, append(p.tracingAttributes, attribute.Int("updates", len(txsWithMerklePaths)))...) - defer func() { - tracing.EndTracing(span, err) - }() - - txHashes := make([][]byte, len(txsWithMerklePaths)) - blockIDs := make([]uint64, len(txsWithMerklePaths)) - merklePaths := make([]string, len(txsWithMerklePaths)) - for pos, tx := range txsWithMerklePaths { - txHashes[pos] = tx.Hash - merklePaths[pos] = tx.MerklePath - blockIDs[pos] = blockID - } - - qBulkUpsert := ` - INSERT INTO blocktx.transactions (hash) - SELECT UNNEST($1::BYTEA[]) - ON CONFLICT (hash) - DO UPDATE SET hash = EXCLUDED.hash - RETURNING id` - - rows, err := p.db.QueryContext(ctx, qBulkUpsert, pq.Array(txHashes)) - if err != nil { - return errors.Join(store.ErrFailedToUpsertTransactions, err) - } - - counter := 0 - txIDs := make([]uint64, len(txsWithMerklePaths)) - for rows.Next() { - var txID uint64 - err = rows.Scan(&txID) - if err != nil { - return errors.Join(store.ErrFailedToGetRows, err) - } - - txIDs[counter] = txID - counter++ - } - - if len(txIDs) != len(txsWithMerklePaths) { - return errors.Join(store.ErrMismatchedTxIDsAndMerklePathLength, err) - } - - const qMapInsert = ` - INSERT INTO blocktx.block_transactions_map ( - blockid - ,txid - ,merkle_path - ) - SELECT * FROM UNNEST($1::INT[], $2::INT[], $3::TEXT[]) - ON CONFLICT DO NOTHING - ` - _, err = p.db.ExecContext(ctx, qMapInsert, pq.Array(blockIDs), pq.Array(txIDs), pq.Array(merklePaths)) - if err != nil { - return errors.Join(store.ErrFailedToUpsertBlockTransactionsMap, err) - } - - return nil -} diff --git a/internal/blocktx/store/store.go b/internal/blocktx/store/store.go index d5f566fa5..8ba523aa6 100644 --- a/internal/blocktx/store/store.go +++ b/internal/blocktx/store/store.go @@ -15,6 +15,7 @@ var ( ErrBlockNotFound = errors.New("block not found") ErrUnableToPrepareStatement = errors.New("unable to prepare statement") ErrUnableToDeleteRows = errors.New("unable to delete rows") + ErrUnableToGetSQLConnection = errors.New("unable to get or create sql connection") ErrFailedToInsertBlock = errors.New("failed to insert block") ErrFailedToUpdateBlockStatuses = errors.New("failed to update block statuses") ErrFailedToOpenDB = errors.New("failed to open postgres database") @@ -23,6 +24,7 @@ var ( ErrFailedToSetBlockProcessing = errors.New("failed to set block processing") ErrFailedToUpsertTransactions = errors.New("failed to upsert transactions") ErrFailedToUpsertBlockTransactionsMap = errors.New("failed to upsert block transactions map") + ErrFailedToParseHash = errors.New("failed to parse hash") ErrMismatchedTxIDsAndMerklePathLength = errors.New("mismatched tx IDs and merkle path length") ) @@ -31,20 +33,21 @@ type Stats struct { } type BlocktxStore interface { - RegisterTransactions(ctx context.Context, txHashes [][]byte) (updatedTxs []*chainhash.Hash, err error) + RegisterTransactions(ctx context.Context, txHashes [][]byte) error GetBlock(ctx context.Context, hash *chainhash.Hash) (*blocktx_api.Block, error) GetLongestBlockByHeight(ctx context.Context, height uint64) (*blocktx_api.Block, error) - GetChainTip(ctx context.Context) (*blocktx_api.Block, error) + GetChainTip(ctx context.Context, heightRange int) (*blocktx_api.Block, error) UpsertBlock(ctx context.Context, block *blocktx_api.Block) (uint64, error) - UpsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []TxWithMerklePath) error + InsertBlockTransactions(ctx context.Context, blockID uint64, txsWithMerklePaths []TxHashWithMerkleTreeIndex) error MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) error GetBlockGaps(ctx context.Context, heightRange int) ([]*BlockGap, error) ClearBlocktxTable(ctx context.Context, retentionDays int32, table string) (*blocktx_api.RowsAffectedResponse, error) - GetMinedTransactions(ctx context.Context, hashes [][]byte, onlyLongestChain bool) ([]TransactionBlock, error) + GetMinedTransactions(ctx context.Context, hashes [][]byte) ([]BlockTransaction, error) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) GetStaleChainBackFromHash(ctx context.Context, hash []byte) ([]*blocktx_api.Block, error) GetOrphansBackToNonOrphanAncestor(ctx context.Context, hash []byte) (orphans []*blocktx_api.Block, nonOrphanAncestor *blocktx_api.Block, err error) - GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]TransactionBlock, error) + GetRegisteredTxsByBlockHashes(ctx context.Context, blockHashes [][]byte) ([]BlockTransaction, error) + GetBlockTransactionsHashes(ctx context.Context, blockHash []byte) ([]*chainhash.Hash, error) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []BlockStatusUpdate) error GetStats(ctx context.Context) (*Stats, error) diff --git a/internal/message_queue/nats/client/nats_core/nats_core_client.go b/internal/message_queue/nats/client/nats_core/nats_core_client.go index e2cb1e4a2..1eabeed54 100644 --- a/internal/message_queue/nats/client/nats_core/nats_core_client.go +++ b/internal/message_queue/nats/client/nats_core/nats_core_client.go @@ -91,7 +91,7 @@ func (c Client) Publish(ctx context.Context, topic string, data []byte) (err err } func (c Client) PublishMarshal(ctx context.Context, topic string, m proto.Message) (err error) { - ctx, span := tracing.StartTracing(ctx, "Publish", c.tracingEnabled, c.tracingAttributes...) + ctx, span := tracing.StartTracing(ctx, "PublishMarshal", c.tracingEnabled, c.tracingAttributes...) defer func() { tracing.EndTracing(span, err) }() diff --git a/internal/metamorph/client.go b/internal/metamorph/client.go index 94c5c5970..d9fccc30f 100644 --- a/internal/metamorph/client.go +++ b/internal/metamorph/client.go @@ -225,7 +225,7 @@ func (m *Metamorph) GetTransactionStatus(ctx context.Context, txID string) (txSt return txStatus, nil } -// GetTransactionStatusეს gets the statusეს of all transactions. +// GetTransactionStatuses gets the status of all transactions. func (m *Metamorph) GetTransactionStatuses(ctx context.Context, txIDs []string) (txStatus []*TransactionStatus, err error) { ctx, span := tracing.StartTracing(ctx, "GetTransactionStatus", m.tracingEnabled, append(m.tracingAttributes, attribute.String("txIDs", txIDs[0]))...) defer func() { diff --git a/internal/metamorph/processor.go b/internal/metamorph/processor.go index dea8596b5..46740e766 100644 --- a/internal/metamorph/processor.go +++ b/internal/metamorph/processor.go @@ -327,6 +327,30 @@ func (p *Processor) updateMined(ctx context.Context, txsBlocks []*blocktx_api.Tr p.delTxFromCache(data.Hash) } + + p.rebroadcastStaleTxs(ctx, txsBlocks) +} + +func (p *Processor) rebroadcastStaleTxs(ctx context.Context, txsBlocks []*blocktx_api.TransactionBlock) { + _, span := tracing.StartTracing(ctx, "rebroadcastStaleTxs", p.tracingEnabled, p.tracingAttributes...) + defer tracing.EndTracing(span, nil) + + for _, tx := range txsBlocks { + if tx.BlockStatus == blocktx_api.Status_STALE { + txHash, err := chainhash.NewHash(tx.TransactionHash) + if err != nil { + p.logger.Warn("error parsing transaction hash") + continue + } + + p.logger.Debug("Re-announcing stale tx", slog.String("hash", txHash.String())) + + peers := p.pm.AnnounceTransaction(txHash, nil) + if len(peers) == 0 { + p.logger.Warn("transaction was not announced to any peer during rebroadcast", slog.String("hash", txHash.String())) + } + } + } } func (p *Processor) StartProcessSubmittedTxs() { @@ -792,7 +816,7 @@ func (p *Processor) ProcessTransaction(ctx context.Context, req *ProcessorReques // register transaction in blocktx using message queue if err = p.mqClient.Publish(ctx, RegisterTxTopic, req.Data.Hash[:]); err != nil { - p.logger.Error("failed to register tx in blocktx", slog.String("hash", req.Data.Hash.String()), slog.String("err", err.Error())) + p.logger.Error("Failed to register tx in blocktx", slog.String("hash", req.Data.Hash.String()), slog.String("err", err.Error())) } // broadcast that transaction is stored to client diff --git a/internal/metamorph/processor_helpers.go b/internal/metamorph/processor_helpers.go index f3e68149a..a50bc206a 100644 --- a/internal/metamorph/processor_helpers.go +++ b/internal/metamorph/processor_helpers.go @@ -3,9 +3,10 @@ package metamorph import ( "encoding/json" "errors" - "github.com/libsv/go-p2p/chaincfg/chainhash" "log/slog" + "github.com/libsv/go-p2p/chaincfg/chainhash" + "github.com/bitcoin-sv/arc/internal/cache" "github.com/bitcoin-sv/arc/internal/callbacker/callbacker_api" "github.com/bitcoin-sv/arc/internal/metamorph/metamorph_api" @@ -116,15 +117,11 @@ func (p *Processor) getStatusUpdateCount() (int, error) { } func shouldUpdateCompetingTxs(new, found store.UpdateStatus) bool { - if new.Status >= found.Status && !unorderedEqual(new.CompetingTxs, found.CompetingTxs) { - return true - } - - return false + return new.Status >= found.Status && !unorderedEqual(new.CompetingTxs, found.CompetingTxs) } func shouldUpdateStatus(new, found store.UpdateStatus) bool { - return new.Status > found.Status + return new.Status > found.Status || found.Status == metamorph_api.Status_MINED_IN_STALE_BLOCK } // unorderedEqual checks if two string slices contain diff --git a/internal/metamorph/store/postgresql/fixtures/update_double_spend/metamorph.transactions.yaml b/internal/metamorph/store/postgresql/fixtures/update_double_spend/metamorph.transactions.yaml index b21e87ca5..fedf67bf8 100644 --- a/internal/metamorph/store/postgresql/fixtures/update_double_spend/metamorph.transactions.yaml +++ b/internal/metamorph/store/postgresql/fixtures/update_double_spend/metamorph.transactions.yaml @@ -30,3 +30,8 @@ status: 70 stored_at: 2023-10-01 14:00:00 last_submitted_at: 2023-10-01 14:00:00 +- hash: 0xfe3ae78226a8a1c78039a7d10590a42dc4b691acaa8cbc6831b464da49e8ba08 + locked_by: metamorph-1 + status: 115 + stored_at: 2023-10-01 14:00:00 + last_submitted_at: 2023-10-01 14:00:00 diff --git a/internal/metamorph/store/postgresql/fixtures/update_status/metamorph.transactions.yaml b/internal/metamorph/store/postgresql/fixtures/update_status/metamorph.transactions.yaml index d38c311d2..f8255e222 100644 --- a/internal/metamorph/store/postgresql/fixtures/update_status/metamorph.transactions.yaml +++ b/internal/metamorph/store/postgresql/fixtures/update_status/metamorph.transactions.yaml @@ -28,3 +28,8 @@ status: 70 stored_at: 2023-10-01 14:00:00 last_submitted_at: 2023-10-01 14:00:00 +- hash: 0xfe3ae78226a8a1c78039a7d10590a42dc4b691acaa8cbc6831b464da49e8ba08 + locked_by: metamorph-1 + status: 115 + stored_at: 2023-10-01 14:00:00 + last_submitted_at: 2023-10-01 14:00:00 diff --git a/internal/metamorph/store/postgresql/postgres.go b/internal/metamorph/store/postgresql/postgres.go index 660db6f1b..930e0a924 100644 --- a/internal/metamorph/store/postgresql/postgres.go +++ b/internal/metamorph/store/postgresql/postgres.go @@ -662,8 +662,8 @@ func (p *PostgreSQL) UpdateStatusBulk(ctx context.Context, updates []store.Updat SELECT t.hash, t.status, t.reject_reason, t.history_update, t.timestamp FROM UNNEST($2::BYTEA[], $3::INT[], $4::TEXT[], $5::JSONB[], $6::TIMESTAMP WITH TIME ZONE[]) AS t(hash, status, reject_reason, history_update, timestamp) ) AS bulk_query - WHERE metamorph.transactions.hash = bulk_query.hash - AND metamorph.transactions.status < bulk_query.status + WHERE metamorph.transactions.hash=bulk_query.hash + AND (metamorph.transactions.status < bulk_query.status OR metamorph.transactions.status=$7) RETURNING metamorph.transactions.stored_at ,metamorph.transactions.hash ,metamorph.transactions.status @@ -695,7 +695,15 @@ func (p *PostgreSQL) UpdateStatusBulk(ctx context.Context, updates []store.Updat return nil, err } - rows, err := tx.QueryContext(ctx, qBulk, p.now(), pq.Array(txHashes), pq.Array(statuses), pq.Array(rejectReasons), pq.Array(statusHistories), pq.Array(timestamps)) + rows, err := tx.QueryContext(ctx, qBulk, + p.now(), + pq.Array(txHashes), + pq.Array(statuses), + pq.Array(rejectReasons), + pq.Array(statusHistories), + pq.Array(timestamps), + metamorph_api.Status_MINED_IN_STALE_BLOCK, + ) if err != nil { return nil, err } @@ -853,7 +861,7 @@ func (p *PostgreSQL) UpdateDoubleSpend(ctx context.Context, updates []store.Upda AS t(hash, status, reject_reason, competing_txs) ) AS bulk_query WHERE metamorph.transactions.hash=bulk_query.hash - AND metamorph.transactions.status <= bulk_query.status + AND (metamorph.transactions.status <= bulk_query.status OR metamorph.transactions.status=$6) AND (metamorph.transactions.competing_txs IS NULL OR LENGTH(metamorph.transactions.competing_txs) < LENGTH(bulk_query.competing_txs)) RETURNING metamorph.transactions.stored_at @@ -916,7 +924,14 @@ func (p *PostgreSQL) UpdateDoubleSpend(ctx context.Context, updates []store.Upda } } - rows, err = tx.QueryContext(ctx, qBulk, p.now(), pq.Array(txHashes), pq.Array(statuses), pq.Array(rejectReasons), pq.Array(competingTxs)) + rows, err = tx.QueryContext(ctx, qBulk, + p.now(), + pq.Array(txHashes), + pq.Array(statuses), + pq.Array(rejectReasons), + pq.Array(competingTxs), + metamorph_api.Status_MINED_IN_STALE_BLOCK, + ) if err != nil { if rollbackErr := tx.Rollback(); rollbackErr != nil { return nil, errors.Join(err, fmt.Errorf("failed to rollback: %v", rollbackErr)) diff --git a/internal/metamorph/store/postgresql/postgres_test.go b/internal/metamorph/store/postgresql/postgres_test.go index 24bdd122d..9544f6374 100644 --- a/internal/metamorph/store/postgresql/postgres_test.go +++ b/internal/metamorph/store/postgresql/postgres_test.go @@ -358,6 +358,10 @@ func TestPostgresDB(t *testing.T) { Hash: *testutils.RevChainhash(t, "7809b730cbe7bb723f299a4e481fb5165f31175876392a54cde85569a18cc75f"), // update not expected - old status > new status Status: metamorph_api.Status_SENT_TO_NETWORK, }, + { + Hash: *testutils.RevChainhash(t, "fe3ae78226a8a1c78039a7d10590a42dc4b691acaa8cbc6831b464da49e8ba08"), // update expected - old status = MINED_IN_STALE_BLOCK + Status: metamorph_api.Status_SEEN_ON_NETWORK, + }, { Hash: *testutils.RevChainhash(t, "3ce1e0c6cbbbe2118c3f80d2e6899d2d487f319ef0923feb61f3d26335b2225c"), // update not expected - hash non-existent in db Status: metamorph_api.Status_ANNOUNCED_TO_NETWORK, @@ -367,7 +371,7 @@ func TestPostgresDB(t *testing.T) { Status: metamorph_api.Status_ANNOUNCED_TO_NETWORK, }, } - updatedStatuses := 3 + updatedStatuses := 4 statusUpdates, err := postgresDB.UpdateStatusBulk(ctx, updates) require.NoError(t, err) @@ -383,6 +387,9 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, metamorph_api.Status_SEEN_ON_NETWORK, statusUpdates[2].Status) require.Equal(t, *testutils.RevChainhash(t, "ee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa"), *statusUpdates[2].Hash) + require.Equal(t, metamorph_api.Status_SEEN_ON_NETWORK, statusUpdates[3].Status) + require.Equal(t, *testutils.RevChainhash(t, "fe3ae78226a8a1c78039a7d10590a42dc4b691acaa8cbc6831b464da49e8ba08"), *statusUpdates[3].Hash) + returnedDataRequested, err := postgresDB.Get(ctx, testutils.RevChainhash(t, "7809b730cbe7bb723f299a4e481fb5165f31175876392a54cde85569a18cc75f")[:]) require.NoError(t, err) require.Equal(t, metamorph_api.Status_ACCEPTED_BY_NETWORK, returnedDataRequested.Status) @@ -423,6 +430,11 @@ func TestPostgresDB(t *testing.T) { CompetingTxs: []string{"1234"}, Error: errors.New("double spend attempted"), }, + { + Hash: *testutils.RevChainhash(t, "fe3ae78226a8a1c78039a7d10590a42dc4b691acaa8cbc6831b464da49e8ba08"), // update expected - old status = MINED_IN_STALE_BLOCK + Status: metamorph_api.Status_DOUBLE_SPEND_ATTEMPTED, + CompetingTxs: []string{"1234"}, + }, { Hash: *testutils.RevChainhash(t, "3ce1e0c6cbbbe2118c3f80d2e6899d2d487f319ef0923feb61f3d26335b2225c"), // update not expected - hash non-existent in db Status: metamorph_api.Status_DOUBLE_SPEND_ATTEMPTED, @@ -434,7 +446,7 @@ func TestPostgresDB(t *testing.T) { CompetingTxs: []string{"1234"}, }, } - updatedStatuses := 4 + updatedStatuses := 5 statusUpdates, err := postgresDB.UpdateDoubleSpend(ctx, updates) require.NoError(t, err) @@ -457,6 +469,10 @@ func TestPostgresDB(t *testing.T) { require.Equal(t, []string{"1234"}, statusUpdates[3].CompetingTxs) require.Equal(t, "double spend attempted", statusUpdates[3].RejectReason) + require.Equal(t, metamorph_api.Status_DOUBLE_SPEND_ATTEMPTED, statusUpdates[4].Status) + require.Equal(t, *testutils.RevChainhash(t, "fe3ae78226a8a1c78039a7d10590a42dc4b691acaa8cbc6831b464da49e8ba08"), *statusUpdates[4].Hash) + require.Equal(t, []string{"1234"}, statusUpdates[4].CompetingTxs) + statusUpdates, err = postgresDB.UpdateDoubleSpend(ctx, updates) require.NoError(t, err) require.Len(t, statusUpdates, 0) diff --git a/test/submit_01_single_test.go b/test/submit_01_single_test.go index e38fc7d5a..55aa66942 100644 --- a/test/submit_01_single_test.go +++ b/test/submit_01_single_test.go @@ -5,7 +5,6 @@ package test import ( "embed" "encoding/hex" - "encoding/json" "fmt" "net/http" "strconv" @@ -14,6 +13,7 @@ import ( sdkTx "github.com/bitcoin-sv/go-sdk/transaction" "github.com/libsv/go-bc" + "github.com/libsv/go-p2p/chaincfg/chainhash" "github.com/stretchr/testify/require" "github.com/bitcoin-sv/arc/internal/node_client" @@ -123,28 +123,14 @@ func TestSubmitSingle(t *testing.T) { t.Logf("Transaction status: %s", statusResponse.TxStatus) // Check Merkle path - require.NotNil(t, statusResponse.MerklePath) - t.Logf("BUMP: %s", *statusResponse.MerklePath) - bump, err := bc.NewBUMPFromStr(*statusResponse.MerklePath) - require.NoError(t, err) - - jsonB, err := json.Marshal(bump) - require.NoError(t, err) - t.Logf("BUMPjson: %s", string(jsonB)) - - root, err := bump.CalculateRootGivenTxid(tc.tx.TxID()) - require.NoError(t, err) - - require.NotNil(t, statusResponse.BlockHeight) - blockRoot := node_client.GetBlockRootByHeight(t, bitcoind, int(*statusResponse.BlockHeight)) - require.Equal(t, blockRoot, root) + checkMerklePath(t, statusResponse) }) } } func TestSubmitMined(t *testing.T) { - t.Run("submit mined tx", func(t *testing.T) { - // submit an unregistered, already mined transaction. ARC should return the status as MINED for the transaction. + t.Run("submit mined tx + calculate merkle path", func(t *testing.T) { + // Submit an unregistered, already mined transaction. ARC should return the status as MINED for the transaction. // given address, _ := node_client.FundNewWallet(t, bitcoind) @@ -154,6 +140,29 @@ func TestSubmitMined(t *testing.T) { tx, _ := sdkTx.NewTransactionFromHex(rawTx.Hex) exRawTx := tx.String() + blockData := node_client.GetBlockDataByBlockHash(t, bitcoind, rawTx.BlockHash) + blockTxHashes := make([]*chainhash.Hash, len(blockData.Txs)) + var txIndex uint64 + + for i, blockTx := range blockData.Txs { + h, err := chainhash.NewHashFromStr(blockTx) + require.NoError(t, err) + + blockTxHashes[i] = h + + if blockTx == rawTx.Hash { + txIndex = uint64(i) + } + } + + merkleTree := bc.BuildMerkleTreeStoreChainHash(blockTxHashes) + require.Equal(t, merkleTree[len(merkleTree)-1].String(), blockData.MerkleRoot) + + merklePath, err := bc.NewBUMPFromMerkleTreeAndIndex(blockData.Height, merkleTree, txIndex) + require.NoError(t, err) + merklePathStr, err := merklePath.String() + require.NoError(t, err) + callbackReceivedChan := make(chan *TransactionResponse) callbackErrChan := make(chan error) @@ -175,6 +184,7 @@ func TestSubmitMined(t *testing.T) { case status := <-callbackReceivedChan: require.Equal(t, rawTx.TxID, status.Txid) require.Equal(t, StatusMined, status.TxStatus) + require.Equal(t, merklePathStr, *status.MerklePath) case err := <-callbackErrChan: t.Fatalf("callback error: %v", err) case <-callbackTimeout: diff --git a/test/submit_02_batch_test.go b/test/submit_02_batch_test.go index e65b49b15..dba0609b8 100644 --- a/test/submit_02_batch_test.go +++ b/test/submit_02_batch_test.go @@ -59,5 +59,38 @@ func TestBatchChainedTxs(t *testing.T) { if hasFailed { t.FailNow() } + + node_client.Generate(t, bitcoind, 1) + + // Check a couple of Merkle paths + statusURL := fmt.Sprintf("%s/%s", arcEndpointV1Tx, txs[3].TxID()) + statusResponse := getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResponse.TxStatus) + checkMerklePath(t, statusResponse) + + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txs[4].TxID()) + statusResponse = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResponse.TxStatus) + checkMerklePath(t, statusResponse) + + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txs[5].TxID()) + statusResponse = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResponse.TxStatus) + checkMerklePath(t, statusResponse) + + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txs[15].TxID()) + statusResponse = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResponse.TxStatus) + checkMerklePath(t, statusResponse) + + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txs[16].TxID()) + statusResponse = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResponse.TxStatus) + checkMerklePath(t, statusResponse) + + statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, txs[17].TxID()) + statusResponse = getRequest[TransactionResponse](t, statusURL) + require.Equal(t, StatusMined, statusResponse.TxStatus) + checkMerklePath(t, statusResponse) }) } diff --git a/test/submit_05_reorg_test.go b/test/submit_05_reorg_test.go index 50f336c48..48597f520 100644 --- a/test/submit_05_reorg_test.go +++ b/test/submit_05_reorg_test.go @@ -13,9 +13,6 @@ import ( ) func TestReorg(t *testing.T) { - // TODO: remove the skip when gaps are filling quickly again - t.Skip("Skipping until gaps are being processed quickly again") - address, privateKey := node_client.FundNewWallet(t, bitcoind) utxos := node_client.GetUtxos(t, bitcoind, address) @@ -155,22 +152,11 @@ func TestReorg(t *testing.T) { require.Equal(t, StatusMined, statusResp.TxStatus) require.Equal(t, staleHash, *statusResp.BlockHash) - // verify that tx2 is now MINED_IN_STALE_BLOCK + // verify that tx2 was rebroadcasted and is now MINED with new block data statusURL = fmt.Sprintf("%s/%s", arcEndpointV1Tx, tx2.TxID()) statusResp = getRequest[TransactionResponse](t, statusURL) - require.Equal(t, StatusMinedInStaleBlock, statusResp.TxStatus) + require.Equal(t, StatusMined, statusResp.TxStatus) require.Equal(t, tx2BlockHash, *statusResp.BlockHash) - - // verify that callback for tx2 was received with status MINED_IN_STALE_BLOCK - select { - case status := <-callbackReceivedChan: - require.Equal(t, tx2.TxID(), status.Txid) - require.Equal(t, StatusMinedInStaleBlock, status.TxStatus) - case err := <-callbackErrChan: - t.Fatalf("callback error: %v", err) - case <-time.After(1 * time.Second): - t.Fatal("callback exceeded timeout") - } } func call(t *testing.T, method string, params []interface{}) { diff --git a/test/utils.go b/test/utils.go index 76da39afa..c48e9ee0d 100644 --- a/test/utils.go +++ b/test/utils.go @@ -20,7 +20,10 @@ import ( "github.com/bitcoin-sv/go-sdk/script" sdkTx "github.com/bitcoin-sv/go-sdk/transaction" "github.com/bitcoin-sv/go-sdk/transaction/template/p2pkh" + "github.com/libsv/go-bc" "github.com/stretchr/testify/require" + + "github.com/bitcoin-sv/arc/internal/node_client" ) const ( @@ -399,3 +402,21 @@ func generateNewUnlockingScriptFromRandomKey() (*script.Script, error) { } return sc, nil } + +func checkMerklePath(t *testing.T, statusResponse TransactionResponse) { + require.NotNil(t, statusResponse.MerklePath) + + bump, err := bc.NewBUMPFromStr(*statusResponse.MerklePath) + require.NoError(t, err) + + jsonB, err := json.Marshal(bump) + require.NoError(t, err) + t.Logf("BUMPjson: %s", string(jsonB)) + + root, err := bump.CalculateRootGivenTxid(statusResponse.Txid) + require.NoError(t, err) + + require.NotNil(t, statusResponse.BlockHeight) + blockRoot := node_client.GetBlockRootByHeight(t, bitcoind, int(*statusResponse.BlockHeight)) + require.Equal(t, blockRoot, root) +}