Skip to content

Commit

Permalink
BPAAS-1054: Expand load unmined test
Browse files Browse the repository at this point in the history
  • Loading branch information
boecklim committed Oct 24, 2023
1 parent 015de71 commit e84c2d8
Show file tree
Hide file tree
Showing 2 changed files with 118 additions and 50 deletions.
161 changes: 116 additions & 45 deletions metamorph/processor_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
package metamorph

import (
"bytes"
"context"
"errors"
"fmt"
"log/slog"
"os"
Expand Down Expand Up @@ -90,44 +92,91 @@ func TestNewProcessor(t *testing.T) {
}

func TestLoadUnmined(t *testing.T) {
storedAt := time.Date(2023, 10, 3, 5, 0, 0, 0, time.UTC)

tt := []struct {
name string
storedData []*store.StoreData
name string
storedData []*store.StoreData
updateStatusErr error
getTransactionBlockErr error

expectedItemsBeforeLoadUnmined int
expectedItemsAfterLoadUnmined int
expectedItemTxHashes []*chainhash.Hash
expectedItemTxHashesFinal []*chainhash.Hash
}{
{
name: "no unmined transactions",

expectedItemsBeforeLoadUnmined: 0,
expectedItemsAfterLoadUnmined: 0,
name: "no unmined transactions loaded",
},
{
name: "2 unmined transactions",
name: "load 3 unmined transactions, TX2 was mined",
storedData: []*store.StoreData{
{
StoredAt: testdata.Time,
AnnouncedAt: testdata.Time.Add(1 * time.Second),
MinedAt: testdata.Time.Add(2 * time.Second),
StoredAt: storedAt,
AnnouncedAt: storedAt.Add(1 * time.Second),
Hash: testdata.TX1Hash,
Status: metamorph_api.Status_SENT_TO_NETWORK,
},
{
StoredAt: storedAt,
AnnouncedAt: storedAt.Add(1 * time.Second),
Hash: testdata.TX2Hash,
Status: metamorph_api.Status_SEEN_ON_NETWORK,
CallbackUrl: "http://api.example.com",
},
{
StoredAt: storedAt,
AnnouncedAt: storedAt.Add(1 * time.Second),
Hash: testdata.TX3Hash,
Status: metamorph_api.Status_SEEN_ON_NETWORK,
},
},

expectedItemTxHashesFinal: []*chainhash.Hash{testdata.TX1Hash, testdata.TX3Hash},
},
{
name: "load 3 unmined transactions, none mined",
storedData: []*store.StoreData{
{
StoredAt: storedAt,
AnnouncedAt: storedAt.Add(1 * time.Second),
Hash: testdata.TX1Hash,
Status: metamorph_api.Status_ANNOUNCED_TO_NETWORK,
},
{
StoredAt: storedAt,
AnnouncedAt: storedAt.Add(1 * time.Second),
Hash: testdata.TX2Hash,
Status: metamorph_api.Status_STORED,
},
},

expectedItemTxHashesFinal: []*chainhash.Hash{testdata.TX1Hash, testdata.TX2Hash},
},
{
name: "update status fails",
storedData: []*store.StoreData{
{
StoredAt: storedAt,
AnnouncedAt: storedAt.Add(1 * time.Second),
Hash: testdata.TX2Hash,
Status: metamorph_api.Status_STORED,
},
},
updateStatusErr: errors.New("failed to update status"),

expectedItemTxHashesFinal: []*chainhash.Hash{testdata.TX2Hash},
},
{
name: "get transaction block fails",
storedData: []*store.StoreData{
{
StoredAt: testdata.Time,
AnnouncedAt: testdata.Time.Add(1 * time.Second),
MinedAt: testdata.Time.Add(2 * time.Second),
Hash: testdata.TX1Hash,
Status: metamorph_api.Status_SENT_TO_NETWORK,
CallbackUrl: "https://test.com",
CallbackToken: "token",
StoredAt: storedAt,
AnnouncedAt: storedAt.Add(1 * time.Second),
Hash: testdata.TX2Hash,
Status: metamorph_api.Status_SEEN_ON_NETWORK,
},
},
getTransactionBlockErr: errors.New("failed to get transaction block"),

expectedItemTxHashes: []*chainhash.Hash{testdata.TX1Hash, testdata.TX2Hash},
expectedItemsBeforeLoadUnmined: 0,
expectedItemsAfterLoadUnmined: 2,
expectedItemTxHashesFinal: []*chainhash.Hash{testdata.TX2Hash},
},
}

Expand All @@ -136,24 +185,19 @@ func TestLoadUnmined(t *testing.T) {
pm := p2p.NewPeerManagerMock()

btxMock := &blockTxMock.ClientIMock{
GetTransactionBlocksFunc: func(ctx context.Context, transaction *blocktx_api.Transactions) (*blocktx_api.TransactionBlocks, error) {
blocks := &blocktx_api.TransactionBlocks{
TransactionBlocks: []*blocktx_api.TransactionBlock{
{
BlockHash: testdata.Block1Hash[:],
BlockHeight: 123,
TransactionHash: testdata.TX1Hash.CloneBytes(),
},
},
}
return blocks, nil
},
GetTransactionBlockFunc: func(ctx context.Context, transaction *blocktx_api.Transaction) (*blocktx_api.RegisterTransactionResponse, error) {
txResponse := &blocktx_api.RegisterTransactionResponse{
BlockHash: testdata.Block2Hash[:],
BlockHeight: 2,

var txResponse *blocktx_api.RegisterTransactionResponse

// TX2 was mined
if bytes.Compare(testdata.TX2Hash[:], transaction.Hash[:]) == 0 {
txResponse = &blocktx_api.RegisterTransactionResponse{
BlockHash: testdata.Block2Hash[:],
BlockHeight: 2,
}
}
return txResponse, nil

return txResponse, tc.getTransactionBlockErr
},
}
mtmStore := &storeMock.MetamorphStoreMock{
Expand All @@ -163,21 +207,48 @@ func TestLoadUnmined(t *testing.T) {
}
return nil
},
UpdateMinedFunc: func(ctx context.Context, hash *chainhash.Hash, blockHash *chainhash.Hash, blockHeight uint64) error {
require.Equal(t, testdata.TX2Hash, hash)
return nil
},
UpdateStatusFunc: func(ctx context.Context, hash *chainhash.Hash, status metamorph_api.Status, rejectReason string) error {
require.Equal(t, bytes.Compare(testdata.TX2Hash[:], hash[:]), 0)
return tc.updateStatusErr
},
GetFunc: func(ctx context.Context, key []byte) (*store.StoreData, error) {
return &store.StoreData{
StoredAt: storedAt,
AnnouncedAt: storedAt.Add(1 * time.Second),
Hash: testdata.TX2Hash,
Status: metamorph_api.Status_SEEN_ON_NETWORK,
CallbackUrl: "http://api.example.com",
BlockHash: testdata.Block2Hash,
BlockHeight: 2,
}, nil
},
}

processor, err := NewProcessor(mtmStore, pm, "test", nil, btxMock, WithProcessExpiredSeenTxsInterval(time.Millisecond*100))
processor, err := NewProcessor(mtmStore, pm, "test", nil, btxMock,
WithProcessExpiredSeenTxsInterval(time.Hour*24),
WithCacheExpiryTime(time.Hour*24),
WithNow(func() time.Time {
return storedAt.Add(1 * time.Hour)
}),
)
require.NoError(t, err)
assert.Equal(t, tc.expectedItemsBeforeLoadUnmined, processor.processorResponseMap.Len())
require.Equal(t, 0, processor.processorResponseMap.Len())
processor.LoadUnmined()
assert.Equal(t, tc.expectedItemsAfterLoadUnmined, processor.processorResponseMap.Len())

time.Sleep(time.Millisecond * 200)

items := processor.processorResponseMap.Items()
allItemHashes := make([]*chainhash.Hash, 0, len(processor.processorResponseMap.Items()))

for _, txHash := range tc.expectedItemTxHashes {
assert.Equal(t, txHash, items[*txHash].Hash)
for i, item := range processor.processorResponseMap.Items() {
require.Equal(t, i, *item.Hash)
allItemHashes = append(allItemHashes, item.Hash)
}

require.ElementsMatch(t, tc.expectedItemTxHashesFinal, allItemHashes)
})
}
}
Expand Down
7 changes: 2 additions & 5 deletions testdata/data.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,8 @@ var (
TX1Raw = "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff1a0386c40b2f7461616c2e636f6d2f00cf47ad9c7af83836000000ffffffff0117564425000000001976a914522cf9e7626d9bd8729e5a1398ece40dad1b6a2f88ac00000000"
TX1RawBytes, _ = hex.DecodeString(TX1Raw)

TX2 = "1a8fda8c35b8fc30885e88d6eb0214e2b3a74c96c82c386cb463905446011fdf"
TX2Hash, _ = chainhash.NewHashFromStr(TX2)
TX2Raw = "01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff1a0385c40b2f7461616c2e636f6d2f0fde4a8a8fbbf5c636010000ffffffff02c9d74425000000001976a914522cf9e7626d9bd8729e5a1398ece40dad1b6a2f88ac0000000000000000fda502006a04ac1eed884d53027b2276657273696f6e223a22302e31222c22686569676874223a3737313230352c22707265764d696e65724964223a22303365393264336535633366376264393435646662663438653761393933393362316266623366313166333830616533306432383665376666326165633561323730222c22707265764d696e65724964536967223a2233303435303232313030643736333630653464323133333163613836663031386330343665353763393338663139373735303734373333333533363062653337303438636165316166333032323030626536363034353430323162663934363465393966356139353831613938633963663439353430373539386335396234373334623266646234383262663937222c226d696e65724964223a22303365393264336535633366376264393435646662663438653761393933393362316266623366313166333830616533306432383665376666326165633561323730222c2276637478223a7b2274784964223a2235373962343335393235613930656533396133376265336230306239303631653734633330633832343133663664306132303938653162656137613235313566222c22766f7574223a307d2c226d696e6572436f6e74616374223a7b22656d61696c223a22696e666f407461616c2e636f6d222c226e616d65223a225441414c20446973747269627574656420496e666f726d6174696f6e20546563686e6f6c6f67696573222c226d65726368616e74415049456e64506f696e74223a2268747470733a2f2f6d65726368616e746170692e7461616c2e636f6d2f227d7d473045022100f7a4d02865a503d202b4e3d1b045a7efaef830b3d27efd6482c939c376e58c7802202d9332ca0a8b90001fa143512b4b6f3fe00f227d9d882861cc87975cb8778da800000000"
TX2RawBytes, _ = hex.DecodeString(TX2Raw)
TX2 = "1a8fda8c35b8fc30885e88d6eb0214e2b3a74c96c82c386cb463905446011fdf"
TX2Hash, _ = chainhash.NewHashFromStr(TX2)

TX3 = "3f63399b3d9d94ba9c5b7398b9328dcccfcfd50f07ad8b214e766168c391642b"
TX3Hash, _ = chainhash.NewHashFromStr(TX3)
Expand All @@ -30,6 +28,5 @@ var (
TX4Hash, _ = chainhash.NewHashFromStr(TX4)

Time = time.Date(2009, 1, 03, 18, 15, 05, 0, time.UTC)
DefaultFees = `[{"feeType":"data","miningFee":{"bytes":1000,"satoshis":5},"relayFee":{"bytes":1000,"satoshis":5}},{"feeType":"standard","miningFee":{"bytes":1000,"satoshis":5},"relayFee":{"bytes":1000,"satoshis":5}}]`
DefaultPolicy = `{"excessiveblocksize":2000000000,"blockmaxsize":512000000,"maxtxsizepolicy":10000000,"maxorphantxsize":1000000000,"datacarriersize":4294967295,"maxscriptsizepolicy":500000,"maxopsperscriptpolicy":4294967295,"maxscriptnumlengthpolicy":10000,"maxpubkeyspermultisigpolicy":4294967295,"maxtxsigopscountspolicy":4294967295,"maxstackmemoryusagepolicy":100000000,"maxstackmemoryusageconsensus":200000000,"limitancestorcount":10000,"limitcpfpgroupmemberscount":25,"maxmempool":2000000000,"maxmempoolsizedisk":0,"mempoolmaxpercentcpfp":10,"acceptnonstdoutputs":true,"datacarrier":true,"minminingtxfee":5e-7,"maxstdtxvalidationduration":3,"maxnonstdtxvalidationduration":1000,"maxtxchainvalidationbudget":50,"validationclockcpu":true,"minconsolidationfactor":20,"maxconsolidationinputscriptsize":150,"minconfconsolidationinput":6,"minconsolidationinputmaturity":6,"acceptnonstdconsolidationinput":false}`
)

0 comments on commit e84c2d8

Please sign in to comment.