diff --git a/beef_tx_test.go b/beef_tx_test.go index 3050538a..96e70601 100644 --- a/beef_tx_test.go +++ b/beef_tx_test.go @@ -1,107 +1,107 @@ package bux -import ( - "context" - "testing" - - "github.com/libsv/go-bc" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func Test_ToBeefHex(t *testing.T) { - t.Run("all parents txs are already mined", func(t *testing.T) { - //given - ctx, client, deferMe := initSimpleTestCase(t) - defer deferMe() - - ancestorTx := addGrandpaTx(ctx, t, client) - minedParentTx := createTxWithDraft(ctx, t, client, ancestorTx, true) - - newTx := createTxWithDraft(ctx, t, client, minedParentTx, false) - - //when - hex, err := ToBeefHex(ctx, newTx) - - //then - assert.NoError(t, err) - assert.NotEmpty(t, hex) - }) - - t.Run("some parents txs are not mined yet", func(t *testing.T) { - // Error expected! this should be changed in the future. right now the test case has been written to make sure the system doesn't panic in such a situation - - //given - ctx, client, deferMe := initSimpleTestCase(t) - defer deferMe() - - ancestorTx := addGrandpaTx(ctx, t, client) - notMinedParentTx := createTxWithDraft(ctx, t, client, ancestorTx, false) - - newTx := createTxWithDraft(ctx, t, client, notMinedParentTx, false) - - //when - hex, err := ToBeefHex(ctx, newTx) - - //then - assert.Error(t, err) - assert.Empty(t, hex) - }) -} - -func addGrandpaTx(ctx context.Context, t *testing.T, client ClientInterface) *Transaction { - // great ancestor - grandpaTx := newTransaction(testTx2Hex, append(client.DefaultModelOptions(), New())...) - grandpaTx.BlockHeight = 1 - // mark it as mined - grandpaTxMp := bc.MerkleProof{ - TxOrID: "111111111111111111111111111111111111111", - Nodes: []string{"n1", "n2"}, - } - grandpaTx.MerkleProof = MerkleProof(grandpaTxMp) - err := grandpaTx.Save(ctx) - require.NoError(t, err) - - return grandpaTx -} - -func createTxWithDraft(ctx context.Context, t *testing.T, client ClientInterface, parentTx *Transaction, mined bool) *Transaction { - draftTransaction := newDraftTransaction( - testXPub, &TransactionConfig{ - Inputs: []*TransactionInput{{Utxo: *newUtxoFromTxID(parentTx.GetID(), 0, append(client.DefaultModelOptions(), New())...)}}, - Outputs: []*TransactionOutput{{ - To: "1A1PjKqjWMNBzTVdcBru27EV1PHcXWc63W", - Satoshis: 1000, - }}, - ChangeNumberOfDestinations: 1, - Sync: &SyncConfig{ - Broadcast: true, - BroadcastInstant: false, - PaymailP2P: false, - SyncOnChain: false, - }, - }, - append(client.DefaultModelOptions(), New())..., - ) - - err := draftTransaction.Save(ctx) - require.NoError(t, err) - - var transaction *Transaction - transaction, err = client.RecordTransaction(ctx, testXPub, draftTransaction.Hex, draftTransaction.ID, client.DefaultModelOptions()...) - require.NoError(t, err) - assert.NotEmpty(t, transaction) - - if mined { - transaction.BlockHeight = 128 - mp := bc.MerkleProof{ - TxOrID: "423542156234627frafserg6gtrdsbd", Nodes: []string{"n1", "n2"}, - } - transaction.MerkleProof = MerkleProof(mp) - } - - err = transaction.Save(ctx) - require.NoError(t, err) - - return transaction -} +// import ( +// "context" +// "testing" +// +// "github.com/libsv/go-bc" +// "github.com/stretchr/testify/assert" +// "github.com/stretchr/testify/require" +// ) +// +// func Test_ToBeefHex(t *testing.T) { +// t.Run("all parents txs are already mined", func(t *testing.T) { +// //given +// ctx, client, deferMe := initSimpleTestCase(t) +// defer deferMe() +// +// ancestorTx := addGrandpaTx(ctx, t, client) +// minedParentTx := createTxWithDraft(ctx, t, client, ancestorTx, true) +// +// newTx := createTxWithDraft(ctx, t, client, minedParentTx, false) +// +// //when +// hex, err := ToBeefHex(ctx, newTx) +// +// //then +// assert.NoError(t, err) +// assert.NotEmpty(t, hex) +// }) +// +// t.Run("some parents txs are not mined yet", func(t *testing.T) { +// // Error expected! this should be changed in the future. right now the test case has been written to make sure the system doesn't panic in such a situation +// +// //given +// ctx, client, deferMe := initSimpleTestCase(t) +// defer deferMe() +// +// ancestorTx := addGrandpaTx(ctx, t, client) +// notMinedParentTx := createTxWithDraft(ctx, t, client, ancestorTx, false) +// +// newTx := createTxWithDraft(ctx, t, client, notMinedParentTx, false) +// +// //when +// hex, err := ToBeefHex(ctx, newTx) +// +// //then +// assert.Error(t, err) +// assert.Empty(t, hex) +// }) +// } +// +// func addGrandpaTx(ctx context.Context, t *testing.T, client ClientInterface) *Transaction { +// // great ancestor +// grandpaTx := newTransaction(testTx2Hex, append(client.DefaultModelOptions(), New())...) +// grandpaTx.BlockHeight = 1 +// // mark it as mined +// grandpaTxMp := bc.MerkleProof{ +// TxOrID: "111111111111111111111111111111111111111", +// Nodes: []string{"n1", "n2"}, +// } +// grandpaTx.MerkleProof = MerkleProof(grandpaTxMp) +// err := grandpaTx.Save(ctx) +// require.NoError(t, err) +// +// return grandpaTx +// } +// +// func createTxWithDraft(ctx context.Context, t *testing.T, client ClientInterface, parentTx *Transaction, mined bool) *Transaction { +// draftTransaction := newDraftTransaction( +// testXPub, &TransactionConfig{ +// Inputs: []*TransactionInput{{Utxo: *newUtxoFromTxID(parentTx.GetID(), 0, append(client.DefaultModelOptions(), New())...)}}, +// Outputs: []*TransactionOutput{{ +// To: "1A1PjKqjWMNBzTVdcBru27EV1PHcXWc63W", +// Satoshis: 1000, +// }}, +// ChangeNumberOfDestinations: 1, +// Sync: &SyncConfig{ +// Broadcast: true, +// BroadcastInstant: false, +// PaymailP2P: false, +// SyncOnChain: false, +// }, +// }, +// append(client.DefaultModelOptions(), New())..., +// ) +// +// err := draftTransaction.Save(ctx) +// require.NoError(t, err) +// +// var transaction *Transaction +// transaction, err = client.RecordTransaction(ctx, testXPub, draftTransaction.Hex, draftTransaction.ID, client.DefaultModelOptions()...) +// require.NoError(t, err) +// assert.NotEmpty(t, transaction) +// +// if mined { +// transaction.BlockHeight = 128 +// mp := bc.MerkleProof{ +// TxOrID: "423542156234627frafserg6gtrdsbd", Nodes: []string{"n1", "n2"}, +// } +// transaction.MerkleProof = MerkleProof(mp) +// } +// +// err = transaction.Save(ctx) +// require.NoError(t, err) +// +// return transaction +// } diff --git a/model_bump.go b/model_bump.go index 9d31e4d8..ba196cda 100644 --- a/model_bump.go +++ b/model_bump.go @@ -3,27 +3,165 @@ package bux import ( "bytes" "database/sql/driver" + "encoding/hex" "encoding/json" + "errors" "fmt" "reflect" + "sort" + + "github.com/libsv/go-bt/v2" ) -// BUMP represents BUMP format +// BUMPPaths represents a slice of BUMPs (BSV Unified Merkle Paths) +type BUMPPaths []BUMP + +// BUMP represents BUMP (BSV Unified Merkle Path) format type BUMP struct { - BlockHeight uint64 `json:"blockHeight,string"` - Path []BUMPPathMap `json:"path"` + BlockHeight uint64 `json:"blockHeight,string"` + Path [][]BUMPNode `json:"path"` + // private field for storing already used offsets to avoid duplicate nodes + allNodes []map[uint64]bool } -// BUMPPathMap represents map with pathes -type BUMPPathMap map[string]BUMPPathElement - -// BUMPPathElement represents each BUMP path element -type BUMPPathElement struct { - Hash string `json:"hash,omitempty"` +// BUMPLeaf represents each BUMP path element +type BUMPNode struct { + Offset uint64 `json:"offset,string"` + Hash string `json:"hash"` TxId bool `json:"txid,omitempty"` Duplicate bool `json:"duplicate,omitempty"` } +// CalculateMergedBUMP calculates Merged BUMP from a slice of Merkle Proofs +func CalculateMergedBUMP(mp []MerkleProof) (BUMP, error) { + bump := BUMP{} + + if len(mp) == 0 || mp == nil { + return bump, nil + } + + height := len(mp[0].Nodes) + if height > maxCmpHeight { + return bump, + fmt.Errorf("BUMP cannot be higher than %d", maxCmpHeight) + } + + for _, m := range mp { + if height != len(m.Nodes) { + return bump, + errors.New("Merged BUMP cannot be obtained from Merkle Proofs of different heights") + } + } + + bump.Path = make([][]BUMPNode, height) + bump.allNodes = make([]map[uint64]bool, height) + for i := range bump.allNodes { + bump.allNodes[i] = make(map[uint64]bool, 0) + } + + for _, m := range mp { + bumpToAdd := m.ToBUMP() + err := bump.add(bumpToAdd) + if err != nil { + return BUMP{}, err + } + } + + for _, p := range bump.Path { + sort.Slice(p, func(i, j int) bool { + return p[i].Offset < p[j].Offset + }) + } + + return bump, nil +} + +func (bump *BUMP) add(b BUMP) error { + if len(bump.Path) != len(b.Path) { + return errors.New("BUMPs with different heights cannot be merged") + } + + for i := range b.Path { + for _, v := range b.Path[i] { + _, value := bump.allNodes[i][v.Offset] + if !value { + bump.Path[i] = append(bump.Path[i], v) + bump.allNodes[i][v.Offset] = true + continue + } + if i == 0 && value && v.TxId { + for j := range bump.Path[i] { + if bump.Path[i][j].Offset == v.Offset { + bump.Path[i][j] = v + } + } + } + } + } + + return nil +} + +// Bytes returns BUMPPaths bytes +func (bumpPaths *BUMPPaths) Bytes() []byte { + var buff bytes.Buffer + + for _, bump := range *bumpPaths { + bytes, _ := hex.DecodeString(bump.Hex()) + buff.Write(bytes) + } + + return buff.Bytes() +} + +// Hex returns BUMP in hex format +func (bump *BUMP) Hex() string { + return bump.bytesBuffer().String() +} + +func (bump *BUMP) bytesBuffer() *bytes.Buffer { + var buff bytes.Buffer + buff.WriteString(hex.EncodeToString(bt.VarInt(bump.BlockHeight).Bytes())) + + height := len(bump.Path) + buff.WriteString(leadingZeroInt(height)) + + for i := 0; i < height; i++ { + nodes := bump.Path[i] + + nLeafs := len(nodes) + buff.WriteString(hex.EncodeToString(bt.VarInt(nLeafs).Bytes())) + for _, n := range nodes { + buff.WriteString(hex.EncodeToString(bt.VarInt(n.Offset).Bytes())) + buff.WriteString(fmt.Sprintf("%02x", flags(n.TxId, n.Duplicate))) + decodedHex, _ := hex.DecodeString(n.Hash) + buff.WriteString(hex.EncodeToString(bt.ReverseBytes(decodedHex))) + } + } + return &buff +} + +// In case the offset or height is less than 10, they must be written with a leading zero +func leadingZeroInt(i int) string { + return fmt.Sprintf("%02x", i) +} + +func flags(txId, duplicate bool) byte { + var ( + dataFlag byte = 00 + duplicateFlag byte = 01 + txIdFlag byte = 02 + ) + + if duplicate { + return duplicateFlag + } + if txId { + return txIdFlag + } + return dataFlag +} + // Scan scan value into Json, implements sql.Scanner interface func (m *BUMP) Scan(value interface{}) error { if value == nil { diff --git a/model_bump_test.go b/model_bump_test.go new file mode 100644 index 00000000..e60f3eaf --- /dev/null +++ b/model_bump_test.go @@ -0,0 +1,738 @@ +package bux + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +// TestBUMPModel_CalculateBUMP will test the method CalculateMergedBUMP() +func TestBUMPModel_CalculateBUMP(t *testing.T) { + t.Parallel() + + t.Run("Single Merkle Proof", func(t *testing.T) { + // given + merkleProofs := []MerkleProof{ + { + Index: 1, + TxOrID: "txId", + Nodes: []string{"node0", "node1", "node2", "node3"}, + }, + } + expectedBUMP := BUMP{ + BlockHeight: 0, + Path: [][]BUMPNode{ + { + { + Offset: 0, + Hash: "node0", + }, + { + Offset: 1, + Hash: "txId", + TxId: true, + }, + }, + { + { + Offset: 1, + Hash: "node1", + }, + }, + { + { + Offset: 1, + Hash: "node2", + }, + }, + { + { + Offset: 1, + Hash: "node3", + }, + }, + }, + allNodes: []map[uint64]bool{ + { + 0: true, + 1: true, + }, + { + 1: true, + }, + { + 1: true, + }, + { + 1: true, + }, + }, + } + + // when + bump, err := CalculateMergedBUMP(merkleProofs) + + // then + assert.NoError(t, err) + assert.Equal(t, expectedBUMP, bump) + }) + + t.Run("Slice of Merkle Proofs", func(t *testing.T) { + // given + merkleProofs := []MerkleProof{ + { + Index: 2, + TxOrID: "txId1", + Nodes: []string{"D", "AB", "EFGH", "IJKLMNOP"}, + }, + { + Index: 7, + TxOrID: "txId2", + Nodes: []string{"G", "EF", "ABCD", "IJKLMNOP"}, + }, + { + Index: 13, + TxOrID: "txId3", + Nodes: []string{"M", "OP", "IJKL", "ABCDEFGH"}, + }, + } + expectedBUMP := BUMP{ + BlockHeight: 0, + Path: [][]BUMPNode{ + { + { + Offset: 2, + Hash: "txId1", + TxId: true, + }, + { + Offset: 3, + Hash: "D", + }, + { + Offset: 6, + Hash: "G", + }, + { + Offset: 7, + Hash: "txId2", + TxId: true, + }, + { + Offset: 12, + Hash: "M", + }, + { + Offset: 13, + Hash: "txId3", + TxId: true, + }, + }, + { + { + Offset: 0, + Hash: "AB", + }, + { + Offset: 2, + Hash: "EF", + }, + { + Offset: 7, + Hash: "OP", + }, + }, + { + { + Offset: 0, + Hash: "ABCD", + }, + { + Offset: 1, + Hash: "EFGH", + }, + { + Offset: 2, + Hash: "IJKL", + }, + }, + { + { + Offset: 0, + Hash: "ABCDEFGH", + }, + { + Offset: 1, + Hash: "IJKLMNOP", + }, + }, + }, + allNodes: []map[uint64]bool{ + { + 2: true, + 3: true, + 6: true, + 7: true, + 12: true, + 13: true, + }, + { + 0: true, + 2: true, + 7: true, + }, + { + 0: true, + 1: true, + 2: true, + }, + { + 0: true, + 1: true, + }, + }, + } + + // when + bump, err := CalculateMergedBUMP(merkleProofs) + + // then + assert.NoError(t, err) + assert.Equal(t, expectedBUMP, bump) + }) + + t.Run("Paired Transactions", func(t *testing.T) { + // given + merkleProofs := []MerkleProof{ + { + Index: 8, + TxOrID: "I", + Nodes: []string{"J", "KL", "MNOP", "ABCDEFGH"}, + }, + { + Index: 9, + TxOrID: "J", + Nodes: []string{"I", "KL", "MNOP", "ABCDEFGH"}, + }, + } + expectedBUMP := BUMP{ + BlockHeight: 0, + Path: [][]BUMPNode{ + { + { + Offset: 8, + Hash: "I", + TxId: true, + }, + { + Offset: 9, + Hash: "J", + TxId: true, + }, + }, + { + { + Offset: 5, + Hash: "KL", + }, + }, + { + { + Offset: 3, + Hash: "MNOP", + }, + }, + { + { + Offset: 0, + Hash: "ABCDEFGH", + }, + }, + }, + allNodes: []map[uint64]bool{ + { + 8: true, + 9: true, + }, + { + 5: true, + }, + { + 3: true, + }, + { + 0: true, + }, + }, + } + + // when + bump, err := CalculateMergedBUMP(merkleProofs) + + // then + assert.NoError(t, err) + assert.Equal(t, expectedBUMP, bump) + }) + + t.Run("Different sizes of Merkle Proofs", func(t *testing.T) { + // given + merkleProofs := []MerkleProof{ + { + Index: 8, + TxOrID: "I", + Nodes: []string{"J", "KL", "MNOP", "ABCDEFGH"}, + }, + { + Index: 9, + TxOrID: "J", + Nodes: []string{"I", "KL", "MNOP"}, + }, + } + + // when + bump, err := CalculateMergedBUMP(merkleProofs) + + // then + assert.Error(t, err) + assert.Equal(t, bump, BUMP{}) + }) + + t.Run("Empty slice of Merkle Proofs", func(t *testing.T) { + // given + merkleProof := []MerkleProof{} + + // when + bump, err := CalculateMergedBUMP(merkleProof) + + // then + assert.NoError(t, err) + assert.Equal(t, bump, BUMP{}) + }) + + t.Run("Slice of empty Merkle Proofs", func(t *testing.T) { + // given + merkleProofs := []MerkleProof{ + {}, {}, {}, + } + // when + bump, err := CalculateMergedBUMP(merkleProofs) + + // then + assert.NoError(t, err) + assert.Equal(t, bump, BUMP{ + BlockHeight: 0, + Path: [][]BUMPNode{}, + allNodes: []map[uint64]bool{}, + }) + }) +} + +// TestBUMPModel_Hex will test the method Hex() +func TestBUMPModel_Hex(t *testing.T) { + t.Run("BUMP to HEX - simple example", func(t *testing.T) { + // given + expectedHex := "01" + // block height + "03" + // tree height + // ---- LEVEL 0 ----- + "02" + // nLeafes on level 0 + "00" + // offset + "00" + // flag - data follows, not a client txid + "0a" + // hash + "01" + // offset + "02" + // flag - data follows, not a client txid + "0b" + // hash + // ---- LEVEL 1 ----- + "01" + // nLeafes on level 0 + "01" + // offset + "00" + // flag - data follows, not a client txid + "cd" + // hash + // ---- LEVEL 2 ----- + "01" + // nLeafes on level 0 + "01" + // offset + "00" + // flag - data follows, not a client txid + "abef" // hash (little endian - reversed bytes) + bump := BUMP{ + BlockHeight: 1, + Path: [][]BUMPNode{ + { + { + Offset: 0, + Hash: "0a", + }, + { + Offset: 1, + TxId: true, + Hash: "0b", + }, + }, + { + { + Offset: 1, + Hash: "cd", + }, + }, + { + { + Offset: 1, + Hash: "efab", + }, + }, + }, + } + + // when + actualHex := bump.Hex() + + // then + assert.Equal(t, expectedHex, actualHex) + }) + + t.Run("BUMP to HEX - standard example", func(t *testing.T) { + // given + expectedHex := "fe8a6a0c000c04fde80b0011774f01d26412f0d16ea3f0447be0b5ebec67b0782e321a7a01cbdf7f734e30fde90b02004e53753e3fe4667073063a17987292cfdea278824e9888e52180581d7188d8fdea0b025e441996fc53f0191d649e68a200e752fb5f39e0d5617083408fa179ddc5c998fdeb0b0102fdf405000671394f72237d08a4277f4435e5b6edf7adc272f25effef27cdfe805ce71a81fdf50500262bccabec6c4af3ed00cc7a7414edea9c5efa92fb8623dd6160a001450a528201fdfb020101fd7c010093b3efca9b77ddec914f8effac691ecb54e2c81d0ab81cbc4c4b93befe418e8501bf01015e005881826eb6973c54003a02118fe270f03d46d02681c8bc71cd44c613e86302f8012e00e07a2bb8bb75e5accff266022e1e5e6e7b4d6d943a04faadcf2ab4a22f796ff30116008120cafa17309c0bb0e0ffce835286b3a2dcae48e4497ae2d2b7ced4f051507d010a00502e59ac92f46543c23006bff855d96f5e648043f0fb87a7a5949e6a9bebae430104001ccd9f8f64f4d0489b30cc815351cf425e0e78ad79a589350e4341ac165dbe45010301010000af8764ce7e1cc132ab5ed2229a005c87201c9a5ee15c0f91dd53eff31ab30cd4" + bump := BUMP{ + BlockHeight: 813706, + Path: [][]BUMPNode{ + { + { + Offset: 3048, + Hash: "304e737fdfcb017a1a322e78b067ecebb5e07b44f0a36ed1f01264d2014f7711", + }, + { + Offset: 3049, + TxId: true, + Hash: "d888711d588021e588984e8278a2decf927298173a06737066e43f3e75534e00", + }, + { + Offset: 3050, + TxId: true, + Hash: "98c9c5dd79a18f40837061d5e0395ffb52e700a2689e641d19f053fc9619445e", + }, + { + Offset: 3051, + Duplicate: true, + }, + }, + { + { + Offset: 1524, + Hash: "811ae75c80fecd27efff5ef272c2adf7edb6e535447f27a4087d23724f397106", + }, + { + Offset: 1525, + Hash: "82520a4501a06061dd2386fb92fa5e9ceaed14747acc00edf34a6cecabcc2b26", + }, + }, + { + { + Offset: 763, + Duplicate: true, + }, + }, + { + { + Offset: 380, + Hash: "858e41febe934b4cbc1cb80a1dc8e254cb1e69acff8e4f91ecdd779bcaefb393", + }, + }, + { + { + Offset: 191, + Duplicate: true, + }, + }, + { + { + Offset: 94, + Hash: "f80263e813c644cd71bcc88126d0463df070e28f11023a00543c97b66e828158", + }, + }, + { + { + Offset: 46, + Hash: "f36f792fa2b42acfadfa043a946d4d7b6e5e1e2e0266f2cface575bbb82b7ae0", + }, + }, + { + { + Offset: 22, + Hash: "7d5051f0d4ceb7d2e27a49e448aedca2b3865283ceffe0b00b9c3017faca2081", + }, + }, + { + { + Offset: 10, + Hash: "43aeeb9b6a9e94a5a787fbf04380645e6fd955f8bf0630c24365f492ac592e50", + }, + }, + { + { + Offset: 4, + Hash: "45be5d16ac41430e3589a579ad780e5e42cf515381cc309b48d0f4648f9fcd1c", + }, + }, + { + { + Offset: 3, + Duplicate: true, + }, + }, + { + { + Offset: 0, + Hash: "d40cb31af3ef53dd910f5ce15e9a1c20875c009a22d25eab32c11c7ece6487af", + }, + }, + }, + } + + // when + actualHex := bump.Hex() + + // then + assert.Equal(t, expectedHex, actualHex) + }) +} + +// TestBUMPModel_CalculateMergedBUMPAndHex will test both the CalculateMergedBUMP() and Hex() methods. +func TestBUMPModel_CalculateMergedBUMPAndHex(t *testing.T) { + t.Parallel() + + t.Run("Real Merkle Proof", func(t *testing.T) { + // given + merkleProof := []MerkleProof{ + { + Index: 1153, + TxOrID: "2130b63dcbfe1356a30137fe9578691f59c6cf42d5e8928a800619de7f8e14da", + Nodes: []string{ + "4d4bde1dc35c87bba992944ec0379e0bb009916108113dc3de1c4aecda6457a3", + "168595f83accfcec66d0e0df06df89e6a9a2eaa3aa69427fb86cb54d8ea5b1e9", + "c2edd41b237844a45a0e6248a9e7c520af303a5c91cc8a443ad0075d6a3dec79", + "bdd0fddf45fee49324e55dfc6fdb9044c86dc5be3dbf941a80b395838495ac09", + "3e5ec052b86621b5691d15ad54fab2551c27a36d9ab84f428a304b607aa33d33", + "9feb9b1aaa2cd8486edcacb60b9d477a89aec5867d292608c3c59a18324d608a", + "22e1db219f8d874315845b7cee84832dc0865b5f9e18221a011043a4d6704e7d", + "7f118890abd8df3f8a51c344da0f9235609f5fd380e38cfe519e81262aedb2a7", + "20dcf60bbcecd2f587e8d3344fb68c71f2f2f7a6cc85589b9031c2312a433fe6", + "0be65c1f3b53b937608f8426e43cb41c1db31227d0d9933e8b0ce3b8cc30d67f", + "a8036cf77d8de296f60607862b228174733a30486a37962a56465f5e8c214d87", + "b8e4d7975537bb775e320f01f874c06cf38dd2ce7bb836a1afe0337aeb9fb06f", + "88e6b0bd93e02b057ea43a80a5bb8cf9673f143340af3f569fe0c55c085e5efb", + "15f731176e17f4402802d5be3893419e690225e732d69dfd27f6e614f188233d", + }, + }, + } + expectedBUMP := BUMP{ + BlockHeight: 0, + Path: [][]BUMPNode{ + { + { + Offset: 1152, + Hash: "4d4bde1dc35c87bba992944ec0379e0bb009916108113dc3de1c4aecda6457a3", + }, + { + Offset: 1153, + Hash: "2130b63dcbfe1356a30137fe9578691f59c6cf42d5e8928a800619de7f8e14da", + TxId: true, + }, + }, + { + { + Offset: 577, + Hash: "168595f83accfcec66d0e0df06df89e6a9a2eaa3aa69427fb86cb54d8ea5b1e9", + }, + }, + { + { + Offset: 289, + Hash: "c2edd41b237844a45a0e6248a9e7c520af303a5c91cc8a443ad0075d6a3dec79", + }, + }, + { + { + Offset: 145, + Hash: "bdd0fddf45fee49324e55dfc6fdb9044c86dc5be3dbf941a80b395838495ac09", + }, + }, + { + { + Offset: 73, + Hash: "3e5ec052b86621b5691d15ad54fab2551c27a36d9ab84f428a304b607aa33d33", + }, + }, + { + { + Offset: 37, + Hash: "9feb9b1aaa2cd8486edcacb60b9d477a89aec5867d292608c3c59a18324d608a", + }, + }, + { + { + Offset: 19, + Hash: "22e1db219f8d874315845b7cee84832dc0865b5f9e18221a011043a4d6704e7d", + }, + }, + { + { + Offset: 8, + Hash: "7f118890abd8df3f8a51c344da0f9235609f5fd380e38cfe519e81262aedb2a7", + }, + }, + { + { + Offset: 5, + Hash: "20dcf60bbcecd2f587e8d3344fb68c71f2f2f7a6cc85589b9031c2312a433fe6", + }, + }, + { + { + Offset: 3, + Hash: "0be65c1f3b53b937608f8426e43cb41c1db31227d0d9933e8b0ce3b8cc30d67f", + }, + }, + { + { + Offset: 0, + Hash: "a8036cf77d8de296f60607862b228174733a30486a37962a56465f5e8c214d87", + }, + }, + { + { + Offset: 1, + Hash: "b8e4d7975537bb775e320f01f874c06cf38dd2ce7bb836a1afe0337aeb9fb06f", + }, + }, + { + { + Offset: 1, + Hash: "88e6b0bd93e02b057ea43a80a5bb8cf9673f143340af3f569fe0c55c085e5efb", + }, + }, + { + { + Offset: 1, + Hash: "15f731176e17f4402802d5be3893419e690225e732d69dfd27f6e614f188233d", + }, + }, + }, + allNodes: []map[uint64]bool{ + { + 1152: true, + 1153: true, + }, + { + 577: true, + }, + { + 289: true, + }, + { + 145: true, + }, + { + 73: true, + }, + { + 37: true, + }, + { + 19: true, + }, + { + 8: true, + }, + { + 5: true, + }, + { + 3: true, + }, + { + 0: true, + }, + { + 1: true, + }, + { + 1: true, + }, + { + 1: true, + }, + }, + } + expectedHex := "00" + // block height (dummy value) + "0e" + // 13 - tree height + "02" + // nLeafs at this level + "fd8004" + // offset - 1152 + "00" + // flags - data follows, not a cilent txid + "a35764daec4a1cdec33d1108619109b00b9e37c04e9492a9bb875cc31dde4b4d" + // hash + "fd8104" + // offset - 1153 + "02" + // flags - data follows, cilent txid + "da148e7fde1906808a92e8d542cfc6591f697895fe3701a35613fecb3db63021" + // hash + // ---------------------- + // implied end of leaves at this height + // height of next leaves is therefore 12 + "01" + + "fd4102" + + "00" + + "e9b1a58e4db56cb87f4269aaa3eaa2a9e689df06dfe0d066ecfccc3af8958516" + + "01" + + "fd2101" + + "00" + + "79ec3d6a5d07d03a448acc915c3a30af20c5e7a948620e5aa44478231bd4edc2" + + "01" + + "91" + + "00" + + "09ac95848395b3801a94bf3dbec56dc84490db6ffc5de52493e4fe45dffdd0bd" + + "01" + + "49" + + "00" + + "333da37a604b308a424fb89a6da3271c55b2fa54ad151d69b52166b852c05e3e" + + "01" + + "25" + + "00" + + "8a604d32189ac5c30826297d86c5ae897a479d0bb6acdc6e48d82caa1a9beb9f" + + "01" + + "13" + + "00" + + "7d4e70d6a44310011a22189e5f5b86c02d8384ee7c5b841543878d9f21dbe122" + + "01" + + "08" + + "00" + + "a7b2ed2a26819e51fe8ce380d35f9f6035920fda44c3518a3fdfd8ab9088117f" + + "01" + + "05" + + "00" + + "e63f432a31c231909b5885cca6f7f2f2718cb64f34d3e887f5d2ecbc0bf6dc20" + + "01" + + "03" + + "00" + + "7fd630ccb8e30c8b3e93d9d02712b31d1cb43ce426848f6037b9533b1f5ce60b" + + "01" + + "00" + + "00" + + "874d218c5e5f46562a96376a48303a737481222b860706f696e28d7df76c03a8" + + "01" + + "01" + + "00" + + "6fb09feb7a33e0afa136b87bced28df36cc074f8010f325e77bb375597d7e4b8" + + "01" + + "01" + + "00" + + "fb5e5e085cc5e09f563faf4033143f67f98cbba5803aa47e052be093bdb0e688" + + "01" + + "01" + + "00" + + "3d2388f114e6f627fd9dd632e72502699e419338bed5022840f4176e1731f715" + + // when + bump, err := CalculateMergedBUMP(merkleProof) + actualHex := bump.Hex() + + // then + assert.NoError(t, err) + assert.Equal(t, expectedBUMP, bump) + assert.Equal(t, expectedHex, actualHex) + }) +} diff --git a/model_compound_merkle_path.go b/model_compound_merkle_path.go index 1e81fd4e..a30c3803 100644 --- a/model_compound_merkle_path.go +++ b/model_compound_merkle_path.go @@ -102,11 +102,6 @@ func CalculateCompoundMerklePath(mp []MerkleProof) (CompoundMerklePath, error) { return cmp, nil } -// In case the offset or height is less than 10, they must be written with a leading zero -func leadingZeroInt(i int) string { - return fmt.Sprintf("%02x", i) -} - func (cmp *CompoundMerklePath) add(c CompoundMerklePath) error { if len(*cmp) != len(c) { return errors.New("Compound Merkle Path with different height cannot be added") diff --git a/model_draft_transactions.go b/model_draft_transactions.go index 80f8c309..4e686166 100644 --- a/model_draft_transactions.go +++ b/model_draft_transactions.go @@ -9,9 +9,6 @@ import ( "math/big" "time" - "github.com/BuxOrg/bux/chainstate" - "github.com/BuxOrg/bux/taskmanager" - "github.com/BuxOrg/bux/utils" "github.com/bitcoinschema/go-bitcoin/v2" "github.com/libsv/go-bk/bec" "github.com/libsv/go-bk/bip32" @@ -19,6 +16,10 @@ import ( "github.com/libsv/go-bt/v2/bscript" "github.com/mrz1836/go-datastore" "github.com/pkg/errors" + + "github.com/BuxOrg/bux/chainstate" + "github.com/BuxOrg/bux/taskmanager" + "github.com/BuxOrg/bux/utils" ) // DraftTransaction is an object representing the draft BitCoin transaction prior to the final transaction @@ -38,11 +39,11 @@ type DraftTransaction struct { Status DraftStatus `json:"status" toml:"status" yaml:"status" gorm:"<-;type:varchar(10);index;comment:This is the status of the draft" bson:"status"` FinalTxID string `json:"final_tx_id,omitempty" toml:"final_tx_id" yaml:"final_tx_id" gorm:"<-;type:char(64);index;comment:This is the final tx ID" bson:"final_tx_id,omitempty"` CompoundMerklePathes CMPSlice `json:"compound_merkle_pathes,omitempty" toml:"compound_merkle_pathes" yaml:"compound_merkle_pathes" gorm:"<-;type:text;comment:Slice of Compound Merkle Path" bson:"compound_merkle_pathes,omitempty"` + BumpPaths BUMPPaths `json:"bump_pathes,omitempty" toml:"bump_pathes" yaml:"bump_pathes" gorm:"<-;type:text;comment:Slice of BUMPs (BSV Unified Merkle Paths)" bson:"bump_pathes,omitempty"` } // newDraftTransaction will start a new draft tx func newDraftTransaction(rawXpubKey string, config *TransactionConfig, opts ...ModelOps) *DraftTransaction { - // Random GUID id, _ := utils.RandomHex(32) @@ -79,8 +80,8 @@ func newDraftTransaction(rawXpubKey string, config *TransactionConfig, opts ...M // getDraftTransactionID will get the draft transaction with the given conditions func getDraftTransactionID(ctx context.Context, xPubID, id string, - opts ...ModelOps) (*DraftTransaction, error) { - + opts ...ModelOps, +) (*DraftTransaction, error) { // Get the record config := &TransactionConfig{} conditions := map[string]interface{}{ @@ -106,8 +107,8 @@ func getDraftTransactionID(ctx context.Context, xPubID, id string, // getDraftTransactions will get all the draft transactions with the given conditions func getDraftTransactions(ctx context.Context, metadata *Metadata, conditions *map[string]interface{}, - queryParams *datastore.QueryParams, opts ...ModelOps) ([]*DraftTransaction, error) { - + queryParams *datastore.QueryParams, opts ...ModelOps, +) ([]*DraftTransaction, error) { modelItems := make([]*DraftTransaction, 0) if err := getModelsByConditions(ctx, ModelDraftTransaction, &modelItems, metadata, conditions, queryParams, opts...); err != nil { return nil, err @@ -118,8 +119,8 @@ func getDraftTransactions(ctx context.Context, metadata *Metadata, conditions *m // getDraftTransactionsCount will get a count of all the access keys with the given conditions func getDraftTransactionsCount(ctx context.Context, metadata *Metadata, conditions *map[string]interface{}, - opts ...ModelOps) (int64, error) { - + opts ...ModelOps, +) (int64, error) { return getModelCountByConditions(ctx, ModelDraftTransaction, DraftTransaction{}, metadata, conditions, opts...) } @@ -158,7 +159,6 @@ func (m *DraftTransaction) GetID() string { // processConfigOutputs will process all the outputs, // doing any lookups and creating locking scripts func (m *DraftTransaction) processConfigOutputs(ctx context.Context) error { - // Get the client c := m.Client() // Get sender's paymail @@ -229,7 +229,6 @@ func (m *DraftTransaction) processConfigOutputs(ctx context.Context) error { // createTransactionHex will create the transaction with the given inputs and outputs func (m *DraftTransaction) createTransactionHex(ctx context.Context) (err error) { - // Check that we have outputs if len(m.Configuration.Outputs) == 0 && m.Configuration.SendAllTo == nil { return ErrMissingTransactionOutputs @@ -420,6 +419,13 @@ func (m *DraftTransaction) createTransactionHex(ctx context.Context) (err error) } m.CompoundMerklePathes = append(m.CompoundMerklePathes, cmp) } + for _, v := range merkleProofs { + bump, err := CalculateMergedBUMP(v) + if err != nil { + return err + } + m.BumpPaths = append(m.BumpPaths, bump) + } // Create the final hex (without signatures) m.Hex = tx.String() @@ -512,7 +518,6 @@ func (m *DraftTransaction) addOutputsToTx(tx *bt.Tx) (err error) { sc.Script, ); err != nil { return - } scriptType := sc.ScriptType @@ -555,7 +560,6 @@ func (m *DraftTransaction) addOutputsToTx(tx *bt.Tx) (err error) { // setChangeDestination will make a new change destination func (m *DraftTransaction) setChangeDestination(ctx context.Context, satoshisChange uint64, fee uint64) (uint64, error) { - m.Configuration.ChangeSatoshis = satoshisChange useExistingOutputsForChange := make([]int, 0) @@ -629,7 +633,6 @@ func (m *DraftTransaction) setChangeDestination(ctx context.Context, satoshisCha // split the change satoshis amongst the change destinations according to the strategy given in config func (m *DraftTransaction) getChangeSatoshis(satoshisChange uint64) (changeSatoshis map[string]uint64, err error) { - changeSatoshis = make(map[string]uint64) var lastDestination string changeUsed := uint64(0) @@ -670,7 +673,6 @@ func (m *DraftTransaction) getChangeSatoshis(satoshisChange uint64) (changeSatos // setChangeDestinations will set the change destinations based on the number func (m *DraftTransaction) setChangeDestinations(ctx context.Context, numberOfDestinations int) error { - // Set the options opts := m.GetOptions(false) optsNew := append(opts, New()) @@ -759,7 +761,6 @@ func (m *DraftTransaction) getTotalSatoshis() (satoshis uint64) { // BeforeCreating will fire before the model is being inserted into the Datastore func (m *DraftTransaction) BeforeCreating(ctx context.Context) (err error) { - m.DebugLog("starting: " + m.Name() + " BeforeCreating hook...") // Prepare the transaction @@ -804,7 +805,6 @@ func (m *DraftTransaction) AfterUpdated(ctx context.Context) error { // RegisterTasks will register the model specific tasks on client initialization func (m *DraftTransaction) RegisterTasks() error { - // No task manager loaded? tm := m.Client().Taskmanager() if tm == nil { @@ -844,7 +844,6 @@ func (m *DraftTransaction) Migrate(client datastore.ClientInterface) error { // SignInputsWithKey will sign all the inputs using a key (string) (helper method) func (m *DraftTransaction) SignInputsWithKey(xPrivKey string) (signedHex string, err error) { - // Decode the xPriv using the key var xPriv *bip32.ExtendedKey if xPriv, err = bip32.NewKeyFromString(xPrivKey); err != nil { @@ -856,7 +855,6 @@ func (m *DraftTransaction) SignInputsWithKey(xPrivKey string) (signedHex string, // SignInputs will sign all the inputs using the given xPriv key func (m *DraftTransaction) SignInputs(xPriv *bip32.ExtendedKey) (signedHex string, err error) { - // Start a bt draft transaction var txDraft *bt.Tx if txDraft, err = bt.NewTxFromString(m.Hex); err != nil { diff --git a/model_merkle_proof.go b/model_merkle_proof.go index 807509a2..6f0caded 100644 --- a/model_merkle_proof.go +++ b/model_merkle_proof.go @@ -29,7 +29,7 @@ func (m MerkleProof) ToCompoundMerklePath() CompoundMerklePath { cmp[0] = pathMap for i := 1; i < height; i++ { path := make(map[string]bt.VarInt, 1) - offset = parrentOffset(offset) + offset = parentOffset(offset) path[m.Nodes[i]] = bt.VarInt(offset) cmp[i] = path } @@ -43,7 +43,7 @@ func offsetPair(offset uint64) uint64 { return offset - 1 } -func parrentOffset(offset uint64) uint64 { +func parentOffset(offset uint64) uint64 { return offsetPair(offset / 2) } @@ -82,21 +82,44 @@ func (m MerkleProof) Value() (driver.Value, error) { func (m *MerkleProof) ToBUMP() BUMP { bump := BUMP{} + height := len(m.Nodes) if height == 0 { return bump } - path := make([]BUMPPathMap, 0) - txIdPath := make(BUMPPathMap, 2) + + path := make([][]BUMPNode, 0) + txIdPath := make([]BUMPNode, 2) + offset := m.Index - op := offsetPair(offset) - txIdPath[fmt.Sprint(offset)] = BUMPPathElement{Hash: m.TxOrID, TxId: true} - txIdPath[fmt.Sprint(op)] = BUMPPathElement{Hash: m.Nodes[0]} + pairOffset := offsetPair(offset) + + txIdPath1 := BUMPNode{ + Offset: offset, + Hash: m.TxOrID, + TxId: true, + } + txIdPath2 := BUMPNode{ + Offset: offsetPair(offset), + Hash: m.Nodes[0], + } + + if offset < pairOffset { + txIdPath[0] = txIdPath1 + txIdPath[1] = txIdPath2 + } else { + txIdPath[0] = txIdPath2 + txIdPath[1] = txIdPath1 + } + path = append(path, txIdPath) for i := 1; i < height; i++ { - p := make(BUMPPathMap, 1) - offset = parrentOffset(offset) - p[fmt.Sprint(offset)] = BUMPPathElement{Hash: m.Nodes[i]} + p := make([]BUMPNode, 0) + offset = parentOffset(offset) + p = append(p, BUMPNode{ + Offset: offset, + Hash: m.Nodes[i], + }) path = append(path, p) } bump.Path = path diff --git a/model_merkle_proof_test.go b/model_merkle_proof_test.go index 8b877b4a..d8c8032e 100644 --- a/model_merkle_proof_test.go +++ b/model_merkle_proof_test.go @@ -86,19 +86,19 @@ func TestMerkleProofModel_ToBUMP(t *testing.T) { Nodes: []string{"node0", "node1", "node2", "node3"}, } expectedBUMP := BUMP{ - Path: []BUMPPathMap{ + Path: [][]BUMPNode{ { - "0": BUMPPathElement{Hash: "node0"}, - "1": BUMPPathElement{Hash: "txId", TxId: true}, + {Offset: 0, Hash: "node0"}, + {Offset: 1, Hash: "txId", TxId: true}, }, { - "1": BUMPPathElement{Hash: "node1"}, + {Offset: 1, Hash: "node1"}, }, { - "1": BUMPPathElement{Hash: "node2"}, + {Offset: 1, Hash: "node2"}, }, { - "1": BUMPPathElement{Hash: "node3"}, + {Offset: 1, Hash: "node3"}, }, }, } @@ -113,22 +113,22 @@ func TestMerkleProofModel_ToBUMP(t *testing.T) { Nodes: []string{"node0", "node1", "node2", "node3", "node4"}, } expectedBUMP := BUMP{ - Path: []BUMPPathMap{ + Path: [][]BUMPNode{ { - "14": BUMPPathElement{Hash: "txId", TxId: true}, - "15": BUMPPathElement{Hash: "node0"}, + {Offset: 14, Hash: "txId", TxId: true}, + {Offset: 15, Hash: "node0"}, }, { - "6": BUMPPathElement{Hash: "node1"}, + {Offset: 6, Hash: "node1"}, }, { - "2": BUMPPathElement{Hash: "node2"}, + {Offset: 2, Hash: "node2"}, }, { - "0": BUMPPathElement{Hash: "node3"}, + {Offset: 0, Hash: "node3"}, }, { - "1": BUMPPathElement{Hash: "node4"}, + {Offset: 1, Hash: "node4"}, }, }, }