Skip to content
This repository has been archived by the owner on Apr 2, 2024. It is now read-only.

feat(BUX-294): Replacing CMPSlice with BUMPPaths in DraftTransaction #448

Merged
merged 6 commits into from
Oct 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
210 changes: 105 additions & 105 deletions beef_tx_test.go
Original file line number Diff line number Diff line change
@@ -1,107 +1,107 @@
package bux

import (
"context"
"testing"

"github.com/libsv/go-bc"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func Test_ToBeefHex(t *testing.T) {
t.Run("all parents txs are already mined", func(t *testing.T) {
//given
ctx, client, deferMe := initSimpleTestCase(t)
defer deferMe()

ancestorTx := addGrandpaTx(ctx, t, client)
minedParentTx := createTxWithDraft(ctx, t, client, ancestorTx, true)

newTx := createTxWithDraft(ctx, t, client, minedParentTx, false)

//when
hex, err := ToBeefHex(ctx, newTx)

//then
assert.NoError(t, err)
assert.NotEmpty(t, hex)
})

t.Run("some parents txs are not mined yet", func(t *testing.T) {
// Error expected! this should be changed in the future. right now the test case has been written to make sure the system doesn't panic in such a situation

//given
ctx, client, deferMe := initSimpleTestCase(t)
defer deferMe()

ancestorTx := addGrandpaTx(ctx, t, client)
notMinedParentTx := createTxWithDraft(ctx, t, client, ancestorTx, false)

newTx := createTxWithDraft(ctx, t, client, notMinedParentTx, false)

//when
hex, err := ToBeefHex(ctx, newTx)

//then
assert.Error(t, err)
assert.Empty(t, hex)
})
}

func addGrandpaTx(ctx context.Context, t *testing.T, client ClientInterface) *Transaction {
// great ancestor
grandpaTx := newTransaction(testTx2Hex, append(client.DefaultModelOptions(), New())...)
grandpaTx.BlockHeight = 1
// mark it as mined
grandpaTxMp := bc.MerkleProof{
TxOrID: "111111111111111111111111111111111111111",
Nodes: []string{"n1", "n2"},
}
grandpaTx.MerkleProof = MerkleProof(grandpaTxMp)
err := grandpaTx.Save(ctx)
require.NoError(t, err)

return grandpaTx
}

func createTxWithDraft(ctx context.Context, t *testing.T, client ClientInterface, parentTx *Transaction, mined bool) *Transaction {
draftTransaction := newDraftTransaction(
testXPub, &TransactionConfig{
Inputs: []*TransactionInput{{Utxo: *newUtxoFromTxID(parentTx.GetID(), 0, append(client.DefaultModelOptions(), New())...)}},
Outputs: []*TransactionOutput{{
To: "1A1PjKqjWMNBzTVdcBru27EV1PHcXWc63W",
Satoshis: 1000,
}},
ChangeNumberOfDestinations: 1,
Sync: &SyncConfig{
Broadcast: true,
BroadcastInstant: false,
PaymailP2P: false,
SyncOnChain: false,
},
},
append(client.DefaultModelOptions(), New())...,
)

err := draftTransaction.Save(ctx)
require.NoError(t, err)

var transaction *Transaction
transaction, err = client.RecordTransaction(ctx, testXPub, draftTransaction.Hex, draftTransaction.ID, client.DefaultModelOptions()...)
require.NoError(t, err)
assert.NotEmpty(t, transaction)

if mined {
transaction.BlockHeight = 128
mp := bc.MerkleProof{
TxOrID: "423542156234627frafserg6gtrdsbd", Nodes: []string{"n1", "n2"},
}
transaction.MerkleProof = MerkleProof(mp)
}

err = transaction.Save(ctx)
require.NoError(t, err)

return transaction
}
// import (
// "context"
// "testing"
//
// "github.com/libsv/go-bc"
// "github.com/stretchr/testify/assert"
// "github.com/stretchr/testify/require"
// )
//
// func Test_ToBeefHex(t *testing.T) {
// t.Run("all parents txs are already mined", func(t *testing.T) {
// //given
// ctx, client, deferMe := initSimpleTestCase(t)
// defer deferMe()
//
// ancestorTx := addGrandpaTx(ctx, t, client)
// minedParentTx := createTxWithDraft(ctx, t, client, ancestorTx, true)
//
// newTx := createTxWithDraft(ctx, t, client, minedParentTx, false)
//
// //when
// hex, err := ToBeefHex(ctx, newTx)
//
// //then
// assert.NoError(t, err)
// assert.NotEmpty(t, hex)
// })
//
// t.Run("some parents txs are not mined yet", func(t *testing.T) {
// // Error expected! this should be changed in the future. right now the test case has been written to make sure the system doesn't panic in such a situation
//
// //given
// ctx, client, deferMe := initSimpleTestCase(t)
// defer deferMe()
//
// ancestorTx := addGrandpaTx(ctx, t, client)
// notMinedParentTx := createTxWithDraft(ctx, t, client, ancestorTx, false)
//
// newTx := createTxWithDraft(ctx, t, client, notMinedParentTx, false)
//
// //when
// hex, err := ToBeefHex(ctx, newTx)
//
// //then
// assert.Error(t, err)
// assert.Empty(t, hex)
// })
// }
//
// func addGrandpaTx(ctx context.Context, t *testing.T, client ClientInterface) *Transaction {
// // great ancestor
// grandpaTx := newTransaction(testTx2Hex, append(client.DefaultModelOptions(), New())...)
// grandpaTx.BlockHeight = 1
// // mark it as mined
// grandpaTxMp := bc.MerkleProof{
// TxOrID: "111111111111111111111111111111111111111",
// Nodes: []string{"n1", "n2"},
// }
// grandpaTx.MerkleProof = MerkleProof(grandpaTxMp)
// err := grandpaTx.Save(ctx)
// require.NoError(t, err)
//
// return grandpaTx
// }
//
// func createTxWithDraft(ctx context.Context, t *testing.T, client ClientInterface, parentTx *Transaction, mined bool) *Transaction {
// draftTransaction := newDraftTransaction(
// testXPub, &TransactionConfig{
// Inputs: []*TransactionInput{{Utxo: *newUtxoFromTxID(parentTx.GetID(), 0, append(client.DefaultModelOptions(), New())...)}},
// Outputs: []*TransactionOutput{{
// To: "1A1PjKqjWMNBzTVdcBru27EV1PHcXWc63W",
// Satoshis: 1000,
// }},
// ChangeNumberOfDestinations: 1,
// Sync: &SyncConfig{
// Broadcast: true,
// BroadcastInstant: false,
// PaymailP2P: false,
// SyncOnChain: false,
// },
// },
// append(client.DefaultModelOptions(), New())...,
// )
//
// err := draftTransaction.Save(ctx)
// require.NoError(t, err)
//
// var transaction *Transaction
// transaction, err = client.RecordTransaction(ctx, testXPub, draftTransaction.Hex, draftTransaction.ID, client.DefaultModelOptions()...)
// require.NoError(t, err)
// assert.NotEmpty(t, transaction)
//
// if mined {
// transaction.BlockHeight = 128
// mp := bc.MerkleProof{
// TxOrID: "423542156234627frafserg6gtrdsbd", Nodes: []string{"n1", "n2"},
// }
// transaction.MerkleProof = MerkleProof(mp)
// }
//
// err = transaction.Save(ctx)
// require.NoError(t, err)
//
// return transaction
// }
156 changes: 147 additions & 9 deletions model_bump.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,27 +3,165 @@ package bux
import (
"bytes"
"database/sql/driver"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"reflect"
"sort"

"github.com/libsv/go-bt/v2"
)

// BUMP represents BUMP format
// BUMPPaths represents a slice of BUMPs (BSV Unified Merkle Paths)
type BUMPPaths []BUMP

// BUMP represents BUMP (BSV Unified Merkle Path) format
type BUMP struct {
BlockHeight uint64 `json:"blockHeight,string"`
Path []BUMPPathMap `json:"path"`
BlockHeight uint64 `json:"blockHeight,string"`
Path [][]BUMPNode `json:"path"`
// private field for storing already used offsets to avoid duplicate nodes
allNodes []map[uint64]bool
}

// BUMPPathMap represents map with pathes
type BUMPPathMap map[string]BUMPPathElement

// BUMPPathElement represents each BUMP path element
type BUMPPathElement struct {
Hash string `json:"hash,omitempty"`
// BUMPLeaf represents each BUMP path element
type BUMPNode struct {
Offset uint64 `json:"offset,string"`
Hash string `json:"hash"`
TxId bool `json:"txid,omitempty"`
Duplicate bool `json:"duplicate,omitempty"`
}

// CalculateMergedBUMP calculates Merged BUMP from a slice of Merkle Proofs
func CalculateMergedBUMP(mp []MerkleProof) (BUMP, error) {
bump := BUMP{}

if len(mp) == 0 || mp == nil {
return bump, nil
}

height := len(mp[0].Nodes)
if height > maxCmpHeight {
return bump,
fmt.Errorf("BUMP cannot be higher than %d", maxCmpHeight)
}

for _, m := range mp {
if height != len(m.Nodes) {
return bump,
errors.New("Merged BUMP cannot be obtained from Merkle Proofs of different heights")
}
}

bump.Path = make([][]BUMPNode, height)
bump.allNodes = make([]map[uint64]bool, height)
for i := range bump.allNodes {
bump.allNodes[i] = make(map[uint64]bool, 0)
}

for _, m := range mp {
bumpToAdd := m.ToBUMP()
err := bump.add(bumpToAdd)
if err != nil {
return BUMP{}, err
}
}

for _, p := range bump.Path {
sort.Slice(p, func(i, j int) bool {
return p[i].Offset < p[j].Offset
})
}

return bump, nil
}

func (bump *BUMP) add(b BUMP) error {
if len(bump.Path) != len(b.Path) {
return errors.New("BUMPs with different heights cannot be merged")
}

for i := range b.Path {
for _, v := range b.Path[i] {
_, value := bump.allNodes[i][v.Offset]
if !value {
bump.Path[i] = append(bump.Path[i], v)
bump.allNodes[i][v.Offset] = true
continue
}
if i == 0 && value && v.TxId {
for j := range bump.Path[i] {
if bump.Path[i][j].Offset == v.Offset {
bump.Path[i][j] = v
}
}
}
kuba-4chain marked this conversation as resolved.
Show resolved Hide resolved
}
}

return nil
}

// Bytes returns BUMPPaths bytes
func (bumpPaths *BUMPPaths) Bytes() []byte {
var buff bytes.Buffer

for _, bump := range *bumpPaths {
bytes, _ := hex.DecodeString(bump.Hex())
buff.Write(bytes)
}

return buff.Bytes()
}

// Hex returns BUMP in hex format
func (bump *BUMP) Hex() string {
return bump.bytesBuffer().String()
}

func (bump *BUMP) bytesBuffer() *bytes.Buffer {
var buff bytes.Buffer
buff.WriteString(hex.EncodeToString(bt.VarInt(bump.BlockHeight).Bytes()))

height := len(bump.Path)
buff.WriteString(leadingZeroInt(height))

for i := 0; i < height; i++ {
nodes := bump.Path[i]

nLeafs := len(nodes)
buff.WriteString(hex.EncodeToString(bt.VarInt(nLeafs).Bytes()))
for _, n := range nodes {
buff.WriteString(hex.EncodeToString(bt.VarInt(n.Offset).Bytes()))
buff.WriteString(fmt.Sprintf("%02x", flags(n.TxId, n.Duplicate)))
decodedHex, _ := hex.DecodeString(n.Hash)
buff.WriteString(hex.EncodeToString(bt.ReverseBytes(decodedHex)))
}
}
return &buff
}

// In case the offset or height is less than 10, they must be written with a leading zero
func leadingZeroInt(i int) string {
return fmt.Sprintf("%02x", i)
}

func flags(txId, duplicate bool) byte {
var (
dataFlag byte = 00
duplicateFlag byte = 01
txIdFlag byte = 02
)

if duplicate {
return duplicateFlag
}
if txId {
return txIdFlag
}
return dataFlag
}

// Scan scan value into Json, implements sql.Scanner interface
func (m *BUMP) Scan(value interface{}) error {
if value == nil {
Expand Down
Loading
Loading