Skip to content

Commit

Permalink
feat(BUX-172): verify merkle root for unmined inputs (#48)
Browse files Browse the repository at this point in the history
* feat(172): export Bump fields

* feat(BUX-172): move SPV to new package; verify merkle root for unmined inputs

* feat(BUX-172): set version v0.8.0

* feat(BUX-172): fix typo

* feat(BUX-172): fix ver test

* feat(BUX-172): add tests for valid but inccorect BUMP

* feat(BUX-172): implement SPV for every unmined tx in BEEF

* refactor (BUX-172): move p2p_beef_* to from paymail to beef package

* feat(BUX-172): improve bump verification

* feat(BUX-172): adjust to review comments

* feat(BUX-172): adjust to review

* Update spv/spv.go - inline error check

Co-authored-by: Damian Orzepowski <[email protected]>

* Update spv/spv.go - inline error check

Co-authored-by: Damian Orzepowski <[email protected]>

* fix code

---------

Co-authored-by: Damian Orzepowski <[email protected]>
  • Loading branch information
arkadiuszos4chain and dorzepowski authored Nov 24, 2023
1 parent a3cd5a1 commit f719ffa
Show file tree
Hide file tree
Showing 17 changed files with 407 additions and 297 deletions.
71 changes: 32 additions & 39 deletions p2p_beef_tx.go → beef/beef_tx.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package paymail
package beef

import (
"encoding/hex"
Expand All @@ -14,8 +14,8 @@ const (
)

const (
HasNoCMP = 0x00
HasCMP = 0x01
HasNoBump = 0x00
HasBump = 0x01
)

const (
Expand All @@ -27,33 +27,26 @@ const (

type TxData struct {
Transaction *bt.Tx
PathIndex *bt.VarInt
}
BumpIndex *bt.VarInt

type DecodedBEEF struct {
BUMPs BUMPs
InputsTxData []*TxData
ProcessedTxData *bt.Tx
txID string
}

// GetMerkleRoots will calculate the merkle roots for the BUMPs in the BEEF transaction
func (dBeef *DecodedBEEF) GetMerkleRootsRequest() ([]MerkleRootConfirmationRequestItem, error) {
var merkleRootsRequest []MerkleRootConfirmationRequestItem

for _, bump := range dBeef.BUMPs {
merkleRoot, err := bump.calculateMerkleRoot()
if err != nil {
return nil, err
}
func (td *TxData) Unmined() bool {
return td.BumpIndex == nil
}

request := MerkleRootConfirmationRequestItem{
BlockHeight: int32(bump.blockHeight),
MerkleRoot: merkleRoot,
}
merkleRootsRequest = append(merkleRootsRequest, request)
func (td *TxData) GetTxID() string {
if len(td.txID) == 0 {
td.txID = td.Transaction.TxID()
}

return merkleRootsRequest, nil
return td.txID
}

type DecodedBEEF struct {
BUMPs BUMPs
Transactions []*TxData
}

func DecodeBEEF(beefHex string) (*DecodedBEEF, error) {
Expand All @@ -72,17 +65,16 @@ func DecodeBEEF(beefHex string) (*DecodedBEEF, error) {
return nil, err
}

// get the last transaction as the processed transaction - it should be the last one because of khan's ordering
processedTx := transactions[len(transactions)-1]
transactions = transactions[:len(transactions)-1]

return &DecodedBEEF{
BUMPs: bumps,
InputsTxData: transactions,
ProcessedTxData: processedTx.Transaction,
BUMPs: bumps,
Transactions: transactions,
}, nil
}

func (d *DecodedBEEF) GetLatestTx() *bt.Tx {
return d.Transactions[len(d.Transactions)-1].Transaction // get the last transaction as the processed transaction - it should be the last one because of khan's ordering
}

func decodeBUMPs(beefBytes []byte) ([]BUMP, []byte, error) {
if len(beefBytes) == 0 {
return nil, nil, errors.New("cannot decode BUMP - no bytes provided")
Expand All @@ -96,8 +88,8 @@ func decodeBUMPs(beefBytes []byte) ([]BUMP, []byte, error) {

beefBytes = beefBytes[bytesUsed:]

bumps := make([]BUMP, 0, int(nBump))
for i := 0; i < int(nBump); i++ {
bumps := make([]BUMP, 0, uint64(nBump))
for i := uint64(0); i < uint64(nBump); i++ {
if len(beefBytes) == 0 {
return nil, nil, errors.New("insufficient bytes to extract BUMP blockHeight")
}
Expand All @@ -117,8 +109,8 @@ func decodeBUMPs(beefBytes []byte) ([]BUMP, []byte, error) {
beefBytes = remainingBytes

bump := BUMP{
blockHeight: uint64(blockHeight),
path: bumpPaths,
BlockHeight: uint64(blockHeight),
Path: bumpPaths,
}

bumps = append(bumps, bump)
Expand Down Expand Up @@ -185,7 +177,7 @@ func decodeBUMPLevel(nLeaves bt.VarInt, hexBytes []byte) ([]BUMPLeaf, []byte, er
hexBytes = hexBytes[hashBytesCount:]

bumpLeaf := BUMPLeaf{
hash: hash,
Hash: hash,
offset: uint64(offset),
}
if flag == txIDFlag {
Expand Down Expand Up @@ -216,19 +208,20 @@ func decodeTransactionsWithPathIndexes(bytes []byte) ([]*TxData, error) {
bytes = bytes[offset:]

var pathIndex *bt.VarInt
if bytes[0] == HasCMP {

if bytes[0] == HasBump {
value, offset := bt.NewVarIntFromBytes(bytes[1:])
pathIndex = &value
bytes = bytes[1+offset:]
} else if bytes[0] == HasNoCMP {
} else if bytes[0] == HasNoBump {
bytes = bytes[1:]
} else {
return nil, fmt.Errorf("invalid HasCMP flag for transaction at index %d", i)
}

transactions = append(transactions, &TxData{
Transaction: tx,
PathIndex: pathIndex,
BumpIndex: pathIndex,
})
}

Expand Down
87 changes: 39 additions & 48 deletions p2p_beef_tx_test.go → beef/beef_tx_test.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package paymail
package beef

import (
"context"
"errors"
"testing"

Expand All @@ -10,57 +9,47 @@ import (
"github.com/stretchr/testify/assert"
)

// Mock implementation of a service provider
type mockServiceProvider struct{}

// VerifyMerkleRoots is a mock implementation of this interface
func (m *mockServiceProvider) VerifyMerkleRoots(_ context.Context, _ []MerkleRootConfirmationRequestItem) error {
// Verify the merkle roots
return nil
}

func TestDecodeBEEF_DecodeBEEF_HappyPaths(t *testing.T) {
testCases := []struct {
name string
hexStream string
expectedDecodedBEEF *DecodedBEEF
pathIndexForTheOldestInput *bt.VarInt
expectedError error
}{
{
name: "valid BEEF with 1 CMP and 1 input transaction",
name: "valid BEEF with 1 BUMP and 1 input transaction",
hexStream: "0100beef01fe636d0c0007021400fe507c0c7aa754cef1f7889d5fd395cf1f785dd7de98eed895dbedfe4e5bc70d1502ac4e164f5bc16746bb0868404292ac8318bbac3800e4aad13a014da427adce3e010b00bc4ff395efd11719b277694cface5aa50d085a0bb81f613f70313acd28cf4557010400574b2d9142b8d28b61d88e3b2c3f44d858411356b49a28a4643b6d1a6a092a5201030051a05fc84d531b5d250c23f4f886f6812f9fe3f402d61607f977b4ecd2701c19010000fd781529d58fc2523cf396a7f25440b409857e7e221766c57214b1d38c7b481f01010062f542f45ea3660f86c013ced80534cb5fd4c19d66c56e7e8c5d4bf2d40acc5e010100b121e91836fd7cd5102b654e9f72f3cf6fdbfd0b161c53a9c54b12c841126331020100000001cd4e4cac3c7b56920d1e7655e7e260d31f29d9a388d04910f1bbd72304a79029010000006b483045022100e75279a205a547c445719420aa3138bf14743e3f42618e5f86a19bde14bb95f7022064777d34776b05d816daf1699493fcdf2ef5a5ab1ad710d9c97bfb5b8f7cef3641210263e2dee22b1ddc5e11f6fab8bcd2378bdd19580d640501ea956ec0e786f93e76ffffffff013e660000000000001976a9146bfd5c7fbe21529d45803dbcf0c87dd3c71efbc288ac0000000001000100000001ac4e164f5bc16746bb0868404292ac8318bbac3800e4aad13a014da427adce3e000000006a47304402203a61a2e931612b4bda08d541cfb980885173b8dcf64a3471238ae7abcd368d6402204cbf24f04b9aa2256d8901f0ed97866603d2be8324c2bfb7a37bf8fc90edd5b441210263e2dee22b1ddc5e11f6fab8bcd2378bdd19580d640501ea956ec0e786f93e76ffffffff013c660000000000001976a9146bfd5c7fbe21529d45803dbcf0c87dd3c71efbc288ac0000000000",
expectedDecodedBEEF: &DecodedBEEF{
BUMPs: BUMPs{
BUMP{
blockHeight: 814435,
path: [][]BUMPLeaf{
BlockHeight: 814435,
Path: [][]BUMPLeaf{
{
BUMPLeaf{hash: "0dc75b4efeeddb95d8ee98ded75d781fcf95d35f9d88f7f1ce54a77a0c7c50fe", offset: 20},
BUMPLeaf{hash: "3ecead27a44d013ad1aae40038acbb1883ac9242406808bb4667c15b4f164eac", txId: true, offset: 21},
BUMPLeaf{Hash: "0dc75b4efeeddb95d8ee98ded75d781fcf95d35f9d88f7f1ce54a77a0c7c50fe", offset: 20},
BUMPLeaf{Hash: "3ecead27a44d013ad1aae40038acbb1883ac9242406808bb4667c15b4f164eac", txId: true, offset: 21},
},
{
BUMPLeaf{hash: "5745cf28cd3a31703f611fb80b5a080da55acefa4c6977b21917d1ef95f34fbc", offset: 11},
BUMPLeaf{Hash: "5745cf28cd3a31703f611fb80b5a080da55acefa4c6977b21917d1ef95f34fbc", offset: 11},
},
{
BUMPLeaf{hash: "522a096a1a6d3b64a4289ab456134158d8443f2c3b8ed8618bd2b842912d4b57", offset: 4},
BUMPLeaf{Hash: "522a096a1a6d3b64a4289ab456134158d8443f2c3b8ed8618bd2b842912d4b57", offset: 4},
},
{
BUMPLeaf{hash: "191c70d2ecb477f90716d602f4e39f2f81f686f8f4230c255d1b534dc85fa051", offset: 3},
BUMPLeaf{Hash: "191c70d2ecb477f90716d602f4e39f2f81f686f8f4230c255d1b534dc85fa051", offset: 3},
},
{
BUMPLeaf{hash: "1f487b8cd3b11472c56617227e7e8509b44054f2a796f33c52c28fd5291578fd", offset: 0},
BUMPLeaf{Hash: "1f487b8cd3b11472c56617227e7e8509b44054f2a796f33c52c28fd5291578fd", offset: 0},
},
{
BUMPLeaf{hash: "5ecc0ad4f24b5d8c7e6ec5669dc1d45fcb3405d8ce13c0860f66a35ef442f562", offset: 1},
BUMPLeaf{Hash: "5ecc0ad4f24b5d8c7e6ec5669dc1d45fcb3405d8ce13c0860f66a35ef442f562", offset: 1},
},
{
BUMPLeaf{hash: "31631241c8124bc5a9531c160bfddb6fcff3729f4e652b10d57cfd3618e921b1", offset: 1},
BUMPLeaf{Hash: "31631241c8124bc5a9531c160bfddb6fcff3729f4e652b10d57cfd3618e921b1", offset: 1},
},
},
},
},
InputsTxData: []*TxData{
Transactions: []*TxData{
{
Transaction: &bt.Tx{
Version: 1,
Expand All @@ -80,25 +69,29 @@ func TestDecodeBEEF_DecodeBEEF_HappyPaths(t *testing.T) {
},
},
},
PathIndex: func(v bt.VarInt) *bt.VarInt { return &v }(0x0),
BumpIndex: func(v bt.VarInt) *bt.VarInt { return &v }(0x0),
},
},
ProcessedTxData: &bt.Tx{
Version: 1,
LockTime: 0,
Inputs: []*bt.Input{
{
PreviousTxSatoshis: 0,
PreviousTxOutIndex: 0,
SequenceNumber: 4294967295,
PreviousTxScript: nil,
},
},
Outputs: []*bt.Output{
{
Satoshis: 26172,
LockingScript: bscript.NewFromBytes([]byte("76a9146bfd5c7fbe21529d45803dbcf0c87dd3c71efbc288ac")),

{
Transaction: &bt.Tx{
Version: 1,
LockTime: 0,
Inputs: []*bt.Input{
{
PreviousTxSatoshis: 0,
PreviousTxOutIndex: 0,
SequenceNumber: 4294967295,
PreviousTxScript: nil,
},
},
Outputs: []*bt.Output{
{
Satoshis: 26172,
LockingScript: bscript.NewFromBytes([]byte("76a9146bfd5c7fbe21529d45803dbcf0c87dd3c71efbc288ac")),
},
},
},
BumpIndex: nil,
},
},
},
Expand All @@ -114,20 +107,18 @@ func TestDecodeBEEF_DecodeBEEF_HappyPaths(t *testing.T) {
decodedBEEF, err := DecodeBEEF(beef)

// then
assert.Equal(t, tc.expectedError, err, "expected error %v, but got %v", tc.expectedError, err)
assert.Nil(t, err)

assert.Equal(t, len(tc.expectedDecodedBEEF.InputsTxData), len(decodedBEEF.InputsTxData), "expected %v inputs, but got %v", len(tc.expectedDecodedBEEF.InputsTxData), len(decodedBEEF.InputsTxData))
assert.Equal(t, len(tc.expectedDecodedBEEF.Transactions), len(decodedBEEF.Transactions), "expected %v inputs, but got %v", len(tc.expectedDecodedBEEF.Transactions), len(decodedBEEF.Transactions))

assert.Equal(t, len(tc.expectedDecodedBEEF.BUMPs), len(decodedBEEF.BUMPs), "expected %v BUMPs, but got %v", len(tc.expectedDecodedBEEF.BUMPs), len(decodedBEEF.BUMPs))

for i, bump := range tc.expectedDecodedBEEF.BUMPs {
assert.Equal(t, len(bump.path), len(decodedBEEF.BUMPs[i].path), "expected %v BUMPPaths for %v BUMP, but got %v", len(bump.path), i, len(decodedBEEF.BUMPs[i].path))
assert.Equal(t, bump.path, decodedBEEF.BUMPs[i].path, "expected equal BUMPPaths for %v BUMP, expected: %v but got %v", i, bump, len(decodedBEEF.BUMPs[i].path))
assert.Equal(t, len(bump.Path), len(decodedBEEF.BUMPs[i].Path), "expected %v BUMPPaths for %v BUMP, but got %v", len(bump.Path), i, len(decodedBEEF.BUMPs[i].Path))
assert.Equal(t, bump.Path, decodedBEEF.BUMPs[i].Path, "expected equal BUMPPaths for %v BUMP, expected: %v but got %v", i, bump, len(decodedBEEF.BUMPs[i].Path))
}

assert.NotNil(t, decodedBEEF.ProcessedTxData, "expected original transaction to be not nil")

assert.Equal(t, tc.expectedDecodedBEEF.InputsTxData[0].PathIndex, decodedBEEF.InputsTxData[0].PathIndex, "expected path index for the oldest input to be %v, but got %v", tc.expectedDecodedBEEF.InputsTxData[0].PathIndex, decodedBEEF.InputsTxData[0].PathIndex)
assert.Equal(t, tc.expectedDecodedBEEF.Transactions[0].BumpIndex, decodedBEEF.Transactions[0].BumpIndex, "expected path index for the oldest input to be %v, but got %v", tc.expectedDecodedBEEF.Transactions[0].BumpIndex, decodedBEEF.Transactions[0].BumpIndex)
})
}
}
Expand Down
44 changes: 22 additions & 22 deletions bump.go → beef/bump.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package paymail
package beef

import (
"errors"
Expand All @@ -11,13 +11,13 @@ type BUMPs []BUMP

// BUMP is a struct that represents a whole BUMP format
type BUMP struct {
blockHeight uint64
path [][]BUMPLeaf
BlockHeight uint64
Path [][]BUMPLeaf
}

// BUMPLeaf represents each BUMP path element
type BUMPLeaf struct {
hash string
Hash string
txId bool
duplicate bool
offset uint64
Expand All @@ -30,10 +30,10 @@ const (
txIDFlag
)

func (b BUMP) calculateMerkleRoot() (string, error) {
func (b BUMP) CalculateMerkleRoot() (string, error) {
merkleRoot := ""

for _, bumpPathElement := range b.path[0] {
for _, bumpPathElement := range b.Path[0] {
if bumpPathElement.txId {
calcMerkleRoot, err := calculateMerkleRoot(bumpPathElement, b)
if err != nil {
Expand All @@ -53,21 +53,12 @@ func (b BUMP) calculateMerkleRoot() (string, error) {
return merkleRoot, nil
}

func findLeafByOffset(offset uint64, bumpLeaves []BUMPLeaf) *BUMPLeaf {
for _, bumpTx := range bumpLeaves {
if bumpTx.offset == offset {
return &bumpTx
}
}
return nil
}

// calculateMerkleRoots will calculate one merkle root for tx in the BUMPLeaf
func calculateMerkleRoot(baseLeaf BUMPLeaf, bump BUMP) (string, error) {
calculatedHash := baseLeaf.hash
calculatedHash := baseLeaf.Hash
offset := baseLeaf.offset

for _, bLevel := range bump.path {
for _, bLevel := range bump.Path {
newOffset := getOffsetPair(offset)
leafInPair := findLeafByOffset(newOffset, bLevel)
if leafInPair == nil {
Expand All @@ -85,7 +76,7 @@ func calculateMerkleRoot(baseLeaf BUMPLeaf, bump BUMP) (string, error) {
offset = offset / 2

baseLeaf = BUMPLeaf{
hash: calculatedHash,
Hash: calculatedHash,
offset: offset,
}
}
Expand All @@ -100,19 +91,28 @@ func getOffsetPair(offset uint64) uint64 {
return offset - 1
}

func findLeafByOffset(offset uint64, bumpLeaves []BUMPLeaf) *BUMPLeaf {
for _, bumpTx := range bumpLeaves {
if bumpTx.offset == offset {
return &bumpTx
}
}
return nil
}

func prepareNodes(baseLeaf BUMPLeaf, offset uint64, leafInPair BUMPLeaf, newOffset uint64) (string, string) {
var baseLeafHash, pairLeafHash string

if baseLeaf.duplicate {
baseLeafHash = leafInPair.hash
baseLeafHash = leafInPair.Hash
} else {
baseLeafHash = baseLeaf.hash
baseLeafHash = baseLeaf.Hash
}

if leafInPair.duplicate {
pairLeafHash = baseLeaf.hash
pairLeafHash = baseLeaf.Hash
} else {
pairLeafHash = leafInPair.hash
pairLeafHash = leafInPair.Hash
}

if newOffset > offset {
Expand Down
Loading

0 comments on commit f719ffa

Please sign in to comment.