Skip to content

Commit

Permalink
chore: refactor share splitting and merging (#637)
Browse files Browse the repository at this point in the history
* chore: refactor share splitting and merging

* chore: address feedback from #820

* linter

* Apply suggestions from code review

Co-authored-by: Rootul Patel <[email protected]>

* remove unused code

* linter

* chore: change fields for sharestack to be more accurate

* add comment to old implementation

* review feedback

* rename power of two util

* Apply suggestions from code review

review feedback

Co-authored-by: Rootul Patel <[email protected]>

Co-authored-by: Rootul Patel <[email protected]>
  • Loading branch information
evan-forbes and rootulp authored Aug 29, 2022
1 parent 05bdf9e commit 13076a6
Show file tree
Hide file tree
Showing 21 changed files with 979 additions and 839 deletions.
4 changes: 2 additions & 2 deletions app/child_tx_decoder.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ import (

func MalleatedTxDecoder(dec sdk.TxDecoder) sdk.TxDecoder {
return func(txBytes []byte) (sdk.Tx, error) {
if _, childTx, has := coretypes.UnwrapMalleatedTx(txBytes); has {
return dec(childTx)
if malleatedTx, has := coretypes.UnwrapMalleatedTx(txBytes); has {
return dec(malleatedTx.Tx)
}
return dec(txBytes)
}
Expand Down
4 changes: 2 additions & 2 deletions app/process_proposal.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,15 +101,15 @@ func (app *App) ProcessProposal(req abci.RequestProcessProposal) abci.ResponsePr
}
}

nsshares, _, err := shares.ComputeShares(&data, req.BlockData.OriginalSquareSize)
rawShares, err := shares.Split(data)
if err != nil {
logInvalidPropBlockError(app.Logger(), req.Header, "failure to compute shares from block data:", err)
return abci.ResponseProcessProposal{
Result: abci.ResponseProcessProposal_REJECT,
}
}

eds, err := da.ExtendShares(req.BlockData.OriginalSquareSize, nsshares.RawShares())
eds, err := da.ExtendShares(req.BlockData.OriginalSquareSize, rawShares)
if err != nil {
logInvalidPropBlockError(app.Logger(), req.Header, "failure to erasure the data square", err)
return abci.ResponseProcessProposal{
Expand Down
9 changes: 7 additions & 2 deletions app/split_shares.go
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,10 @@ func newShareSplitter(txConf client.TxConfig, squareSize uint64, data *core.Data
if err != nil {
panic(err)
}
sqwr.evdShares = shares.SplitEvidenceIntoShares(evdData).RawShares()
sqwr.evdShares, err = shares.SplitEvidence(evdData.Evidence)
if err != nil {
panic(err)
}

sqwr.txWriter = coretypes.NewContiguousShareWriter(consts.TxNamespaceID)
sqwr.msgWriter = coretypes.NewMessageShareWriter()
Expand Down Expand Up @@ -188,7 +191,9 @@ func (sqwr *shareSplitter) writeMalleatedTx(
return false, nil, nil, err
}

wrappedTx, err := coretypes.WrapMalleatedTx(parentHash[:], rawProcessedTx)
// we use a share index of 0 here because this implementation doesn't
// support non-interactive defaults or the usuage of wrapped txs
wrappedTx, err := coretypes.WrapMalleatedTx(parentHash[:], 0, rawProcessedTx)
if err != nil {
return false, nil, nil, err
}
Expand Down
34 changes: 16 additions & 18 deletions app/test/process_proposal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -151,10 +151,10 @@ func TestMessageInclusionCheck(t *testing.T) {
data, err := coretypes.DataFromProto(tt.input.BlockData)
require.NoError(t, err)

shares, _, err := shares.ComputeShares(&data, tt.input.BlockData.OriginalSquareSize)
shares, err := shares.Split(data)
require.NoError(t, err)

rawShares := shares.RawShares()
rawShares := shares

require.NoError(t, err)
eds, err := da.ExtendShares(tt.input.BlockData.OriginalSquareSize, rawShares)
Expand Down Expand Up @@ -207,13 +207,11 @@ func TestProcessMessagesWithReservedNamespaces(t *testing.T) {
data, err := coretypes.DataFromProto(input.BlockData)
require.NoError(t, err)

shares, _, err := shares.ComputeShares(&data, input.BlockData.OriginalSquareSize)
shares, err := shares.Split(data)
require.NoError(t, err)

rawShares := shares.RawShares()

require.NoError(t, err)
eds, err := da.ExtendShares(input.BlockData.OriginalSquareSize, rawShares)
eds, err := da.ExtendShares(input.BlockData.OriginalSquareSize, shares)
require.NoError(t, err)
dah := da.NewDataAvailabilityHeader(eds)
input.Header.DataHash = dah.Hash()
Expand All @@ -234,6 +232,9 @@ func TestProcessMessageWithUnsortedMessages(t *testing.T) {
pfdOne, msgOne := genRandMsgPayForDataForNamespace(t, signer, 8, namespaceOne)
pfdTwo, msgTwo := genRandMsgPayForDataForNamespace(t, signer, 8, namespaceTwo)

cMsgOne := &core.Message{NamespaceId: pfdOne.GetMessageNamespaceId(), Data: msgOne}
cMsgTwo := &core.Message{NamespaceId: pfdTwo.GetMessageNamespaceId(), Data: msgTwo}

input := abci.RequestProcessProposal{
BlockData: &core.Data{
Txs: [][]byte{
Expand All @@ -242,14 +243,8 @@ func TestProcessMessageWithUnsortedMessages(t *testing.T) {
},
Messages: core.Messages{
MessagesList: []*core.Message{
{
NamespaceId: pfdTwo.GetMessageNamespaceId(),
Data: msgTwo,
},
{
NamespaceId: pfdOne.GetMessageNamespaceId(),
Data: msgOne,
},
cMsgOne,
cMsgTwo,
},
},
OriginalSquareSize: 8,
Expand All @@ -258,17 +253,20 @@ func TestProcessMessageWithUnsortedMessages(t *testing.T) {
data, err := coretypes.DataFromProto(input.BlockData)
require.NoError(t, err)

shares, _, err := shares.ComputeShares(&data, input.BlockData.OriginalSquareSize)
shares, err := shares.Split(data)
require.NoError(t, err)

rawShares := shares.RawShares()

require.NoError(t, err)
eds, err := da.ExtendShares(input.BlockData.OriginalSquareSize, rawShares)
eds, err := da.ExtendShares(input.BlockData.OriginalSquareSize, shares)

require.NoError(t, err)
dah := da.NewDataAvailabilityHeader(eds)
input.Header.DataHash = dah.Hash()

// swap the messages
input.BlockData.Messages.MessagesList[0] = cMsgTwo
input.BlockData.Messages.MessagesList[1] = cMsgOne

got := testApp.ProcessProposal(input)

assert.Equal(t, got.Result, abci.ResponseProcessProposal_REJECT)
Expand Down
7 changes: 3 additions & 4 deletions app/test/split_shares_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import (
"github.com/tendermint/tendermint/pkg/consts"
"github.com/tendermint/tendermint/pkg/da"
core "github.com/tendermint/tendermint/proto/tendermint/types"
coretypes "github.com/tendermint/tendermint/types"
)

func TestSplitShares(t *testing.T) {
Expand Down Expand Up @@ -101,14 +100,14 @@ func TestSplitShares(t *testing.T) {
dah := da.NewDataAvailabilityHeader(eds)
data.Hash = dah.Hash()

parsedData, err := coretypes.DataFromSquare(eds)
parsedData, err := shares.Merge(eds)
require.NoError(t, err)

assert.Equal(t, data.Txs, parsedData.Txs.ToSliceOfBytes())

parsedShares, _, err := shares.ComputeShares(&parsedData, tt.squareSize)
parsedShares, err := shares.Split(parsedData)
require.NoError(t, err)

require.Equal(t, square, parsedShares.RawShares())
require.Equal(t, square, parsedShares)
}
}
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -161,5 +161,5 @@ require (
replace (
github.com/cosmos/cosmos-sdk => github.com/celestiaorg/cosmos-sdk v1.2.0-sdk-v0.46.0
github.com/gogo/protobuf => github.com/regen-network/protobuf v1.3.3-alpha.regen.1
github.com/tendermint/tendermint => github.com/celestiaorg/celestia-core v1.3.3-tm-v0.34.20
github.com/tendermint/tendermint => github.com/celestiaorg/celestia-core v1.3.5-tm-v0.34.20
)
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,8 @@ github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46f
github.com/bwesterb/go-ristretto v1.2.0/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
github.com/c-bata/go-prompt v0.2.2/go.mod h1:VzqtzE2ksDBcdln8G7mk2RX9QyGjH+OVqOCSiVIqS34=
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
github.com/celestiaorg/celestia-core v1.3.3-tm-v0.34.20 h1:XspZtkoIfJ68TCVOOGWkI4W7taQFJLLqSu6GME5RbqU=
github.com/celestiaorg/celestia-core v1.3.3-tm-v0.34.20/go.mod h1:f4R8qNJrP1CDH0SNwj4jA3NymBLQM4lNdx6Ijmfllbw=
github.com/celestiaorg/celestia-core v1.3.5-tm-v0.34.20 h1:Z66gewBIJIQ5T72v2ktu4FMA/sgDE4ok4y9DvfnCUUE=
github.com/celestiaorg/celestia-core v1.3.5-tm-v0.34.20/go.mod h1:f4R8qNJrP1CDH0SNwj4jA3NymBLQM4lNdx6Ijmfllbw=
github.com/celestiaorg/cosmos-sdk v1.2.0-sdk-v0.46.0 h1:A1F7L/09uGClsU+kmugMy47Ezv4ll0uxNRNdaGa37T8=
github.com/celestiaorg/cosmos-sdk v1.2.0-sdk-v0.46.0/go.mod h1:OXRC0p460CFKl77uQZWY/8p5uZmDrNum7BmVZDupq0Q=
github.com/celestiaorg/go-leopard v0.1.0 h1:28z2EkvKJIez5J9CEaiiUEC+OxalRLtTGJJ1oScfE1g=
Expand Down
9 changes: 9 additions & 0 deletions pkg/appconsts/appconsts.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package appconsts

import (
"bytes"

"github.com/tendermint/tendermint/pkg/consts"
)

var NameSpacedPaddedShareBytes = bytes.Repeat([]byte{0}, consts.MsgShareSize)
123 changes: 123 additions & 0 deletions pkg/shares/contiguous_shares_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
package shares

import (
"context"
"fmt"
"testing"
"time"

"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/tendermint/tendermint/pkg/consts"
coretypes "github.com/tendermint/tendermint/types"
)

func TestContigShareWriter(t *testing.T) {
// note that this test is mainly for debugging purposes, the main round trip
// tests occur in TestMerge and Test_processContiguousShares
w := NewContiguousShareSplitter(consts.TxNamespaceID)
txs := generateRandomContiguousShares(33, 200)
for _, tx := range txs {
rawTx, _ := tx.MarshalDelimited()
w.WriteBytes(rawTx)
}
resShares := w.Export()
rawResTxs, err := processContiguousShares(resShares.RawShares())
resTxs := coretypes.ToTxs(rawResTxs)
require.NoError(t, err)

assert.Equal(t, txs, resTxs)
}

func Test_parseDelimiter(t *testing.T) {
for i := uint64(0); i < 100; i++ {
tx := generateRandomContiguousShares(1, int(i))[0]
input, err := tx.MarshalDelimited()
if err != nil {
panic(err)
}
res, txLen, err := ParseDelimiter(input)
if err != nil {
panic(err)
}
assert.Equal(t, i, txLen)
assert.Equal(t, []byte(tx), res)
}
}

func TestFuzz_processContiguousShares(t *testing.T) {
t.Skip()
// run random shares through processContiguousShares for a minute
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()
for {
select {
case <-ctx.Done():
return
default:
Test_processContiguousShares(t)
}
}
}

func Test_processContiguousShares(t *testing.T) {
// exactTxShareSize is the length of tx that will fit exactly into a single
// share, accounting for namespace id and the length delimiter prepended to
// each tx
const exactTxShareSize = consts.TxShareSize - 1

type test struct {
name string
txSize int
txCount int
}

// each test is ran twice, once using txSize as an exact size, and again
// using it as a cap for randomly sized txs
tests := []test{
{"single small tx", 10, 1},
{"many small txs", 10, 10},
{"single big tx", 1000, 1},
{"many big txs", 1000, 10},
{"single exact size tx", exactTxShareSize, 1},
{"many exact size txs", exactTxShareSize, 10},
}

for _, tc := range tests {
tc := tc

// run the tests with identically sized txs
t.Run(fmt.Sprintf("%s idendically sized ", tc.name), func(t *testing.T) {
txs := generateRandomContiguousShares(tc.txCount, tc.txSize)

shares := SplitTxs(txs)

parsedTxs, err := processContiguousShares(shares)
if err != nil {
t.Error(err)
}

// check that the data parsed is identical
for i := 0; i < len(txs); i++ {
assert.Equal(t, []byte(txs[i]), parsedTxs[i])
}
})

// run the same tests using randomly sized txs with caps of tc.txSize
t.Run(fmt.Sprintf("%s randomly sized", tc.name), func(t *testing.T) {
txs := generateRandomlySizedContiguousShares(tc.txCount, tc.txSize)

shares := SplitTxs(txs)

parsedTxs, err := processContiguousShares(shares)
if err != nil {
t.Error(err)
}

// check that the data parsed is identical to the original
for i := 0; i < len(txs); i++ {
assert.Equal(t, []byte(txs[i]), parsedTxs[i])
}
})
}
}
Loading

0 comments on commit 13076a6

Please sign in to comment.