From 78c3a338fbc880bf8172eac16540768ae8b5d396 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Wed, 25 Sep 2024 12:01:57 +0530 Subject: [PATCH 01/34] update structure --- chainclient/broadcast_tx.go | 2 +- proto/{sdk/avail => cada}/v1beta1/abci.proto | 2 +- proto/{sdk/avail => cada}/v1beta1/genesis.proto | 2 +- proto/{sdk/avail => cada}/v1beta1/query.proto | 4 ++-- proto/{sdk/avail => cada}/v1beta1/tx.proto | 2 +- proto/{sdk/avail => cada}/v1beta1/vote_extensions.proto | 4 ++-- relayer/publish.go | 2 +- relayer/relayer.go | 2 +- relayer/submit_data_test.go | 2 +- {client => x/cada/client}/cli/cli_test.go | 0 {client => x/cada/client}/cli/keys.go | 2 +- {client => x/cada/client}/cli/query.go | 2 +- {client => x/cada/client}/cli/tx.go | 4 ++-- {keeper => x/cada/keeper}/abci.go | 0 {keeper => x/cada/keeper}/abci_test.go | 2 +- {keeper => x/cada/keeper}/blob_status.go | 0 {keeper => x/cada/keeper}/genesis.go | 2 +- {keeper => x/cada/keeper}/keeper.go | 2 +- {keeper => x/cada/keeper}/keeper_test.go | 6 +++--- {keeper => x/cada/keeper}/msg_server.go | 2 +- {keeper => x/cada/keeper}/msg_server_test.go | 4 ++-- {keeper => x/cada/keeper}/query_server.go | 2 +- {keeper => x/cada/keeper}/query_server_test.go | 4 ++-- {keeper => x/cada/keeper}/status_test.go | 2 +- {keeper => x/cada/keeper}/store.go | 2 +- {keeper => x/cada/keeper}/store_test.go | 2 +- {keeper => x/cada/keeper}/vote_extension.go | 0 {keeper => x/cada/keeper}/vote_extension_test.go | 2 +- {module => x/cada/module}/autocli.go | 0 {module => x/cada/module}/depinject.go | 0 {module => x/cada/module}/module.go | 6 +++--- {specs => x/cada/specs}/01_concepts.md | 0 {specs => x/cada/specs}/02_state.md | 0 {specs => x/cada/specs}/03_msg.md | 0 {specs => x/cada/specs}/04_client.md | 0 {specs => x/cada/specs}/05_prepare_proposal.md | 0 {specs => x/cada/specs}/06_preblocker.md | 0 {specs => x/cada/specs}/07_vote_extension.md | 0 {specs => x/cada/specs}/README.md | 0 {types => x/cada/types}/abci.pb.go | 0 {types => x/cada/types}/avail_config.go | 0 {types => x/cada/types}/codec.go | 0 {types => x/cada/types}/genesis.go | 0 {types => x/cada/types}/genesis.pb.go | 0 {types => x/cada/types}/keys.go | 0 {types => x/cada/types}/query.pb.go | 0 {types => x/cada/types}/query.pb.gw.go | 0 {types => x/cada/types}/tx.pb.go | 0 {types => x/cada/types}/validator.pb.go | 0 {types => x/cada/types}/vote_extensions.pb.go | 0 50 files changed, 34 insertions(+), 34 deletions(-) rename proto/{sdk/avail => cada}/v1beta1/abci.proto (96%) rename proto/{sdk/avail => cada}/v1beta1/genesis.proto (86%) rename proto/{sdk/avail => cada}/v1beta1/query.proto (95%) rename proto/{sdk/avail => cada}/v1beta1/tx.proto (98%) rename proto/{sdk/avail => cada}/v1beta1/vote_extensions.proto (87%) rename {client => x/cada/client}/cli/cli_test.go (100%) rename {client => x/cada/client}/cli/keys.go (91%) rename {client => x/cada/client}/cli/query.go (95%) rename {client => x/cada/client}/cli/tx.go (95%) rename {keeper => x/cada/keeper}/abci.go (100%) rename {keeper => x/cada/keeper}/abci_test.go (98%) rename {keeper => x/cada/keeper}/blob_status.go (100%) rename {keeper => x/cada/keeper}/genesis.go (88%) rename {keeper => x/cada/keeper}/keeper.go (96%) rename {keeper => x/cada/keeper}/keeper_test.go (95%) rename {keeper => x/cada/keeper}/msg_server.go (97%) rename {keeper => x/cada/keeper}/msg_server_test.go (93%) rename {keeper => x/cada/keeper}/query_server.go (96%) rename {keeper => x/cada/keeper}/query_server_test.go (85%) rename {keeper => x/cada/keeper}/status_test.go (96%) rename {keeper => x/cada/keeper}/store.go (99%) rename {keeper => x/cada/keeper}/store_test.go (97%) rename {keeper => x/cada/keeper}/vote_extension.go (100%) rename {keeper => x/cada/keeper}/vote_extension_test.go (96%) rename {module => x/cada/module}/autocli.go (100%) rename {module => x/cada/module}/depinject.go (100%) rename {module => x/cada/module}/module.go (96%) rename {specs => x/cada/specs}/01_concepts.md (100%) rename {specs => x/cada/specs}/02_state.md (100%) rename {specs => x/cada/specs}/03_msg.md (100%) rename {specs => x/cada/specs}/04_client.md (100%) rename {specs => x/cada/specs}/05_prepare_proposal.md (100%) rename {specs => x/cada/specs}/06_preblocker.md (100%) rename {specs => x/cada/specs}/07_vote_extension.md (100%) rename {specs => x/cada/specs}/README.md (100%) rename {types => x/cada/types}/abci.pb.go (100%) rename {types => x/cada/types}/avail_config.go (100%) rename {types => x/cada/types}/codec.go (100%) rename {types => x/cada/types}/genesis.go (100%) rename {types => x/cada/types}/genesis.pb.go (100%) rename {types => x/cada/types}/keys.go (100%) rename {types => x/cada/types}/query.pb.go (100%) rename {types => x/cada/types}/query.pb.gw.go (100%) rename {types => x/cada/types}/tx.pb.go (100%) rename {types => x/cada/types}/validator.pb.go (100%) rename {types => x/cada/types}/vote_extensions.pb.go (100%) diff --git a/chainclient/broadcast_tx.go b/chainclient/broadcast_tx.go index 5e19ae7..052be22 100644 --- a/chainclient/broadcast_tx.go +++ b/chainclient/broadcast_tx.go @@ -12,7 +12,7 @@ import ( "github.com/cosmos/cosmos-sdk/crypto/keyring" sdk "github.com/cosmos/cosmos-sdk/types" authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" - "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/types" ) // GetBinPath returns the path to the cada home directory within the user's home directory. diff --git a/proto/sdk/avail/v1beta1/abci.proto b/proto/cada/v1beta1/abci.proto similarity index 96% rename from proto/sdk/avail/v1beta1/abci.proto rename to proto/cada/v1beta1/abci.proto index 78de09a..b59445a 100644 --- a/proto/sdk/avail/v1beta1/abci.proto +++ b/proto/cada/v1beta1/abci.proto @@ -1,5 +1,5 @@ syntax = "proto3"; -package sdk.avail.v1beta1; +package cada.v1beta1; import "gogoproto/gogo.proto"; diff --git a/proto/sdk/avail/v1beta1/genesis.proto b/proto/cada/v1beta1/genesis.proto similarity index 86% rename from proto/sdk/avail/v1beta1/genesis.proto rename to proto/cada/v1beta1/genesis.proto index 3d9ca4c..ac0e4b6 100644 --- a/proto/sdk/avail/v1beta1/genesis.proto +++ b/proto/cada/v1beta1/genesis.proto @@ -1,5 +1,5 @@ syntax = "proto3"; -package sdk.avail.v1beta1; +package cada.v1beta1; option go_package = "github.com/vitwit/avail-da-module/types"; diff --git a/proto/sdk/avail/v1beta1/query.proto b/proto/cada/v1beta1/query.proto similarity index 95% rename from proto/sdk/avail/v1beta1/query.proto rename to proto/cada/v1beta1/query.proto index e15d5f0..8a6822c 100644 --- a/proto/sdk/avail/v1beta1/query.proto +++ b/proto/cada/v1beta1/query.proto @@ -1,8 +1,8 @@ syntax = "proto3"; -package sdk.avail.v1beta1; +package cada.v1beta1; import "google/api/annotations.proto"; -import "sdk/avail/v1beta1/tx.proto"; +import "cada/v1beta1/tx.proto"; option go_package = "github.com/vitwit/avail-da-module/types"; diff --git a/proto/sdk/avail/v1beta1/tx.proto b/proto/cada/v1beta1/tx.proto similarity index 98% rename from proto/sdk/avail/v1beta1/tx.proto rename to proto/cada/v1beta1/tx.proto index 8b56279..225647f 100644 --- a/proto/sdk/avail/v1beta1/tx.proto +++ b/proto/cada/v1beta1/tx.proto @@ -1,5 +1,5 @@ syntax = "proto3"; -package sdk.avail.v1beta1; +package cada.v1beta1; import "cosmos/msg/v1/msg.proto"; import "gogoproto/gogo.proto"; diff --git a/proto/sdk/avail/v1beta1/vote_extensions.proto b/proto/cada/v1beta1/vote_extensions.proto similarity index 87% rename from proto/sdk/avail/v1beta1/vote_extensions.proto rename to proto/cada/v1beta1/vote_extensions.proto index 958c0be..f498628 100644 --- a/proto/sdk/avail/v1beta1/vote_extensions.proto +++ b/proto/cada/v1beta1/vote_extensions.proto @@ -1,7 +1,7 @@ syntax = "proto3"; -package sdk.avail.v1beta1; +package cada.v1beta1; -import "sdk/avail/v1beta1/tx.proto"; +import "cada/v1beta1/tx.proto"; option go_package = "github.com/vitwit/avail-da-module/types"; diff --git a/relayer/publish.go b/relayer/publish.go index 8402612..6f66ea4 100644 --- a/relayer/publish.go +++ b/relayer/publish.go @@ -7,7 +7,7 @@ import ( "github.com/cosmos/cosmos-sdk/codec" sdk "github.com/cosmos/cosmos-sdk/types" dacli "github.com/vitwit/avail-da-module/chainclient" - "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/types" ) // PostBlocks is called in the PreBlocker. The proposer will publish the blocks at this point diff --git a/relayer/relayer.go b/relayer/relayer.go index 72e3021..259057e 100644 --- a/relayer/relayer.go +++ b/relayer/relayer.go @@ -6,7 +6,7 @@ import ( "github.com/cosmos/cosmos-sdk/codec" "github.com/vitwit/avail-da-module/relayer/avail" "github.com/vitwit/avail-da-module/relayer/local" - "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/types" ) // Relayer is responsible for posting new blocks to Avail diff --git a/relayer/submit_data_test.go b/relayer/submit_data_test.go index 6c1c5d3..14bf3f0 100644 --- a/relayer/submit_data_test.go +++ b/relayer/submit_data_test.go @@ -8,7 +8,7 @@ import ( relayer "github.com/vitwit/avail-da-module/relayer" "github.com/vitwit/avail-da-module/relayer/avail" mocks "github.com/vitwit/avail-da-module/relayer/avail/mocks" - cadatypes "github.com/vitwit/avail-da-module/types" + cadatypes "github.com/vitwit/avail-da-module/x/cada/types" ) func TestSubmitDataToAvailClient(t *testing.T) { diff --git a/client/cli/cli_test.go b/x/cada/client/cli/cli_test.go similarity index 100% rename from client/cli/cli_test.go rename to x/cada/client/cli/cli_test.go diff --git a/client/cli/keys.go b/x/cada/client/cli/keys.go similarity index 91% rename from client/cli/keys.go rename to x/cada/client/cli/keys.go index 9752073..9661f21 100644 --- a/client/cli/keys.go +++ b/x/cada/client/cli/keys.go @@ -3,7 +3,7 @@ package cli import ( "github.com/cosmos/cosmos-sdk/client" "github.com/spf13/cobra" - types "github.com/vitwit/avail-da-module/types" + types "github.com/vitwit/avail-da-module/x/cada/types" ) const ( diff --git a/client/cli/query.go b/x/cada/client/cli/query.go similarity index 95% rename from client/cli/query.go rename to x/cada/client/cli/query.go index d6d37cb..d09bd29 100644 --- a/client/cli/query.go +++ b/x/cada/client/cli/query.go @@ -6,7 +6,7 @@ import ( "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/flags" "github.com/spf13/cobra" - types "github.com/vitwit/avail-da-module/types" + types "github.com/vitwit/avail-da-module/x/cada/types" ) // GetQueryCmd returns the root query command for the cada module. diff --git a/client/cli/tx.go b/x/cada/client/cli/tx.go similarity index 95% rename from client/cli/tx.go rename to x/cada/client/cli/tx.go index b589c65..280f870 100644 --- a/client/cli/tx.go +++ b/x/cada/client/cli/tx.go @@ -9,8 +9,8 @@ import ( "github.com/cosmos/cosmos-sdk/client/flags" "github.com/cosmos/cosmos-sdk/client/tx" "github.com/spf13/cobra" - "github.com/vitwit/avail-da-module/keeper" - types "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/keeper" + types "github.com/vitwit/avail-da-module/x/cada/types" ) // NewTxCmd creates and returns a Cobra command for transaction subcommands related to the cada module. diff --git a/keeper/abci.go b/x/cada/keeper/abci.go similarity index 100% rename from keeper/abci.go rename to x/cada/keeper/abci.go diff --git a/keeper/abci_test.go b/x/cada/keeper/abci_test.go similarity index 98% rename from keeper/abci_test.go rename to x/cada/keeper/abci_test.go index d4b7241..71259f5 100644 --- a/keeper/abci_test.go +++ b/x/cada/keeper/abci_test.go @@ -4,7 +4,7 @@ import ( abci "github.com/cometbft/cometbft/abci/types" sdk "github.com/cosmos/cosmos-sdk/types" banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" - store "github.com/vitwit/avail-da-module/keeper" + store "github.com/vitwit/avail-da-module/x/cada/keeper" ) func (s *TestSuite) TestPrepareProposal() { diff --git a/keeper/blob_status.go b/x/cada/keeper/blob_status.go similarity index 100% rename from keeper/blob_status.go rename to x/cada/keeper/blob_status.go diff --git a/keeper/genesis.go b/x/cada/keeper/genesis.go similarity index 88% rename from keeper/genesis.go rename to x/cada/keeper/genesis.go index 368155c..7177330 100644 --- a/keeper/genesis.go +++ b/x/cada/keeper/genesis.go @@ -2,7 +2,7 @@ package keeper import ( sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/types" ) // InitGenesis initializes the module's state from a genesis state. diff --git a/keeper/keeper.go b/x/cada/keeper/keeper.go similarity index 96% rename from keeper/keeper.go rename to x/cada/keeper/keeper.go index 6b1e3fb..5048ebd 100644 --- a/keeper/keeper.go +++ b/x/cada/keeper/keeper.go @@ -11,7 +11,7 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" "github.com/spf13/cobra" "github.com/vitwit/avail-da-module/relayer" - types "github.com/vitwit/avail-da-module/types" + types "github.com/vitwit/avail-da-module/x/cada/types" ) type Keeper struct { diff --git a/keeper/keeper_test.go b/x/cada/keeper/keeper_test.go similarity index 95% rename from keeper/keeper_test.go rename to x/cada/keeper/keeper_test.go index c1d7ebd..7e2e5ff 100644 --- a/keeper/keeper_test.go +++ b/x/cada/keeper/keeper_test.go @@ -19,10 +19,10 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" moduletestutil "github.com/cosmos/cosmos-sdk/types/module/testutil" "github.com/stretchr/testify/suite" - "github.com/vitwit/avail-da-module/keeper" - module "github.com/vitwit/avail-da-module/module" relayer "github.com/vitwit/avail-da-module/relayer" - types "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/keeper" + module "github.com/vitwit/avail-da-module/x/cada/module" + types "github.com/vitwit/avail-da-module/x/cada/types" ) type TestSuite struct { diff --git a/keeper/msg_server.go b/x/cada/keeper/msg_server.go similarity index 97% rename from keeper/msg_server.go rename to x/cada/keeper/msg_server.go index 9d85d1b..aad6c7d 100644 --- a/keeper/msg_server.go +++ b/x/cada/keeper/msg_server.go @@ -6,7 +6,7 @@ import ( "fmt" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/types" ) type msgServer struct { diff --git a/keeper/msg_server_test.go b/x/cada/keeper/msg_server_test.go similarity index 93% rename from keeper/msg_server_test.go rename to x/cada/keeper/msg_server_test.go index 64b1a8d..7244463 100644 --- a/keeper/msg_server_test.go +++ b/x/cada/keeper/msg_server_test.go @@ -1,8 +1,8 @@ package keeper_test import ( - cadakeeper "github.com/vitwit/avail-da-module/keeper" - "github.com/vitwit/avail-da-module/types" + cadakeeper "github.com/vitwit/avail-da-module/x/cada/keeper" + "github.com/vitwit/avail-da-module/x/cada/types" ) func (s *TestSuite) TestMsgServer_UpdateBlobStatus() { diff --git a/keeper/query_server.go b/x/cada/keeper/query_server.go similarity index 96% rename from keeper/query_server.go rename to x/cada/keeper/query_server.go index 3e9b8a9..14860cd 100644 --- a/keeper/query_server.go +++ b/x/cada/keeper/query_server.go @@ -4,7 +4,7 @@ import ( "context" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/types" ) var _ types.QueryServer = queryServer{} diff --git a/keeper/query_server_test.go b/x/cada/keeper/query_server_test.go similarity index 85% rename from keeper/query_server_test.go rename to x/cada/keeper/query_server_test.go index bcb7696..7f3287b 100644 --- a/keeper/query_server_test.go +++ b/x/cada/keeper/query_server_test.go @@ -1,8 +1,8 @@ package keeper_test import ( - store "github.com/vitwit/avail-da-module/keeper" - "github.com/vitwit/avail-da-module/types" + store "github.com/vitwit/avail-da-module/x/cada/keeper" + "github.com/vitwit/avail-da-module/x/cada/types" ) func (s *TestSuite) TestSubmitBlobStatus() { diff --git a/keeper/status_test.go b/x/cada/keeper/status_test.go similarity index 96% rename from keeper/status_test.go rename to x/cada/keeper/status_test.go index c9dbfa1..e7328de 100644 --- a/keeper/status_test.go +++ b/x/cada/keeper/status_test.go @@ -1,7 +1,7 @@ package keeper_test import ( - "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/types" ) func (s *TestSuite) TestSetBlobStatusPending() { diff --git a/keeper/store.go b/x/cada/keeper/store.go similarity index 99% rename from keeper/store.go rename to x/cada/keeper/store.go index 558613d..2ced9d0 100644 --- a/keeper/store.go +++ b/x/cada/keeper/store.go @@ -7,7 +7,7 @@ import ( "cosmossdk.io/collections" storetypes2 "cosmossdk.io/store/types" sdk "github.com/cosmos/cosmos-sdk/types" - types "github.com/vitwit/avail-da-module/types" + types "github.com/vitwit/avail-da-module/x/cada/types" ) const ( diff --git a/keeper/store_test.go b/x/cada/keeper/store_test.go similarity index 97% rename from keeper/store_test.go rename to x/cada/keeper/store_test.go index cf6fa75..425823f 100644 --- a/keeper/store_test.go +++ b/x/cada/keeper/store_test.go @@ -1,7 +1,7 @@ package keeper_test import ( - store "github.com/vitwit/avail-da-module/keeper" + store "github.com/vitwit/avail-da-module/x/cada/keeper" ) func (s *TestSuite) TestCanUpdateStatusToPending() { diff --git a/keeper/vote_extension.go b/x/cada/keeper/vote_extension.go similarity index 100% rename from keeper/vote_extension.go rename to x/cada/keeper/vote_extension.go diff --git a/keeper/vote_extension_test.go b/x/cada/keeper/vote_extension_test.go similarity index 96% rename from keeper/vote_extension_test.go rename to x/cada/keeper/vote_extension_test.go index d37a91d..a130bc0 100644 --- a/keeper/vote_extension_test.go +++ b/x/cada/keeper/vote_extension_test.go @@ -2,7 +2,7 @@ package keeper_test import ( abci "github.com/cometbft/cometbft/abci/types" - store "github.com/vitwit/avail-da-module/keeper" + store "github.com/vitwit/avail-da-module/x/cada/keeper" ) func (s *TestSuite) TestExtendVoteHandler() { diff --git a/module/autocli.go b/x/cada/module/autocli.go similarity index 100% rename from module/autocli.go rename to x/cada/module/autocli.go diff --git a/module/depinject.go b/x/cada/module/depinject.go similarity index 100% rename from module/depinject.go rename to x/cada/module/depinject.go diff --git a/module/module.go b/x/cada/module/module.go similarity index 96% rename from module/module.go rename to x/cada/module/module.go index 5367651..c3de2a7 100644 --- a/module/module.go +++ b/x/cada/module/module.go @@ -14,9 +14,9 @@ import ( "github.com/cosmos/cosmos-sdk/types/module" gwruntime "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" - "github.com/vitwit/avail-da-module/client/cli" - "github.com/vitwit/avail-da-module/keeper" - types "github.com/vitwit/avail-da-module/types" + "github.com/vitwit/avail-da-module/x/cada/client/cli" + "github.com/vitwit/avail-da-module/x/cada/keeper" + types "github.com/vitwit/avail-da-module/x/cada/types" ) var ( diff --git a/specs/01_concepts.md b/x/cada/specs/01_concepts.md similarity index 100% rename from specs/01_concepts.md rename to x/cada/specs/01_concepts.md diff --git a/specs/02_state.md b/x/cada/specs/02_state.md similarity index 100% rename from specs/02_state.md rename to x/cada/specs/02_state.md diff --git a/specs/03_msg.md b/x/cada/specs/03_msg.md similarity index 100% rename from specs/03_msg.md rename to x/cada/specs/03_msg.md diff --git a/specs/04_client.md b/x/cada/specs/04_client.md similarity index 100% rename from specs/04_client.md rename to x/cada/specs/04_client.md diff --git a/specs/05_prepare_proposal.md b/x/cada/specs/05_prepare_proposal.md similarity index 100% rename from specs/05_prepare_proposal.md rename to x/cada/specs/05_prepare_proposal.md diff --git a/specs/06_preblocker.md b/x/cada/specs/06_preblocker.md similarity index 100% rename from specs/06_preblocker.md rename to x/cada/specs/06_preblocker.md diff --git a/specs/07_vote_extension.md b/x/cada/specs/07_vote_extension.md similarity index 100% rename from specs/07_vote_extension.md rename to x/cada/specs/07_vote_extension.md diff --git a/specs/README.md b/x/cada/specs/README.md similarity index 100% rename from specs/README.md rename to x/cada/specs/README.md diff --git a/types/abci.pb.go b/x/cada/types/abci.pb.go similarity index 100% rename from types/abci.pb.go rename to x/cada/types/abci.pb.go diff --git a/types/avail_config.go b/x/cada/types/avail_config.go similarity index 100% rename from types/avail_config.go rename to x/cada/types/avail_config.go diff --git a/types/codec.go b/x/cada/types/codec.go similarity index 100% rename from types/codec.go rename to x/cada/types/codec.go diff --git a/types/genesis.go b/x/cada/types/genesis.go similarity index 100% rename from types/genesis.go rename to x/cada/types/genesis.go diff --git a/types/genesis.pb.go b/x/cada/types/genesis.pb.go similarity index 100% rename from types/genesis.pb.go rename to x/cada/types/genesis.pb.go diff --git a/types/keys.go b/x/cada/types/keys.go similarity index 100% rename from types/keys.go rename to x/cada/types/keys.go diff --git a/types/query.pb.go b/x/cada/types/query.pb.go similarity index 100% rename from types/query.pb.go rename to x/cada/types/query.pb.go diff --git a/types/query.pb.gw.go b/x/cada/types/query.pb.gw.go similarity index 100% rename from types/query.pb.gw.go rename to x/cada/types/query.pb.gw.go diff --git a/types/tx.pb.go b/x/cada/types/tx.pb.go similarity index 100% rename from types/tx.pb.go rename to x/cada/types/tx.pb.go diff --git a/types/validator.pb.go b/x/cada/types/validator.pb.go similarity index 100% rename from types/validator.pb.go rename to x/cada/types/validator.pb.go diff --git a/types/vote_extensions.pb.go b/x/cada/types/vote_extensions.pb.go similarity index 100% rename from types/vote_extensions.pb.go rename to x/cada/types/vote_extensions.pb.go From 4ffc45fe5398bfc19e13a3c74f4096fed8833c3e Mon Sep 17 00:00:00 2001 From: saiteja Date: Wed, 25 Sep 2024 12:05:29 +0530 Subject: [PATCH 02/34] feat: make proto gen --- go.mod | 2 +- types/abci.pb.go | 765 ++++++++++++++++++++++++++++++ types/genesis.pb.go | 263 +++++++++++ types/query.pb.go | 690 +++++++++++++++++++++++++++ types/query.pb.gw.go | 153 ++++++ types/tx.pb.go | 912 ++++++++++++++++++++++++++++++++++++ types/vote_extensions.pb.go | 367 +++++++++++++++ 7 files changed, 3151 insertions(+), 1 deletion(-) create mode 100644 types/abci.pb.go create mode 100644 types/genesis.pb.go create mode 100644 types/query.pb.go create mode 100644 types/query.pb.gw.go create mode 100644 types/tx.pb.go create mode 100644 types/vote_extensions.pb.go diff --git a/go.mod b/go.mod index 833ffee..db56e7a 100644 --- a/go.mod +++ b/go.mod @@ -171,7 +171,7 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20240709173604-40e1e62336c5 // indirect google.golang.org/protobuf v1.34.2 // indirect gopkg.in/ini.v1 v1.67.0 // indirect - gopkg.in/yaml.v3 v3.0.1 + gopkg.in/yaml.v3 v3.0.1 // indirect gotest.tools/v3 v3.5.1 // indirect nhooyr.io/websocket v1.8.10 // indirect pgregory.net/rapid v1.1.0 // indirect diff --git a/types/abci.pb.go b/types/abci.pb.go new file mode 100644 index 0000000..6160bda --- /dev/null +++ b/types/abci.pb.go @@ -0,0 +1,765 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: cada/v1beta1/abci.proto + +package types + +import ( + fmt "fmt" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" + io "io" + math "math" + math_bits "math/bits" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +// InjectedData represents a message containing data that has been injected. +type InjectedData struct { + // PendingBlocks contains information about blocks that are pending. + PendingBlocks PendingBlocks `protobuf:"bytes,1,opt,name=pending_blocks,json=pendingBlocks,proto3" json:"pending_blocks"` +} + +func (m *InjectedData) Reset() { *m = InjectedData{} } +func (m *InjectedData) String() string { return proto.CompactTextString(m) } +func (*InjectedData) ProtoMessage() {} +func (*InjectedData) Descriptor() ([]byte, []int) { + return fileDescriptor_c10129946240d9b3, []int{0} +} +func (m *InjectedData) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *InjectedData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_InjectedData.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *InjectedData) XXX_Merge(src proto.Message) { + xxx_messageInfo_InjectedData.Merge(m, src) +} +func (m *InjectedData) XXX_Size() int { + return m.Size() +} +func (m *InjectedData) XXX_DiscardUnknown() { + xxx_messageInfo_InjectedData.DiscardUnknown(m) +} + +var xxx_messageInfo_InjectedData proto.InternalMessageInfo + +func (m *InjectedData) GetPendingBlocks() PendingBlocks { + if m != nil { + return m.PendingBlocks + } + return PendingBlocks{} +} + +// The PendingBlocks message includes a list of block heights that are currently pending. +type PendingBlocks struct { + BlockHeights []int64 `protobuf:"varint,1,rep,packed,name=block_heights,json=blockHeights,proto3" json:"block_heights,omitempty"` +} + +func (m *PendingBlocks) Reset() { *m = PendingBlocks{} } +func (m *PendingBlocks) String() string { return proto.CompactTextString(m) } +func (*PendingBlocks) ProtoMessage() {} +func (*PendingBlocks) Descriptor() ([]byte, []int) { + return fileDescriptor_c10129946240d9b3, []int{1} +} +func (m *PendingBlocks) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *PendingBlocks) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_PendingBlocks.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *PendingBlocks) XXX_Merge(src proto.Message) { + xxx_messageInfo_PendingBlocks.Merge(m, src) +} +func (m *PendingBlocks) XXX_Size() int { + return m.Size() +} +func (m *PendingBlocks) XXX_DiscardUnknown() { + xxx_messageInfo_PendingBlocks.DiscardUnknown(m) +} + +var xxx_messageInfo_PendingBlocks proto.InternalMessageInfo + +func (m *PendingBlocks) GetBlockHeights() []int64 { + if m != nil { + return m.BlockHeights + } + return nil +} + +// UnprovenBlock represents a message containing data about a block that has not yet been proven. +type UnprovenBlock struct { + Height int64 `protobuf:"varint,1,opt,name=height,proto3" json:"height,omitempty"` + Block []byte `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"` +} + +func (m *UnprovenBlock) Reset() { *m = UnprovenBlock{} } +func (m *UnprovenBlock) String() string { return proto.CompactTextString(m) } +func (*UnprovenBlock) ProtoMessage() {} +func (*UnprovenBlock) Descriptor() ([]byte, []int) { + return fileDescriptor_c10129946240d9b3, []int{2} +} +func (m *UnprovenBlock) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *UnprovenBlock) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_UnprovenBlock.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *UnprovenBlock) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnprovenBlock.Merge(m, src) +} +func (m *UnprovenBlock) XXX_Size() int { + return m.Size() +} +func (m *UnprovenBlock) XXX_DiscardUnknown() { + xxx_messageInfo_UnprovenBlock.DiscardUnknown(m) +} + +var xxx_messageInfo_UnprovenBlock proto.InternalMessageInfo + +func (m *UnprovenBlock) GetHeight() int64 { + if m != nil { + return m.Height + } + return 0 +} + +func (m *UnprovenBlock) GetBlock() []byte { + if m != nil { + return m.Block + } + return nil +} + +func init() { + proto.RegisterType((*InjectedData)(nil), "cada.v1beta1.InjectedData") + proto.RegisterType((*PendingBlocks)(nil), "cada.v1beta1.PendingBlocks") + proto.RegisterType((*UnprovenBlock)(nil), "cada.v1beta1.UnprovenBlock") +} + +func init() { proto.RegisterFile("cada/v1beta1/abci.proto", fileDescriptor_c10129946240d9b3) } + +var fileDescriptor_c10129946240d9b3 = []byte{ + // 278 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4f, 0x4e, 0x4c, 0x49, + 0xd4, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x4f, 0x4c, 0x4a, 0xce, 0xd4, 0x2b, 0x28, + 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0x49, 0xe8, 0x41, 0x25, 0xa4, 0x44, 0xd2, 0xf3, 0xd3, 0xf3, + 0xc1, 0x12, 0xfa, 0x20, 0x16, 0x44, 0x8d, 0x52, 0x04, 0x17, 0x8f, 0x67, 0x5e, 0x56, 0x6a, 0x72, + 0x49, 0x6a, 0x8a, 0x4b, 0x62, 0x49, 0xa2, 0x90, 0x07, 0x17, 0x5f, 0x41, 0x6a, 0x5e, 0x4a, 0x66, + 0x5e, 0x7a, 0x7c, 0x52, 0x4e, 0x7e, 0x72, 0x76, 0xb1, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0xb7, 0x91, + 0xb4, 0x1e, 0xb2, 0x61, 0x7a, 0x01, 0x10, 0x35, 0x4e, 0x60, 0x25, 0x4e, 0x2c, 0x27, 0xee, 0xc9, + 0x33, 0x04, 0xf1, 0x16, 0x20, 0x0b, 0x2a, 0x99, 0x70, 0xf1, 0xa2, 0xa8, 0x12, 0x52, 0xe6, 0xe2, + 0x05, 0x1b, 0x19, 0x9f, 0x91, 0x9a, 0x99, 0x9e, 0x51, 0x02, 0x32, 0x99, 0x59, 0x83, 0x39, 0x88, + 0x07, 0x2c, 0xe8, 0x01, 0x11, 0x53, 0xb2, 0xe5, 0xe2, 0x0d, 0xcd, 0x2b, 0x28, 0xca, 0x2f, 0x4b, + 0xcd, 0x03, 0x6b, 0x13, 0x12, 0xe3, 0x62, 0x83, 0xa8, 0x07, 0x3b, 0x84, 0x39, 0x08, 0xca, 0x13, + 0x12, 0xe1, 0x62, 0x05, 0x6b, 0x94, 0x60, 0x52, 0x60, 0xd4, 0xe0, 0x09, 0x82, 0x70, 0x9c, 0x1c, + 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, + 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, 0x8e, 0x21, 0x4a, 0x3d, 0x3d, 0xb3, 0x24, 0xa3, + 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0xbf, 0x2c, 0xb3, 0xa4, 0x3c, 0xb3, 0x44, 0x3f, 0xb1, 0x2c, + 0x31, 0x33, 0x47, 0x37, 0x25, 0x51, 0x37, 0x37, 0x3f, 0xa5, 0x34, 0x27, 0x55, 0xbf, 0xa4, 0xb2, + 0x20, 0xb5, 0x38, 0x89, 0x0d, 0x1c, 0x30, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2d, 0xe3, + 0x83, 0xee, 0x57, 0x01, 0x00, 0x00, +} + +func (m *InjectedData) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *InjectedData) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *InjectedData) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + { + size, err := m.PendingBlocks.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintAbci(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + return len(dAtA) - i, nil +} + +func (m *PendingBlocks) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *PendingBlocks) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *PendingBlocks) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.BlockHeights) > 0 { + dAtA3 := make([]byte, len(m.BlockHeights)*10) + var j2 int + for _, num1 := range m.BlockHeights { + num := uint64(num1) + for num >= 1<<7 { + dAtA3[j2] = uint8(uint64(num)&0x7f | 0x80) + num >>= 7 + j2++ + } + dAtA3[j2] = uint8(num) + j2++ + } + i -= j2 + copy(dAtA[i:], dAtA3[:j2]) + i = encodeVarintAbci(dAtA, i, uint64(j2)) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *UnprovenBlock) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *UnprovenBlock) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *UnprovenBlock) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Block) > 0 { + i -= len(m.Block) + copy(dAtA[i:], m.Block) + i = encodeVarintAbci(dAtA, i, uint64(len(m.Block))) + i-- + dAtA[i] = 0x12 + } + if m.Height != 0 { + i = encodeVarintAbci(dAtA, i, uint64(m.Height)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func encodeVarintAbci(dAtA []byte, offset int, v uint64) int { + offset -= sovAbci(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *InjectedData) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = m.PendingBlocks.Size() + n += 1 + l + sovAbci(uint64(l)) + return n +} + +func (m *PendingBlocks) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.BlockHeights) > 0 { + l = 0 + for _, e := range m.BlockHeights { + l += sovAbci(uint64(e)) + } + n += 1 + sovAbci(uint64(l)) + l + } + return n +} + +func (m *UnprovenBlock) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Height != 0 { + n += 1 + sovAbci(uint64(m.Height)) + } + l = len(m.Block) + if l > 0 { + n += 1 + l + sovAbci(uint64(l)) + } + return n +} + +func sovAbci(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozAbci(x uint64) (n int) { + return sovAbci(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *InjectedData) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: InjectedData: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: InjectedData: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field PendingBlocks", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthAbci + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthAbci + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.PendingBlocks.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipAbci(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthAbci + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *PendingBlocks) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: PendingBlocks: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: PendingBlocks: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType == 0 { + var v int64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.BlockHeights = append(m.BlockHeights, v) + } else if wireType == 2 { + var packedLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + packedLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if packedLen < 0 { + return ErrInvalidLengthAbci + } + postIndex := iNdEx + packedLen + if postIndex < 0 { + return ErrInvalidLengthAbci + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var elementCount int + var count int + for _, integer := range dAtA[iNdEx:postIndex] { + if integer < 128 { + count++ + } + } + elementCount = count + if elementCount != 0 && len(m.BlockHeights) == 0 { + m.BlockHeights = make([]int64, 0, elementCount) + } + for iNdEx < postIndex { + var v int64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.BlockHeights = append(m.BlockHeights, v) + } + } else { + return fmt.Errorf("proto: wrong wireType = %d for field BlockHeights", wireType) + } + default: + iNdEx = preIndex + skippy, err := skipAbci(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthAbci + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *UnprovenBlock) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: UnprovenBlock: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: UnprovenBlock: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Height", wireType) + } + m.Height = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Height |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Block", wireType) + } + var byteLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowAbci + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + byteLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if byteLen < 0 { + return ErrInvalidLengthAbci + } + postIndex := iNdEx + byteLen + if postIndex < 0 { + return ErrInvalidLengthAbci + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Block = append(m.Block[:0], dAtA[iNdEx:postIndex]...) + if m.Block == nil { + m.Block = []byte{} + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipAbci(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthAbci + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipAbci(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + depth := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowAbci + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowAbci + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + case 1: + iNdEx += 8 + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowAbci + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthAbci + } + iNdEx += length + case 3: + depth++ + case 4: + if depth == 0 { + return 0, ErrUnexpectedEndOfGroupAbci + } + depth-- + case 5: + iNdEx += 4 + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + if iNdEx < 0 { + return 0, ErrInvalidLengthAbci + } + if depth == 0 { + return iNdEx, nil + } + } + return 0, io.ErrUnexpectedEOF +} + +var ( + ErrInvalidLengthAbci = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowAbci = fmt.Errorf("proto: integer overflow") + ErrUnexpectedEndOfGroupAbci = fmt.Errorf("proto: unexpected end of group") +) diff --git a/types/genesis.pb.go b/types/genesis.pb.go new file mode 100644 index 0000000..9b74616 --- /dev/null +++ b/types/genesis.pb.go @@ -0,0 +1,263 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: cada/v1beta1/genesis.proto + +package types + +import ( + fmt "fmt" + proto "github.com/cosmos/gogoproto/proto" + io "io" + math "math" + math_bits "math/bits" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +// GenesisState defines the avail da module's genesis state. +type GenesisState struct { +} + +func (m *GenesisState) Reset() { *m = GenesisState{} } +func (m *GenesisState) String() string { return proto.CompactTextString(m) } +func (*GenesisState) ProtoMessage() {} +func (*GenesisState) Descriptor() ([]byte, []int) { + return fileDescriptor_add756a79c56de92, []int{0} +} +func (m *GenesisState) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *GenesisState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_GenesisState.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *GenesisState) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenesisState.Merge(m, src) +} +func (m *GenesisState) XXX_Size() int { + return m.Size() +} +func (m *GenesisState) XXX_DiscardUnknown() { + xxx_messageInfo_GenesisState.DiscardUnknown(m) +} + +var xxx_messageInfo_GenesisState proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GenesisState)(nil), "cada.v1beta1.GenesisState") +} + +func init() { proto.RegisterFile("cada/v1beta1/genesis.proto", fileDescriptor_add756a79c56de92) } + +var fileDescriptor_add756a79c56de92 = []byte{ + // 144 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4a, 0x4e, 0x4c, 0x49, + 0xd4, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x4f, 0x4f, 0xcd, 0x4b, 0x2d, 0xce, 0x2c, + 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0xc9, 0xe9, 0x41, 0xe5, 0x94, 0xf8, 0xb8, + 0x78, 0xdc, 0x21, 0xd2, 0xc1, 0x25, 0x89, 0x25, 0xa9, 0x4e, 0x8e, 0x27, 0x1e, 0xc9, 0x31, 0x5e, + 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, 0x1e, 0xcb, 0x31, + 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xa5, 0x9e, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, + 0xab, 0x5f, 0x96, 0x59, 0x52, 0x9e, 0x59, 0xa2, 0x9f, 0x58, 0x96, 0x98, 0x99, 0xa3, 0x9b, 0x92, + 0xa8, 0x9b, 0x9b, 0x9f, 0x52, 0x9a, 0x93, 0xaa, 0x5f, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, + 0xb6, 0xc7, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xfd, 0xa2, 0xb6, 0xea, 0x85, 0x00, 0x00, 0x00, +} + +func (m *GenesisState) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *GenesisState) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + return len(dAtA) - i, nil +} + +func encodeVarintGenesis(dAtA []byte, offset int, v uint64) int { + offset -= sovGenesis(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *GenesisState) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + return n +} + +func sovGenesis(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozGenesis(x uint64) (n int) { + return sovGenesis(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *GenesisState) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: GenesisState: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: GenesisState: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipGenesis(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + depth := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowGenesis + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowGenesis + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + case 1: + iNdEx += 8 + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowGenesis + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthGenesis + } + iNdEx += length + case 3: + depth++ + case 4: + if depth == 0 { + return 0, ErrUnexpectedEndOfGroupGenesis + } + depth-- + case 5: + iNdEx += 4 + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + if iNdEx < 0 { + return 0, ErrInvalidLengthGenesis + } + if depth == 0 { + return iNdEx, nil + } + } + return 0, io.ErrUnexpectedEOF +} + +var ( + ErrInvalidLengthGenesis = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowGenesis = fmt.Errorf("proto: integer overflow") + ErrUnexpectedEndOfGroupGenesis = fmt.Errorf("proto: unexpected end of group") +) diff --git a/types/query.pb.go b/types/query.pb.go new file mode 100644 index 0000000..d30b2cb --- /dev/null +++ b/types/query.pb.go @@ -0,0 +1,690 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: cada/v1beta1/query.proto + +package types + +import ( + context "context" + fmt "fmt" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" + _ "google.golang.org/genproto/googleapis/api/annotations" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + io "io" + math "math" + math_bits "math/bits" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +// QuerySubmitBlobStatusRequest is the request type for the SubmitBlobStatus RPC method. +type QuerySubmittedBlobStatusRequest struct { +} + +func (m *QuerySubmittedBlobStatusRequest) Reset() { *m = QuerySubmittedBlobStatusRequest{} } +func (m *QuerySubmittedBlobStatusRequest) String() string { return proto.CompactTextString(m) } +func (*QuerySubmittedBlobStatusRequest) ProtoMessage() {} +func (*QuerySubmittedBlobStatusRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_0579d66f58f8318a, []int{0} +} +func (m *QuerySubmittedBlobStatusRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *QuerySubmittedBlobStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_QuerySubmittedBlobStatusRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *QuerySubmittedBlobStatusRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuerySubmittedBlobStatusRequest.Merge(m, src) +} +func (m *QuerySubmittedBlobStatusRequest) XXX_Size() int { + return m.Size() +} +func (m *QuerySubmittedBlobStatusRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QuerySubmittedBlobStatusRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QuerySubmittedBlobStatusRequest proto.InternalMessageInfo + +// QuerySubmitBlobStatusResponse is the response type for the SubmitBlobStatus RPC method. +type QuerySubmittedBlobStatusResponse struct { + // range specifies the range of blocks that are being submitted. + // It contains information about the starting and ending block heights. + Range *Range `protobuf:"bytes,1,opt,name=range,proto3" json:"range,omitempty"` + // status indicates the current status of the blob submission. + // Possible values might include statuses like "pending", "in_voting", "success", or "failure". + Status string `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` + // proven_height represents the height up to which data has been successfully proven. + // This height indicates the extent of posted data to avail light client. + ProvenHeight uint64 `protobuf:"varint,3,opt,name=proven_height,json=provenHeight,proto3" json:"proven_height,omitempty"` + // voting_ends_at denotes the block height at which the last voting on the blob ended. + // This provides information on when the last voting period for the blob concluded. + VotingEndsAt string `protobuf:"bytes,4,opt,name=voting_ends_at,json=votingEndsAt,proto3" json:"voting_ends_at,omitempty"` +} + +func (m *QuerySubmittedBlobStatusResponse) Reset() { *m = QuerySubmittedBlobStatusResponse{} } +func (m *QuerySubmittedBlobStatusResponse) String() string { return proto.CompactTextString(m) } +func (*QuerySubmittedBlobStatusResponse) ProtoMessage() {} +func (*QuerySubmittedBlobStatusResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_0579d66f58f8318a, []int{1} +} +func (m *QuerySubmittedBlobStatusResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *QuerySubmittedBlobStatusResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_QuerySubmittedBlobStatusResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *QuerySubmittedBlobStatusResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuerySubmittedBlobStatusResponse.Merge(m, src) +} +func (m *QuerySubmittedBlobStatusResponse) XXX_Size() int { + return m.Size() +} +func (m *QuerySubmittedBlobStatusResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QuerySubmittedBlobStatusResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QuerySubmittedBlobStatusResponse proto.InternalMessageInfo + +func (m *QuerySubmittedBlobStatusResponse) GetRange() *Range { + if m != nil { + return m.Range + } + return nil +} + +func (m *QuerySubmittedBlobStatusResponse) GetStatus() string { + if m != nil { + return m.Status + } + return "" +} + +func (m *QuerySubmittedBlobStatusResponse) GetProvenHeight() uint64 { + if m != nil { + return m.ProvenHeight + } + return 0 +} + +func (m *QuerySubmittedBlobStatusResponse) GetVotingEndsAt() string { + if m != nil { + return m.VotingEndsAt + } + return "" +} + +func init() { + proto.RegisterType((*QuerySubmittedBlobStatusRequest)(nil), "cada.v1beta1.QuerySubmittedBlobStatusRequest") + proto.RegisterType((*QuerySubmittedBlobStatusResponse)(nil), "cada.v1beta1.QuerySubmittedBlobStatusResponse") +} + +func init() { proto.RegisterFile("cada/v1beta1/query.proto", fileDescriptor_0579d66f58f8318a) } + +var fileDescriptor_0579d66f58f8318a = []byte{ + // 357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x51, 0xcd, 0x4a, 0xeb, 0x40, + 0x14, 0xee, 0xf4, 0xb6, 0x85, 0x3b, 0xb7, 0xf7, 0x2e, 0xa6, 0x5c, 0x09, 0x45, 0x62, 0x6c, 0x15, + 0x2b, 0xd2, 0x84, 0xd6, 0x27, 0x68, 0x41, 0x70, 0x6b, 0xba, 0x73, 0x53, 0x66, 0x9a, 0x21, 0x1d, + 0x48, 0x67, 0xd2, 0xcc, 0x49, 0xb4, 0x5b, 0x9f, 0x40, 0xf0, 0x09, 0x5c, 0xbb, 0xf1, 0x31, 0x5c, + 0x16, 0xdc, 0xb8, 0x94, 0xd6, 0x07, 0x91, 0x66, 0x8a, 0x5a, 0x50, 0x74, 0x79, 0xbe, 0x3f, 0x3e, + 0xbe, 0x83, 0xad, 0x11, 0x0d, 0xa8, 0x97, 0x75, 0x18, 0x07, 0xda, 0xf1, 0xa6, 0x29, 0x4f, 0x66, + 0x6e, 0x9c, 0x28, 0x50, 0xa4, 0xba, 0x62, 0xdc, 0x35, 0x53, 0xdf, 0x0e, 0x95, 0x0a, 0x23, 0xee, + 0xd1, 0x58, 0x78, 0x54, 0x4a, 0x05, 0x14, 0x84, 0x92, 0xda, 0x68, 0xeb, 0xff, 0x37, 0x52, 0xe0, + 0xd2, 0xc0, 0x8d, 0x5d, 0xbc, 0x73, 0xb6, 0x4a, 0x1c, 0xa4, 0x6c, 0x22, 0x00, 0x78, 0xd0, 0x8f, + 0x14, 0x1b, 0x00, 0x85, 0x54, 0xfb, 0x7c, 0x9a, 0x72, 0x0d, 0x8d, 0x7b, 0x84, 0x9d, 0xaf, 0x35, + 0x3a, 0x56, 0x52, 0x73, 0x72, 0x88, 0xcb, 0x09, 0x95, 0x21, 0xb7, 0x90, 0x83, 0x5a, 0x7f, 0xba, + 0x35, 0xf7, 0x63, 0x35, 0xd7, 0x5f, 0x51, 0xbe, 0x51, 0x90, 0x2d, 0x5c, 0xd1, 0xb9, 0xd9, 0x2a, + 0x3a, 0xa8, 0xf5, 0xdb, 0x5f, 0x5f, 0xa4, 0x89, 0xff, 0xc6, 0x89, 0xca, 0xb8, 0x1c, 0x8e, 0xb9, + 0x08, 0xc7, 0x60, 0xfd, 0x72, 0x50, 0xab, 0xe4, 0x57, 0x0d, 0x78, 0x9a, 0x63, 0x64, 0x0f, 0xff, + 0xcb, 0x14, 0x08, 0x19, 0x0e, 0xb9, 0x0c, 0xf4, 0x90, 0x82, 0x55, 0xca, 0x43, 0xaa, 0x06, 0x3d, + 0x91, 0x81, 0xee, 0x41, 0xf7, 0x0e, 0xe1, 0x72, 0x5e, 0x99, 0xdc, 0x22, 0x5c, 0xfb, 0xa4, 0x37, + 0x69, 0x6f, 0x16, 0xfc, 0x66, 0x83, 0xba, 0xfb, 0x53, 0xb9, 0x99, 0xa3, 0x71, 0x74, 0xf5, 0xf8, + 0x72, 0x53, 0xdc, 0x27, 0x4d, 0x8f, 0x66, 0x54, 0x44, 0x2c, 0x52, 0xec, 0x6d, 0x7b, 0x9d, 0xfb, + 0xde, 0x4d, 0xfd, 0xde, 0xc3, 0xc2, 0x46, 0xf3, 0x85, 0x8d, 0x9e, 0x17, 0x36, 0xba, 0x5e, 0xda, + 0x85, 0xf9, 0xd2, 0x2e, 0x3c, 0x2d, 0xed, 0xc2, 0xf9, 0x41, 0x28, 0x60, 0x9c, 0x32, 0x77, 0xa4, + 0x26, 0x5e, 0x26, 0xe0, 0x42, 0x80, 0xc9, 0x6b, 0x07, 0xb4, 0x3d, 0x51, 0x41, 0x1a, 0x71, 0x0f, + 0x66, 0x31, 0xd7, 0xac, 0x92, 0x7f, 0xf3, 0xf8, 0x35, 0x00, 0x00, 0xff, 0xff, 0xdf, 0x69, 0xec, + 0x0a, 0x2c, 0x02, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// QueryClient is the client API for Query service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type QueryClient interface { + // SubmitBlobStatus queries the current status of blob submissions. + SubmittedBlobStatus(ctx context.Context, in *QuerySubmittedBlobStatusRequest, opts ...grpc.CallOption) (*QuerySubmittedBlobStatusResponse, error) +} + +type queryClient struct { + cc grpc1.ClientConn +} + +func NewQueryClient(cc grpc1.ClientConn) QueryClient { + return &queryClient{cc} +} + +func (c *queryClient) SubmittedBlobStatus(ctx context.Context, in *QuerySubmittedBlobStatusRequest, opts ...grpc.CallOption) (*QuerySubmittedBlobStatusResponse, error) { + out := new(QuerySubmittedBlobStatusResponse) + err := c.cc.Invoke(ctx, "/cada.v1beta1.Query/SubmittedBlobStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// QueryServer is the server API for Query service. +type QueryServer interface { + // SubmitBlobStatus queries the current status of blob submissions. + SubmittedBlobStatus(context.Context, *QuerySubmittedBlobStatusRequest) (*QuerySubmittedBlobStatusResponse, error) +} + +// UnimplementedQueryServer can be embedded to have forward compatible implementations. +type UnimplementedQueryServer struct { +} + +func (*UnimplementedQueryServer) SubmittedBlobStatus(ctx context.Context, req *QuerySubmittedBlobStatusRequest) (*QuerySubmittedBlobStatusResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method SubmittedBlobStatus not implemented") +} + +func RegisterQueryServer(s grpc1.Server, srv QueryServer) { + s.RegisterService(&_Query_serviceDesc, srv) +} + +func _Query_SubmittedBlobStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QuerySubmittedBlobStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(QueryServer).SubmittedBlobStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/cada.v1beta1.Query/SubmittedBlobStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(QueryServer).SubmittedBlobStatus(ctx, req.(*QuerySubmittedBlobStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Query_serviceDesc = grpc.ServiceDesc{ + ServiceName: "cada.v1beta1.Query", + HandlerType: (*QueryServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SubmittedBlobStatus", + Handler: _Query_SubmittedBlobStatus_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "cada/v1beta1/query.proto", +} + +func (m *QuerySubmittedBlobStatusRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *QuerySubmittedBlobStatusRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *QuerySubmittedBlobStatusRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + return len(dAtA) - i, nil +} + +func (m *QuerySubmittedBlobStatusResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *QuerySubmittedBlobStatusResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *QuerySubmittedBlobStatusResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.VotingEndsAt) > 0 { + i -= len(m.VotingEndsAt) + copy(dAtA[i:], m.VotingEndsAt) + i = encodeVarintQuery(dAtA, i, uint64(len(m.VotingEndsAt))) + i-- + dAtA[i] = 0x22 + } + if m.ProvenHeight != 0 { + i = encodeVarintQuery(dAtA, i, uint64(m.ProvenHeight)) + i-- + dAtA[i] = 0x18 + } + if len(m.Status) > 0 { + i -= len(m.Status) + copy(dAtA[i:], m.Status) + i = encodeVarintQuery(dAtA, i, uint64(len(m.Status))) + i-- + dAtA[i] = 0x12 + } + if m.Range != nil { + { + size, err := m.Range.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQuery(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func encodeVarintQuery(dAtA []byte, offset int, v uint64) int { + offset -= sovQuery(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *QuerySubmittedBlobStatusRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + return n +} + +func (m *QuerySubmittedBlobStatusResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Range != nil { + l = m.Range.Size() + n += 1 + l + sovQuery(uint64(l)) + } + l = len(m.Status) + if l > 0 { + n += 1 + l + sovQuery(uint64(l)) + } + if m.ProvenHeight != 0 { + n += 1 + sovQuery(uint64(m.ProvenHeight)) + } + l = len(m.VotingEndsAt) + if l > 0 { + n += 1 + l + sovQuery(uint64(l)) + } + return n +} + +func sovQuery(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozQuery(x uint64) (n int) { + return sovQuery(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *QuerySubmittedBlobStatusRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: QuerySubmittedBlobStatusRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: QuerySubmittedBlobStatusRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + default: + iNdEx = preIndex + skippy, err := skipQuery(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthQuery + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *QuerySubmittedBlobStatusResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: QuerySubmittedBlobStatusResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: QuerySubmittedBlobStatusResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Range", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQuery + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQuery + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Range == nil { + m.Range = &Range{} + } + if err := m.Range.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Status", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthQuery + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthQuery + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Status = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ProvenHeight", wireType) + } + m.ProvenHeight = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.ProvenHeight |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field VotingEndsAt", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthQuery + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthQuery + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.VotingEndsAt = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQuery(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthQuery + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipQuery(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + depth := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowQuery + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowQuery + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + case 1: + iNdEx += 8 + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowQuery + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthQuery + } + iNdEx += length + case 3: + depth++ + case 4: + if depth == 0 { + return 0, ErrUnexpectedEndOfGroupQuery + } + depth-- + case 5: + iNdEx += 4 + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + if iNdEx < 0 { + return 0, ErrInvalidLengthQuery + } + if depth == 0 { + return iNdEx, nil + } + } + return 0, io.ErrUnexpectedEOF +} + +var ( + ErrInvalidLengthQuery = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowQuery = fmt.Errorf("proto: integer overflow") + ErrUnexpectedEndOfGroupQuery = fmt.Errorf("proto: unexpected end of group") +) diff --git a/types/query.pb.gw.go b/types/query.pb.gw.go new file mode 100644 index 0000000..800b835 --- /dev/null +++ b/types/query.pb.gw.go @@ -0,0 +1,153 @@ +// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. +// source: cada/v1beta1/query.proto + +/* +Package types is a reverse proxy. + +It translates gRPC into RESTful JSON APIs. +*/ +package types + +import ( + "context" + "io" + "net/http" + + "github.com/golang/protobuf/descriptor" + "github.com/golang/protobuf/proto" + "github.com/grpc-ecosystem/grpc-gateway/runtime" + "github.com/grpc-ecosystem/grpc-gateway/utilities" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" +) + +// Suppress "imported and not used" errors +var _ codes.Code +var _ io.Reader +var _ status.Status +var _ = runtime.String +var _ = utilities.NewDoubleArray +var _ = descriptor.ForMessage +var _ = metadata.Join + +func request_Query_SubmittedBlobStatus_0(ctx context.Context, marshaler runtime.Marshaler, client QueryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq QuerySubmittedBlobStatusRequest + var metadata runtime.ServerMetadata + + msg, err := client.SubmittedBlobStatus(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_Query_SubmittedBlobStatus_0(ctx context.Context, marshaler runtime.Marshaler, server QueryServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq QuerySubmittedBlobStatusRequest + var metadata runtime.ServerMetadata + + msg, err := server.SubmittedBlobStatus(ctx, &protoReq) + return msg, metadata, err + +} + +// RegisterQueryHandlerServer registers the http handlers for service Query to "mux". +// UnaryRPC :call QueryServer directly. +// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. +// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterQueryHandlerFromEndpoint instead. +func RegisterQueryHandlerServer(ctx context.Context, mux *runtime.ServeMux, server QueryServer) error { + + mux.Handle("GET", pattern_Query_SubmittedBlobStatus_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_Query_SubmittedBlobStatus_0(rctx, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Query_SubmittedBlobStatus_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +// RegisterQueryHandlerFromEndpoint is same as RegisterQueryHandler but +// automatically dials to "endpoint" and closes the connection when "ctx" gets done. +func RegisterQueryHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { + conn, err := grpc.Dial(endpoint, opts...) + if err != nil { + return err + } + defer func() { + if err != nil { + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + return + } + go func() { + <-ctx.Done() + if cerr := conn.Close(); cerr != nil { + grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) + } + }() + }() + + return RegisterQueryHandler(ctx, mux, conn) +} + +// RegisterQueryHandler registers the http handlers for service Query to "mux". +// The handlers forward requests to the grpc endpoint over "conn". +func RegisterQueryHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { + return RegisterQueryHandlerClient(ctx, mux, NewQueryClient(conn)) +} + +// RegisterQueryHandlerClient registers the http handlers for service Query +// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "QueryClient". +// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "QueryClient" +// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in +// "QueryClient" to call the correct interceptors. +func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, client QueryClient) error { + + mux.Handle("GET", pattern_Query_SubmittedBlobStatus_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Query_SubmittedBlobStatus_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Query_SubmittedBlobStatus_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + + return nil +} + +var ( + pattern_Query_SubmittedBlobStatus_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"availblob", "v1beta1", "submitBlobStatus"}, "", runtime.AssumeColonVerbOpt(false))) +) + +var ( + forward_Query_SubmittedBlobStatus_0 = runtime.ForwardResponseMessage +) diff --git a/types/tx.pb.go b/types/tx.pb.go new file mode 100644 index 0000000..09da3f0 --- /dev/null +++ b/types/tx.pb.go @@ -0,0 +1,912 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: cada/v1beta1/tx.proto + +package types + +import ( + context "context" + fmt "fmt" + _ "github.com/cosmos/cosmos-sdk/types/msgservice" + _ "github.com/cosmos/gogoproto/gogoproto" + grpc1 "github.com/cosmos/gogoproto/grpc" + proto "github.com/cosmos/gogoproto/proto" + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" + io "io" + math "math" + math_bits "math/bits" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +// BlobStatus defines the statuses for a blob submission +type BlobStatus int32 + +const ( + // Indicates that the blob status is unspecified or not set. + BLOB_STATUS_UNSPECIFIED BlobStatus = 0 + // Indicates that the blob submission failed. + BLOB_STATUS_FAILURE BlobStatus = 1 + // Indicates that the blob submission was successful. + BLOB_STATUS_SUCCESS BlobStatus = 2 + // Indicates that the blob submission is still pending and has not yet been processed. + BLOB_STATUS_PENDING BlobStatus = 3 +) + +var BlobStatus_name = map[int32]string{ + 0: "BLOB_STATUS_UNSPECIFIED", + 1: "BLOB_STATUS_FAILURE", + 2: "BLOB_STATUS_SUCCESS", + 3: "BLOB_STATUS_PENDING", +} + +var BlobStatus_value = map[string]int32{ + "BLOB_STATUS_UNSPECIFIED": 0, + "BLOB_STATUS_FAILURE": 1, + "BLOB_STATUS_SUCCESS": 2, + "BLOB_STATUS_PENDING": 3, +} + +func (x BlobStatus) String() string { + return proto.EnumName(BlobStatus_name, int32(x)) +} + +func (BlobStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_c917a06c648c4256, []int{0} +} + +// Range defines the range of blocks for which the blob is being submitted. +type Range struct { + // The starting block height in the range. Indicates the beginning of the block range. + From uint64 `protobuf:"varint,1,opt,name=from,proto3" json:"from,omitempty"` + // The ending block height in the range. Indicates the end of the block range. + To uint64 `protobuf:"varint,2,opt,name=to,proto3" json:"to,omitempty"` +} + +func (m *Range) Reset() { *m = Range{} } +func (m *Range) String() string { return proto.CompactTextString(m) } +func (*Range) ProtoMessage() {} +func (*Range) Descriptor() ([]byte, []int) { + return fileDescriptor_c917a06c648c4256, []int{0} +} +func (m *Range) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_Range.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *Range) XXX_Merge(src proto.Message) { + xxx_messageInfo_Range.Merge(m, src) +} +func (m *Range) XXX_Size() int { + return m.Size() +} +func (m *Range) XXX_DiscardUnknown() { + xxx_messageInfo_Range.DiscardUnknown(m) +} + +var xxx_messageInfo_Range proto.InternalMessageInfo + +func (m *Range) GetFrom() uint64 { + if m != nil { + return m.From + } + return 0 +} + +func (m *Range) GetTo() uint64 { + if m != nil { + return m.To + } + return 0 +} + +// MsgUpdateBlobStatusRequest define a message to update the status of a previously submitted blob. +type MsgUpdateBlobStatusRequest struct { + // Address of the validator updating the blob status. + ValidatorAddress string `protobuf:"bytes,1,opt,name=validator_address,json=validatorAddress,proto3" json:"validator_address,omitempty"` + // range of blocks for which the blob status is being updated. + BlocksRange *Range `protobuf:"bytes,2,opt,name=blocks_range,json=blocksRange,proto3" json:"blocks_range,omitempty"` + // The height at which the blob is stored in the Avail system. This indicates where the blob data is available. + AvailHeight uint64 `protobuf:"varint,3,opt,name=avail_height,json=availHeight,proto3" json:"avail_height,omitempty"` + // The status of the blob submission. + IsSuccess bool `protobuf:"varint,4,opt,name=is_success,json=isSuccess,proto3" json:"is_success,omitempty"` +} + +func (m *MsgUpdateBlobStatusRequest) Reset() { *m = MsgUpdateBlobStatusRequest{} } +func (m *MsgUpdateBlobStatusRequest) String() string { return proto.CompactTextString(m) } +func (*MsgUpdateBlobStatusRequest) ProtoMessage() {} +func (*MsgUpdateBlobStatusRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c917a06c648c4256, []int{1} +} +func (m *MsgUpdateBlobStatusRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *MsgUpdateBlobStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_MsgUpdateBlobStatusRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *MsgUpdateBlobStatusRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgUpdateBlobStatusRequest.Merge(m, src) +} +func (m *MsgUpdateBlobStatusRequest) XXX_Size() int { + return m.Size() +} +func (m *MsgUpdateBlobStatusRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MsgUpdateBlobStatusRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgUpdateBlobStatusRequest proto.InternalMessageInfo + +func (m *MsgUpdateBlobStatusRequest) GetValidatorAddress() string { + if m != nil { + return m.ValidatorAddress + } + return "" +} + +func (m *MsgUpdateBlobStatusRequest) GetBlocksRange() *Range { + if m != nil { + return m.BlocksRange + } + return nil +} + +func (m *MsgUpdateBlobStatusRequest) GetAvailHeight() uint64 { + if m != nil { + return m.AvailHeight + } + return 0 +} + +func (m *MsgUpdateBlobStatusRequest) GetIsSuccess() bool { + if m != nil { + return m.IsSuccess + } + return false +} + +// MsgUpdateBlobStatusResponse is the response type for the Msg/UpdateBlobStatus RPC method. +type MsgUpdateBlobStatusResponse struct { +} + +func (m *MsgUpdateBlobStatusResponse) Reset() { *m = MsgUpdateBlobStatusResponse{} } +func (m *MsgUpdateBlobStatusResponse) String() string { return proto.CompactTextString(m) } +func (*MsgUpdateBlobStatusResponse) ProtoMessage() {} +func (*MsgUpdateBlobStatusResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c917a06c648c4256, []int{2} +} +func (m *MsgUpdateBlobStatusResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *MsgUpdateBlobStatusResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_MsgUpdateBlobStatusResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *MsgUpdateBlobStatusResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MsgUpdateBlobStatusResponse.Merge(m, src) +} +func (m *MsgUpdateBlobStatusResponse) XXX_Size() int { + return m.Size() +} +func (m *MsgUpdateBlobStatusResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MsgUpdateBlobStatusResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MsgUpdateBlobStatusResponse proto.InternalMessageInfo + +func init() { + proto.RegisterEnum("cada.v1beta1.BlobStatus", BlobStatus_name, BlobStatus_value) + proto.RegisterType((*Range)(nil), "cada.v1beta1.Range") + proto.RegisterType((*MsgUpdateBlobStatusRequest)(nil), "cada.v1beta1.MsgUpdateBlobStatusRequest") + proto.RegisterType((*MsgUpdateBlobStatusResponse)(nil), "cada.v1beta1.MsgUpdateBlobStatusResponse") +} + +func init() { proto.RegisterFile("cada/v1beta1/tx.proto", fileDescriptor_c917a06c648c4256) } + +var fileDescriptor_c917a06c648c4256 = []byte{ + // 464 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcf, 0x6e, 0xd3, 0x40, + 0x10, 0xc6, 0xed, 0x24, 0x45, 0x74, 0x12, 0x21, 0xb3, 0x05, 0x12, 0xb9, 0xaa, 0x55, 0x72, 0x21, + 0xa4, 0xaa, 0xad, 0x14, 0x89, 0x03, 0xb7, 0x24, 0x75, 0x21, 0x52, 0x1b, 0x2a, 0xbb, 0xbe, 0x70, + 0xb1, 0xd6, 0xf6, 0xb2, 0xb1, 0xb0, 0xbb, 0xc1, 0xbb, 0x36, 0x7f, 0x4e, 0x88, 0x13, 0x47, 0xde, + 0x81, 0x17, 0xe8, 0x63, 0x70, 0xec, 0x11, 0x89, 0x0b, 0x4a, 0x0e, 0x7d, 0x0d, 0xe4, 0x35, 0x82, + 0x10, 0x40, 0xe2, 0xb4, 0xa3, 0xdf, 0x37, 0xda, 0xf9, 0xbe, 0x9d, 0x85, 0xdb, 0x21, 0x8e, 0xb0, + 0x55, 0x0c, 0x02, 0x22, 0xf0, 0xc0, 0x12, 0xaf, 0xcd, 0x79, 0xc6, 0x04, 0x43, 0xad, 0x12, 0x9b, + 0x3f, 0xb0, 0xde, 0x0e, 0x19, 0x4f, 0x19, 0xb7, 0x52, 0x4e, 0xad, 0x62, 0x50, 0x1e, 0x55, 0x9b, + 0x7e, 0x8b, 0x32, 0xca, 0x64, 0x69, 0x95, 0x55, 0x45, 0xbb, 0x7b, 0xb0, 0xe1, 0xe0, 0x73, 0x4a, + 0x10, 0x82, 0xc6, 0xf3, 0x8c, 0xa5, 0x1d, 0x75, 0x57, 0xed, 0x35, 0x1c, 0x59, 0xa3, 0x1b, 0x50, + 0x13, 0xac, 0x53, 0x93, 0xa4, 0x26, 0x58, 0xf7, 0xab, 0x0a, 0xfa, 0x09, 0xa7, 0xde, 0x3c, 0xc2, + 0x82, 0x8c, 0x12, 0x16, 0xb8, 0x02, 0x8b, 0x9c, 0x3b, 0xe4, 0x65, 0x4e, 0xb8, 0x40, 0x7b, 0x70, + 0xb3, 0xc0, 0x49, 0x1c, 0x61, 0xc1, 0x32, 0x1f, 0x47, 0x51, 0x46, 0x38, 0x97, 0xf7, 0x6d, 0x3a, + 0xda, 0x4f, 0x61, 0x58, 0x71, 0xf4, 0x10, 0x5a, 0x41, 0xc2, 0xc2, 0x17, 0xdc, 0xcf, 0xca, 0xf9, + 0x72, 0x4a, 0xf3, 0x60, 0xcb, 0x5c, 0x0d, 0x63, 0x4a, 0x6b, 0x4e, 0xb3, 0x6a, 0xac, 0x7c, 0xde, + 0x85, 0x16, 0x2e, 0x70, 0x9c, 0xf8, 0x33, 0x12, 0xd3, 0x99, 0xe8, 0xd4, 0xa5, 0xbb, 0xa6, 0x64, + 0x4f, 0x24, 0x42, 0x3b, 0x00, 0x31, 0xf7, 0x79, 0x1e, 0x86, 0xa5, 0x81, 0xc6, 0xae, 0xda, 0xbb, + 0xee, 0x6c, 0xc6, 0xdc, 0xad, 0xc0, 0xa3, 0x3b, 0xef, 0xaf, 0x2e, 0xfa, 0x7f, 0x3a, 0xed, 0xee, + 0xc0, 0xf6, 0x5f, 0xc3, 0xf1, 0x39, 0x3b, 0xe7, 0xa4, 0xff, 0x16, 0xe0, 0x17, 0x45, 0xdb, 0xd0, + 0x1e, 0x1d, 0x3f, 0x1d, 0xf9, 0xee, 0xd9, 0xf0, 0xcc, 0x73, 0x7d, 0x6f, 0xea, 0x9e, 0xda, 0xe3, + 0xc9, 0xd1, 0xc4, 0x3e, 0xd4, 0x14, 0xd4, 0x86, 0xad, 0x55, 0xf1, 0x68, 0x38, 0x39, 0xf6, 0x1c, + 0x5b, 0x53, 0xd7, 0x05, 0xd7, 0x1b, 0x8f, 0x6d, 0xd7, 0xd5, 0x6a, 0xeb, 0xc2, 0xa9, 0x3d, 0x3d, + 0x9c, 0x4c, 0x1f, 0x6b, 0x75, 0xbd, 0xf1, 0xe1, 0x93, 0xa1, 0x1c, 0xe4, 0x50, 0x3f, 0xe1, 0x14, + 0x51, 0xd0, 0xd6, 0xed, 0xa1, 0xde, 0xef, 0x2f, 0xf6, 0xef, 0xf5, 0xe8, 0xf7, 0xff, 0xa3, 0xb3, + 0xca, 0xaa, 0x6f, 0xbc, 0xbb, 0xba, 0xe8, 0xab, 0xa3, 0xe1, 0xe7, 0x85, 0xa1, 0x5e, 0x2e, 0x0c, + 0xf5, 0xdb, 0xc2, 0x50, 0x3f, 0x2e, 0x0d, 0xe5, 0x72, 0x69, 0x28, 0x5f, 0x96, 0x86, 0xf2, 0xec, + 0x1e, 0x8d, 0xc5, 0x2c, 0x0f, 0xcc, 0x90, 0xa5, 0x56, 0x11, 0x8b, 0x57, 0xb1, 0xb0, 0xe4, 0x06, + 0xf6, 0x23, 0xbc, 0x9f, 0xb2, 0x28, 0x4f, 0x88, 0x25, 0xde, 0xcc, 0x09, 0x0f, 0xae, 0xc9, 0x6f, + 0xf6, 0xe0, 0x7b, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf5, 0xd1, 0x7e, 0xdd, 0xbc, 0x02, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MsgClient is the client API for Msg service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MsgClient interface { + // UpdateBlobStatus updates the status of a blob submission. + UpdateBlobStatus(ctx context.Context, in *MsgUpdateBlobStatusRequest, opts ...grpc.CallOption) (*MsgUpdateBlobStatusResponse, error) +} + +type msgClient struct { + cc grpc1.ClientConn +} + +func NewMsgClient(cc grpc1.ClientConn) MsgClient { + return &msgClient{cc} +} + +func (c *msgClient) UpdateBlobStatus(ctx context.Context, in *MsgUpdateBlobStatusRequest, opts ...grpc.CallOption) (*MsgUpdateBlobStatusResponse, error) { + out := new(MsgUpdateBlobStatusResponse) + err := c.cc.Invoke(ctx, "/cada.v1beta1.Msg/UpdateBlobStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MsgServer is the server API for Msg service. +type MsgServer interface { + // UpdateBlobStatus updates the status of a blob submission. + UpdateBlobStatus(context.Context, *MsgUpdateBlobStatusRequest) (*MsgUpdateBlobStatusResponse, error) +} + +// UnimplementedMsgServer can be embedded to have forward compatible implementations. +type UnimplementedMsgServer struct { +} + +func (*UnimplementedMsgServer) UpdateBlobStatus(ctx context.Context, req *MsgUpdateBlobStatusRequest) (*MsgUpdateBlobStatusResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method UpdateBlobStatus not implemented") +} + +func RegisterMsgServer(s grpc1.Server, srv MsgServer) { + s.RegisterService(&_Msg_serviceDesc, srv) +} + +func _Msg_UpdateBlobStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MsgUpdateBlobStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MsgServer).UpdateBlobStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/cada.v1beta1.Msg/UpdateBlobStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MsgServer).UpdateBlobStatus(ctx, req.(*MsgUpdateBlobStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Msg_serviceDesc = grpc.ServiceDesc{ + ServiceName: "cada.v1beta1.Msg", + HandlerType: (*MsgServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UpdateBlobStatus", + Handler: _Msg_UpdateBlobStatus_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "cada/v1beta1/tx.proto", +} + +func (m *Range) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *Range) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *Range) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.To != 0 { + i = encodeVarintTx(dAtA, i, uint64(m.To)) + i-- + dAtA[i] = 0x10 + } + if m.From != 0 { + i = encodeVarintTx(dAtA, i, uint64(m.From)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func (m *MsgUpdateBlobStatusRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *MsgUpdateBlobStatusRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *MsgUpdateBlobStatusRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.IsSuccess { + i-- + if m.IsSuccess { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x20 + } + if m.AvailHeight != 0 { + i = encodeVarintTx(dAtA, i, uint64(m.AvailHeight)) + i-- + dAtA[i] = 0x18 + } + if m.BlocksRange != nil { + { + size, err := m.BlocksRange.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintTx(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + if len(m.ValidatorAddress) > 0 { + i -= len(m.ValidatorAddress) + copy(dAtA[i:], m.ValidatorAddress) + i = encodeVarintTx(dAtA, i, uint64(len(m.ValidatorAddress))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *MsgUpdateBlobStatusResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *MsgUpdateBlobStatusResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *MsgUpdateBlobStatusResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + return len(dAtA) - i, nil +} + +func encodeVarintTx(dAtA []byte, offset int, v uint64) int { + offset -= sovTx(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *Range) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.From != 0 { + n += 1 + sovTx(uint64(m.From)) + } + if m.To != 0 { + n += 1 + sovTx(uint64(m.To)) + } + return n +} + +func (m *MsgUpdateBlobStatusRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.ValidatorAddress) + if l > 0 { + n += 1 + l + sovTx(uint64(l)) + } + if m.BlocksRange != nil { + l = m.BlocksRange.Size() + n += 1 + l + sovTx(uint64(l)) + } + if m.AvailHeight != 0 { + n += 1 + sovTx(uint64(m.AvailHeight)) + } + if m.IsSuccess { + n += 2 + } + return n +} + +func (m *MsgUpdateBlobStatusResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + return n +} + +func sovTx(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozTx(x uint64) (n int) { + return sovTx(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *Range) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Range: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Range: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field From", wireType) + } + m.From = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.From |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field To", wireType) + } + m.To = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.To |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipTx(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthTx + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *MsgUpdateBlobStatusRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: MsgUpdateBlobStatusRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: MsgUpdateBlobStatusRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ValidatorAddress", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthTx + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthTx + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ValidatorAddress = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field BlocksRange", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthTx + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthTx + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.BlocksRange == nil { + m.BlocksRange = &Range{} + } + if err := m.BlocksRange.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field AvailHeight", wireType) + } + m.AvailHeight = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.AvailHeight |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field IsSuccess", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.IsSuccess = bool(v != 0) + default: + iNdEx = preIndex + skippy, err := skipTx(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthTx + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *MsgUpdateBlobStatusResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: MsgUpdateBlobStatusResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: MsgUpdateBlobStatusResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + default: + iNdEx = preIndex + skippy, err := skipTx(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthTx + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipTx(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + depth := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowTx + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowTx + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + case 1: + iNdEx += 8 + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowTx + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthTx + } + iNdEx += length + case 3: + depth++ + case 4: + if depth == 0 { + return 0, ErrUnexpectedEndOfGroupTx + } + depth-- + case 5: + iNdEx += 4 + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + if iNdEx < 0 { + return 0, ErrInvalidLengthTx + } + if depth == 0 { + return iNdEx, nil + } + } + return 0, io.ErrUnexpectedEOF +} + +var ( + ErrInvalidLengthTx = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowTx = fmt.Errorf("proto: integer overflow") + ErrUnexpectedEndOfGroupTx = fmt.Errorf("proto: unexpected end of group") +) diff --git a/types/vote_extensions.pb.go b/types/vote_extensions.pb.go new file mode 100644 index 0000000..7556687 --- /dev/null +++ b/types/vote_extensions.pb.go @@ -0,0 +1,367 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: cada/v1beta1/vote_extensions.proto + +package types + +import ( + fmt "fmt" + proto "github.com/cosmos/gogoproto/proto" + io "io" + math "math" + math_bits "math/bits" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +// AvailVoteExtension defines the info which includes in vote extensions +type AvailVoteExtension struct { + // avail_height specifies the height in the Avail chain at which the data has been posted. + AvailHeight int64 `protobuf:"varint,1,opt,name=avail_height,json=availHeight,proto3" json:"avail_height,omitempty"` + // range defines the range of blocks that have been posted to the Avail Data Availability (DA) layer. + Range *Range `protobuf:"bytes,2,opt,name=range,proto3" json:"range,omitempty"` +} + +func (m *AvailVoteExtension) Reset() { *m = AvailVoteExtension{} } +func (m *AvailVoteExtension) String() string { return proto.CompactTextString(m) } +func (*AvailVoteExtension) ProtoMessage() {} +func (*AvailVoteExtension) Descriptor() ([]byte, []int) { + return fileDescriptor_194cb859e020afeb, []int{0} +} +func (m *AvailVoteExtension) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *AvailVoteExtension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_AvailVoteExtension.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *AvailVoteExtension) XXX_Merge(src proto.Message) { + xxx_messageInfo_AvailVoteExtension.Merge(m, src) +} +func (m *AvailVoteExtension) XXX_Size() int { + return m.Size() +} +func (m *AvailVoteExtension) XXX_DiscardUnknown() { + xxx_messageInfo_AvailVoteExtension.DiscardUnknown(m) +} + +var xxx_messageInfo_AvailVoteExtension proto.InternalMessageInfo + +func (m *AvailVoteExtension) GetAvailHeight() int64 { + if m != nil { + return m.AvailHeight + } + return 0 +} + +func (m *AvailVoteExtension) GetRange() *Range { + if m != nil { + return m.Range + } + return nil +} + +func init() { + proto.RegisterType((*AvailVoteExtension)(nil), "cada.v1beta1.AvailVoteExtension") +} + +func init() { + proto.RegisterFile("cada/v1beta1/vote_extensions.proto", fileDescriptor_194cb859e020afeb) +} + +var fileDescriptor_194cb859e020afeb = []byte{ + // 216 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4a, 0x4e, 0x4c, 0x49, + 0xd4, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x2f, 0xcb, 0x2f, 0x49, 0x8d, 0x4f, 0xad, + 0x28, 0x49, 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0x2b, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, + 0x01, 0xa9, 0xd1, 0x83, 0xaa, 0x91, 0x12, 0x45, 0xd1, 0x51, 0x52, 0x01, 0x51, 0xa4, 0x94, 0xc4, + 0x25, 0xe4, 0x58, 0x96, 0x98, 0x99, 0x13, 0x96, 0x5f, 0x92, 0xea, 0x0a, 0x33, 0x41, 0x48, 0x91, + 0x8b, 0x27, 0x11, 0x24, 0x1a, 0x9f, 0x91, 0x9a, 0x99, 0x9e, 0x51, 0x22, 0xc1, 0xa8, 0xc0, 0xa8, + 0xc1, 0x1c, 0xc4, 0x0d, 0x16, 0xf3, 0x00, 0x0b, 0x09, 0x69, 0x72, 0xb1, 0x16, 0x25, 0xe6, 0xa5, + 0xa7, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x70, 0x1b, 0x09, 0xeb, 0x21, 0xdb, 0xa6, 0x17, 0x04, 0x92, + 0x0a, 0x82, 0xa8, 0x70, 0x72, 0x3c, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, + 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, + 0xf5, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0xfd, 0xb2, 0xcc, 0x92, 0xf2, + 0xcc, 0x12, 0x7d, 0xb0, 0x1d, 0xba, 0x29, 0x89, 0xba, 0xb9, 0xf9, 0x29, 0xa5, 0x39, 0xa9, 0xfa, + 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x49, 0x6c, 0x60, 0xd7, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, + 0x08, 0x21, 0x4f, 0xd7, 0xf8, 0x00, 0x00, 0x00, +} + +func (m *AvailVoteExtension) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *AvailVoteExtension) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *AvailVoteExtension) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Range != nil { + { + size, err := m.Range.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintVoteExtensions(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + if m.AvailHeight != 0 { + i = encodeVarintVoteExtensions(dAtA, i, uint64(m.AvailHeight)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func encodeVarintVoteExtensions(dAtA []byte, offset int, v uint64) int { + offset -= sovVoteExtensions(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *AvailVoteExtension) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.AvailHeight != 0 { + n += 1 + sovVoteExtensions(uint64(m.AvailHeight)) + } + if m.Range != nil { + l = m.Range.Size() + n += 1 + l + sovVoteExtensions(uint64(l)) + } + return n +} + +func sovVoteExtensions(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozVoteExtensions(x uint64) (n int) { + return sovVoteExtensions(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *AvailVoteExtension) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowVoteExtensions + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: AvailVoteExtension: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: AvailVoteExtension: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field AvailHeight", wireType) + } + m.AvailHeight = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowVoteExtensions + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.AvailHeight |= int64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Range", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowVoteExtensions + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthVoteExtensions + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthVoteExtensions + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Range == nil { + m.Range = &Range{} + } + if err := m.Range.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipVoteExtensions(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthVoteExtensions + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipVoteExtensions(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + depth := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowVoteExtensions + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowVoteExtensions + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + case 1: + iNdEx += 8 + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowVoteExtensions + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthVoteExtensions + } + iNdEx += length + case 3: + depth++ + case 4: + if depth == 0 { + return 0, ErrUnexpectedEndOfGroupVoteExtensions + } + depth-- + case 5: + iNdEx += 4 + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + if iNdEx < 0 { + return 0, ErrInvalidLengthVoteExtensions + } + if depth == 0 { + return iNdEx, nil + } + } + return 0, io.ErrUnexpectedEOF +} + +var ( + ErrInvalidLengthVoteExtensions = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowVoteExtensions = fmt.Errorf("proto: integer overflow") + ErrUnexpectedEndOfGroupVoteExtensions = fmt.Errorf("proto: unexpected end of group") +) From 71c0f4ca8662442cdae7fa20bf3ceecb899eb6b6 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Wed, 25 Sep 2024 12:08:37 +0530 Subject: [PATCH 03/34] fix imports --- simapp/app/app.go | 6 +++--- simapp/app/test_helpers.go | 2 +- simapp/cmd/cada/commads.go | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/simapp/app/app.go b/simapp/app/app.go index 5801f7b..45a3b42 100644 --- a/simapp/app/app.go +++ b/simapp/app/app.go @@ -137,12 +137,12 @@ import ( packetforwardkeeper "github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v8/packetforward/keeper" packetforwardtypes "github.com/cosmos/ibc-apps/middleware/packet-forward-middleware/v8/packetforward/types" - cadakeeper "github.com/vitwit/avail-da-module/keeper" - cadamodule "github.com/vitwit/avail-da-module/module" cadarelayer "github.com/vitwit/avail-da-module/relayer" "github.com/vitwit/avail-da-module/relayer/avail" httpclient "github.com/vitwit/avail-da-module/relayer/http" - cadatypes "github.com/vitwit/avail-da-module/types" + cadakeeper "github.com/vitwit/avail-da-module/x/cada/keeper" + cadamodule "github.com/vitwit/avail-da-module/x/cada/module" + cadatypes "github.com/vitwit/avail-da-module/x/cada/types" ) const ( diff --git a/simapp/app/test_helpers.go b/simapp/app/test_helpers.go index e2b565e..712e1d9 100644 --- a/simapp/app/test_helpers.go +++ b/simapp/app/test_helpers.go @@ -27,7 +27,7 @@ import ( minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" "github.com/stretchr/testify/require" network "github.com/vitwit/avail-da-module/network" - relayercfg "github.com/vitwit/avail-da-module/types" + relayercfg "github.com/vitwit/avail-da-module/x/cada/types" ) // SetupOptions defines arguments that are passed into `Simapp` constructor. diff --git a/simapp/cmd/cada/commads.go b/simapp/cmd/cada/commads.go index 61b3b8a..6bb4571 100644 --- a/simapp/cmd/cada/commads.go +++ b/simapp/cmd/cada/commads.go @@ -41,8 +41,8 @@ import ( banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" "github.com/cosmos/cosmos-sdk/x/crisis" genutilcli "github.com/cosmos/cosmos-sdk/x/genutil/client/cli" - cadacli "github.com/vitwit/avail-da-module/client/cli" - cadatypes "github.com/vitwit/avail-da-module/types" + cadacli "github.com/vitwit/avail-da-module/x/cada/client/cli" + cadatypes "github.com/vitwit/avail-da-module/x/cada/types" ) // initCometBFTConfig helps to override default CometBFT Config values. From 6aba59da7cbb70aa11246c097aa2862e4377a226 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Wed, 25 Sep 2024 12:21:40 +0530 Subject: [PATCH 04/34] update proto imports --- proto/cada/v1beta1/abci.proto | 2 +- proto/cada/v1beta1/genesis.proto | 2 +- proto/cada/v1beta1/query.proto | 2 +- proto/cada/v1beta1/tx.proto | 2 +- proto/cada/v1beta1/vote_extensions.proto | 2 +- types/abci.pb.go | 765 ------------------- types/genesis.pb.go | 263 ------- types/query.pb.go | 690 ----------------- types/query.pb.gw.go | 153 ---- types/tx.pb.go | 912 ----------------------- types/vote_extensions.pb.go | 367 --------- 11 files changed, 5 insertions(+), 3155 deletions(-) delete mode 100644 types/abci.pb.go delete mode 100644 types/genesis.pb.go delete mode 100644 types/query.pb.go delete mode 100644 types/query.pb.gw.go delete mode 100644 types/tx.pb.go delete mode 100644 types/vote_extensions.pb.go diff --git a/proto/cada/v1beta1/abci.proto b/proto/cada/v1beta1/abci.proto index b59445a..38c62af 100644 --- a/proto/cada/v1beta1/abci.proto +++ b/proto/cada/v1beta1/abci.proto @@ -3,7 +3,7 @@ package cada.v1beta1; import "gogoproto/gogo.proto"; -option go_package = "github.com/vitwit/avail-da-module/types"; +option go_package = "github.com/vitwit/avail-da-module/x/cada/types"; // InjectedData represents a message containing data that has been injected. message InjectedData { diff --git a/proto/cada/v1beta1/genesis.proto b/proto/cada/v1beta1/genesis.proto index ac0e4b6..2db98b0 100644 --- a/proto/cada/v1beta1/genesis.proto +++ b/proto/cada/v1beta1/genesis.proto @@ -1,7 +1,7 @@ syntax = "proto3"; package cada.v1beta1; -option go_package = "github.com/vitwit/avail-da-module/types"; +option go_package = "github.com/vitwit/avail-da-module/x/cada/types"; // GenesisState defines the avail da module's genesis state. message GenesisState { diff --git a/proto/cada/v1beta1/query.proto b/proto/cada/v1beta1/query.proto index 8a6822c..304685c 100644 --- a/proto/cada/v1beta1/query.proto +++ b/proto/cada/v1beta1/query.proto @@ -4,7 +4,7 @@ package cada.v1beta1; import "google/api/annotations.proto"; import "cada/v1beta1/tx.proto"; -option go_package = "github.com/vitwit/avail-da-module/types"; +option go_package = "github.com/vitwit/avail-da-module/x/cada/types"; // QuerySubmitBlobStatusRequest is the request type for the SubmitBlobStatus RPC method. message QuerySubmittedBlobStatusRequest { diff --git a/proto/cada/v1beta1/tx.proto b/proto/cada/v1beta1/tx.proto index 225647f..70a5dde 100644 --- a/proto/cada/v1beta1/tx.proto +++ b/proto/cada/v1beta1/tx.proto @@ -4,7 +4,7 @@ package cada.v1beta1; import "cosmos/msg/v1/msg.proto"; import "gogoproto/gogo.proto"; -option go_package = "github.com/vitwit/avail-da-module/types"; +option go_package = "github.com/vitwit/avail-da-module/x/cada/types"; // Msg defines the Msg service for cada module service Msg { diff --git a/proto/cada/v1beta1/vote_extensions.proto b/proto/cada/v1beta1/vote_extensions.proto index f498628..e6db06a 100644 --- a/proto/cada/v1beta1/vote_extensions.proto +++ b/proto/cada/v1beta1/vote_extensions.proto @@ -3,7 +3,7 @@ package cada.v1beta1; import "cada/v1beta1/tx.proto"; -option go_package = "github.com/vitwit/avail-da-module/types"; +option go_package = "github.com/vitwit/avail-da-module/x/cada/types"; // AvailVoteExtension defines the info which includes in vote extensions message AvailVoteExtension { diff --git a/types/abci.pb.go b/types/abci.pb.go deleted file mode 100644 index 6160bda..0000000 --- a/types/abci.pb.go +++ /dev/null @@ -1,765 +0,0 @@ -// Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: cada/v1beta1/abci.proto - -package types - -import ( - fmt "fmt" - _ "github.com/cosmos/gogoproto/gogoproto" - proto "github.com/cosmos/gogoproto/proto" - io "io" - math "math" - math_bits "math/bits" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package - -// InjectedData represents a message containing data that has been injected. -type InjectedData struct { - // PendingBlocks contains information about blocks that are pending. - PendingBlocks PendingBlocks `protobuf:"bytes,1,opt,name=pending_blocks,json=pendingBlocks,proto3" json:"pending_blocks"` -} - -func (m *InjectedData) Reset() { *m = InjectedData{} } -func (m *InjectedData) String() string { return proto.CompactTextString(m) } -func (*InjectedData) ProtoMessage() {} -func (*InjectedData) Descriptor() ([]byte, []int) { - return fileDescriptor_c10129946240d9b3, []int{0} -} -func (m *InjectedData) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *InjectedData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_InjectedData.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *InjectedData) XXX_Merge(src proto.Message) { - xxx_messageInfo_InjectedData.Merge(m, src) -} -func (m *InjectedData) XXX_Size() int { - return m.Size() -} -func (m *InjectedData) XXX_DiscardUnknown() { - xxx_messageInfo_InjectedData.DiscardUnknown(m) -} - -var xxx_messageInfo_InjectedData proto.InternalMessageInfo - -func (m *InjectedData) GetPendingBlocks() PendingBlocks { - if m != nil { - return m.PendingBlocks - } - return PendingBlocks{} -} - -// The PendingBlocks message includes a list of block heights that are currently pending. -type PendingBlocks struct { - BlockHeights []int64 `protobuf:"varint,1,rep,packed,name=block_heights,json=blockHeights,proto3" json:"block_heights,omitempty"` -} - -func (m *PendingBlocks) Reset() { *m = PendingBlocks{} } -func (m *PendingBlocks) String() string { return proto.CompactTextString(m) } -func (*PendingBlocks) ProtoMessage() {} -func (*PendingBlocks) Descriptor() ([]byte, []int) { - return fileDescriptor_c10129946240d9b3, []int{1} -} -func (m *PendingBlocks) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *PendingBlocks) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_PendingBlocks.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *PendingBlocks) XXX_Merge(src proto.Message) { - xxx_messageInfo_PendingBlocks.Merge(m, src) -} -func (m *PendingBlocks) XXX_Size() int { - return m.Size() -} -func (m *PendingBlocks) XXX_DiscardUnknown() { - xxx_messageInfo_PendingBlocks.DiscardUnknown(m) -} - -var xxx_messageInfo_PendingBlocks proto.InternalMessageInfo - -func (m *PendingBlocks) GetBlockHeights() []int64 { - if m != nil { - return m.BlockHeights - } - return nil -} - -// UnprovenBlock represents a message containing data about a block that has not yet been proven. -type UnprovenBlock struct { - Height int64 `protobuf:"varint,1,opt,name=height,proto3" json:"height,omitempty"` - Block []byte `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"` -} - -func (m *UnprovenBlock) Reset() { *m = UnprovenBlock{} } -func (m *UnprovenBlock) String() string { return proto.CompactTextString(m) } -func (*UnprovenBlock) ProtoMessage() {} -func (*UnprovenBlock) Descriptor() ([]byte, []int) { - return fileDescriptor_c10129946240d9b3, []int{2} -} -func (m *UnprovenBlock) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *UnprovenBlock) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_UnprovenBlock.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *UnprovenBlock) XXX_Merge(src proto.Message) { - xxx_messageInfo_UnprovenBlock.Merge(m, src) -} -func (m *UnprovenBlock) XXX_Size() int { - return m.Size() -} -func (m *UnprovenBlock) XXX_DiscardUnknown() { - xxx_messageInfo_UnprovenBlock.DiscardUnknown(m) -} - -var xxx_messageInfo_UnprovenBlock proto.InternalMessageInfo - -func (m *UnprovenBlock) GetHeight() int64 { - if m != nil { - return m.Height - } - return 0 -} - -func (m *UnprovenBlock) GetBlock() []byte { - if m != nil { - return m.Block - } - return nil -} - -func init() { - proto.RegisterType((*InjectedData)(nil), "cada.v1beta1.InjectedData") - proto.RegisterType((*PendingBlocks)(nil), "cada.v1beta1.PendingBlocks") - proto.RegisterType((*UnprovenBlock)(nil), "cada.v1beta1.UnprovenBlock") -} - -func init() { proto.RegisterFile("cada/v1beta1/abci.proto", fileDescriptor_c10129946240d9b3) } - -var fileDescriptor_c10129946240d9b3 = []byte{ - // 278 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4f, 0x4e, 0x4c, 0x49, - 0xd4, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x4f, 0x4c, 0x4a, 0xce, 0xd4, 0x2b, 0x28, - 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0x49, 0xe8, 0x41, 0x25, 0xa4, 0x44, 0xd2, 0xf3, 0xd3, 0xf3, - 0xc1, 0x12, 0xfa, 0x20, 0x16, 0x44, 0x8d, 0x52, 0x04, 0x17, 0x8f, 0x67, 0x5e, 0x56, 0x6a, 0x72, - 0x49, 0x6a, 0x8a, 0x4b, 0x62, 0x49, 0xa2, 0x90, 0x07, 0x17, 0x5f, 0x41, 0x6a, 0x5e, 0x4a, 0x66, - 0x5e, 0x7a, 0x7c, 0x52, 0x4e, 0x7e, 0x72, 0x76, 0xb1, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0xb7, 0x91, - 0xb4, 0x1e, 0xb2, 0x61, 0x7a, 0x01, 0x10, 0x35, 0x4e, 0x60, 0x25, 0x4e, 0x2c, 0x27, 0xee, 0xc9, - 0x33, 0x04, 0xf1, 0x16, 0x20, 0x0b, 0x2a, 0x99, 0x70, 0xf1, 0xa2, 0xa8, 0x12, 0x52, 0xe6, 0xe2, - 0x05, 0x1b, 0x19, 0x9f, 0x91, 0x9a, 0x99, 0x9e, 0x51, 0x02, 0x32, 0x99, 0x59, 0x83, 0x39, 0x88, - 0x07, 0x2c, 0xe8, 0x01, 0x11, 0x53, 0xb2, 0xe5, 0xe2, 0x0d, 0xcd, 0x2b, 0x28, 0xca, 0x2f, 0x4b, - 0xcd, 0x03, 0x6b, 0x13, 0x12, 0xe3, 0x62, 0x83, 0xa8, 0x07, 0x3b, 0x84, 0x39, 0x08, 0xca, 0x13, - 0x12, 0xe1, 0x62, 0x05, 0x6b, 0x94, 0x60, 0x52, 0x60, 0xd4, 0xe0, 0x09, 0x82, 0x70, 0x9c, 0x1c, - 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, - 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, 0x58, 0x8e, 0x21, 0x4a, 0x3d, 0x3d, 0xb3, 0x24, 0xa3, - 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0xbf, 0x2c, 0xb3, 0xa4, 0x3c, 0xb3, 0x44, 0x3f, 0xb1, 0x2c, - 0x31, 0x33, 0x47, 0x37, 0x25, 0x51, 0x37, 0x37, 0x3f, 0xa5, 0x34, 0x27, 0x55, 0xbf, 0xa4, 0xb2, - 0x20, 0xb5, 0x38, 0x89, 0x0d, 0x1c, 0x30, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2d, 0xe3, - 0x83, 0xee, 0x57, 0x01, 0x00, 0x00, -} - -func (m *InjectedData) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *InjectedData) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *InjectedData) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - { - size, err := m.PendingBlocks.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintAbci(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0xa - return len(dAtA) - i, nil -} - -func (m *PendingBlocks) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *PendingBlocks) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *PendingBlocks) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if len(m.BlockHeights) > 0 { - dAtA3 := make([]byte, len(m.BlockHeights)*10) - var j2 int - for _, num1 := range m.BlockHeights { - num := uint64(num1) - for num >= 1<<7 { - dAtA3[j2] = uint8(uint64(num)&0x7f | 0x80) - num >>= 7 - j2++ - } - dAtA3[j2] = uint8(num) - j2++ - } - i -= j2 - copy(dAtA[i:], dAtA3[:j2]) - i = encodeVarintAbci(dAtA, i, uint64(j2)) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *UnprovenBlock) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *UnprovenBlock) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *UnprovenBlock) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if len(m.Block) > 0 { - i -= len(m.Block) - copy(dAtA[i:], m.Block) - i = encodeVarintAbci(dAtA, i, uint64(len(m.Block))) - i-- - dAtA[i] = 0x12 - } - if m.Height != 0 { - i = encodeVarintAbci(dAtA, i, uint64(m.Height)) - i-- - dAtA[i] = 0x8 - } - return len(dAtA) - i, nil -} - -func encodeVarintAbci(dAtA []byte, offset int, v uint64) int { - offset -= sovAbci(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} -func (m *InjectedData) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = m.PendingBlocks.Size() - n += 1 + l + sovAbci(uint64(l)) - return n -} - -func (m *PendingBlocks) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if len(m.BlockHeights) > 0 { - l = 0 - for _, e := range m.BlockHeights { - l += sovAbci(uint64(e)) - } - n += 1 + sovAbci(uint64(l)) + l - } - return n -} - -func (m *UnprovenBlock) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.Height != 0 { - n += 1 + sovAbci(uint64(m.Height)) - } - l = len(m.Block) - if l > 0 { - n += 1 + l + sovAbci(uint64(l)) - } - return n -} - -func sovAbci(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 -} -func sozAbci(x uint64) (n int) { - return sovAbci(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *InjectedData) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: InjectedData: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: InjectedData: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PendingBlocks", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthAbci - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthAbci - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if err := m.PendingBlocks.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipAbci(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthAbci - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *PendingBlocks) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: PendingBlocks: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: PendingBlocks: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType == 0 { - var v int64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } - m.BlockHeights = append(m.BlockHeights, v) - } else if wireType == 2 { - var packedLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - packedLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if packedLen < 0 { - return ErrInvalidLengthAbci - } - postIndex := iNdEx + packedLen - if postIndex < 0 { - return ErrInvalidLengthAbci - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - var elementCount int - var count int - for _, integer := range dAtA[iNdEx:postIndex] { - if integer < 128 { - count++ - } - } - elementCount = count - if elementCount != 0 && len(m.BlockHeights) == 0 { - m.BlockHeights = make([]int64, 0, elementCount) - } - for iNdEx < postIndex { - var v int64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } - m.BlockHeights = append(m.BlockHeights, v) - } - } else { - return fmt.Errorf("proto: wrong wireType = %d for field BlockHeights", wireType) - } - default: - iNdEx = preIndex - skippy, err := skipAbci(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthAbci - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *UnprovenBlock) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: UnprovenBlock: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: UnprovenBlock: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field Height", wireType) - } - m.Height = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.Height |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Block", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowAbci - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLengthAbci - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLengthAbci - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Block = append(m.Block[:0], dAtA[iNdEx:postIndex]...) - if m.Block == nil { - m.Block = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipAbci(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthAbci - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipAbci(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - depth := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowAbci - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowAbci - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - case 1: - iNdEx += 8 - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowAbci - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthAbci - } - iNdEx += length - case 3: - depth++ - case 4: - if depth == 0 { - return 0, ErrUnexpectedEndOfGroupAbci - } - depth-- - case 5: - iNdEx += 4 - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - if iNdEx < 0 { - return 0, ErrInvalidLengthAbci - } - if depth == 0 { - return iNdEx, nil - } - } - return 0, io.ErrUnexpectedEOF -} - -var ( - ErrInvalidLengthAbci = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowAbci = fmt.Errorf("proto: integer overflow") - ErrUnexpectedEndOfGroupAbci = fmt.Errorf("proto: unexpected end of group") -) diff --git a/types/genesis.pb.go b/types/genesis.pb.go deleted file mode 100644 index 9b74616..0000000 --- a/types/genesis.pb.go +++ /dev/null @@ -1,263 +0,0 @@ -// Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: cada/v1beta1/genesis.proto - -package types - -import ( - fmt "fmt" - proto "github.com/cosmos/gogoproto/proto" - io "io" - math "math" - math_bits "math/bits" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package - -// GenesisState defines the avail da module's genesis state. -type GenesisState struct { -} - -func (m *GenesisState) Reset() { *m = GenesisState{} } -func (m *GenesisState) String() string { return proto.CompactTextString(m) } -func (*GenesisState) ProtoMessage() {} -func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_add756a79c56de92, []int{0} -} -func (m *GenesisState) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *GenesisState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_GenesisState.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *GenesisState) XXX_Merge(src proto.Message) { - xxx_messageInfo_GenesisState.Merge(m, src) -} -func (m *GenesisState) XXX_Size() int { - return m.Size() -} -func (m *GenesisState) XXX_DiscardUnknown() { - xxx_messageInfo_GenesisState.DiscardUnknown(m) -} - -var xxx_messageInfo_GenesisState proto.InternalMessageInfo - -func init() { - proto.RegisterType((*GenesisState)(nil), "cada.v1beta1.GenesisState") -} - -func init() { proto.RegisterFile("cada/v1beta1/genesis.proto", fileDescriptor_add756a79c56de92) } - -var fileDescriptor_add756a79c56de92 = []byte{ - // 144 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4a, 0x4e, 0x4c, 0x49, - 0xd4, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x4f, 0x4f, 0xcd, 0x4b, 0x2d, 0xce, 0x2c, - 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0xc9, 0xe9, 0x41, 0xe5, 0x94, 0xf8, 0xb8, - 0x78, 0xdc, 0x21, 0xd2, 0xc1, 0x25, 0x89, 0x25, 0xa9, 0x4e, 0x8e, 0x27, 0x1e, 0xc9, 0x31, 0x5e, - 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, 0x1e, 0xcb, 0x31, - 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xa5, 0x9e, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, - 0xab, 0x5f, 0x96, 0x59, 0x52, 0x9e, 0x59, 0xa2, 0x9f, 0x58, 0x96, 0x98, 0x99, 0xa3, 0x9b, 0x92, - 0xa8, 0x9b, 0x9b, 0x9f, 0x52, 0x9a, 0x93, 0xaa, 0x5f, 0x52, 0x59, 0x90, 0x5a, 0x9c, 0xc4, 0x06, - 0xb6, 0xc7, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xfd, 0xa2, 0xb6, 0xea, 0x85, 0x00, 0x00, 0x00, -} - -func (m *GenesisState) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GenesisState) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - return len(dAtA) - i, nil -} - -func encodeVarintGenesis(dAtA []byte, offset int, v uint64) int { - offset -= sovGenesis(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} -func (m *GenesisState) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - return n -} - -func sovGenesis(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 -} -func sozGenesis(x uint64) (n int) { - return sovGenesis(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *GenesisState) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenesis - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GenesisState: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GenesisState: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipGenesis(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthGenesis - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipGenesis(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - depth := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowGenesis - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowGenesis - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - case 1: - iNdEx += 8 - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowGenesis - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthGenesis - } - iNdEx += length - case 3: - depth++ - case 4: - if depth == 0 { - return 0, ErrUnexpectedEndOfGroupGenesis - } - depth-- - case 5: - iNdEx += 4 - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - if iNdEx < 0 { - return 0, ErrInvalidLengthGenesis - } - if depth == 0 { - return iNdEx, nil - } - } - return 0, io.ErrUnexpectedEOF -} - -var ( - ErrInvalidLengthGenesis = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowGenesis = fmt.Errorf("proto: integer overflow") - ErrUnexpectedEndOfGroupGenesis = fmt.Errorf("proto: unexpected end of group") -) diff --git a/types/query.pb.go b/types/query.pb.go deleted file mode 100644 index d30b2cb..0000000 --- a/types/query.pb.go +++ /dev/null @@ -1,690 +0,0 @@ -// Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: cada/v1beta1/query.proto - -package types - -import ( - context "context" - fmt "fmt" - grpc1 "github.com/cosmos/gogoproto/grpc" - proto "github.com/cosmos/gogoproto/proto" - _ "google.golang.org/genproto/googleapis/api/annotations" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" - io "io" - math "math" - math_bits "math/bits" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package - -// QuerySubmitBlobStatusRequest is the request type for the SubmitBlobStatus RPC method. -type QuerySubmittedBlobStatusRequest struct { -} - -func (m *QuerySubmittedBlobStatusRequest) Reset() { *m = QuerySubmittedBlobStatusRequest{} } -func (m *QuerySubmittedBlobStatusRequest) String() string { return proto.CompactTextString(m) } -func (*QuerySubmittedBlobStatusRequest) ProtoMessage() {} -func (*QuerySubmittedBlobStatusRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_0579d66f58f8318a, []int{0} -} -func (m *QuerySubmittedBlobStatusRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *QuerySubmittedBlobStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_QuerySubmittedBlobStatusRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *QuerySubmittedBlobStatusRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_QuerySubmittedBlobStatusRequest.Merge(m, src) -} -func (m *QuerySubmittedBlobStatusRequest) XXX_Size() int { - return m.Size() -} -func (m *QuerySubmittedBlobStatusRequest) XXX_DiscardUnknown() { - xxx_messageInfo_QuerySubmittedBlobStatusRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_QuerySubmittedBlobStatusRequest proto.InternalMessageInfo - -// QuerySubmitBlobStatusResponse is the response type for the SubmitBlobStatus RPC method. -type QuerySubmittedBlobStatusResponse struct { - // range specifies the range of blocks that are being submitted. - // It contains information about the starting and ending block heights. - Range *Range `protobuf:"bytes,1,opt,name=range,proto3" json:"range,omitempty"` - // status indicates the current status of the blob submission. - // Possible values might include statuses like "pending", "in_voting", "success", or "failure". - Status string `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` - // proven_height represents the height up to which data has been successfully proven. - // This height indicates the extent of posted data to avail light client. - ProvenHeight uint64 `protobuf:"varint,3,opt,name=proven_height,json=provenHeight,proto3" json:"proven_height,omitempty"` - // voting_ends_at denotes the block height at which the last voting on the blob ended. - // This provides information on when the last voting period for the blob concluded. - VotingEndsAt string `protobuf:"bytes,4,opt,name=voting_ends_at,json=votingEndsAt,proto3" json:"voting_ends_at,omitempty"` -} - -func (m *QuerySubmittedBlobStatusResponse) Reset() { *m = QuerySubmittedBlobStatusResponse{} } -func (m *QuerySubmittedBlobStatusResponse) String() string { return proto.CompactTextString(m) } -func (*QuerySubmittedBlobStatusResponse) ProtoMessage() {} -func (*QuerySubmittedBlobStatusResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_0579d66f58f8318a, []int{1} -} -func (m *QuerySubmittedBlobStatusResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *QuerySubmittedBlobStatusResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_QuerySubmittedBlobStatusResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *QuerySubmittedBlobStatusResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_QuerySubmittedBlobStatusResponse.Merge(m, src) -} -func (m *QuerySubmittedBlobStatusResponse) XXX_Size() int { - return m.Size() -} -func (m *QuerySubmittedBlobStatusResponse) XXX_DiscardUnknown() { - xxx_messageInfo_QuerySubmittedBlobStatusResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_QuerySubmittedBlobStatusResponse proto.InternalMessageInfo - -func (m *QuerySubmittedBlobStatusResponse) GetRange() *Range { - if m != nil { - return m.Range - } - return nil -} - -func (m *QuerySubmittedBlobStatusResponse) GetStatus() string { - if m != nil { - return m.Status - } - return "" -} - -func (m *QuerySubmittedBlobStatusResponse) GetProvenHeight() uint64 { - if m != nil { - return m.ProvenHeight - } - return 0 -} - -func (m *QuerySubmittedBlobStatusResponse) GetVotingEndsAt() string { - if m != nil { - return m.VotingEndsAt - } - return "" -} - -func init() { - proto.RegisterType((*QuerySubmittedBlobStatusRequest)(nil), "cada.v1beta1.QuerySubmittedBlobStatusRequest") - proto.RegisterType((*QuerySubmittedBlobStatusResponse)(nil), "cada.v1beta1.QuerySubmittedBlobStatusResponse") -} - -func init() { proto.RegisterFile("cada/v1beta1/query.proto", fileDescriptor_0579d66f58f8318a) } - -var fileDescriptor_0579d66f58f8318a = []byte{ - // 357 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x51, 0xcd, 0x4a, 0xeb, 0x40, - 0x14, 0xee, 0xf4, 0xb6, 0x85, 0x3b, 0xb7, 0xf7, 0x2e, 0xa6, 0x5c, 0x09, 0x45, 0x62, 0x6c, 0x15, - 0x2b, 0xd2, 0x84, 0xd6, 0x27, 0x68, 0x41, 0x70, 0x6b, 0xba, 0x73, 0x53, 0x66, 0x9a, 0x21, 0x1d, - 0x48, 0x67, 0xd2, 0xcc, 0x49, 0xb4, 0x5b, 0x9f, 0x40, 0xf0, 0x09, 0x5c, 0xbb, 0xf1, 0x31, 0x5c, - 0x16, 0xdc, 0xb8, 0x94, 0xd6, 0x07, 0x91, 0x66, 0x8a, 0x5a, 0x50, 0x74, 0x79, 0xbe, 0x3f, 0x3e, - 0xbe, 0x83, 0xad, 0x11, 0x0d, 0xa8, 0x97, 0x75, 0x18, 0x07, 0xda, 0xf1, 0xa6, 0x29, 0x4f, 0x66, - 0x6e, 0x9c, 0x28, 0x50, 0xa4, 0xba, 0x62, 0xdc, 0x35, 0x53, 0xdf, 0x0e, 0x95, 0x0a, 0x23, 0xee, - 0xd1, 0x58, 0x78, 0x54, 0x4a, 0x05, 0x14, 0x84, 0x92, 0xda, 0x68, 0xeb, 0xff, 0x37, 0x52, 0xe0, - 0xd2, 0xc0, 0x8d, 0x5d, 0xbc, 0x73, 0xb6, 0x4a, 0x1c, 0xa4, 0x6c, 0x22, 0x00, 0x78, 0xd0, 0x8f, - 0x14, 0x1b, 0x00, 0x85, 0x54, 0xfb, 0x7c, 0x9a, 0x72, 0x0d, 0x8d, 0x7b, 0x84, 0x9d, 0xaf, 0x35, - 0x3a, 0x56, 0x52, 0x73, 0x72, 0x88, 0xcb, 0x09, 0x95, 0x21, 0xb7, 0x90, 0x83, 0x5a, 0x7f, 0xba, - 0x35, 0xf7, 0x63, 0x35, 0xd7, 0x5f, 0x51, 0xbe, 0x51, 0x90, 0x2d, 0x5c, 0xd1, 0xb9, 0xd9, 0x2a, - 0x3a, 0xa8, 0xf5, 0xdb, 0x5f, 0x5f, 0xa4, 0x89, 0xff, 0xc6, 0x89, 0xca, 0xb8, 0x1c, 0x8e, 0xb9, - 0x08, 0xc7, 0x60, 0xfd, 0x72, 0x50, 0xab, 0xe4, 0x57, 0x0d, 0x78, 0x9a, 0x63, 0x64, 0x0f, 0xff, - 0xcb, 0x14, 0x08, 0x19, 0x0e, 0xb9, 0x0c, 0xf4, 0x90, 0x82, 0x55, 0xca, 0x43, 0xaa, 0x06, 0x3d, - 0x91, 0x81, 0xee, 0x41, 0xf7, 0x0e, 0xe1, 0x72, 0x5e, 0x99, 0xdc, 0x22, 0x5c, 0xfb, 0xa4, 0x37, - 0x69, 0x6f, 0x16, 0xfc, 0x66, 0x83, 0xba, 0xfb, 0x53, 0xb9, 0x99, 0xa3, 0x71, 0x74, 0xf5, 0xf8, - 0x72, 0x53, 0xdc, 0x27, 0x4d, 0x8f, 0x66, 0x54, 0x44, 0x2c, 0x52, 0xec, 0x6d, 0x7b, 0x9d, 0xfb, - 0xde, 0x4d, 0xfd, 0xde, 0xc3, 0xc2, 0x46, 0xf3, 0x85, 0x8d, 0x9e, 0x17, 0x36, 0xba, 0x5e, 0xda, - 0x85, 0xf9, 0xd2, 0x2e, 0x3c, 0x2d, 0xed, 0xc2, 0xf9, 0x41, 0x28, 0x60, 0x9c, 0x32, 0x77, 0xa4, - 0x26, 0x5e, 0x26, 0xe0, 0x42, 0x80, 0xc9, 0x6b, 0x07, 0xb4, 0x3d, 0x51, 0x41, 0x1a, 0x71, 0x0f, - 0x66, 0x31, 0xd7, 0xac, 0x92, 0x7f, 0xf3, 0xf8, 0x35, 0x00, 0x00, 0xff, 0xff, 0xdf, 0x69, 0xec, - 0x0a, 0x2c, 0x02, 0x00, 0x00, -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// QueryClient is the client API for Query service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type QueryClient interface { - // SubmitBlobStatus queries the current status of blob submissions. - SubmittedBlobStatus(ctx context.Context, in *QuerySubmittedBlobStatusRequest, opts ...grpc.CallOption) (*QuerySubmittedBlobStatusResponse, error) -} - -type queryClient struct { - cc grpc1.ClientConn -} - -func NewQueryClient(cc grpc1.ClientConn) QueryClient { - return &queryClient{cc} -} - -func (c *queryClient) SubmittedBlobStatus(ctx context.Context, in *QuerySubmittedBlobStatusRequest, opts ...grpc.CallOption) (*QuerySubmittedBlobStatusResponse, error) { - out := new(QuerySubmittedBlobStatusResponse) - err := c.cc.Invoke(ctx, "/cada.v1beta1.Query/SubmittedBlobStatus", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// QueryServer is the server API for Query service. -type QueryServer interface { - // SubmitBlobStatus queries the current status of blob submissions. - SubmittedBlobStatus(context.Context, *QuerySubmittedBlobStatusRequest) (*QuerySubmittedBlobStatusResponse, error) -} - -// UnimplementedQueryServer can be embedded to have forward compatible implementations. -type UnimplementedQueryServer struct { -} - -func (*UnimplementedQueryServer) SubmittedBlobStatus(ctx context.Context, req *QuerySubmittedBlobStatusRequest) (*QuerySubmittedBlobStatusResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method SubmittedBlobStatus not implemented") -} - -func RegisterQueryServer(s grpc1.Server, srv QueryServer) { - s.RegisterService(&_Query_serviceDesc, srv) -} - -func _Query_SubmittedBlobStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(QuerySubmittedBlobStatusRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(QueryServer).SubmittedBlobStatus(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/cada.v1beta1.Query/SubmittedBlobStatus", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(QueryServer).SubmittedBlobStatus(ctx, req.(*QuerySubmittedBlobStatusRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _Query_serviceDesc = grpc.ServiceDesc{ - ServiceName: "cada.v1beta1.Query", - HandlerType: (*QueryServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "SubmittedBlobStatus", - Handler: _Query_SubmittedBlobStatus_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "cada/v1beta1/query.proto", -} - -func (m *QuerySubmittedBlobStatusRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *QuerySubmittedBlobStatusRequest) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *QuerySubmittedBlobStatusRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - return len(dAtA) - i, nil -} - -func (m *QuerySubmittedBlobStatusResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *QuerySubmittedBlobStatusResponse) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *QuerySubmittedBlobStatusResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if len(m.VotingEndsAt) > 0 { - i -= len(m.VotingEndsAt) - copy(dAtA[i:], m.VotingEndsAt) - i = encodeVarintQuery(dAtA, i, uint64(len(m.VotingEndsAt))) - i-- - dAtA[i] = 0x22 - } - if m.ProvenHeight != 0 { - i = encodeVarintQuery(dAtA, i, uint64(m.ProvenHeight)) - i-- - dAtA[i] = 0x18 - } - if len(m.Status) > 0 { - i -= len(m.Status) - copy(dAtA[i:], m.Status) - i = encodeVarintQuery(dAtA, i, uint64(len(m.Status))) - i-- - dAtA[i] = 0x12 - } - if m.Range != nil { - { - size, err := m.Range.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintQuery(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func encodeVarintQuery(dAtA []byte, offset int, v uint64) int { - offset -= sovQuery(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} -func (m *QuerySubmittedBlobStatusRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - return n -} - -func (m *QuerySubmittedBlobStatusResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.Range != nil { - l = m.Range.Size() - n += 1 + l + sovQuery(uint64(l)) - } - l = len(m.Status) - if l > 0 { - n += 1 + l + sovQuery(uint64(l)) - } - if m.ProvenHeight != 0 { - n += 1 + sovQuery(uint64(m.ProvenHeight)) - } - l = len(m.VotingEndsAt) - if l > 0 { - n += 1 + l + sovQuery(uint64(l)) - } - return n -} - -func sovQuery(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 -} -func sozQuery(x uint64) (n int) { - return sovQuery(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *QuerySubmittedBlobStatusRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQuery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: QuerySubmittedBlobStatusRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: QuerySubmittedBlobStatusRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipQuery(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthQuery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *QuerySubmittedBlobStatusResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQuery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: QuerySubmittedBlobStatusResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: QuerySubmittedBlobStatusResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Range", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQuery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthQuery - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthQuery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Range == nil { - m.Range = &Range{} - } - if err := m.Range.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Status", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQuery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthQuery - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthQuery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Status = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 3: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field ProvenHeight", wireType) - } - m.ProvenHeight = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQuery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.ProvenHeight |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 4: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field VotingEndsAt", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowQuery - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthQuery - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthQuery - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.VotingEndsAt = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipQuery(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthQuery - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipQuery(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - depth := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowQuery - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowQuery - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - case 1: - iNdEx += 8 - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowQuery - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthQuery - } - iNdEx += length - case 3: - depth++ - case 4: - if depth == 0 { - return 0, ErrUnexpectedEndOfGroupQuery - } - depth-- - case 5: - iNdEx += 4 - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - if iNdEx < 0 { - return 0, ErrInvalidLengthQuery - } - if depth == 0 { - return iNdEx, nil - } - } - return 0, io.ErrUnexpectedEOF -} - -var ( - ErrInvalidLengthQuery = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowQuery = fmt.Errorf("proto: integer overflow") - ErrUnexpectedEndOfGroupQuery = fmt.Errorf("proto: unexpected end of group") -) diff --git a/types/query.pb.gw.go b/types/query.pb.gw.go deleted file mode 100644 index 800b835..0000000 --- a/types/query.pb.gw.go +++ /dev/null @@ -1,153 +0,0 @@ -// Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: cada/v1beta1/query.proto - -/* -Package types is a reverse proxy. - -It translates gRPC into RESTful JSON APIs. -*/ -package types - -import ( - "context" - "io" - "net/http" - - "github.com/golang/protobuf/descriptor" - "github.com/golang/protobuf/proto" - "github.com/grpc-ecosystem/grpc-gateway/runtime" - "github.com/grpc-ecosystem/grpc-gateway/utilities" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/metadata" - "google.golang.org/grpc/status" -) - -// Suppress "imported and not used" errors -var _ codes.Code -var _ io.Reader -var _ status.Status -var _ = runtime.String -var _ = utilities.NewDoubleArray -var _ = descriptor.ForMessage -var _ = metadata.Join - -func request_Query_SubmittedBlobStatus_0(ctx context.Context, marshaler runtime.Marshaler, client QueryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq QuerySubmittedBlobStatusRequest - var metadata runtime.ServerMetadata - - msg, err := client.SubmittedBlobStatus(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) - return msg, metadata, err - -} - -func local_request_Query_SubmittedBlobStatus_0(ctx context.Context, marshaler runtime.Marshaler, server QueryServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { - var protoReq QuerySubmittedBlobStatusRequest - var metadata runtime.ServerMetadata - - msg, err := server.SubmittedBlobStatus(ctx, &protoReq) - return msg, metadata, err - -} - -// RegisterQueryHandlerServer registers the http handlers for service Query to "mux". -// UnaryRPC :call QueryServer directly. -// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. -// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterQueryHandlerFromEndpoint instead. -func RegisterQueryHandlerServer(ctx context.Context, mux *runtime.ServeMux, server QueryServer) error { - - mux.Handle("GET", pattern_Query_SubmittedBlobStatus_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - var stream runtime.ServerTransportStream - ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := local_request_Query_SubmittedBlobStatus_0(rctx, inboundMarshaler, server, req, pathParams) - md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_Query_SubmittedBlobStatus_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -// RegisterQueryHandlerFromEndpoint is same as RegisterQueryHandler but -// automatically dials to "endpoint" and closes the connection when "ctx" gets done. -func RegisterQueryHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) { - conn, err := grpc.Dial(endpoint, opts...) - if err != nil { - return err - } - defer func() { - if err != nil { - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - return - } - go func() { - <-ctx.Done() - if cerr := conn.Close(); cerr != nil { - grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr) - } - }() - }() - - return RegisterQueryHandler(ctx, mux, conn) -} - -// RegisterQueryHandler registers the http handlers for service Query to "mux". -// The handlers forward requests to the grpc endpoint over "conn". -func RegisterQueryHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error { - return RegisterQueryHandlerClient(ctx, mux, NewQueryClient(conn)) -} - -// RegisterQueryHandlerClient registers the http handlers for service Query -// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "QueryClient". -// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "QueryClient" -// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in -// "QueryClient" to call the correct interceptors. -func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, client QueryClient) error { - - mux.Handle("GET", pattern_Query_SubmittedBlobStatus_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { - ctx, cancel := context.WithCancel(req.Context()) - defer cancel() - inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) - rctx, err := runtime.AnnotateContext(ctx, mux, req) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - resp, md, err := request_Query_SubmittedBlobStatus_0(rctx, inboundMarshaler, client, req, pathParams) - ctx = runtime.NewServerMetadataContext(ctx, md) - if err != nil { - runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) - return - } - - forward_Query_SubmittedBlobStatus_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) - - }) - - return nil -} - -var ( - pattern_Query_SubmittedBlobStatus_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"availblob", "v1beta1", "submitBlobStatus"}, "", runtime.AssumeColonVerbOpt(false))) -) - -var ( - forward_Query_SubmittedBlobStatus_0 = runtime.ForwardResponseMessage -) diff --git a/types/tx.pb.go b/types/tx.pb.go deleted file mode 100644 index 09da3f0..0000000 --- a/types/tx.pb.go +++ /dev/null @@ -1,912 +0,0 @@ -// Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: cada/v1beta1/tx.proto - -package types - -import ( - context "context" - fmt "fmt" - _ "github.com/cosmos/cosmos-sdk/types/msgservice" - _ "github.com/cosmos/gogoproto/gogoproto" - grpc1 "github.com/cosmos/gogoproto/grpc" - proto "github.com/cosmos/gogoproto/proto" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" - io "io" - math "math" - math_bits "math/bits" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package - -// BlobStatus defines the statuses for a blob submission -type BlobStatus int32 - -const ( - // Indicates that the blob status is unspecified or not set. - BLOB_STATUS_UNSPECIFIED BlobStatus = 0 - // Indicates that the blob submission failed. - BLOB_STATUS_FAILURE BlobStatus = 1 - // Indicates that the blob submission was successful. - BLOB_STATUS_SUCCESS BlobStatus = 2 - // Indicates that the blob submission is still pending and has not yet been processed. - BLOB_STATUS_PENDING BlobStatus = 3 -) - -var BlobStatus_name = map[int32]string{ - 0: "BLOB_STATUS_UNSPECIFIED", - 1: "BLOB_STATUS_FAILURE", - 2: "BLOB_STATUS_SUCCESS", - 3: "BLOB_STATUS_PENDING", -} - -var BlobStatus_value = map[string]int32{ - "BLOB_STATUS_UNSPECIFIED": 0, - "BLOB_STATUS_FAILURE": 1, - "BLOB_STATUS_SUCCESS": 2, - "BLOB_STATUS_PENDING": 3, -} - -func (x BlobStatus) String() string { - return proto.EnumName(BlobStatus_name, int32(x)) -} - -func (BlobStatus) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_c917a06c648c4256, []int{0} -} - -// Range defines the range of blocks for which the blob is being submitted. -type Range struct { - // The starting block height in the range. Indicates the beginning of the block range. - From uint64 `protobuf:"varint,1,opt,name=from,proto3" json:"from,omitempty"` - // The ending block height in the range. Indicates the end of the block range. - To uint64 `protobuf:"varint,2,opt,name=to,proto3" json:"to,omitempty"` -} - -func (m *Range) Reset() { *m = Range{} } -func (m *Range) String() string { return proto.CompactTextString(m) } -func (*Range) ProtoMessage() {} -func (*Range) Descriptor() ([]byte, []int) { - return fileDescriptor_c917a06c648c4256, []int{0} -} -func (m *Range) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_Range.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *Range) XXX_Merge(src proto.Message) { - xxx_messageInfo_Range.Merge(m, src) -} -func (m *Range) XXX_Size() int { - return m.Size() -} -func (m *Range) XXX_DiscardUnknown() { - xxx_messageInfo_Range.DiscardUnknown(m) -} - -var xxx_messageInfo_Range proto.InternalMessageInfo - -func (m *Range) GetFrom() uint64 { - if m != nil { - return m.From - } - return 0 -} - -func (m *Range) GetTo() uint64 { - if m != nil { - return m.To - } - return 0 -} - -// MsgUpdateBlobStatusRequest define a message to update the status of a previously submitted blob. -type MsgUpdateBlobStatusRequest struct { - // Address of the validator updating the blob status. - ValidatorAddress string `protobuf:"bytes,1,opt,name=validator_address,json=validatorAddress,proto3" json:"validator_address,omitempty"` - // range of blocks for which the blob status is being updated. - BlocksRange *Range `protobuf:"bytes,2,opt,name=blocks_range,json=blocksRange,proto3" json:"blocks_range,omitempty"` - // The height at which the blob is stored in the Avail system. This indicates where the blob data is available. - AvailHeight uint64 `protobuf:"varint,3,opt,name=avail_height,json=availHeight,proto3" json:"avail_height,omitempty"` - // The status of the blob submission. - IsSuccess bool `protobuf:"varint,4,opt,name=is_success,json=isSuccess,proto3" json:"is_success,omitempty"` -} - -func (m *MsgUpdateBlobStatusRequest) Reset() { *m = MsgUpdateBlobStatusRequest{} } -func (m *MsgUpdateBlobStatusRequest) String() string { return proto.CompactTextString(m) } -func (*MsgUpdateBlobStatusRequest) ProtoMessage() {} -func (*MsgUpdateBlobStatusRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_c917a06c648c4256, []int{1} -} -func (m *MsgUpdateBlobStatusRequest) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *MsgUpdateBlobStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_MsgUpdateBlobStatusRequest.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *MsgUpdateBlobStatusRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_MsgUpdateBlobStatusRequest.Merge(m, src) -} -func (m *MsgUpdateBlobStatusRequest) XXX_Size() int { - return m.Size() -} -func (m *MsgUpdateBlobStatusRequest) XXX_DiscardUnknown() { - xxx_messageInfo_MsgUpdateBlobStatusRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_MsgUpdateBlobStatusRequest proto.InternalMessageInfo - -func (m *MsgUpdateBlobStatusRequest) GetValidatorAddress() string { - if m != nil { - return m.ValidatorAddress - } - return "" -} - -func (m *MsgUpdateBlobStatusRequest) GetBlocksRange() *Range { - if m != nil { - return m.BlocksRange - } - return nil -} - -func (m *MsgUpdateBlobStatusRequest) GetAvailHeight() uint64 { - if m != nil { - return m.AvailHeight - } - return 0 -} - -func (m *MsgUpdateBlobStatusRequest) GetIsSuccess() bool { - if m != nil { - return m.IsSuccess - } - return false -} - -// MsgUpdateBlobStatusResponse is the response type for the Msg/UpdateBlobStatus RPC method. -type MsgUpdateBlobStatusResponse struct { -} - -func (m *MsgUpdateBlobStatusResponse) Reset() { *m = MsgUpdateBlobStatusResponse{} } -func (m *MsgUpdateBlobStatusResponse) String() string { return proto.CompactTextString(m) } -func (*MsgUpdateBlobStatusResponse) ProtoMessage() {} -func (*MsgUpdateBlobStatusResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_c917a06c648c4256, []int{2} -} -func (m *MsgUpdateBlobStatusResponse) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *MsgUpdateBlobStatusResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_MsgUpdateBlobStatusResponse.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *MsgUpdateBlobStatusResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_MsgUpdateBlobStatusResponse.Merge(m, src) -} -func (m *MsgUpdateBlobStatusResponse) XXX_Size() int { - return m.Size() -} -func (m *MsgUpdateBlobStatusResponse) XXX_DiscardUnknown() { - xxx_messageInfo_MsgUpdateBlobStatusResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_MsgUpdateBlobStatusResponse proto.InternalMessageInfo - -func init() { - proto.RegisterEnum("cada.v1beta1.BlobStatus", BlobStatus_name, BlobStatus_value) - proto.RegisterType((*Range)(nil), "cada.v1beta1.Range") - proto.RegisterType((*MsgUpdateBlobStatusRequest)(nil), "cada.v1beta1.MsgUpdateBlobStatusRequest") - proto.RegisterType((*MsgUpdateBlobStatusResponse)(nil), "cada.v1beta1.MsgUpdateBlobStatusResponse") -} - -func init() { proto.RegisterFile("cada/v1beta1/tx.proto", fileDescriptor_c917a06c648c4256) } - -var fileDescriptor_c917a06c648c4256 = []byte{ - // 464 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcf, 0x6e, 0xd3, 0x40, - 0x10, 0xc6, 0xed, 0x24, 0x45, 0x74, 0x12, 0x21, 0xb3, 0x05, 0x12, 0xb9, 0xaa, 0x55, 0x72, 0x21, - 0xa4, 0xaa, 0xad, 0x14, 0x89, 0x03, 0xb7, 0x24, 0x75, 0x21, 0x52, 0x1b, 0x2a, 0xbb, 0xbe, 0x70, - 0xb1, 0xd6, 0xf6, 0xb2, 0xb1, 0xb0, 0xbb, 0xc1, 0xbb, 0x36, 0x7f, 0x4e, 0x88, 0x13, 0x47, 0xde, - 0x81, 0x17, 0xe8, 0x63, 0x70, 0xec, 0x11, 0x89, 0x0b, 0x4a, 0x0e, 0x7d, 0x0d, 0xe4, 0x35, 0x82, - 0x10, 0x40, 0xe2, 0xb4, 0xa3, 0xdf, 0x37, 0xda, 0xf9, 0xbe, 0x9d, 0x85, 0xdb, 0x21, 0x8e, 0xb0, - 0x55, 0x0c, 0x02, 0x22, 0xf0, 0xc0, 0x12, 0xaf, 0xcd, 0x79, 0xc6, 0x04, 0x43, 0xad, 0x12, 0x9b, - 0x3f, 0xb0, 0xde, 0x0e, 0x19, 0x4f, 0x19, 0xb7, 0x52, 0x4e, 0xad, 0x62, 0x50, 0x1e, 0x55, 0x9b, - 0x7e, 0x8b, 0x32, 0xca, 0x64, 0x69, 0x95, 0x55, 0x45, 0xbb, 0x7b, 0xb0, 0xe1, 0xe0, 0x73, 0x4a, - 0x10, 0x82, 0xc6, 0xf3, 0x8c, 0xa5, 0x1d, 0x75, 0x57, 0xed, 0x35, 0x1c, 0x59, 0xa3, 0x1b, 0x50, - 0x13, 0xac, 0x53, 0x93, 0xa4, 0x26, 0x58, 0xf7, 0xab, 0x0a, 0xfa, 0x09, 0xa7, 0xde, 0x3c, 0xc2, - 0x82, 0x8c, 0x12, 0x16, 0xb8, 0x02, 0x8b, 0x9c, 0x3b, 0xe4, 0x65, 0x4e, 0xb8, 0x40, 0x7b, 0x70, - 0xb3, 0xc0, 0x49, 0x1c, 0x61, 0xc1, 0x32, 0x1f, 0x47, 0x51, 0x46, 0x38, 0x97, 0xf7, 0x6d, 0x3a, - 0xda, 0x4f, 0x61, 0x58, 0x71, 0xf4, 0x10, 0x5a, 0x41, 0xc2, 0xc2, 0x17, 0xdc, 0xcf, 0xca, 0xf9, - 0x72, 0x4a, 0xf3, 0x60, 0xcb, 0x5c, 0x0d, 0x63, 0x4a, 0x6b, 0x4e, 0xb3, 0x6a, 0xac, 0x7c, 0xde, - 0x85, 0x16, 0x2e, 0x70, 0x9c, 0xf8, 0x33, 0x12, 0xd3, 0x99, 0xe8, 0xd4, 0xa5, 0xbb, 0xa6, 0x64, - 0x4f, 0x24, 0x42, 0x3b, 0x00, 0x31, 0xf7, 0x79, 0x1e, 0x86, 0xa5, 0x81, 0xc6, 0xae, 0xda, 0xbb, - 0xee, 0x6c, 0xc6, 0xdc, 0xad, 0xc0, 0xa3, 0x3b, 0xef, 0xaf, 0x2e, 0xfa, 0x7f, 0x3a, 0xed, 0xee, - 0xc0, 0xf6, 0x5f, 0xc3, 0xf1, 0x39, 0x3b, 0xe7, 0xa4, 0xff, 0x16, 0xe0, 0x17, 0x45, 0xdb, 0xd0, - 0x1e, 0x1d, 0x3f, 0x1d, 0xf9, 0xee, 0xd9, 0xf0, 0xcc, 0x73, 0x7d, 0x6f, 0xea, 0x9e, 0xda, 0xe3, - 0xc9, 0xd1, 0xc4, 0x3e, 0xd4, 0x14, 0xd4, 0x86, 0xad, 0x55, 0xf1, 0x68, 0x38, 0x39, 0xf6, 0x1c, - 0x5b, 0x53, 0xd7, 0x05, 0xd7, 0x1b, 0x8f, 0x6d, 0xd7, 0xd5, 0x6a, 0xeb, 0xc2, 0xa9, 0x3d, 0x3d, - 0x9c, 0x4c, 0x1f, 0x6b, 0x75, 0xbd, 0xf1, 0xe1, 0x93, 0xa1, 0x1c, 0xe4, 0x50, 0x3f, 0xe1, 0x14, - 0x51, 0xd0, 0xd6, 0xed, 0xa1, 0xde, 0xef, 0x2f, 0xf6, 0xef, 0xf5, 0xe8, 0xf7, 0xff, 0xa3, 0xb3, - 0xca, 0xaa, 0x6f, 0xbc, 0xbb, 0xba, 0xe8, 0xab, 0xa3, 0xe1, 0xe7, 0x85, 0xa1, 0x5e, 0x2e, 0x0c, - 0xf5, 0xdb, 0xc2, 0x50, 0x3f, 0x2e, 0x0d, 0xe5, 0x72, 0x69, 0x28, 0x5f, 0x96, 0x86, 0xf2, 0xec, - 0x1e, 0x8d, 0xc5, 0x2c, 0x0f, 0xcc, 0x90, 0xa5, 0x56, 0x11, 0x8b, 0x57, 0xb1, 0xb0, 0xe4, 0x06, - 0xf6, 0x23, 0xbc, 0x9f, 0xb2, 0x28, 0x4f, 0x88, 0x25, 0xde, 0xcc, 0x09, 0x0f, 0xae, 0xc9, 0x6f, - 0xf6, 0xe0, 0x7b, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf5, 0xd1, 0x7e, 0xdd, 0xbc, 0x02, 0x00, 0x00, -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// MsgClient is the client API for Msg service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type MsgClient interface { - // UpdateBlobStatus updates the status of a blob submission. - UpdateBlobStatus(ctx context.Context, in *MsgUpdateBlobStatusRequest, opts ...grpc.CallOption) (*MsgUpdateBlobStatusResponse, error) -} - -type msgClient struct { - cc grpc1.ClientConn -} - -func NewMsgClient(cc grpc1.ClientConn) MsgClient { - return &msgClient{cc} -} - -func (c *msgClient) UpdateBlobStatus(ctx context.Context, in *MsgUpdateBlobStatusRequest, opts ...grpc.CallOption) (*MsgUpdateBlobStatusResponse, error) { - out := new(MsgUpdateBlobStatusResponse) - err := c.cc.Invoke(ctx, "/cada.v1beta1.Msg/UpdateBlobStatus", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// MsgServer is the server API for Msg service. -type MsgServer interface { - // UpdateBlobStatus updates the status of a blob submission. - UpdateBlobStatus(context.Context, *MsgUpdateBlobStatusRequest) (*MsgUpdateBlobStatusResponse, error) -} - -// UnimplementedMsgServer can be embedded to have forward compatible implementations. -type UnimplementedMsgServer struct { -} - -func (*UnimplementedMsgServer) UpdateBlobStatus(ctx context.Context, req *MsgUpdateBlobStatusRequest) (*MsgUpdateBlobStatusResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method UpdateBlobStatus not implemented") -} - -func RegisterMsgServer(s grpc1.Server, srv MsgServer) { - s.RegisterService(&_Msg_serviceDesc, srv) -} - -func _Msg_UpdateBlobStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(MsgUpdateBlobStatusRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MsgServer).UpdateBlobStatus(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/cada.v1beta1.Msg/UpdateBlobStatus", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MsgServer).UpdateBlobStatus(ctx, req.(*MsgUpdateBlobStatusRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _Msg_serviceDesc = grpc.ServiceDesc{ - ServiceName: "cada.v1beta1.Msg", - HandlerType: (*MsgServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "UpdateBlobStatus", - Handler: _Msg_UpdateBlobStatus_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "cada/v1beta1/tx.proto", -} - -func (m *Range) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *Range) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *Range) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.To != 0 { - i = encodeVarintTx(dAtA, i, uint64(m.To)) - i-- - dAtA[i] = 0x10 - } - if m.From != 0 { - i = encodeVarintTx(dAtA, i, uint64(m.From)) - i-- - dAtA[i] = 0x8 - } - return len(dAtA) - i, nil -} - -func (m *MsgUpdateBlobStatusRequest) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *MsgUpdateBlobStatusRequest) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *MsgUpdateBlobStatusRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.IsSuccess { - i-- - if m.IsSuccess { - dAtA[i] = 1 - } else { - dAtA[i] = 0 - } - i-- - dAtA[i] = 0x20 - } - if m.AvailHeight != 0 { - i = encodeVarintTx(dAtA, i, uint64(m.AvailHeight)) - i-- - dAtA[i] = 0x18 - } - if m.BlocksRange != nil { - { - size, err := m.BlocksRange.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintTx(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x12 - } - if len(m.ValidatorAddress) > 0 { - i -= len(m.ValidatorAddress) - copy(dAtA[i:], m.ValidatorAddress) - i = encodeVarintTx(dAtA, i, uint64(len(m.ValidatorAddress))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *MsgUpdateBlobStatusResponse) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *MsgUpdateBlobStatusResponse) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *MsgUpdateBlobStatusResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - return len(dAtA) - i, nil -} - -func encodeVarintTx(dAtA []byte, offset int, v uint64) int { - offset -= sovTx(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} -func (m *Range) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.From != 0 { - n += 1 + sovTx(uint64(m.From)) - } - if m.To != 0 { - n += 1 + sovTx(uint64(m.To)) - } - return n -} - -func (m *MsgUpdateBlobStatusRequest) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.ValidatorAddress) - if l > 0 { - n += 1 + l + sovTx(uint64(l)) - } - if m.BlocksRange != nil { - l = m.BlocksRange.Size() - n += 1 + l + sovTx(uint64(l)) - } - if m.AvailHeight != 0 { - n += 1 + sovTx(uint64(m.AvailHeight)) - } - if m.IsSuccess { - n += 2 - } - return n -} - -func (m *MsgUpdateBlobStatusResponse) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - return n -} - -func sovTx(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 -} -func sozTx(x uint64) (n int) { - return sovTx(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *Range) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Range: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Range: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field From", wireType) - } - m.From = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.From |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field To", wireType) - } - m.To = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.To |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipTx(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthTx - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *MsgUpdateBlobStatusRequest) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: MsgUpdateBlobStatusRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: MsgUpdateBlobStatusRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ValidatorAddress", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTx - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTx - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.ValidatorAddress = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field BlocksRange", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthTx - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthTx - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.BlocksRange == nil { - m.BlocksRange = &Range{} - } - if err := m.BlocksRange.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 3: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field AvailHeight", wireType) - } - m.AvailHeight = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.AvailHeight |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 4: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field IsSuccess", wireType) - } - var v int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - v |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - m.IsSuccess = bool(v != 0) - default: - iNdEx = preIndex - skippy, err := skipTx(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthTx - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *MsgUpdateBlobStatusResponse) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowTx - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: MsgUpdateBlobStatusResponse: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: MsgUpdateBlobStatusResponse: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skipTx(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthTx - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipTx(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - depth := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowTx - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowTx - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - case 1: - iNdEx += 8 - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowTx - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthTx - } - iNdEx += length - case 3: - depth++ - case 4: - if depth == 0 { - return 0, ErrUnexpectedEndOfGroupTx - } - depth-- - case 5: - iNdEx += 4 - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - if iNdEx < 0 { - return 0, ErrInvalidLengthTx - } - if depth == 0 { - return iNdEx, nil - } - } - return 0, io.ErrUnexpectedEOF -} - -var ( - ErrInvalidLengthTx = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowTx = fmt.Errorf("proto: integer overflow") - ErrUnexpectedEndOfGroupTx = fmt.Errorf("proto: unexpected end of group") -) diff --git a/types/vote_extensions.pb.go b/types/vote_extensions.pb.go deleted file mode 100644 index 7556687..0000000 --- a/types/vote_extensions.pb.go +++ /dev/null @@ -1,367 +0,0 @@ -// Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: cada/v1beta1/vote_extensions.proto - -package types - -import ( - fmt "fmt" - proto "github.com/cosmos/gogoproto/proto" - io "io" - math "math" - math_bits "math/bits" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package - -// AvailVoteExtension defines the info which includes in vote extensions -type AvailVoteExtension struct { - // avail_height specifies the height in the Avail chain at which the data has been posted. - AvailHeight int64 `protobuf:"varint,1,opt,name=avail_height,json=availHeight,proto3" json:"avail_height,omitempty"` - // range defines the range of blocks that have been posted to the Avail Data Availability (DA) layer. - Range *Range `protobuf:"bytes,2,opt,name=range,proto3" json:"range,omitempty"` -} - -func (m *AvailVoteExtension) Reset() { *m = AvailVoteExtension{} } -func (m *AvailVoteExtension) String() string { return proto.CompactTextString(m) } -func (*AvailVoteExtension) ProtoMessage() {} -func (*AvailVoteExtension) Descriptor() ([]byte, []int) { - return fileDescriptor_194cb859e020afeb, []int{0} -} -func (m *AvailVoteExtension) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *AvailVoteExtension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_AvailVoteExtension.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *AvailVoteExtension) XXX_Merge(src proto.Message) { - xxx_messageInfo_AvailVoteExtension.Merge(m, src) -} -func (m *AvailVoteExtension) XXX_Size() int { - return m.Size() -} -func (m *AvailVoteExtension) XXX_DiscardUnknown() { - xxx_messageInfo_AvailVoteExtension.DiscardUnknown(m) -} - -var xxx_messageInfo_AvailVoteExtension proto.InternalMessageInfo - -func (m *AvailVoteExtension) GetAvailHeight() int64 { - if m != nil { - return m.AvailHeight - } - return 0 -} - -func (m *AvailVoteExtension) GetRange() *Range { - if m != nil { - return m.Range - } - return nil -} - -func init() { - proto.RegisterType((*AvailVoteExtension)(nil), "cada.v1beta1.AvailVoteExtension") -} - -func init() { - proto.RegisterFile("cada/v1beta1/vote_extensions.proto", fileDescriptor_194cb859e020afeb) -} - -var fileDescriptor_194cb859e020afeb = []byte{ - // 216 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4a, 0x4e, 0x4c, 0x49, - 0xd4, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x2f, 0xcb, 0x2f, 0x49, 0x8d, 0x4f, 0xad, - 0x28, 0x49, 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0x2b, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, - 0x01, 0xa9, 0xd1, 0x83, 0xaa, 0x91, 0x12, 0x45, 0xd1, 0x51, 0x52, 0x01, 0x51, 0xa4, 0x94, 0xc4, - 0x25, 0xe4, 0x58, 0x96, 0x98, 0x99, 0x13, 0x96, 0x5f, 0x92, 0xea, 0x0a, 0x33, 0x41, 0x48, 0x91, - 0x8b, 0x27, 0x11, 0x24, 0x1a, 0x9f, 0x91, 0x9a, 0x99, 0x9e, 0x51, 0x22, 0xc1, 0xa8, 0xc0, 0xa8, - 0xc1, 0x1c, 0xc4, 0x0d, 0x16, 0xf3, 0x00, 0x0b, 0x09, 0x69, 0x72, 0xb1, 0x16, 0x25, 0xe6, 0xa5, - 0xa7, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x70, 0x1b, 0x09, 0xeb, 0x21, 0xdb, 0xa6, 0x17, 0x04, 0x92, - 0x0a, 0x82, 0xa8, 0x70, 0x72, 0x3c, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, - 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, - 0xf5, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0xfd, 0xb2, 0xcc, 0x92, 0xf2, - 0xcc, 0x12, 0x7d, 0xb0, 0x1d, 0xba, 0x29, 0x89, 0xba, 0xb9, 0xf9, 0x29, 0xa5, 0x39, 0xa9, 0xfa, - 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x49, 0x6c, 0x60, 0xd7, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, - 0x08, 0x21, 0x4f, 0xd7, 0xf8, 0x00, 0x00, 0x00, -} - -func (m *AvailVoteExtension) Marshal() (dAtA []byte, err error) { - size := m.Size() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *AvailVoteExtension) MarshalTo(dAtA []byte) (int, error) { - size := m.Size() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *AvailVoteExtension) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.Range != nil { - { - size, err := m.Range.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintVoteExtensions(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x12 - } - if m.AvailHeight != 0 { - i = encodeVarintVoteExtensions(dAtA, i, uint64(m.AvailHeight)) - i-- - dAtA[i] = 0x8 - } - return len(dAtA) - i, nil -} - -func encodeVarintVoteExtensions(dAtA []byte, offset int, v uint64) int { - offset -= sovVoteExtensions(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} -func (m *AvailVoteExtension) Size() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.AvailHeight != 0 { - n += 1 + sovVoteExtensions(uint64(m.AvailHeight)) - } - if m.Range != nil { - l = m.Range.Size() - n += 1 + l + sovVoteExtensions(uint64(l)) - } - return n -} - -func sovVoteExtensions(x uint64) (n int) { - return (math_bits.Len64(x|1) + 6) / 7 -} -func sozVoteExtensions(x uint64) (n int) { - return sovVoteExtensions(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *AvailVoteExtension) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowVoteExtensions - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: AvailVoteExtension: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: AvailVoteExtension: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field AvailHeight", wireType) - } - m.AvailHeight = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowVoteExtensions - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.AvailHeight |= int64(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Range", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowVoteExtensions - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthVoteExtensions - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthVoteExtensions - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Range == nil { - m.Range = &Range{} - } - if err := m.Range.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipVoteExtensions(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthVoteExtensions - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func skipVoteExtensions(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - depth := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowVoteExtensions - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowVoteExtensions - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - case 1: - iNdEx += 8 - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflowVoteExtensions - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLengthVoteExtensions - } - iNdEx += length - case 3: - depth++ - case 4: - if depth == 0 { - return 0, ErrUnexpectedEndOfGroupVoteExtensions - } - depth-- - case 5: - iNdEx += 4 - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - if iNdEx < 0 { - return 0, ErrInvalidLengthVoteExtensions - } - if depth == 0 { - return iNdEx, nil - } - } - return 0, io.ErrUnexpectedEOF -} - -var ( - ErrInvalidLengthVoteExtensions = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflowVoteExtensions = fmt.Errorf("proto: integer overflow") - ErrUnexpectedEndOfGroupVoteExtensions = fmt.Errorf("proto: unexpected end of group") -) From 7b7fb5dddeab8669718316296adec96ccb36abe1 Mon Sep 17 00:00:00 2001 From: saiteja Date: Wed, 25 Sep 2024 12:24:43 +0530 Subject: [PATCH 05/34] fix: proto --- x/cada/types/abci.pb.go | 62 ++++++++++----------- x/cada/types/genesis.pb.go | 32 +++++------ x/cada/types/query.pb.go | 76 +++++++++++++------------- x/cada/types/query.pb.gw.go | 2 +- x/cada/types/tx.pb.go | 88 +++++++++++++++--------------- x/cada/types/vote_extensions.pb.go | 40 +++++++------- 6 files changed, 150 insertions(+), 150 deletions(-) diff --git a/x/cada/types/abci.pb.go b/x/cada/types/abci.pb.go index 5bbf177..b4f5c64 100644 --- a/x/cada/types/abci.pb.go +++ b/x/cada/types/abci.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: sdk/avail/v1beta1/abci.proto +// source: cada/v1beta1/abci.proto package types @@ -33,7 +33,7 @@ func (m *InjectedData) Reset() { *m = InjectedData{} } func (m *InjectedData) String() string { return proto.CompactTextString(m) } func (*InjectedData) ProtoMessage() {} func (*InjectedData) Descriptor() ([]byte, []int) { - return fileDescriptor_cf694f1e6f573248, []int{0} + return fileDescriptor_c10129946240d9b3, []int{0} } func (m *InjectedData) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -78,7 +78,7 @@ func (m *PendingBlocks) Reset() { *m = PendingBlocks{} } func (m *PendingBlocks) String() string { return proto.CompactTextString(m) } func (*PendingBlocks) ProtoMessage() {} func (*PendingBlocks) Descriptor() ([]byte, []int) { - return fileDescriptor_cf694f1e6f573248, []int{1} + return fileDescriptor_c10129946240d9b3, []int{1} } func (m *PendingBlocks) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -124,7 +124,7 @@ func (m *UnprovenBlock) Reset() { *m = UnprovenBlock{} } func (m *UnprovenBlock) String() string { return proto.CompactTextString(m) } func (*UnprovenBlock) ProtoMessage() {} func (*UnprovenBlock) Descriptor() ([]byte, []int) { - return fileDescriptor_cf694f1e6f573248, []int{2} + return fileDescriptor_c10129946240d9b3, []int{2} } func (m *UnprovenBlock) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -168,33 +168,33 @@ func (m *UnprovenBlock) GetBlock() []byte { } func init() { - proto.RegisterType((*InjectedData)(nil), "sdk.avail.v1beta1.InjectedData") - proto.RegisterType((*PendingBlocks)(nil), "sdk.avail.v1beta1.PendingBlocks") - proto.RegisterType((*UnprovenBlock)(nil), "sdk.avail.v1beta1.UnprovenBlock") -} - -func init() { proto.RegisterFile("sdk/avail/v1beta1/abci.proto", fileDescriptor_cf694f1e6f573248) } - -var fileDescriptor_cf694f1e6f573248 = []byte{ - // 283 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x29, 0x4e, 0xc9, 0xd6, - 0x4f, 0x2c, 0x4b, 0xcc, 0xcc, 0xd1, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x4f, 0x4c, - 0x4a, 0xce, 0xd4, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x12, 0x2c, 0x4e, 0xc9, 0xd6, 0x03, 0xcb, - 0xea, 0x41, 0x65, 0xa5, 0x44, 0xd2, 0xf3, 0xd3, 0xf3, 0xc1, 0xb2, 0xfa, 0x20, 0x16, 0x44, 0xa1, - 0x52, 0x2c, 0x17, 0x8f, 0x67, 0x5e, 0x56, 0x6a, 0x72, 0x49, 0x6a, 0x8a, 0x4b, 0x62, 0x49, 0xa2, - 0x90, 0x2f, 0x17, 0x5f, 0x41, 0x6a, 0x5e, 0x4a, 0x66, 0x5e, 0x7a, 0x7c, 0x52, 0x4e, 0x7e, 0x72, - 0x76, 0xb1, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0xb7, 0x91, 0x82, 0x1e, 0x86, 0x89, 0x7a, 0x01, 0x10, - 0x85, 0x4e, 0x60, 0x75, 0x4e, 0x2c, 0x27, 0xee, 0xc9, 0x33, 0x04, 0xf1, 0x16, 0x20, 0x0b, 0x2a, - 0x99, 0x70, 0xf1, 0xa2, 0xa8, 0x12, 0x52, 0xe6, 0xe2, 0x05, 0x9b, 0x1b, 0x9f, 0x91, 0x9a, 0x99, - 0x9e, 0x51, 0x02, 0x32, 0x9e, 0x59, 0x83, 0x39, 0x88, 0x07, 0x2c, 0xe8, 0x01, 0x11, 0x53, 0xb2, - 0xe5, 0xe2, 0x0d, 0xcd, 0x2b, 0x28, 0xca, 0x2f, 0x4b, 0xcd, 0x03, 0x6b, 0x13, 0x12, 0xe3, 0x62, - 0x83, 0xa8, 0x07, 0xbb, 0x86, 0x39, 0x08, 0xca, 0x13, 0x12, 0xe1, 0x62, 0x05, 0x6b, 0x94, 0x60, - 0x52, 0x60, 0xd4, 0xe0, 0x09, 0x82, 0x70, 0x9c, 0x1c, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, - 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6, 0x09, 0x8f, 0xe5, 0x18, 0x2e, 0x3c, 0x96, 0x63, 0xb8, 0xf1, - 0x58, 0x8e, 0x21, 0x4a, 0x3d, 0x3d, 0xb3, 0x24, 0xa3, 0x34, 0x49, 0x2f, 0x39, 0x3f, 0x57, 0xbf, - 0x2c, 0xb3, 0xa4, 0x3c, 0xb3, 0x04, 0x12, 0x84, 0xba, 0x29, 0x89, 0xba, 0xb9, 0xf9, 0x29, 0xa5, - 0x39, 0xa9, 0xfa, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x49, 0x6c, 0xe0, 0xd0, 0x31, 0x06, 0x04, 0x00, - 0x00, 0xff, 0xff, 0x26, 0x36, 0x5a, 0x14, 0x66, 0x01, 0x00, 0x00, + proto.RegisterType((*InjectedData)(nil), "cada.v1beta1.InjectedData") + proto.RegisterType((*PendingBlocks)(nil), "cada.v1beta1.PendingBlocks") + proto.RegisterType((*UnprovenBlock)(nil), "cada.v1beta1.UnprovenBlock") +} + +func init() { proto.RegisterFile("cada/v1beta1/abci.proto", fileDescriptor_c10129946240d9b3) } + +var fileDescriptor_c10129946240d9b3 = []byte{ + // 280 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0xb1, 0x4e, 0xc3, 0x30, + 0x18, 0x84, 0x63, 0x0a, 0x1d, 0x4c, 0xc2, 0x10, 0x55, 0x50, 0x81, 0x64, 0xaa, 0xb0, 0x64, 0xa9, + 0xad, 0x02, 0x2b, 0x4b, 0xc4, 0x10, 0x36, 0x14, 0x09, 0x09, 0xb1, 0x54, 0x4e, 0x62, 0x39, 0x86, + 0xd4, 0xb6, 0x5a, 0x37, 0xc0, 0x5b, 0xf0, 0x58, 0x1d, 0x3b, 0x32, 0x21, 0x94, 0xbc, 0x08, 0x8a, + 0xdd, 0xa1, 0x6c, 0xfe, 0xef, 0xbe, 0x3b, 0x59, 0x07, 0xcf, 0x0a, 0x5a, 0x52, 0xd2, 0xcc, 0x72, + 0x66, 0xe8, 0x8c, 0xd0, 0xbc, 0x10, 0x58, 0x2f, 0x95, 0x51, 0xa1, 0xdf, 0x1b, 0x78, 0x67, 0x9c, + 0x8f, 0xb8, 0xe2, 0xca, 0x1a, 0xa4, 0x7f, 0x39, 0x26, 0x7a, 0x86, 0xfe, 0x83, 0x7c, 0x65, 0x85, + 0x61, 0xe5, 0x3d, 0x35, 0x34, 0x4c, 0xe1, 0x89, 0x66, 0xb2, 0x14, 0x92, 0xcf, 0xf3, 0x5a, 0x15, + 0x6f, 0xab, 0x31, 0x98, 0x80, 0xf8, 0xf8, 0xfa, 0x02, 0xef, 0x97, 0xe1, 0x47, 0xc7, 0x24, 0x16, + 0x49, 0x0e, 0x37, 0x3f, 0x97, 0x5e, 0x16, 0xe8, 0x7d, 0x31, 0xba, 0x85, 0xc1, 0x3f, 0x2a, 0xbc, + 0x82, 0x81, 0xad, 0x9c, 0x57, 0x4c, 0xf0, 0xca, 0xf4, 0xcd, 0x83, 0x78, 0x90, 0xf9, 0x56, 0x4c, + 0x9d, 0x16, 0xdd, 0xc1, 0xe0, 0x49, 0xea, 0xa5, 0x6a, 0x98, 0xb4, 0xb1, 0xf0, 0x14, 0x0e, 0x1d, + 0x6f, 0x3f, 0x32, 0xc8, 0x76, 0x57, 0x38, 0x82, 0x47, 0x36, 0x38, 0x3e, 0x98, 0x80, 0xd8, 0xcf, + 0xdc, 0x91, 0xa4, 0x9b, 0x16, 0x81, 0x6d, 0x8b, 0xc0, 0x6f, 0x8b, 0xc0, 0x57, 0x87, 0xbc, 0x6d, + 0x87, 0xbc, 0xef, 0x0e, 0x79, 0x2f, 0x98, 0x0b, 0x53, 0xad, 0x73, 0x5c, 0xa8, 0x05, 0x69, 0x84, + 0x79, 0x17, 0x86, 0xd0, 0x86, 0x8a, 0x7a, 0x5a, 0xd2, 0xe9, 0x42, 0x95, 0xeb, 0x9a, 0x91, 0x0f, + 0x62, 0x97, 0x34, 0x9f, 0x9a, 0xad, 0xf2, 0xa1, 0xdd, 0xe7, 0xe6, 0x2f, 0x00, 0x00, 0xff, 0xff, + 0x17, 0x6b, 0xb7, 0xe0, 0x5e, 0x01, 0x00, 0x00, } func (m *InjectedData) Marshal() (dAtA []byte, err error) { diff --git a/x/cada/types/genesis.pb.go b/x/cada/types/genesis.pb.go index 43a5ff8..bcdcdb4 100644 --- a/x/cada/types/genesis.pb.go +++ b/x/cada/types/genesis.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: sdk/avail/v1beta1/genesis.proto +// source: cada/v1beta1/genesis.proto package types @@ -30,7 +30,7 @@ func (m *GenesisState) Reset() { *m = GenesisState{} } func (m *GenesisState) String() string { return proto.CompactTextString(m) } func (*GenesisState) ProtoMessage() {} func (*GenesisState) Descriptor() ([]byte, []int) { - return fileDescriptor_b83d128538762178, []int{0} + return fileDescriptor_add756a79c56de92, []int{0} } func (m *GenesisState) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -60,23 +60,23 @@ func (m *GenesisState) XXX_DiscardUnknown() { var xxx_messageInfo_GenesisState proto.InternalMessageInfo func init() { - proto.RegisterType((*GenesisState)(nil), "sdk.avail.v1beta1.GenesisState") + proto.RegisterType((*GenesisState)(nil), "cada.v1beta1.GenesisState") } -func init() { proto.RegisterFile("sdk/avail/v1beta1/genesis.proto", fileDescriptor_b83d128538762178) } +func init() { proto.RegisterFile("cada/v1beta1/genesis.proto", fileDescriptor_add756a79c56de92) } -var fileDescriptor_b83d128538762178 = []byte{ - // 150 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x2f, 0x4e, 0xc9, 0xd6, - 0x4f, 0x2c, 0x4b, 0xcc, 0xcc, 0xd1, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x4f, 0x4f, - 0xcd, 0x4b, 0x2d, 0xce, 0x2c, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x12, 0x2c, 0x4e, 0xc9, - 0xd6, 0x03, 0x2b, 0xd0, 0x83, 0x2a, 0x50, 0xe2, 0xe3, 0xe2, 0x71, 0x87, 0xa8, 0x09, 0x2e, 0x49, - 0x2c, 0x49, 0x75, 0x72, 0x3c, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, - 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, 0xf5, - 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0xfd, 0xb2, 0xcc, 0x92, 0xf2, 0xcc, - 0x12, 0x88, 0x5d, 0xba, 0x29, 0x89, 0xba, 0xb9, 0xf9, 0x29, 0xa5, 0x39, 0xa9, 0xfa, 0x25, 0x95, - 0x05, 0xa9, 0xc5, 0x49, 0x6c, 0x60, 0xcb, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x18, 0x5a, - 0x23, 0xdb, 0x8f, 0x00, 0x00, 0x00, +var fileDescriptor_add756a79c56de92 = []byte{ + // 148 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4a, 0x4e, 0x4c, 0x49, + 0xd4, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x4f, 0x4f, 0xcd, 0x4b, 0x2d, 0xce, 0x2c, + 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x01, 0xc9, 0xe9, 0x41, 0xe5, 0x94, 0xf8, 0xb8, + 0x78, 0xdc, 0x21, 0xd2, 0xc1, 0x25, 0x89, 0x25, 0xa9, 0x4e, 0x1e, 0x27, 0x1e, 0xc9, 0x31, 0x5e, + 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, 0x1e, 0xcb, 0x31, + 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0xa5, 0x97, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, + 0xab, 0x5f, 0x96, 0x59, 0x52, 0x9e, 0x59, 0xa2, 0x9f, 0x58, 0x96, 0x98, 0x99, 0xa3, 0x9b, 0x92, + 0xa8, 0x9b, 0x9b, 0x9f, 0x52, 0x9a, 0x93, 0xaa, 0x5f, 0xa1, 0x0f, 0xb6, 0xb7, 0xa4, 0xb2, 0x20, + 0xb5, 0x38, 0x89, 0x0d, 0x6c, 0x9d, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x3a, 0x11, 0xd3, 0x69, + 0x8c, 0x00, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { diff --git a/x/cada/types/query.pb.go b/x/cada/types/query.pb.go index c479e7b..2ed1ab0 100644 --- a/x/cada/types/query.pb.go +++ b/x/cada/types/query.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: sdk/avail/v1beta1/query.proto +// source: cada/v1beta1/query.proto package types @@ -36,7 +36,7 @@ func (m *QuerySubmittedBlobStatusRequest) Reset() { *m = QuerySubmittedB func (m *QuerySubmittedBlobStatusRequest) String() string { return proto.CompactTextString(m) } func (*QuerySubmittedBlobStatusRequest) ProtoMessage() {} func (*QuerySubmittedBlobStatusRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_30ff5d91ce731c68, []int{0} + return fileDescriptor_0579d66f58f8318a, []int{0} } func (m *QuerySubmittedBlobStatusRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -85,7 +85,7 @@ func (m *QuerySubmittedBlobStatusResponse) Reset() { *m = QuerySubmitted func (m *QuerySubmittedBlobStatusResponse) String() string { return proto.CompactTextString(m) } func (*QuerySubmittedBlobStatusResponse) ProtoMessage() {} func (*QuerySubmittedBlobStatusResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_30ff5d91ce731c68, []int{1} + return fileDescriptor_0579d66f58f8318a, []int{1} } func (m *QuerySubmittedBlobStatusResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -143,37 +143,37 @@ func (m *QuerySubmittedBlobStatusResponse) GetVotingEndsAt() string { } func init() { - proto.RegisterType((*QuerySubmittedBlobStatusRequest)(nil), "sdk.avail.v1beta1.QuerySubmittedBlobStatusRequest") - proto.RegisterType((*QuerySubmittedBlobStatusResponse)(nil), "sdk.avail.v1beta1.QuerySubmittedBlobStatusResponse") -} - -func init() { proto.RegisterFile("sdk/avail/v1beta1/query.proto", fileDescriptor_30ff5d91ce731c68) } - -var fileDescriptor_30ff5d91ce731c68 = []byte{ - // 362 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0xc1, 0x4a, 0xeb, 0x40, - 0x18, 0x85, 0x3b, 0xbd, 0x6d, 0xe1, 0xce, 0xed, 0x15, 0x1c, 0x41, 0x42, 0xd0, 0x18, 0x5b, 0xc5, - 0x82, 0x34, 0xa1, 0xed, 0x13, 0xb4, 0x20, 0xb8, 0x35, 0xdd, 0xb9, 0x29, 0x13, 0x33, 0xa4, 0x43, - 0xd3, 0x99, 0x34, 0xf3, 0x27, 0xda, 0xad, 0x4f, 0x20, 0xf8, 0x18, 0xae, 0xfb, 0x0e, 0x2e, 0x0b, - 0x6e, 0x5c, 0x4a, 0xeb, 0x83, 0x48, 0x33, 0x45, 0x17, 0xad, 0x88, 0xcb, 0x39, 0x67, 0xce, 0xc7, - 0xe1, 0x3f, 0xf8, 0x50, 0x05, 0x23, 0x97, 0x66, 0x94, 0x47, 0x6e, 0xd6, 0xf2, 0x19, 0xd0, 0x96, - 0x3b, 0x49, 0x59, 0x32, 0x75, 0xe2, 0x44, 0x82, 0x24, 0xbb, 0x2a, 0x18, 0x39, 0xb9, 0xed, 0xac, - 0x6d, 0xf3, 0x20, 0x94, 0x32, 0x8c, 0x98, 0x4b, 0x63, 0xee, 0x52, 0x21, 0x24, 0x50, 0xe0, 0x52, - 0x28, 0x1d, 0x30, 0xcd, 0x4d, 0x1e, 0xdc, 0x69, 0xaf, 0x76, 0x8c, 0x8f, 0xae, 0x56, 0xec, 0x7e, - 0xea, 0x8f, 0x39, 0x00, 0x0b, 0x7a, 0x91, 0xf4, 0xfb, 0x40, 0x21, 0x55, 0x1e, 0x9b, 0xa4, 0x4c, - 0x41, 0x6d, 0x86, 0xb0, 0xfd, 0xfd, 0x1f, 0x15, 0x4b, 0xa1, 0x18, 0x71, 0x70, 0x39, 0xa1, 0x22, - 0x64, 0x06, 0xb2, 0x51, 0xe3, 0x5f, 0xdb, 0x70, 0x36, 0x4a, 0x3a, 0xde, 0xca, 0xf7, 0xf4, 0x37, - 0xb2, 0x8f, 0x2b, 0x2a, 0x27, 0x18, 0x45, 0x1b, 0x35, 0xfe, 0x7a, 0xeb, 0x17, 0xa9, 0xe3, 0xff, - 0x71, 0x22, 0x33, 0x26, 0x06, 0x43, 0xc6, 0xc3, 0x21, 0x18, 0x7f, 0x6c, 0xd4, 0x28, 0x79, 0x55, - 0x2d, 0x5e, 0xe6, 0x1a, 0x39, 0xc1, 0x3b, 0x99, 0x04, 0x2e, 0xc2, 0x01, 0x13, 0x81, 0x1a, 0x50, - 0x30, 0x4a, 0x39, 0xa4, 0xaa, 0xd5, 0x0b, 0x11, 0xa8, 0x2e, 0xb4, 0x67, 0x08, 0x97, 0xf3, 0xde, - 0xe4, 0x09, 0xe1, 0xbd, 0x2d, 0xe5, 0x49, 0x7b, 0x4b, 0xcb, 0x1f, 0xae, 0x61, 0x76, 0x7e, 0x95, - 0xd1, 0xd7, 0xa9, 0x9d, 0xdf, 0xbf, 0xbc, 0x3f, 0x16, 0x4f, 0x49, 0x5d, 0xcf, 0xe0, 0x47, 0xd2, - 0xff, 0x9c, 0x42, 0xe5, 0xb9, 0xaf, 0x50, 0xaf, 0xfb, 0xbc, 0xb0, 0xd0, 0x7c, 0x61, 0xa1, 0xb7, - 0x85, 0x85, 0x1e, 0x96, 0x56, 0x61, 0xbe, 0xb4, 0x0a, 0xaf, 0x4b, 0xab, 0x70, 0x7d, 0x16, 0x72, - 0x18, 0xa6, 0xbe, 0x73, 0x23, 0xc7, 0x6e, 0xc6, 0xe1, 0x96, 0x83, 0xe6, 0x35, 0x03, 0xda, 0x1c, - 0xcb, 0x20, 0x8d, 0x98, 0x0b, 0xd3, 0x98, 0x29, 0xbf, 0x92, 0x8f, 0xdb, 0xf9, 0x08, 0x00, 0x00, - 0xff, 0xff, 0x37, 0x72, 0x2f, 0x8e, 0x4a, 0x02, 0x00, 0x00, + proto.RegisterType((*QuerySubmittedBlobStatusRequest)(nil), "cada.v1beta1.QuerySubmittedBlobStatusRequest") + proto.RegisterType((*QuerySubmittedBlobStatusResponse)(nil), "cada.v1beta1.QuerySubmittedBlobStatusResponse") +} + +func init() { proto.RegisterFile("cada/v1beta1/query.proto", fileDescriptor_0579d66f58f8318a) } + +var fileDescriptor_0579d66f58f8318a = []byte{ + // 361 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x51, 0xcd, 0x4a, 0xeb, 0x40, + 0x14, 0xee, 0xf4, 0xb6, 0x85, 0x3b, 0xb7, 0xf7, 0x2e, 0xa6, 0x5c, 0x09, 0x45, 0x62, 0x6c, 0x15, + 0x2a, 0xd2, 0x0c, 0xad, 0x4f, 0x60, 0x41, 0xe8, 0xd6, 0x74, 0xe7, 0xa6, 0x4c, 0x9a, 0x21, 0x1d, + 0x48, 0x67, 0xd2, 0xcc, 0x49, 0x6c, 0xb7, 0x3e, 0x81, 0xe0, 0x13, 0xb8, 0x76, 0xe3, 0x63, 0xb8, + 0x2c, 0xb8, 0x71, 0x29, 0xad, 0x0f, 0x22, 0xcd, 0x14, 0xb5, 0xa0, 0xe8, 0xf2, 0x7c, 0x7f, 0x7c, + 0x7c, 0x07, 0x5b, 0x23, 0x16, 0x30, 0x9a, 0x75, 0x7c, 0x0e, 0xac, 0x43, 0xa7, 0x29, 0x4f, 0xe6, + 0x6e, 0x9c, 0x28, 0x50, 0xa4, 0xba, 0x66, 0xdc, 0x0d, 0x53, 0xdf, 0x0d, 0x95, 0x0a, 0x23, 0x4e, + 0x59, 0x2c, 0x28, 0x93, 0x52, 0x01, 0x03, 0xa1, 0xa4, 0x36, 0xda, 0xfa, 0xff, 0xad, 0x14, 0x98, + 0x19, 0xb8, 0xb1, 0x8f, 0xf7, 0xce, 0xd7, 0x89, 0x83, 0xd4, 0x9f, 0x08, 0x00, 0x1e, 0xf4, 0x22, + 0xe5, 0x0f, 0x80, 0x41, 0xaa, 0x3d, 0x3e, 0x4d, 0xb9, 0x86, 0xc6, 0x3d, 0xc2, 0xce, 0xd7, 0x1a, + 0x1d, 0x2b, 0xa9, 0x39, 0x39, 0xc2, 0xe5, 0x84, 0xc9, 0x90, 0x5b, 0xc8, 0x41, 0xad, 0x3f, 0xdd, + 0x9a, 0xfb, 0xb1, 0x9a, 0xeb, 0xad, 0x29, 0xcf, 0x28, 0xc8, 0x0e, 0xae, 0xe8, 0xdc, 0x6c, 0x15, + 0x1d, 0xd4, 0xfa, 0xed, 0x6d, 0x2e, 0xd2, 0xc4, 0x7f, 0xe3, 0x44, 0x65, 0x5c, 0x0e, 0xc7, 0x5c, + 0x84, 0x63, 0xb0, 0x7e, 0x39, 0xa8, 0x55, 0xf2, 0xaa, 0x06, 0xec, 0xe7, 0x18, 0x39, 0xc0, 0xff, + 0x32, 0x05, 0x42, 0x86, 0x43, 0x2e, 0x03, 0x3d, 0x64, 0x60, 0x95, 0xf2, 0x90, 0xaa, 0x41, 0xcf, + 0x64, 0xa0, 0x4f, 0xa1, 0x7b, 0x87, 0x70, 0x39, 0xaf, 0x4c, 0x6e, 0x11, 0xae, 0x7d, 0xd2, 0x9b, + 0xb4, 0xb7, 0x0b, 0x7e, 0xb3, 0x41, 0xdd, 0xfd, 0xa9, 0xdc, 0xcc, 0xd1, 0x38, 0xbe, 0x7a, 0x7c, + 0xb9, 0x29, 0x1e, 0x92, 0x26, 0x65, 0x19, 0x13, 0x91, 0x1f, 0x29, 0xff, 0x6d, 0x7b, 0x9d, 0xfb, + 0xde, 0x4d, 0xbd, 0xfe, 0xc3, 0xd2, 0x46, 0x8b, 0xa5, 0x8d, 0x9e, 0x97, 0x36, 0xba, 0x5e, 0xd9, + 0x85, 0xc5, 0xca, 0x2e, 0x3c, 0xad, 0xec, 0xc2, 0x85, 0x1b, 0x0a, 0x18, 0xa7, 0xbe, 0x3b, 0x52, + 0x13, 0x9a, 0x09, 0xb8, 0x14, 0x60, 0xf2, 0xda, 0x01, 0x6b, 0x4f, 0x54, 0x90, 0x46, 0x9c, 0xce, + 0x68, 0xfe, 0x58, 0x98, 0xc7, 0x5c, 0xfb, 0x95, 0xfc, 0xa9, 0x27, 0xaf, 0x01, 0x00, 0x00, 0xff, + 0xff, 0xaa, 0x41, 0x02, 0xee, 0x33, 0x02, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -202,7 +202,7 @@ func NewQueryClient(cc grpc1.ClientConn) QueryClient { func (c *queryClient) SubmittedBlobStatus(ctx context.Context, in *QuerySubmittedBlobStatusRequest, opts ...grpc.CallOption) (*QuerySubmittedBlobStatusResponse, error) { out := new(QuerySubmittedBlobStatusResponse) - err := c.cc.Invoke(ctx, "/sdk.avail.v1beta1.Query/SubmittedBlobStatus", in, out, opts...) + err := c.cc.Invoke(ctx, "/cada.v1beta1.Query/SubmittedBlobStatus", in, out, opts...) if err != nil { return nil, err } @@ -237,7 +237,7 @@ func _Query_SubmittedBlobStatus_Handler(srv interface{}, ctx context.Context, de } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: "/sdk.avail.v1beta1.Query/SubmittedBlobStatus", + FullMethod: "/cada.v1beta1.Query/SubmittedBlobStatus", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(QueryServer).SubmittedBlobStatus(ctx, req.(*QuerySubmittedBlobStatusRequest)) @@ -246,7 +246,7 @@ func _Query_SubmittedBlobStatus_Handler(srv interface{}, ctx context.Context, de } var _Query_serviceDesc = grpc.ServiceDesc{ - ServiceName: "sdk.avail.v1beta1.Query", + ServiceName: "cada.v1beta1.Query", HandlerType: (*QueryServer)(nil), Methods: []grpc.MethodDesc{ { @@ -255,7 +255,7 @@ var _Query_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "sdk/avail/v1beta1/query.proto", + Metadata: "cada/v1beta1/query.proto", } func (m *QuerySubmittedBlobStatusRequest) Marshal() (dAtA []byte, err error) { diff --git a/x/cada/types/query.pb.gw.go b/x/cada/types/query.pb.gw.go index 5c782c5..800b835 100644 --- a/x/cada/types/query.pb.gw.go +++ b/x/cada/types/query.pb.gw.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT. -// source: sdk/avail/v1beta1/query.proto +// source: cada/v1beta1/query.proto /* Package types is a reverse proxy. diff --git a/x/cada/types/tx.pb.go b/x/cada/types/tx.pb.go index 92419f8..842cda0 100644 --- a/x/cada/types/tx.pb.go +++ b/x/cada/types/tx.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: sdk/avail/v1beta1/tx.proto +// source: cada/v1beta1/tx.proto package types @@ -62,7 +62,7 @@ func (x BlobStatus) String() string { } func (BlobStatus) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_7f88203cb33986bc, []int{0} + return fileDescriptor_c917a06c648c4256, []int{0} } // Range defines the range of blocks for which the blob is being submitted. @@ -77,7 +77,7 @@ func (m *Range) Reset() { *m = Range{} } func (m *Range) String() string { return proto.CompactTextString(m) } func (*Range) ProtoMessage() {} func (*Range) Descriptor() ([]byte, []int) { - return fileDescriptor_7f88203cb33986bc, []int{0} + return fileDescriptor_c917a06c648c4256, []int{0} } func (m *Range) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -136,7 +136,7 @@ func (m *MsgUpdateBlobStatusRequest) Reset() { *m = MsgUpdateBlobStatusR func (m *MsgUpdateBlobStatusRequest) String() string { return proto.CompactTextString(m) } func (*MsgUpdateBlobStatusRequest) ProtoMessage() {} func (*MsgUpdateBlobStatusRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_7f88203cb33986bc, []int{1} + return fileDescriptor_c917a06c648c4256, []int{1} } func (m *MsgUpdateBlobStatusRequest) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -201,7 +201,7 @@ func (m *MsgUpdateBlobStatusResponse) Reset() { *m = MsgUpdateBlobStatus func (m *MsgUpdateBlobStatusResponse) String() string { return proto.CompactTextString(m) } func (*MsgUpdateBlobStatusResponse) ProtoMessage() {} func (*MsgUpdateBlobStatusResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_7f88203cb33986bc, []int{2} + return fileDescriptor_c917a06c648c4256, []int{2} } func (m *MsgUpdateBlobStatusResponse) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -231,45 +231,45 @@ func (m *MsgUpdateBlobStatusResponse) XXX_DiscardUnknown() { var xxx_messageInfo_MsgUpdateBlobStatusResponse proto.InternalMessageInfo func init() { - proto.RegisterEnum("sdk.avail.v1beta1.BlobStatus", BlobStatus_name, BlobStatus_value) - proto.RegisterType((*Range)(nil), "sdk.avail.v1beta1.Range") - proto.RegisterType((*MsgUpdateBlobStatusRequest)(nil), "sdk.avail.v1beta1.MsgUpdateBlobStatusRequest") - proto.RegisterType((*MsgUpdateBlobStatusResponse)(nil), "sdk.avail.v1beta1.MsgUpdateBlobStatusResponse") + proto.RegisterEnum("cada.v1beta1.BlobStatus", BlobStatus_name, BlobStatus_value) + proto.RegisterType((*Range)(nil), "cada.v1beta1.Range") + proto.RegisterType((*MsgUpdateBlobStatusRequest)(nil), "cada.v1beta1.MsgUpdateBlobStatusRequest") + proto.RegisterType((*MsgUpdateBlobStatusResponse)(nil), "cada.v1beta1.MsgUpdateBlobStatusResponse") } -func init() { proto.RegisterFile("sdk/avail/v1beta1/tx.proto", fileDescriptor_7f88203cb33986bc) } +func init() { proto.RegisterFile("cada/v1beta1/tx.proto", fileDescriptor_c917a06c648c4256) } -var fileDescriptor_7f88203cb33986bc = []byte{ +var fileDescriptor_c917a06c648c4256 = []byte{ // 467 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcf, 0x6e, 0xd3, 0x40, - 0x10, 0xc6, 0xbd, 0x49, 0x8a, 0xe8, 0xa6, 0x42, 0xee, 0x82, 0x48, 0xe4, 0xaa, 0x56, 0xc9, 0x85, - 0x2a, 0x55, 0x6c, 0xa5, 0xdc, 0xe0, 0x94, 0xa4, 0x2e, 0x44, 0x6a, 0x43, 0x65, 0xd7, 0x17, 0x2e, - 0xd6, 0xda, 0x5e, 0x1c, 0x2b, 0x76, 0x37, 0xf5, 0xac, 0xcd, 0x9f, 0x0b, 0x88, 0x13, 0x47, 0xde, - 0x81, 0x17, 0xe8, 0x63, 0x70, 0xec, 0x91, 0x63, 0x95, 0x1c, 0xfa, 0x1a, 0xc8, 0x6b, 0x04, 0x28, - 0x2d, 0x52, 0x4f, 0x3b, 0xfa, 0x7d, 0x9f, 0x76, 0xbe, 0xd9, 0x1d, 0xac, 0x41, 0x38, 0x33, 0x69, - 0x41, 0xe3, 0xc4, 0x2c, 0xfa, 0x3e, 0x13, 0xb4, 0x6f, 0x8a, 0xf7, 0xc6, 0x3c, 0xe3, 0x82, 0x93, - 0x4d, 0x08, 0x67, 0x86, 0xd4, 0x8c, 0xdf, 0x9a, 0xd6, 0x0a, 0x38, 0xa4, 0x1c, 0xcc, 0x14, 0x22, - 0xb3, 0xe8, 0x97, 0x47, 0xe5, 0xd5, 0x1e, 0x45, 0x3c, 0xe2, 0xb2, 0x34, 0xcb, 0xaa, 0xa2, 0x9d, - 0x3d, 0xbc, 0x66, 0xd3, 0xb3, 0x88, 0x11, 0x82, 0x1b, 0x6f, 0x33, 0x9e, 0xb6, 0xd1, 0x0e, 0xda, - 0x6d, 0xd8, 0xb2, 0x26, 0x0f, 0x70, 0x4d, 0xf0, 0x76, 0x4d, 0x92, 0x9a, 0xe0, 0x9d, 0x2b, 0x84, - 0xb5, 0x63, 0x88, 0xdc, 0x79, 0x48, 0x05, 0x1b, 0x26, 0xdc, 0x77, 0x04, 0x15, 0x39, 0xd8, 0xec, - 0x3c, 0x67, 0x20, 0xc8, 0x1e, 0xde, 0x2c, 0x68, 0x12, 0x87, 0x54, 0xf0, 0xcc, 0xa3, 0x61, 0x98, - 0x31, 0x00, 0x79, 0xdf, 0xba, 0xad, 0xfe, 0x11, 0x06, 0x15, 0x27, 0x2f, 0xf0, 0x86, 0x9f, 0xf0, - 0x60, 0x06, 0x5e, 0x56, 0xf6, 0x97, 0x5d, 0x9a, 0xfb, 0x6d, 0xe3, 0xc6, 0x44, 0x86, 0xcc, 0x67, - 0x37, 0x2b, 0x77, 0x15, 0xf6, 0x09, 0xde, 0x90, 0x1e, 0x6f, 0xca, 0xe2, 0x68, 0x2a, 0xda, 0x75, - 0x19, 0xb1, 0x29, 0xd9, 0x2b, 0x89, 0xc8, 0x36, 0xc6, 0x31, 0x78, 0x90, 0x07, 0x41, 0x99, 0xa2, - 0xb1, 0x83, 0x76, 0xef, 0xdb, 0xeb, 0x31, 0x38, 0x15, 0x78, 0xfe, 0xf8, 0xcb, 0xf5, 0x45, 0xf7, - 0x66, 0xdc, 0xce, 0x36, 0xde, 0xba, 0x75, 0x42, 0x98, 0xf3, 0x33, 0x60, 0xdd, 0x8f, 0x18, 0xff, - 0xa5, 0x64, 0x0b, 0xb7, 0x86, 0x47, 0xaf, 0x87, 0x9e, 0x73, 0x3a, 0x38, 0x75, 0x1d, 0xcf, 0x9d, - 0x38, 0x27, 0xd6, 0x68, 0x7c, 0x38, 0xb6, 0x0e, 0x54, 0x85, 0xb4, 0xf0, 0xc3, 0x7f, 0xc5, 0xc3, - 0xc1, 0xf8, 0xc8, 0xb5, 0x2d, 0x15, 0xad, 0x0a, 0x8e, 0x3b, 0x1a, 0x59, 0x8e, 0xa3, 0xd6, 0x56, - 0x85, 0x13, 0x6b, 0x72, 0x30, 0x9e, 0xbc, 0x54, 0xeb, 0x5a, 0xe3, 0xeb, 0x77, 0x5d, 0xd9, 0xff, - 0x84, 0xeb, 0xc7, 0x10, 0x91, 0x73, 0xac, 0xae, 0xc6, 0x23, 0xbd, 0x5b, 0x9e, 0xed, 0xff, 0x1f, - 0xa5, 0x19, 0x77, 0xb5, 0x57, 0x53, 0x6b, 0x6b, 0x9f, 0xaf, 0x2f, 0xba, 0x68, 0x38, 0xf8, 0xb1, - 0xd0, 0xd1, 0xe5, 0x42, 0x47, 0x57, 0x0b, 0x1d, 0x7d, 0x5b, 0xea, 0xca, 0xe5, 0x52, 0x57, 0x7e, - 0x2e, 0x75, 0xe5, 0xcd, 0xd3, 0x28, 0x16, 0xd3, 0xdc, 0x37, 0x02, 0x9e, 0x9a, 0x45, 0x2c, 0xde, - 0xc5, 0xa2, 0xda, 0xd8, 0x5e, 0x48, 0x7b, 0x29, 0x0f, 0xf3, 0x84, 0x99, 0xe2, 0xc3, 0x9c, 0x81, - 0x7f, 0x4f, 0x6e, 0xdd, 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf5, 0x5d, 0xb6, 0xfa, 0xd5, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x31, 0x6f, 0xd3, 0x40, + 0x14, 0xc7, 0x7d, 0x49, 0x8a, 0xe8, 0x25, 0x42, 0xe6, 0x0a, 0x24, 0x72, 0x55, 0xab, 0x64, 0x0a, + 0xa9, 0xea, 0x53, 0x8a, 0xc4, 0xc0, 0x96, 0xa4, 0x2e, 0x8d, 0xd4, 0x86, 0xca, 0xae, 0x17, 0x16, + 0xeb, 0x6c, 0x1f, 0x17, 0x0b, 0xbb, 0x17, 0x7c, 0x67, 0x53, 0x98, 0x10, 0x13, 0x23, 0xdf, 0x81, + 0x2f, 0xd0, 0x8f, 0xc1, 0xd8, 0x11, 0x89, 0x05, 0x25, 0x43, 0xbf, 0x06, 0xf2, 0x19, 0x41, 0x09, + 0x20, 0x75, 0xf2, 0xd3, 0xef, 0xff, 0xe4, 0xf7, 0xff, 0xdf, 0x7b, 0xf0, 0x7e, 0x48, 0x22, 0x82, + 0x8b, 0x41, 0x40, 0x25, 0x19, 0x60, 0x79, 0x6e, 0xcd, 0x33, 0x2e, 0x39, 0x6a, 0x95, 0xd8, 0xfa, + 0x89, 0x8d, 0x76, 0xc8, 0x45, 0xca, 0x05, 0x4e, 0x05, 0xc3, 0xc5, 0xa0, 0xfc, 0x54, 0x6d, 0xc6, + 0x3d, 0xc6, 0x19, 0x57, 0x25, 0x2e, 0xab, 0x8a, 0x76, 0x77, 0xe0, 0x9a, 0x43, 0xce, 0x18, 0x45, + 0x08, 0x36, 0x5e, 0x66, 0x3c, 0xed, 0x80, 0x6d, 0xd0, 0x6b, 0x38, 0xaa, 0x46, 0x77, 0x60, 0x4d, + 0xf2, 0x4e, 0x4d, 0x91, 0x9a, 0xe4, 0xdd, 0x6f, 0x00, 0x1a, 0xc7, 0x82, 0x79, 0xf3, 0x88, 0x48, + 0x3a, 0x4a, 0x78, 0xe0, 0x4a, 0x22, 0x73, 0xe1, 0xd0, 0xd7, 0x39, 0x15, 0x12, 0xed, 0xc0, 0xbb, + 0x05, 0x49, 0xe2, 0x88, 0x48, 0x9e, 0xf9, 0x24, 0x8a, 0x32, 0x2a, 0x84, 0xfa, 0xdf, 0xba, 0xa3, + 0xff, 0x12, 0x86, 0x15, 0x47, 0x4f, 0x60, 0x2b, 0x48, 0x78, 0xf8, 0x4a, 0xf8, 0x59, 0x39, 0x5f, + 0x4d, 0x69, 0xee, 0x6d, 0x58, 0xd7, 0xc3, 0x58, 0xca, 0x9a, 0xd3, 0xac, 0x1a, 0x2b, 0x9f, 0x0f, + 0x61, 0x8b, 0x14, 0x24, 0x4e, 0xfc, 0x19, 0x8d, 0xd9, 0x4c, 0x76, 0xea, 0xca, 0x5d, 0x53, 0xb1, + 0x43, 0x85, 0xd0, 0x16, 0x84, 0xb1, 0xf0, 0x45, 0x1e, 0x86, 0xa5, 0x81, 0xc6, 0x36, 0xe8, 0xdd, + 0x76, 0xd6, 0x63, 0xe1, 0x56, 0xe0, 0xe9, 0x83, 0x0f, 0x57, 0x17, 0xfd, 0xbf, 0x9d, 0x76, 0xb7, + 0xe0, 0xe6, 0x3f, 0xc3, 0x89, 0x39, 0x3f, 0x13, 0xb4, 0xff, 0x0e, 0xc2, 0xdf, 0x14, 0x6d, 0xc2, + 0xf6, 0xe8, 0xe8, 0xf9, 0xc8, 0x77, 0x4f, 0x87, 0xa7, 0x9e, 0xeb, 0x7b, 0x53, 0xf7, 0xc4, 0x1e, + 0x4f, 0x0e, 0x26, 0xf6, 0xbe, 0xae, 0xa1, 0x36, 0xdc, 0xb8, 0x2e, 0x1e, 0x0c, 0x27, 0x47, 0x9e, + 0x63, 0xeb, 0x60, 0x55, 0x70, 0xbd, 0xf1, 0xd8, 0x76, 0x5d, 0xbd, 0xb6, 0x2a, 0x9c, 0xd8, 0xd3, + 0xfd, 0xc9, 0xf4, 0x99, 0x5e, 0x37, 0x1a, 0x1f, 0x3f, 0x9b, 0xda, 0x5e, 0x0e, 0xeb, 0xc7, 0x82, + 0x21, 0x06, 0xf5, 0x55, 0x7b, 0xa8, 0xf7, 0xe7, 0x8b, 0xfd, 0x7f, 0x3d, 0xc6, 0xa3, 0x1b, 0x74, + 0x56, 0x59, 0x8d, 0xb5, 0xf7, 0x57, 0x17, 0x7d, 0x30, 0x3a, 0xfc, 0xb2, 0x30, 0xc1, 0xe5, 0xc2, + 0x04, 0xdf, 0x17, 0x26, 0xf8, 0xb4, 0x34, 0xb5, 0xcb, 0xa5, 0xa9, 0x7d, 0x5d, 0x9a, 0xda, 0x0b, + 0x8b, 0xc5, 0x72, 0x96, 0x07, 0x56, 0xc8, 0x53, 0x5c, 0xc4, 0xf2, 0x4d, 0x2c, 0xb1, 0xda, 0xc0, + 0x6e, 0x44, 0x76, 0x53, 0x1e, 0xe5, 0x09, 0xc5, 0xe7, 0x58, 0x9d, 0xab, 0x7c, 0x3b, 0xa7, 0x22, + 0xb8, 0xa5, 0xae, 0xed, 0xf1, 0x8f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x30, 0x43, 0x13, 0x63, 0xc3, 0x02, 0x00, 0x00, } @@ -299,7 +299,7 @@ func NewMsgClient(cc grpc1.ClientConn) MsgClient { func (c *msgClient) UpdateBlobStatus(ctx context.Context, in *MsgUpdateBlobStatusRequest, opts ...grpc.CallOption) (*MsgUpdateBlobStatusResponse, error) { out := new(MsgUpdateBlobStatusResponse) - err := c.cc.Invoke(ctx, "/sdk.avail.v1beta1.Msg/UpdateBlobStatus", in, out, opts...) + err := c.cc.Invoke(ctx, "/cada.v1beta1.Msg/UpdateBlobStatus", in, out, opts...) if err != nil { return nil, err } @@ -334,7 +334,7 @@ func _Msg_UpdateBlobStatus_Handler(srv interface{}, ctx context.Context, dec fun } info := &grpc.UnaryServerInfo{ Server: srv, - FullMethod: "/sdk.avail.v1beta1.Msg/UpdateBlobStatus", + FullMethod: "/cada.v1beta1.Msg/UpdateBlobStatus", } handler := func(ctx context.Context, req interface{}) (interface{}, error) { return srv.(MsgServer).UpdateBlobStatus(ctx, req.(*MsgUpdateBlobStatusRequest)) @@ -343,7 +343,7 @@ func _Msg_UpdateBlobStatus_Handler(srv interface{}, ctx context.Context, dec fun } var _Msg_serviceDesc = grpc.ServiceDesc{ - ServiceName: "sdk.avail.v1beta1.Msg", + ServiceName: "cada.v1beta1.Msg", HandlerType: (*MsgServer)(nil), Methods: []grpc.MethodDesc{ { @@ -352,7 +352,7 @@ var _Msg_serviceDesc = grpc.ServiceDesc{ }, }, Streams: []grpc.StreamDesc{}, - Metadata: "sdk/avail/v1beta1/tx.proto", + Metadata: "cada/v1beta1/tx.proto", } func (m *Range) Marshal() (dAtA []byte, err error) { diff --git a/x/cada/types/vote_extensions.pb.go b/x/cada/types/vote_extensions.pb.go index 6601e17..2b0628d 100644 --- a/x/cada/types/vote_extensions.pb.go +++ b/x/cada/types/vote_extensions.pb.go @@ -1,5 +1,5 @@ // Code generated by protoc-gen-gogo. DO NOT EDIT. -// source: sdk/avail/v1beta1/vote_extensions.proto +// source: cada/v1beta1/vote_extensions.proto package types @@ -34,7 +34,7 @@ func (m *AvailVoteExtension) Reset() { *m = AvailVoteExtension{} } func (m *AvailVoteExtension) String() string { return proto.CompactTextString(m) } func (*AvailVoteExtension) ProtoMessage() {} func (*AvailVoteExtension) Descriptor() ([]byte, []int) { - return fileDescriptor_007bc07f2f11afa0, []int{0} + return fileDescriptor_194cb859e020afeb, []int{0} } func (m *AvailVoteExtension) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -78,29 +78,29 @@ func (m *AvailVoteExtension) GetRange() *Range { } func init() { - proto.RegisterType((*AvailVoteExtension)(nil), "sdk.avail.v1beta1.AvailVoteExtension") + proto.RegisterType((*AvailVoteExtension)(nil), "cada.v1beta1.AvailVoteExtension") } func init() { - proto.RegisterFile("sdk/avail/v1beta1/vote_extensions.proto", fileDescriptor_007bc07f2f11afa0) + proto.RegisterFile("cada/v1beta1/vote_extensions.proto", fileDescriptor_194cb859e020afeb) } -var fileDescriptor_007bc07f2f11afa0 = []byte{ - // 221 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x2f, 0x4e, 0xc9, 0xd6, - 0x4f, 0x2c, 0x4b, 0xcc, 0xcc, 0xd1, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x2f, 0xcb, - 0x2f, 0x49, 0x8d, 0x4f, 0xad, 0x28, 0x49, 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0x2b, 0xd6, 0x2b, 0x28, - 0xca, 0x2f, 0xc9, 0x17, 0x12, 0x2c, 0x4e, 0xc9, 0xd6, 0x03, 0x2b, 0xd4, 0x83, 0x2a, 0x94, 0x92, - 0xc2, 0xd4, 0x5b, 0x52, 0x01, 0x51, 0xae, 0x94, 0xce, 0x25, 0xe4, 0x08, 0x92, 0x09, 0xcb, 0x2f, - 0x49, 0x75, 0x85, 0x99, 0x25, 0xa4, 0xc8, 0xc5, 0x03, 0x56, 0x1f, 0x9f, 0x91, 0x9a, 0x99, 0x9e, - 0x51, 0x22, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x1c, 0xc4, 0x0d, 0x16, 0xf3, 0x00, 0x0b, 0x09, 0xe9, - 0x71, 0xb1, 0x16, 0x25, 0xe6, 0xa5, 0xa7, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x70, 0x1b, 0x49, 0xe8, - 0x61, 0xd8, 0xab, 0x17, 0x04, 0x92, 0x0f, 0x82, 0x28, 0x73, 0x72, 0x3c, 0xf1, 0x48, 0x8e, 0xf1, - 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, - 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, 0xf5, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, - 0x5c, 0xfd, 0xb2, 0xcc, 0x92, 0xf2, 0xcc, 0x12, 0x88, 0x63, 0x75, 0x53, 0x12, 0x75, 0x73, 0xf3, - 0x53, 0x4a, 0x73, 0x52, 0xf5, 0x4b, 0x2a, 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0x4e, 0x36, 0x06, - 0x04, 0x00, 0x00, 0xff, 0xff, 0x54, 0x29, 0xaf, 0xb7, 0x0c, 0x01, 0x00, 0x00, +var fileDescriptor_194cb859e020afeb = []byte{ + // 220 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4a, 0x4e, 0x4c, 0x49, + 0xd4, 0x2f, 0x33, 0x4c, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0x2f, 0xcb, 0x2f, 0x49, 0x8d, 0x4f, 0xad, + 0x28, 0x49, 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0x2b, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, + 0x01, 0xa9, 0xd1, 0x83, 0xaa, 0x91, 0x12, 0x45, 0xd1, 0x51, 0x52, 0x01, 0x51, 0xa4, 0x94, 0xc4, + 0x25, 0xe4, 0x58, 0x96, 0x98, 0x99, 0x13, 0x96, 0x5f, 0x92, 0xea, 0x0a, 0x33, 0x41, 0x48, 0x91, + 0x8b, 0x27, 0x11, 0x24, 0x1a, 0x9f, 0x91, 0x9a, 0x99, 0x9e, 0x51, 0x22, 0xc1, 0xa8, 0xc0, 0xa8, + 0xc1, 0x1c, 0xc4, 0x0d, 0x16, 0xf3, 0x00, 0x0b, 0x09, 0x69, 0x72, 0xb1, 0x16, 0x25, 0xe6, 0xa5, + 0xa7, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x70, 0x1b, 0x09, 0xeb, 0x21, 0xdb, 0xa6, 0x17, 0x04, 0x92, + 0x0a, 0x82, 0xa8, 0x70, 0xf2, 0x38, 0xf1, 0x48, 0x8e, 0xf1, 0xc2, 0x23, 0x39, 0xc6, 0x07, 0x8f, + 0xe4, 0x18, 0x27, 0x3c, 0x96, 0x63, 0xb8, 0xf0, 0x58, 0x8e, 0xe1, 0xc6, 0x63, 0x39, 0x86, 0x28, + 0xbd, 0xf4, 0xcc, 0x92, 0x8c, 0xd2, 0x24, 0xbd, 0xe4, 0xfc, 0x5c, 0xfd, 0xb2, 0xcc, 0x92, 0xf2, + 0xcc, 0x12, 0x7d, 0xb0, 0x1d, 0xba, 0x29, 0x89, 0xba, 0xb9, 0xf9, 0x29, 0xa5, 0x39, 0xa9, 0xfa, + 0x15, 0xfa, 0x60, 0x87, 0x97, 0x54, 0x16, 0xa4, 0x16, 0x27, 0xb1, 0x81, 0x1d, 0x6d, 0x0c, 0x08, + 0x00, 0x00, 0xff, 0xff, 0x62, 0x70, 0xd4, 0x80, 0xff, 0x00, 0x00, 0x00, } func (m *AvailVoteExtension) Marshal() (dAtA []byte, err error) { From 83b35fdef8ff63fbbaeb0452be7f998abced5bef Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Wed, 25 Sep 2024 16:41:54 +0530 Subject: [PATCH 06/34] update cli tests --- network/network.go | 46 +++--- x/cada/client/cli/cli_test.go | 257 +++++++++++++++++----------------- 2 files changed, 155 insertions(+), 148 deletions(-) diff --git a/network/network.go b/network/network.go index 0f08bc7..36a5344 100644 --- a/network/network.go +++ b/network/network.go @@ -105,28 +105,30 @@ type Config struct { LegacyAmino *codec.LegacyAmino // TODO: Remove! InterfaceRegistry codectypes.InterfaceRegistry - TxConfig client.TxConfig - AccountRetriever client.AccountRetriever - AppConstructor AppConstructor // the ABCI application constructor - GenesisState map[string]json.RawMessage // custom genesis state to provide - TimeoutCommit time.Duration // the consensus commitment timeout - ChainID string // the network chain-id - NumValidators int // the total number of validators to create and bond - Mnemonics []string // custom user-provided validator operator mnemonics - BondDenom string // the staking bond denomination - MinGasPrices string // the minimum gas prices each validator will accept - AccountTokens sdkmath.Int // the amount of unique validator tokens (e.g. 1000node0) - StakingTokens sdkmath.Int // the amount of tokens each validator has available to stake - BondedTokens sdkmath.Int // the amount of tokens each validator stakes - PruningStrategy string // the pruning strategy each validator will have - EnableLogging bool // enable logging to STDOUT - CleanupDir bool // remove base temporary directory during cleanup - SigningAlgo string // signing algorithm for keys - KeyringOptions []keyring.Option // keyring configuration options - RPCAddress string // RPC listen address (including port) - APIAddress string // REST API listen address (including port) - GRPCAddress string // GRPC server listen address (including port) - PrintMnemonic bool // print the mnemonic of first validator as log output for testing + TxConfig client.TxConfig + AccountRetriever client.AccountRetriever + AppConstructor AppConstructor // the ABCI application constructor + GenesisState map[string]json.RawMessage // custom genesis state to provide + TimeoutCommit time.Duration // the consensus commitment timeout + ChainID string // the network chain-id + NumValidators int // the total number of validators to create and bond + Mnemonics []string // custom user-provided validator operator mnemonics + BondDenom string // the staking bond denomination + MinGasPrices string // the minimum gas prices each validator will accept + AccountTokens sdkmath.Int // the amount of unique validator tokens (e.g. 1000node0) + StakingTokens sdkmath.Int // the amount of tokens each validator has available to stake + BondedTokens sdkmath.Int // the amount of tokens each validator stakes + PruningStrategy string // the pruning strategy each validator will have + EnableLogging bool // enable logging to STDOUT + CleanupDir bool // remove base temporary directory during cleanup + SigningAlgo string // signing algorithm for keys + KeyringOptions []keyring.Option // keyring configuration options + RPCAddress string // RPC listen address (including port) + APIAddress string // REST API listen address (including port) + GRPCAddress string // GRPC server listen address (including port) + PrintMnemonic bool // print the mnemonic of first validator as log output for testing + LightClientURL string + PublishBlobInterval string } // DefaultConfig returns a sane default configuration suitable for nearly all diff --git a/x/cada/client/cli/cli_test.go b/x/cada/client/cli/cli_test.go index 7d973d0..83c02d4 100644 --- a/x/cada/client/cli/cli_test.go +++ b/x/cada/client/cli/cli_test.go @@ -1,128 +1,133 @@ package cli_test -// import ( -// "fmt" -// "testing" - -// "github.com/cosmos/cosmos-sdk/client/flags" -// "github.com/cosmos/cosmos-sdk/crypto/hd" -// sdk "github.com/cosmos/cosmos-sdk/types" -// "github.com/stretchr/testify/suite" -// "github.com/vitwit/avail-da-module/client/cli" -// network "github.com/vitwit/avail-da-module/network" - -// app "simapp/app" - -// clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" -// ) - -// func TestIntegrationTestSuite(t *testing.T) { -// suite.Run(t, new(IntegrationTestSuite)) -// } - -// type IntegrationTestSuite struct { -// suite.Suite - -// cfg network.Config -// network *network.Network -// addresses []string -// } - -// const aliceMnemonic = "all soap kiwi cushion federal skirt tip shock exist tragic verify lunar shine rely torch please view future lizard garbage humble medal leisure mimic" - -// func (s *IntegrationTestSuite) SetupSuite() { -// s.T().Log("setting up integration test suite") - -// var err error - -// // Setup network config -// cfg := network.DefaultConfig(app.NewTestNetworkFixture) -// cfg.NumValidators = 1 -// s.cfg = cfg - -// // Initialize the network -// s.network, err = network.New(s.T(), s.T().TempDir(), cfg) -// s.Require().NoError(err) - -// kb := s.network.Validators[0].ClientCtx.Keyring -// path := sdk.GetConfig().GetFullBIP44Path() -// info, err := kb.NewAccount("alice", aliceMnemonic, "", path, hd.Secp256k1) -// s.Require().NoError(err) - -// add, err := info.GetAddress() -// s.Require().NoError(err) -// s.addresses = append(s.addresses, add.String()) - -// _, err = s.network.WaitForHeight(1) -// s.Require().NoError(err) -// } - -// func (s *IntegrationTestSuite) TearDownSuite() { -// s.T().Log("tearing down integration suite") -// s.network.Cleanup() -// } - -// func (s *IntegrationTestSuite) TestNewUpdateBlobStatusCmd() { -// val := s.network.Validators[0] - -// testCases := []struct { -// name string -// args []string -// expectErr bool -// }{ -// { -// "update blob status - success", -// []string{ -// "1", -// "10", -// "success", -// "120", -// fmt.Sprintf("--%s=%s", flags.FlagFrom, s.addresses[0]), -// fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), -// fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), -// }, -// false, -// }, -// { -// "update blob status - failure", -// []string{ -// "1", -// "10", -// "failure", -// "120", -// fmt.Sprintf("--%s=%s", flags.FlagFrom, s.addresses[0]), -// fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), -// fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), -// }, -// false, -// }, -// { -// "update blob status - invalid status", -// []string{ -// "1", -// "10", -// "invalid", -// "120", -// fmt.Sprintf("--%s=%s", flags.FlagFrom, s.addresses[0]), -// fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), -// fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), -// }, -// false, -// }, -// } - -// for _, tc := range testCases { -// s.Run(tc.name, func() { -// cmd := cli.NewUpdateBlobStatusCmd() -// res, err := clitestutil.ExecTestCLICmd(val.ClientCtx, cmd, tc.args) -// if tc.expectErr { -// if err != nil { -// s.Require().Error(err) -// } -// } - -// s.Require().NoError(nil) -// s.Require().NotNil(res) -// }) -// } -// } +import ( + "fmt" + "testing" + + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/crypto/hd" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/suite" + network "github.com/vitwit/avail-da-module/network" + "github.com/vitwit/avail-da-module/x/cada/client/cli" + + clitestutil "github.com/cosmos/cosmos-sdk/testutil/cli" +) + +func TestIntegrationTestSuite(t *testing.T) { + suite.Run(t, new(IntegrationTestSuite)) +} + +type IntegrationTestSuite struct { + suite.Suite + + cfg network.Config + network *network.Network + addresses []string +} + +const aliceMnemonic = "all soap kiwi cushion federal skirt tip shock exist tragic verify lunar shine rely torch please view future lizard garbage humble medal leisure mimic" + +func (s *IntegrationTestSuite) SetupSuite() { + s.T().Log("setting up integration test suite") + + var err error + + appConfig := network.MinimumAppConfig() + + cfg, err := network.DefaultConfigWithAppConfig(appConfig) + s.Require().NoError(err) + + s.cfg = cfg + + cfg.NumValidators = 1 + cfg.MinGasPrices = "0.000006stake" + cfg.PublishBlobInterval = "5" + cfg.LightClientURL = "http://127.0.0.1:8000" + + // Initialize the network + s.network, err = network.New(s.T(), s.T().TempDir(), cfg) + s.Require().NoError(err) + + kb := s.network.Validators[0].ClientCtx.Keyring + path := sdk.GetConfig().GetFullBIP44Path() + info, err := kb.NewAccount("alice", aliceMnemonic, "", path, hd.Secp256k1) + s.Require().NoError(err) + + add, err := info.GetAddress() + s.Require().NoError(err) + s.addresses = append(s.addresses, add.String()) + + _, err = s.network.WaitForHeight(1) + s.Require().NoError(err) +} + +func (s *IntegrationTestSuite) TearDownSuite() { + s.T().Log("tearing down integration suite") + s.network.Cleanup() +} + +func (s *IntegrationTestSuite) TestNewUpdateBlobStatusCmd() { + val := s.network.Validators[0] + + testCases := []struct { + name string + args []string + expectErr bool + }{ + { + "update blob status - success", + []string{ + "1", + "10", + "success", + "120", + fmt.Sprintf("--%s=%s", flags.FlagFrom, s.addresses[0]), + fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), + }, + false, + }, + { + "update blob status - failure", + []string{ + "1", + "10", + "failure", + "120", + fmt.Sprintf("--%s=%s", flags.FlagFrom, s.addresses[0]), + fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), + }, + false, + }, + { + "update blob status - invalid status", + []string{ + "1", + "10", + "invalid", + "120", + fmt.Sprintf("--%s=%s", flags.FlagFrom, s.addresses[0]), + fmt.Sprintf("--%s=true", flags.FlagSkipConfirmation), + fmt.Sprintf("--%s=%s", flags.FlagBroadcastMode, flags.BroadcastSync), + }, + false, + }, + } + + for _, tc := range testCases { + s.Run(tc.name, func() { + cmd := cli.NewUpdateBlobStatusCmd() + res, err := clitestutil.ExecTestCLICmd(val.ClientCtx, cmd, tc.args) + if tc.expectErr { + if err != nil { + s.Require().Error(err) + } + } + + s.Require().NoError(nil) + s.Require().NotNil(res) + }) + } +} From ef2295766cd7ed14424a617c88f0636d56f92115 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Wed, 25 Sep 2024 23:44:03 +0530 Subject: [PATCH 07/34] add sims tests --- .github/workflows/sims.yml | 130 +++++++++++++++++++++++++++++++++ Makefile | 78 ++++++++++++++++++++ simapp/app/app.go | 2 +- x/cada/module/module.go | 36 +++++++-- x/cada/simulation/genesis.go | 16 ++++ x/cada/simulation/oprations.go | 79 ++++++++++++++++++++ x/cada/types/msg.go | 20 +++++ 7 files changed, 355 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/sims.yml create mode 100644 x/cada/simulation/genesis.go create mode 100644 x/cada/simulation/oprations.go create mode 100644 x/cada/types/msg.go diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml new file mode 100644 index 0000000..84e9017 --- /dev/null +++ b/.github/workflows/sims.yml @@ -0,0 +1,130 @@ +name: Sims +# Sims workflow runs multiple types of simulations (nondeterminism, import-export, after-import, multi-seed-short) +# This workflow will run on all Pull Requests, if a .go, .mod or .sum file have been changed +on: + schedule: + - cron: "0 */2 * * *" + release: + types: [published] + +concurrency: + group: ci-${{ github.ref }}-sims + cancel-in-progress: true + +jobs: + build: + permissions: + contents: read # for actions/checkout to fetch code + runs-on: ubuntu-latest + if: "!contains(github.event.head_commit.message, 'skip-sims')" + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - run: make build + - name: Install runsim + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + + test-sim-import-export: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-import-export + run: | + make test-sim-import-export + + test-sim-after-import: + runs-on: ubuntu-latest + needs: [build] + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-after-import + run: | + make test-sim-after-import + + test-sim-multi-seed-short: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-multi-seed-short + run: | + make test-sim-multi-seed-short + + sims-notify-success: + needs: + [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] + runs-on: ubuntu-latest + if: ${{ success() }} + steps: + - name: Check out repository + uses: actions/checkout@v3 + - name: Get previous workflow status + uses: ./.github/actions/last-workflow-status + id: last_status + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Notify Slack on success + if: ${{ steps.last_status.outputs.last_status == 'failure' }} + uses: rtCamp/action-slack-notify@v2.2.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_CHANNEL: sdk-sims + SLACK_USERNAME: Sim Tests + SLACK_ICON_EMOJI: ":white_check_mark:" + SLACK_COLOR: good + SLACK_MESSAGE: Sims are passing + SLACK_FOOTER: "" + + sims-notify-failure: + permissions: + contents: none + needs: + [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] + runs-on: ubuntu-latest + if: ${{ failure() }} + steps: + - name: Notify Slack on failure + uses: rtCamp/action-slack-notify@v2.2.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_CHANNEL: sdk-sims + SLACK_USERNAME: Sim Tests + SLACK_ICON_EMOJI: ":skull:" + SLACK_COLOR: danger + SLACK_MESSAGE: Sims are failing + SLACK_FOOTER: "" \ No newline at end of file diff --git a/Makefile b/Makefile index 82780b1..ad49bd4 100644 --- a/Makefile +++ b/Makefile @@ -3,6 +3,7 @@ DOCKER := $(shell which docker) export GO111MODULE = on +SIMAPP = ./simapp ############################################################################### ### e2e ### @@ -140,3 +141,80 @@ else endif .PHONY: run-tests test test-all $(TEST_TARGETS) + +test-sim-nondeterminism: + @echo "Running non-determinism test..." + @go test -mod=readonly $(SIMAPP) -run TestAppStateDeterminism -Enabled=true \ + -NumBlocks=20 -BlockSize=200 -Commit=true -Period=0 -v -timeout 24h + +test-sim-custom-genesis-fast: + @echo "Running custom genesis simulation..." + @echo "By default, ${HOME}/.cada/config/genesis.json will be used." + @go test -mod=readonly $(SIMAPP) -run TestFullAppSimulation -Genesis=${HOME}/.cada/config/genesis.json \ + -Enabled=true -NumBlocks=100 -BlockSize=200 -Commit=true -Seed=99 -Period=5 -v -timeout 24h + +test-sim-import-export: runsim + @echo "Running application import/export simulation. This may take several minutes..." + @$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(APP) -ExitOnFail 50 5 TestAppImportExport + +test-sim-after-import: runsim + @echo "Running application simulation-after-import. This may take several minutes..." + @$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(APP) -ExitOnFail 50 5 TestAppSimulationAfterImport + +test-sim-custom-genesis-multi-seed: runsim + @echo "Running multi-seed custom genesis simulation..." + @echo "By default, ${HOME}/.univ/config/genesis.json will be used." + @$(BINDIR)/runsim -Genesis=${HOME}/.univ/config/genesis.json -SimAppPkg=$(APP) -ExitOnFail 400 5 TestFullAppSimulation + +test-sim-multi-seed-long: runsim + @echo "Running long multi-seed application simulation. This may take awhile!" + @$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(APP) -ExitOnFail 500 50 TestFullAppSimulation + +test-sim-multi-seed-short: runsim + @echo "Running short multi-seed application simulation. This may take awhile!" + @$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(APP) -ExitOnFail 50 10 TestFullAppSimulation + +test-sim-benchmark-invariants: + @echo "Running simulation invariant benchmarks..." + @go test -mod=readonly $(APP) -benchmem -bench=BenchmarkInvariants -run=^$ \ + -Enabled=true -NumBlocks=1000 -BlockSize=200 \ + -Period=1 -Commit=true -Seed=57 -v -timeout 24h + +.PHONY: \ +test-sim-nondeterminism \ +test-sim-custom-genesis-fast \ +test-sim-import-export \ +test-sim-after-import \ +test-sim-custom-genesis-multi-seed \ +test-sim-multi-seed-short \ +test-sim-multi-seed-long \ +test-sim-benchmark-invariants + +SIM_NUM_BLOCKS ?= 500 +SIM_BLOCK_SIZE ?= 200 +SIM_COMMIT ?= true + +test-sim-benchmark: + @echo "Running application benchmark for numBlocks=$(SIM_NUM_BLOCKS), blockSize=$(SIM_BLOCK_SIZE). This may take awhile!" + @go test -mod=readonly -benchmem -run=^$$ $(APP) -bench ^BenchmarkFullAppSimulation$$ \ + -Enabled=true -NumBlocks=$(SIM_NUM_BLOCKS) -BlockSize=$(SIM_BLOCK_SIZE) -Commit=$(SIM_COMMIT) -timeout 24h + +test-sim-profile: + @echo "Running application benchmark for numBlocks=$(SIM_NUM_BLOCKS), blockSize=$(SIM_BLOCK_SIZE). This may take awhile!" + @go test -mod=readonly -benchmem -run=^$$ $(APP) -bench ^BenchmarkFullAppSimulation$$ \ + -Enabled=true -NumBlocks=$(SIM_NUM_BLOCKS) -BlockSize=$(SIM_BLOCK_SIZE) -Commit=$(SIM_COMMIT) -timeout 24h -cpuprofile cpu.out -memprofile mem.out + +.PHONY: test-sim-profile test-sim-benchmark + +test-cover: + @export VERSION=$(VERSION); bash -x scripts/test_cover.sh +.PHONY: test-cover + +test-rosetta: + docker build -t rosetta-ci:latest -f contrib/rosetta/node/Dockerfile . + docker-compose -f contrib/rosetta/docker-compose.yaml up --abort-on-container-exit --exit-code-from test_rosetta --build +.PHONY: test-rosetta + +benchmark: + @go test -mod=readonly -bench=. $(PACKAGES_NOSIMULATION) +.PHONY: benchmark \ No newline at end of file diff --git a/simapp/app/app.go b/simapp/app/app.go index 45a3b42..a0acaea 100644 --- a/simapp/app/app.go +++ b/simapp/app/app.go @@ -771,7 +771,7 @@ func NewChainApp( ibctm.NewAppModule(), crisis.NewAppModule(app.CrisisKeeper, skipGenesisInvariants, app.GetSubspace(crisistypes.ModuleName)), // custom - cadamodule.NewAppModule(appCodec, app.CadaKeeper), + cadamodule.NewAppModule(appCodec, app.CadaKeeper, app.AccountKeeper, app.BankKeeper), packetforward.NewAppModule(app.PacketForwardKeeper, app.GetSubspace(packetforwardtypes.ModuleName)), ) diff --git a/x/cada/module/module.go b/x/cada/module/module.go index c3de2a7..ecc19c9 100644 --- a/x/cada/module/module.go +++ b/x/cada/module/module.go @@ -12,10 +12,15 @@ import ( codectypes "github.com/cosmos/cosmos-sdk/codec/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" gwruntime "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" "github.com/vitwit/avail-da-module/x/cada/client/cli" "github.com/vitwit/avail-da-module/x/cada/keeper" + simulation "github.com/vitwit/avail-da-module/x/cada/simulation" types "github.com/vitwit/avail-da-module/x/cada/types" ) @@ -64,15 +69,20 @@ func (ab AppModuleBasic) RegisterInterfaces(registry codectypes.InterfaceRegistr } type AppModule struct { - cdc codec.Codec - keeper *keeper.Keeper + cdc codec.Codec + keeper *keeper.Keeper + authkeeper authkeeper.AccountKeeper + bankkeeper bankkeeper.Keeper + govkeeper govkeeper.Keeper } // NewAppModule creates a new AppModule object -func NewAppModule(cdc codec.Codec, keeper *keeper.Keeper) AppModule { +func NewAppModule(cdc codec.Codec, keeper *keeper.Keeper, ak authkeeper.AccountKeeper, bk bankkeeper.Keeper) AppModule { return AppModule{ - cdc: cdc, - keeper: keeper, + cdc: cdc, + keeper: keeper, + authkeeper: ak, + bankkeeper: bk, } } @@ -164,3 +174,19 @@ func (am AppModule) IsOnePerModuleType() {} // IsAppModule implements the appmodule.AppModule interface. func (am AppModule) IsAppModule() {} + +func (AppModule) GenerateGenesisState(simState *module.SimulationState) { + simulation.RandomizedGenState(simState) +} + +// RegisterStoreDecoder registers a decoder for distribution module's types +func (am AppModule) RegisterStoreDecoder(sdr simtypes.StoreDecoderRegistry) { +} + +// WeightedOperations returns the all the accounts module operations with their respective weights. +func (am AppModule) WeightedOperations(simState module.SimulationState) []simtypes.WeightedOperation { + return simulation.WeightedOperations( + simState.AppParams, simState.Cdc, simState.TxConfig, + am.authkeeper, am.bankkeeper, *am.keeper, + ) +} diff --git a/x/cada/simulation/genesis.go b/x/cada/simulation/genesis.go new file mode 100644 index 0000000..5f32926 --- /dev/null +++ b/x/cada/simulation/genesis.go @@ -0,0 +1,16 @@ +package simulation + +import ( + "github.com/cosmos/cosmos-sdk/types/module" + types "github.com/vitwit/avail-da-module/x/cada/types" +) + +// RandomizedGenState creates a randomized GenesisState for testing. +func RandomizedGenState(simState *module.SimulationState) { + // Since your GenesisState is empty, there's not much to randomize. + // We'll just set the GenesisState to its empty struct. + genesis := types.GenesisState{} + + // Here we use simState to set the default genesis + simState.GenState[types.ModuleName] = simState.Cdc.MustMarshalJSON(&genesis) +} diff --git a/x/cada/simulation/oprations.go b/x/cada/simulation/oprations.go new file mode 100644 index 0000000..0064980 --- /dev/null +++ b/x/cada/simulation/oprations.go @@ -0,0 +1,79 @@ +package simulation + +import ( + "math/rand" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + sdk "github.com/cosmos/cosmos-sdk/types" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + "github.com/cosmos/cosmos-sdk/x/simulation" + "github.com/vitwit/avail-da-module/x/cada/keeper" + availtypes "github.com/vitwit/avail-da-module/x/cada/types" +) + +const ( + OpWeightMsgUpdateBlobStatusRequest = "op_weight_msg_update_blob_status" + + DefaultWeightMsgUpdateStatusRequest = 100 +) + +func WeightedOperations( + appParams simtypes.AppParams, cdc codec.JSONCodec, txConfig client.TxConfig, + ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper, +) simulation.WeightedOperations { + var weightMsgUpdateBlobStatusRequest int + appParams.GetOrGenerate(OpWeightMsgUpdateBlobStatusRequest, &weightMsgUpdateBlobStatusRequest, nil, func(_ *rand.Rand) { + weightMsgUpdateBlobStatusRequest = DefaultWeightMsgUpdateStatusRequest + }) + + return simulation.WeightedOperations{ + simulation.NewWeightedOperation( + weightMsgUpdateBlobStatusRequest, + SimulateUpdateBlobStatus(txConfig, ak, bk, k), + ), + } + +} + +func SimulateUpdateBlobStatus(txConfig client.TxConfig, ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper) simtypes.Operation { + return func( + r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + + simaAccount, _ := simtypes.RandomAcc(r, accs) + + fromBlock := r.Uint64()%100 + 1 + toBlock := fromBlock + r.Uint64()%10 + + isSuccess := r.Intn(2) == 1 + + availHeight := r.Uint64()%100 + 1 + + blockRange := availtypes.Range{ + From: fromBlock, + To: toBlock, + } + + account := ak.GetAccount(ctx, simaAccount.Address) + spendable := bk.SpendableCoins(ctx, account.GetAddress()) + + msg := availtypes.NewMsgUpdateBlobStatus(simaAccount.Address.String(), blockRange, availHeight, isSuccess) + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txConfig, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: simaAccount, + ModuleName: availtypes.ModuleName, + CoinsSpentInMsg: spendable, + } + + return simulation.GenAndDeliverTxWithRandFees(txCtx) + } +} diff --git a/x/cada/types/msg.go b/x/cada/types/msg.go new file mode 100644 index 0000000..da27cdd --- /dev/null +++ b/x/cada/types/msg.go @@ -0,0 +1,20 @@ +package types + +import sdk "github.com/cosmos/cosmos-sdk/types" + +const ( + TypeMsgUpdateBlobStatus = "update_blob_Status" +) + +var ( + _ sdk.Msg = (*MsgUpdateBlobStatusRequest)(nil) +) + +func NewMsgUpdateBlobStatus(valAddr string, blockRange Range, availHeight uint64, isSuccess bool) *MsgUpdateBlobStatusRequest { + return &MsgUpdateBlobStatusRequest{ + ValidatorAddress: valAddr, + BlocksRange: &blockRange, + AvailHeight: availHeight, + IsSuccess: isSuccess, + } +} From a98408355142e6200971320924590a15f77b8e7f Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Fri, 27 Sep 2024 10:35:15 +0530 Subject: [PATCH 08/34] fix: lint --- x/cada/module/module.go | 4 +--- x/cada/simulation/oprations.go | 6 ++---- x/cada/types/msg.go | 4 +--- 3 files changed, 4 insertions(+), 10 deletions(-) diff --git a/x/cada/module/module.go b/x/cada/module/module.go index ecc19c9..8180d24 100644 --- a/x/cada/module/module.go +++ b/x/cada/module/module.go @@ -15,7 +15,6 @@ import ( simtypes "github.com/cosmos/cosmos-sdk/types/simulation" authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" - govkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" gwruntime "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" "github.com/vitwit/avail-da-module/x/cada/client/cli" @@ -73,7 +72,6 @@ type AppModule struct { keeper *keeper.Keeper authkeeper authkeeper.AccountKeeper bankkeeper bankkeeper.Keeper - govkeeper govkeeper.Keeper } // NewAppModule creates a new AppModule object @@ -180,7 +178,7 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // RegisterStoreDecoder registers a decoder for distribution module's types -func (am AppModule) RegisterStoreDecoder(sdr simtypes.StoreDecoderRegistry) { +func (am AppModule) RegisterStoreDecoder(_ simtypes.StoreDecoderRegistry) { } // WeightedOperations returns the all the accounts module operations with their respective weights. diff --git a/x/cada/simulation/oprations.go b/x/cada/simulation/oprations.go index 0064980..ec0a03d 100644 --- a/x/cada/simulation/oprations.go +++ b/x/cada/simulation/oprations.go @@ -22,7 +22,7 @@ const ( ) func WeightedOperations( - appParams simtypes.AppParams, cdc codec.JSONCodec, txConfig client.TxConfig, + appParams simtypes.AppParams, _ codec.JSONCodec, txConfig client.TxConfig, ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper, ) simulation.WeightedOperations { var weightMsgUpdateBlobStatusRequest int @@ -36,14 +36,12 @@ func WeightedOperations( SimulateUpdateBlobStatus(txConfig, ak, bk, k), ), } - } -func SimulateUpdateBlobStatus(txConfig client.TxConfig, ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper) simtypes.Operation { +func SimulateUpdateBlobStatus(txConfig client.TxConfig, ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, _ keeper.Keeper) simtypes.Operation { return func( r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { - simaAccount, _ := simtypes.RandomAcc(r, accs) fromBlock := r.Uint64()%100 + 1 diff --git a/x/cada/types/msg.go b/x/cada/types/msg.go index da27cdd..95dbaf9 100644 --- a/x/cada/types/msg.go +++ b/x/cada/types/msg.go @@ -6,9 +6,7 @@ const ( TypeMsgUpdateBlobStatus = "update_blob_Status" ) -var ( - _ sdk.Msg = (*MsgUpdateBlobStatusRequest)(nil) -) +var _ sdk.Msg = (*MsgUpdateBlobStatusRequest)(nil) func NewMsgUpdateBlobStatus(valAddr string, blockRange Range, availHeight uint64, isSuccess bool) *MsgUpdateBlobStatusRequest { return &MsgUpdateBlobStatusRequest{ From 89034c36d1edb4e4aa1613ce5905467c30167f55 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Fri, 27 Sep 2024 15:19:08 +0530 Subject: [PATCH 09/34] fix: simulation tests --- .github/workflows/sims.yml | 18 ++ Makefile | 17 +- simapp/app/app.go | 2 +- simapp/app/sim_bench_test.go | 164 ++++++++++++++ simapp/app/sim_test.go | 397 +++++++++++++++++++++++++++++++++ x/cada/keeper/abci.go | 4 + x/cada/module/module.go | 2 +- x/cada/simulation/oprations.go | 23 +- 8 files changed, 601 insertions(+), 26 deletions(-) create mode 100644 simapp/app/sim_bench_test.go create mode 100644 simapp/app/sim_test.go diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 84e9017..895081a 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -31,6 +31,24 @@ jobs: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary + test-sim-nondeterminism: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-import-export + run: | + make test-sim-nondeterminism + test-sim-import-export: runs-on: ubuntu-latest needs: [build] diff --git a/Makefile b/Makefile index ad49bd4..9342f28 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,8 @@ DOCKER := $(shell which docker) export GO111MODULE = on -SIMAPP = ./simapp +SIMAPP = ./simapp/app/ +BINDIR ?= $(GOPATH)/bin ############################################################################### ### e2e ### @@ -76,6 +77,7 @@ lint-fix: GO := go TARGET := cada BINDIR ?= $(GOPATH)/bin +CURRENT_DIR = $(shell pwd) .PHONY: all build install clean @@ -142,9 +144,12 @@ endif .PHONY: run-tests test test-all $(TEST_TARGETS) +runsim: + go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + test-sim-nondeterminism: @echo "Running non-determinism test..." - @go test -mod=readonly $(SIMAPP) -run TestAppStateDeterminism -Enabled=true \ + @cd ${CURRENT_DIR}/simapp/app && go test -mod=readonly -run TestAppStateDeterminism -Enabled=true \ -NumBlocks=20 -BlockSize=200 -Commit=true -Period=0 -v -timeout 24h test-sim-custom-genesis-fast: @@ -153,13 +158,13 @@ test-sim-custom-genesis-fast: @go test -mod=readonly $(SIMAPP) -run TestFullAppSimulation -Genesis=${HOME}/.cada/config/genesis.json \ -Enabled=true -NumBlocks=100 -BlockSize=200 -Commit=true -Seed=99 -Period=5 -v -timeout 24h -test-sim-import-export: runsim - @echo "Running application import/export simulation. This may take several minutes..." - @$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(APP) -ExitOnFail 50 5 TestAppImportExport +# test-sim-import-export: runsim +# @echo "Running application import/export simulation. This may take several minutes..." +# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppImportExport test-sim-after-import: runsim @echo "Running application simulation-after-import. This may take several minutes..." - @$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(APP) -ExitOnFail 50 5 TestAppSimulationAfterImport + @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppSimulationAfterImport test-sim-custom-genesis-multi-seed: runsim @echo "Running multi-seed custom genesis simulation..." diff --git a/simapp/app/app.go b/simapp/app/app.go index a0acaea..6d9f4e4 100644 --- a/simapp/app/app.go +++ b/simapp/app/app.go @@ -893,7 +893,7 @@ func NewChainApp( // NOTE: this is not required apps that don't use the simulator for fuzz testing // transactions overrideModules := map[string]module.AppModuleSimulation{ - authtypes.ModuleName: auth.NewAppModule(app.appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, app.GetSubspace(authtypes.ModuleName)), + authtypes.ModuleName: auth.NewAppModule(app.appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts, nil), } app.sm = module.NewSimulationManagerFromAppModules(app.ModuleManager.Modules, overrideModules) diff --git a/simapp/app/sim_bench_test.go b/simapp/app/sim_bench_test.go new file mode 100644 index 0000000..7e89fce --- /dev/null +++ b/simapp/app/sim_bench_test.go @@ -0,0 +1,164 @@ +package app + +import ( + "fmt" + "os" + "testing" + + cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" + flag "github.com/spf13/pflag" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" +) + +var FlagEnableBenchStreamingValue bool + +// Get flags every time the simulator is run +func init() { + flag.BoolVar(&FlagEnableBenchStreamingValue, "EnableStreaming", false, "Enable streaming service") +} + +// Profile with: +// /usr/local/go/bin/go test -benchmem -run=^$ cosmossdk.io/simapp -bench ^BenchmarkFullAppSimulation$ -Commit=true -cpuprofile cpu.out +func BenchmarkFullAppSimulation(b *testing.B) { + b.ReportAllocs() + + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "goleveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if err != nil { + b.Fatalf("simulation setup failed: %s", err.Error()) + } + + if skip { + b.Skip("skipping benchmark application simulation") + } + + defer func() { + require.NoError(b, db.Close()) + require.NoError(b, os.RemoveAll(dir)) + }() + + appOptions := viper.New() + if FlagEnableStreamingValue { + m := make(map[string]interface{}) + m["streaming.abci.keys"] = []string{"*"} + m["streaming.abci.plugin"] = "abci_v1" + m["streaming.abci.stop-node-on-err"] = true + for key, value := range m { + appOptions.SetDefault(key, value) + } + } + appOptions.SetDefault(flags.FlagHome, DefaultNodeHome) + appOptions.SetDefault(server.FlagInvCheckPeriod, simcli.FlagPeriodValue) + + app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(SimAppChainID)) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + if err = simtestutil.CheckExportSimulation(app, config, simParams); err != nil { + b.Fatal(err) + } + + if simErr != nil { + b.Fatal(simErr) + } + + if config.Commit { + simtestutil.PrintStats(db) + } +} + +func BenchmarkInvariants(b *testing.B) { + b.ReportAllocs() + + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-invariant-bench", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if err != nil { + b.Fatalf("simulation setup failed: %s", err.Error()) + } + + if skip { + b.Skip("skipping benchmark application simulation") + } + + config.AllInvariants = false + + defer func() { + require.NoError(b, db.Close()) + require.NoError(b, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(SimAppChainID)) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + if err = simtestutil.CheckExportSimulation(app, config, simParams); err != nil { + b.Fatal(err) + } + + if simErr != nil { + b.Fatal(simErr) + } + + if config.Commit { + simtestutil.PrintStats(db) + } + + ctx := app.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight() + 1}) + + // 3. Benchmark each invariant separately + // + // NOTE: We use the crisis keeper as it has all the invariants registered with + // their respective metadata which makes it useful for testing/benchmarking. + for _, cr := range app.CrisisKeeper.Routes() { + cr := cr + b.Run(fmt.Sprintf("%s/%s", cr.ModuleName, cr.Route), func(b *testing.B) { + if res, stop := cr.Invar(ctx); stop { + b.Fatalf( + "broken invariant at block %d of %d\n%s", + ctx.BlockHeight()-1, config.NumBlocks, res, + ) + } + }) + } +} diff --git a/simapp/app/sim_test.go b/simapp/app/sim_test.go new file mode 100644 index 0000000..9596e01 --- /dev/null +++ b/simapp/app/sim_test.go @@ -0,0 +1,397 @@ +package app + +import ( + "encoding/json" + "flag" + "fmt" + "math/rand" + "os" + "runtime/debug" + "strings" + "testing" + + abci "github.com/cometbft/cometbft/abci/types" + cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" + dbm "github.com/cosmos/cosmos-db" + "github.com/spf13/viper" + "github.com/stretchr/testify/require" + + "cosmossdk.io/log" + "cosmossdk.io/store" + storetypes "cosmossdk.io/store/types" + "cosmossdk.io/x/feegrant" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + "github.com/cosmos/cosmos-sdk/x/simulation" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" +) + +// SimAppChainID hardcoded chainID for simulation +const SimAppChainID = "simulation-app" + +var FlagEnableStreamingValue bool + +// Get flags every time the simulator is run +func init() { + simcli.GetSimulatorFlags() + flag.BoolVar(&FlagEnableStreamingValue, "EnableStreaming", false, "Enable streaming service") +} + +// fauxMerkleModeOpt returns a BaseApp option to use a dbStoreAdapter instead of +// an IAVLStore for faster simulation speed. +func fauxMerkleModeOpt(bapp *baseapp.BaseApp) { + bapp.SetFauxMerkleMode() +} + +// interBlockCacheOpt returns a BaseApp option function that sets the persistent +// inter-block write-through cache. +func interBlockCacheOpt() func(*baseapp.BaseApp) { + return baseapp.SetInterBlockCache(store.NewCommitKVStoreCacheManager()) +} + +func TestFullAppSimulation(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application simulation") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, appName, app.Name()) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(app, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } +} + +func TestAppImportExport(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application import/export simulation") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, appName, app.Name()) + + // Run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(app, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } + + fmt.Printf("exporting genesis...\n") + + exported, err := app.ExportAppStateAndValidators(false, []string{}, []string{}) + require.NoError(t, err) + + fmt.Printf("importing genesis...\n") + + newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, newDB.Close()) + require.NoError(t, os.RemoveAll(newDir)) + }() + + newApp := NewChainApp(log.NewNopLogger(), newDB, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, appName, newApp.Name()) + + var genesisState GenesisState + err = json.Unmarshal(exported.AppState, &genesisState) + require.NoError(t, err) + + ctxA := app.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight()}) + ctxB := newApp.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight()}) + _, err = newApp.ModuleManager.InitGenesis(ctxB, app.AppCodec(), genesisState) + + if err != nil { + if strings.Contains(err.Error(), "validator set is empty after InitGenesis") { + logger.Info("Skipping simulation as all validators have been unbonded") + logger.Info("err", err, "stacktrace", string(debug.Stack())) + return + } + } + + require.NoError(t, err) + err = newApp.StoreConsensusParams(ctxB, exported.ConsensusParams) + require.NoError(t, err) + fmt.Printf("comparing stores...\n") + + // skip certain prefixes + skipPrefixes := map[string][][]byte{ + stakingtypes.StoreKey: { + stakingtypes.UnbondingQueueKey, stakingtypes.RedelegationQueueKey, stakingtypes.ValidatorQueueKey, + stakingtypes.HistoricalInfoKey, stakingtypes.UnbondingIDKey, stakingtypes.UnbondingIndexKey, + stakingtypes.UnbondingTypeKey, stakingtypes.ValidatorUpdatesKey, + }, + authzkeeper.StoreKey: {authzkeeper.GrantQueuePrefix}, + feegrant.StoreKey: {feegrant.FeeAllowanceQueueKeyPrefix}, + slashingtypes.StoreKey: {slashingtypes.ValidatorMissedBlockBitmapKeyPrefix}, + } + + storeKeys := app.GetStoreKeys() + require.NotEmpty(t, storeKeys) + + for _, appKeyA := range storeKeys { + // only compare kvstores + if _, ok := appKeyA.(*storetypes.KVStoreKey); !ok { + continue + } + + keyName := appKeyA.Name() + appKeyB := newApp.GetKey(keyName) + + storeA := ctxA.KVStore(appKeyA) + storeB := ctxB.KVStore(appKeyB) + + failedKVAs, failedKVBs := simtestutil.DiffKVStores(storeA, storeB, skipPrefixes[keyName]) + require.Equal(t, len(failedKVAs), len(failedKVBs), "unequal sets of key-values to compare %s", keyName) + + require.Equal(t, 0, len(failedKVAs), simtestutil.GetSimulationLog(keyName, app.SimulationManager().StoreDecoders, failedKVAs, failedKVBs)) + } +} + +func TestAppSimulationAfterImport(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = SimAppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application simulation after import") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, appName, app.Name()) + + // Run randomized simulation + stopEarly, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(app, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } + + if stopEarly { + fmt.Println("can't export or import a zero-validator genesis, exiting test...") + return + } + + fmt.Printf("exporting genesis...\n") + + exported, err := app.ExportAppStateAndValidators(true, []string{}, []string{}) + require.NoError(t, err) + + fmt.Printf("importing genesis...\n") + + newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", simcli.FlagVerboseValue, simcli.FlagEnabledValue) + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, newDB.Close()) + require.NoError(t, os.RemoveAll(newDir)) + }() + + newApp := NewChainApp(log.NewNopLogger(), newDB, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) + require.Equal(t, appName, newApp.Name()) + + newApp.InitChain(&abci.RequestInitChain{ + AppStateBytes: exported.AppState, + ChainId: SimAppChainID, + }) + + _, _, err = simulation.SimulateFromSeed( + t, + os.Stdout, + newApp.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(newApp, newApp.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + require.NoError(t, err) +} + +// TODO: Make another test for the fuzzer itself, which just has noOp txs +// and doesn't depend on the application. +func TestAppStateDeterminism(t *testing.T) { + if !simcli.FlagEnabledValue { + t.Skip("skipping application simulation") + } + + config := simcli.NewConfigFromFlags() + config.InitialBlockHeight = 1 + config.ExportParamsPath = "" + config.OnOperation = false + config.AllInvariants = false + config.ChainID = SimAppChainID + + numSeeds := 3 + numTimesToRunPerSeed := 3 // This used to be set to 5, but we've temporarily reduced it to 3 for the sake of faster CI. + appHashList := make([]json.RawMessage, numTimesToRunPerSeed) + + // We will be overriding the random seed and just run a single simulation on the provided seed value + if config.Seed != simcli.DefaultSeedValue { + numSeeds = 1 + } + + appOptions := viper.New() + if FlagEnableStreamingValue { + m := make(map[string]interface{}) + m["streaming.abci.keys"] = []string{"*"} + m["streaming.abci.plugin"] = "abci_v1" + m["streaming.abci.stop-node-on-err"] = true + for key, value := range m { + appOptions.SetDefault(key, value) + } + } + appOptions.SetDefault(flags.FlagHome, DefaultNodeHome) + appOptions.SetDefault(server.FlagInvCheckPeriod, simcli.FlagPeriodValue) + if simcli.FlagVerboseValue { + appOptions.SetDefault(flags.FlagLogLevel, "debug") + } + + for i := 0; i < numSeeds; i++ { + if config.Seed == simcli.DefaultSeedValue { + config.Seed = rand.Int63() + } + + fmt.Println("config.Seed: ", config.Seed) + + for j := 0; j < numTimesToRunPerSeed; j++ { + var logger log.Logger + if simcli.FlagVerboseValue { + logger = log.NewTestLogger(t) + } else { + logger = log.NewNopLogger() + } + + db := dbm.NewMemDB() + app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(SimAppChainID)) + + fmt.Printf( + "running non-determinism simulation; seed %d: %d/%d, attempt: %d/%d\n", + config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, + ) + + _, _, err := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + require.NoError(t, err) + + if config.Commit { + simtestutil.PrintStats(db) + } + + appHash := app.LastCommitID().Hash + appHashList[j] = appHash + + if j != 0 { + require.Equal( + t, string(appHashList[0]), string(appHashList[j]), + "non-determinism in seed %d: %d/%d, attempt: %d/%d\n", config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, + ) + } + } + } +} diff --git a/x/cada/keeper/abci.go b/x/cada/keeper/abci.go index b49dca8..941e306 100644 --- a/x/cada/keeper/abci.go +++ b/x/cada/keeper/abci.go @@ -175,6 +175,10 @@ func (k *Keeper) PreBlocker(ctx sdk.Context, req *abci.RequestFinalizeBlock) err // IsValidBlockToPostToDA checks if the given block height is valid for posting data. // The block is considered valid if it meets the defined interval for posting. func (k *Keeper) IsValidBlockToPostToDA(height uint64) bool { + if k.relayer.AvailConfig.PublishBlobInterval == 0 { + return false + } + if height <= uint64(1) { return false } diff --git a/x/cada/module/module.go b/x/cada/module/module.go index 8180d24..85ba96d 100644 --- a/x/cada/module/module.go +++ b/x/cada/module/module.go @@ -178,7 +178,7 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // RegisterStoreDecoder registers a decoder for distribution module's types -func (am AppModule) RegisterStoreDecoder(_ simtypes.StoreDecoderRegistry) { +func (am AppModule) RegisterStoreDecoder(sdr simtypes.StoreDecoderRegistry) { } // WeightedOperations returns the all the accounts module operations with their respective weights. diff --git a/x/cada/simulation/oprations.go b/x/cada/simulation/oprations.go index ec0a03d..6d9292c 100644 --- a/x/cada/simulation/oprations.go +++ b/x/cada/simulation/oprations.go @@ -22,7 +22,7 @@ const ( ) func WeightedOperations( - appParams simtypes.AppParams, _ codec.JSONCodec, txConfig client.TxConfig, + appParams simtypes.AppParams, cdc codec.JSONCodec, txConfig client.TxConfig, ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper, ) simulation.WeightedOperations { var weightMsgUpdateBlobStatusRequest int @@ -33,12 +33,13 @@ func WeightedOperations( return simulation.WeightedOperations{ simulation.NewWeightedOperation( weightMsgUpdateBlobStatusRequest, - SimulateUpdateBlobStatus(txConfig, ak, bk, k), + SimulateUpdateBlobStatus(txConfig, cdc, ak, bk, k), ), } } -func SimulateUpdateBlobStatus(txConfig client.TxConfig, ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, _ keeper.Keeper) simtypes.Operation { +func SimulateUpdateBlobStatus(txConfig client.TxConfig, cdc codec.JSONCodec, ak authkeeper.AccountKeeper, + bk bankkeeper.Keeper, _ keeper.Keeper) simtypes.Operation { return func( r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { @@ -56,22 +57,8 @@ func SimulateUpdateBlobStatus(txConfig client.TxConfig, ak authkeeper.AccountKee To: toBlock, } - account := ak.GetAccount(ctx, simaAccount.Address) - spendable := bk.SpendableCoins(ctx, account.GetAddress()) - msg := availtypes.NewMsgUpdateBlobStatus(simaAccount.Address.String(), blockRange, availHeight, isSuccess) - txCtx := simulation.OperationInput{ - R: r, - App: app, - TxGen: txConfig, - Cdc: nil, - Msg: msg, - Context: ctx, - SimAccount: simaAccount, - ModuleName: availtypes.ModuleName, - CoinsSpentInMsg: spendable, - } - return simulation.GenAndDeliverTxWithRandFees(txCtx) + return simtypes.NewOperationMsg(msg, true, ""), nil, nil } } From 8cbd75317824a416a5700573de7e12807f715bf0 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Fri, 27 Sep 2024 15:23:23 +0530 Subject: [PATCH 10/34] update sims workflow --- .github/workflows/sims.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 895081a..f337c98 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -2,11 +2,11 @@ name: Sims # Sims workflow runs multiple types of simulations (nondeterminism, import-export, after-import, multi-seed-short) # This workflow will run on all Pull Requests, if a .go, .mod or .sum file have been changed on: - schedule: - - cron: "0 */2 * * *" - release: - types: [published] - + pull_request: + push: + branches: + - main + concurrency: group: ci-${{ github.ref }}-sims cancel-in-progress: true From c5074282223aaff36b56e40d9d49cefbdd056986 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Fri, 27 Sep 2024 15:27:59 +0530 Subject: [PATCH 11/34] fix: sims workflow --- .github/workflows/sims.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index f337c98..0760883 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -6,7 +6,7 @@ on: push: branches: - main - + concurrency: group: ci-${{ github.ref }}-sims cancel-in-progress: true @@ -23,7 +23,7 @@ jobs: with: go-version: "1.21" check-latest: true - - run: make build + - run: make -C simapp build - name: Install runsim run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - uses: actions/cache@v3 From c2df3e60178e8c22ef008ea6777550c41e53b5db Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Fri, 27 Sep 2024 17:10:54 +0530 Subject: [PATCH 12/34] fix: sim tests --- .github/workflows/sims.yml | 53 ---------------------------------- Makefile | 53 +++++++++++++++++++--------------- simapp/app/sim_test.go | 2 +- x/cada/module/module.go | 2 +- x/cada/simulation/oprations.go | 38 ++++++++++++++++++++++-- 5 files changed, 67 insertions(+), 81 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 0760883..e58f8ea 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -49,59 +49,6 @@ jobs: run: | make test-sim-nondeterminism - test-sim-import-export: - runs-on: ubuntu-latest - needs: [build] - timeout-minutes: 60 - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - uses: actions/cache@v3 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary - - name: test-sim-import-export - run: | - make test-sim-import-export - - test-sim-after-import: - runs-on: ubuntu-latest - needs: [build] - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - uses: actions/cache@v3 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary - - name: test-sim-after-import - run: | - make test-sim-after-import - - test-sim-multi-seed-short: - runs-on: ubuntu-latest - needs: [build] - timeout-minutes: 60 - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - uses: actions/cache@v3 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary - - name: test-sim-multi-seed-short - run: | - make test-sim-multi-seed-short - sims-notify-success: needs: [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] diff --git a/Makefile b/Makefile index 9342f28..7837a34 100644 --- a/Makefile +++ b/Makefile @@ -112,6 +112,13 @@ set-testnet-configs: ### Tests & Simulation ### ############################################################################### +# make init-simapp initializes a single local node network +# it is useful for testing and development +# Usage: make install && make init-simapp && simd start +# Warning: make init-simapp will remove all data in simapp home directory +init-simapp: + ./simapp/init-chain.sh + test: test-unit test-all: test-unit test-ledger-mock test-race test-cover @@ -152,38 +159,38 @@ test-sim-nondeterminism: @cd ${CURRENT_DIR}/simapp/app && go test -mod=readonly -run TestAppStateDeterminism -Enabled=true \ -NumBlocks=20 -BlockSize=200 -Commit=true -Period=0 -v -timeout 24h -test-sim-custom-genesis-fast: - @echo "Running custom genesis simulation..." - @echo "By default, ${HOME}/.cada/config/genesis.json will be used." - @go test -mod=readonly $(SIMAPP) -run TestFullAppSimulation -Genesis=${HOME}/.cada/config/genesis.json \ - -Enabled=true -NumBlocks=100 -BlockSize=200 -Commit=true -Seed=99 -Period=5 -v -timeout 24h +# test-sim-custom-genesis-fast: +# @echo "Running custom genesis simulation..." +# @echo "By default, ${HOME}/.cada/config/genesis.json will be used." +# @cd ${CURRENT_DIR}/simapp && go test -mod=readonly -run TestFullAppSimulation -Genesis=${HOME}/.cada/config/genesis.json \ +# -Enabled=true -NumBlocks=100 -BlockSize=200 -Commit=true -Seed=99 -Period=5 -v -timeout 24h # test-sim-import-export: runsim # @echo "Running application import/export simulation. This may take several minutes..." # @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppImportExport -test-sim-after-import: runsim - @echo "Running application simulation-after-import. This may take several minutes..." - @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppSimulationAfterImport +# test-sim-after-import: runsim +# @echo "Running application simulation-after-import. This may take several minutes..." +# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppSimulationAfterImport -test-sim-custom-genesis-multi-seed: runsim - @echo "Running multi-seed custom genesis simulation..." - @echo "By default, ${HOME}/.univ/config/genesis.json will be used." - @$(BINDIR)/runsim -Genesis=${HOME}/.univ/config/genesis.json -SimAppPkg=$(APP) -ExitOnFail 400 5 TestFullAppSimulation +# test-sim-custom-genesis-multi-seed: runsim +# @echo "Running multi-seed custom genesis simulation..." +# @echo "By default, ${HOME}/.cada/config/genesis.json will be used." +# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Genesis=${HOME}/.cada/config/genesis.json -SimAppPkg=. -ExitOnFail 400 5 TestFullAppSimulation -test-sim-multi-seed-long: runsim - @echo "Running long multi-seed application simulation. This may take awhile!" - @$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(APP) -ExitOnFail 500 50 TestFullAppSimulation +# test-sim-multi-seed-long: runsim +# @echo "Running long multi-seed application simulation. This may take awhile!" +# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 500 50 TestFullAppSimulation -test-sim-multi-seed-short: runsim - @echo "Running short multi-seed application simulation. This may take awhile!" - @$(BINDIR)/runsim -Jobs=4 -SimAppPkg=$(APP) -ExitOnFail 50 10 TestFullAppSimulation +# test-sim-multi-seed-short: runsim +# @echo "Running short multi-seed application simulation. This may take awhile!" +# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 10 TestFullAppSimulation -test-sim-benchmark-invariants: - @echo "Running simulation invariant benchmarks..." - @go test -mod=readonly $(APP) -benchmem -bench=BenchmarkInvariants -run=^$ \ - -Enabled=true -NumBlocks=1000 -BlockSize=200 \ - -Period=1 -Commit=true -Seed=57 -v -timeout 24h +# test-sim-benchmark-invariants: +# @echo "Running simulation invariant benchmarks..." +# cd ${CURRENT_DIR}/simapp && @go test -mod=readonly -benchmem -bench=BenchmarkInvariants -run=^$ \ +# -Enabled=true -NumBlocks=1000 -BlockSize=200 \ +# -Period=1 -Commit=true -Seed=57 -v -timeout 24h .PHONY: \ test-sim-nondeterminism \ diff --git a/simapp/app/sim_test.go b/simapp/app/sim_test.go index 9596e01..960884d 100644 --- a/simapp/app/sim_test.go +++ b/simapp/app/sim_test.go @@ -34,7 +34,7 @@ import ( ) // SimAppChainID hardcoded chainID for simulation -const SimAppChainID = "simulation-app" +const SimAppChainID = "demo" var FlagEnableStreamingValue bool diff --git a/x/cada/module/module.go b/x/cada/module/module.go index 85ba96d..8180d24 100644 --- a/x/cada/module/module.go +++ b/x/cada/module/module.go @@ -178,7 +178,7 @@ func (AppModule) GenerateGenesisState(simState *module.SimulationState) { } // RegisterStoreDecoder registers a decoder for distribution module's types -func (am AppModule) RegisterStoreDecoder(sdr simtypes.StoreDecoderRegistry) { +func (am AppModule) RegisterStoreDecoder(_ simtypes.StoreDecoderRegistry) { } // WeightedOperations returns the all the accounts module operations with their respective weights. diff --git a/x/cada/simulation/oprations.go b/x/cada/simulation/oprations.go index 6d9292c..025e9e1 100644 --- a/x/cada/simulation/oprations.go +++ b/x/cada/simulation/oprations.go @@ -38,13 +38,20 @@ func WeightedOperations( } } -func SimulateUpdateBlobStatus(txConfig client.TxConfig, cdc codec.JSONCodec, ak authkeeper.AccountKeeper, - bk bankkeeper.Keeper, _ keeper.Keeper) simtypes.Operation { +func SimulateUpdateBlobStatus(txConfig client.TxConfig, _ codec.JSONCodec, ak authkeeper.AccountKeeper, + bk bankkeeper.Keeper, _ keeper.Keeper, +) simtypes.Operation { return func( r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { simaAccount, _ := simtypes.RandomAcc(r, accs) + // Ensure the account has a valid public key + account := ak.GetAccount(ctx, simaAccount.Address) + if account == nil || account.GetPubKey() == nil { + return simtypes.NoOpMsg(availtypes.ModuleName, availtypes.TypeMsgUpdateBlobStatus, "account has no pubkey"), nil, nil + } + fromBlock := r.Uint64()%100 + 1 toBlock := fromBlock + r.Uint64()%10 @@ -57,8 +64,33 @@ func SimulateUpdateBlobStatus(txConfig client.TxConfig, cdc codec.JSONCodec, ak To: toBlock, } + // Fetch spendable coins to simulate transaction fees (even if just dummy fees) + spendable := bk.SpendableCoins(ctx, simaAccount.Address) + if spendable.Empty() { + return simtypes.NoOpMsg(availtypes.ModuleName, availtypes.TypeMsgUpdateBlobStatus, "account has no spendable coins"), nil, nil + } + + // Ensure TxGen is properly initialized + if txConfig == nil { + return simtypes.NoOpMsg(availtypes.ModuleName, availtypes.TypeMsgUpdateBlobStatus, "TxGen is nil"), nil, nil + } + msg := availtypes.NewMsgUpdateBlobStatus(simaAccount.Address.String(), blockRange, availHeight, isSuccess) - return simtypes.NewOperationMsg(msg, true, ""), nil, nil + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: txConfig, + Msg: msg, + Context: ctx, + SimAccount: simaAccount, + ModuleName: availtypes.ModuleName, + CoinsSpentInMsg: spendable, + } + + // Generate and deliver the transaction + return simulation.GenAndDeliverTxWithRandFees(txCtx) + + //return simtypes.NewOperationMsg(msg, true, ""), nil, nil } } From 3b1d7d4786c8a68bd784f8c99f19e2e5a3daca5f Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Fri, 27 Sep 2024 17:12:56 +0530 Subject: [PATCH 13/34] fix: sim tests --- .github/workflows/sims.yml | 57 +++++++++++++++++++++++++++++++-- Makefile | 64 +++++++++++++++++++------------------- 2 files changed, 87 insertions(+), 34 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index e58f8ea..f8daea4 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -6,7 +6,7 @@ on: push: branches: - main - + concurrency: group: ci-${{ github.ref }}-sims cancel-in-progress: true @@ -23,7 +23,7 @@ jobs: with: go-version: "1.21" check-latest: true - - run: make -C simapp build + - run: make build - name: Install runsim run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - uses: actions/cache@v3 @@ -49,6 +49,59 @@ jobs: run: | make test-sim-nondeterminism + # test-sim-import-export: + # runs-on: ubuntu-latest + # needs: [build] + # timeout-minutes: 60 + # steps: + # - uses: actions/checkout@v3 + # - uses: actions/setup-go@v4 + # with: + # go-version: "1.21" + # check-latest: true + # - uses: actions/cache@v3 + # with: + # path: ~/go/bin + # key: ${{ runner.os }}-go-runsim-binary + # - name: test-sim-import-export + # run: | + # make test-sim-import-export + + # test-sim-after-import: + # runs-on: ubuntu-latest + # needs: [build] + # steps: + # - uses: actions/checkout@v3 + # - uses: actions/setup-go@v4 + # with: + # go-version: "1.21" + # check-latest: true + # - uses: actions/cache@v3 + # with: + # path: ~/go/bin + # key: ${{ runner.os }}-go-runsim-binary + # - name: test-sim-after-import + # run: | + # make test-sim-after-import + + # test-sim-multi-seed-short: + # runs-on: ubuntu-latest + # needs: [build] + # timeout-minutes: 60 + # steps: + # - uses: actions/checkout@v3 + # - uses: actions/setup-go@v4 + # with: + # go-version: "1.21" + # check-latest: true + # - uses: actions/cache@v3 + # with: + # path: ~/go/bin + # key: ${{ runner.os }}-go-runsim-binary + # - name: test-sim-multi-seed-short + # run: | + # make test-sim-multi-seed-short + sims-notify-success: needs: [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] diff --git a/Makefile b/Makefile index 7837a34..dc38c5f 100644 --- a/Makefile +++ b/Makefile @@ -159,38 +159,38 @@ test-sim-nondeterminism: @cd ${CURRENT_DIR}/simapp/app && go test -mod=readonly -run TestAppStateDeterminism -Enabled=true \ -NumBlocks=20 -BlockSize=200 -Commit=true -Period=0 -v -timeout 24h -# test-sim-custom-genesis-fast: -# @echo "Running custom genesis simulation..." -# @echo "By default, ${HOME}/.cada/config/genesis.json will be used." -# @cd ${CURRENT_DIR}/simapp && go test -mod=readonly -run TestFullAppSimulation -Genesis=${HOME}/.cada/config/genesis.json \ -# -Enabled=true -NumBlocks=100 -BlockSize=200 -Commit=true -Seed=99 -Period=5 -v -timeout 24h - -# test-sim-import-export: runsim -# @echo "Running application import/export simulation. This may take several minutes..." -# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppImportExport - -# test-sim-after-import: runsim -# @echo "Running application simulation-after-import. This may take several minutes..." -# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppSimulationAfterImport - -# test-sim-custom-genesis-multi-seed: runsim -# @echo "Running multi-seed custom genesis simulation..." -# @echo "By default, ${HOME}/.cada/config/genesis.json will be used." -# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Genesis=${HOME}/.cada/config/genesis.json -SimAppPkg=. -ExitOnFail 400 5 TestFullAppSimulation - -# test-sim-multi-seed-long: runsim -# @echo "Running long multi-seed application simulation. This may take awhile!" -# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 500 50 TestFullAppSimulation - -# test-sim-multi-seed-short: runsim -# @echo "Running short multi-seed application simulation. This may take awhile!" -# @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 10 TestFullAppSimulation - -# test-sim-benchmark-invariants: -# @echo "Running simulation invariant benchmarks..." -# cd ${CURRENT_DIR}/simapp && @go test -mod=readonly -benchmem -bench=BenchmarkInvariants -run=^$ \ -# -Enabled=true -NumBlocks=1000 -BlockSize=200 \ -# -Period=1 -Commit=true -Seed=57 -v -timeout 24h +test-sim-custom-genesis-fast: + @echo "Running custom genesis simulation..." + @echo "By default, ${HOME}/.cada/config/genesis.json will be used." + @cd ${CURRENT_DIR}/simapp && go test -mod=readonly -run TestFullAppSimulation -Genesis=${HOME}/.cada/config/genesis.json \ + -Enabled=true -NumBlocks=100 -BlockSize=200 -Commit=true -Seed=99 -Period=5 -v -timeout 24h + +test-sim-import-export: runsim + @echo "Running application import/export simulation. This may take several minutes..." + @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppImportExport + +test-sim-after-import: runsim + @echo "Running application simulation-after-import. This may take several minutes..." + @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppSimulationAfterImport + +test-sim-custom-genesis-multi-seed: runsim + @echo "Running multi-seed custom genesis simulation..." + @echo "By default, ${HOME}/.cada/config/genesis.json will be used." + @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Genesis=${HOME}/.cada/config/genesis.json -SimAppPkg=. -ExitOnFail 400 5 TestFullAppSimulation + +test-sim-multi-seed-long: runsim + @echo "Running long multi-seed application simulation. This may take awhile!" + @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 500 50 TestFullAppSimulation + +test-sim-multi-seed-short: runsim + @echo "Running short multi-seed application simulation. This may take awhile!" + @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 10 TestFullAppSimulation + +test-sim-benchmark-invariants: + @echo "Running simulation invariant benchmarks..." + cd ${CURRENT_DIR}/simapp && @go test -mod=readonly -benchmem -bench=BenchmarkInvariants -run=^$ \ + -Enabled=true -NumBlocks=1000 -BlockSize=200 \ + -Period=1 -Commit=true -Seed=57 -v -timeout 24h .PHONY: \ test-sim-nondeterminism \ From cf4ecc155664d040f8f21ea40f7587214c8b0def Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Fri, 27 Sep 2024 17:13:52 +0530 Subject: [PATCH 14/34] fix: sim tests --- .github/workflows/sims.yml | 100 ++++++++++++++++++------------------- 1 file changed, 50 insertions(+), 50 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index f8daea4..f337c98 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -49,58 +49,58 @@ jobs: run: | make test-sim-nondeterminism - # test-sim-import-export: - # runs-on: ubuntu-latest - # needs: [build] - # timeout-minutes: 60 - # steps: - # - uses: actions/checkout@v3 - # - uses: actions/setup-go@v4 - # with: - # go-version: "1.21" - # check-latest: true - # - uses: actions/cache@v3 - # with: - # path: ~/go/bin - # key: ${{ runner.os }}-go-runsim-binary - # - name: test-sim-import-export - # run: | - # make test-sim-import-export + test-sim-import-export: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-import-export + run: | + make test-sim-import-export - # test-sim-after-import: - # runs-on: ubuntu-latest - # needs: [build] - # steps: - # - uses: actions/checkout@v3 - # - uses: actions/setup-go@v4 - # with: - # go-version: "1.21" - # check-latest: true - # - uses: actions/cache@v3 - # with: - # path: ~/go/bin - # key: ${{ runner.os }}-go-runsim-binary - # - name: test-sim-after-import - # run: | - # make test-sim-after-import + test-sim-after-import: + runs-on: ubuntu-latest + needs: [build] + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-after-import + run: | + make test-sim-after-import - # test-sim-multi-seed-short: - # runs-on: ubuntu-latest - # needs: [build] - # timeout-minutes: 60 - # steps: - # - uses: actions/checkout@v3 - # - uses: actions/setup-go@v4 - # with: - # go-version: "1.21" - # check-latest: true - # - uses: actions/cache@v3 - # with: - # path: ~/go/bin - # key: ${{ runner.os }}-go-runsim-binary - # - name: test-sim-multi-seed-short - # run: | - # make test-sim-multi-seed-short + test-sim-multi-seed-short: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-multi-seed-short + run: | + make test-sim-multi-seed-short sims-notify-success: needs: From 807dc69383c8f47cc9da6445f38e7f3ec8835a50 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Fri, 27 Sep 2024 17:16:38 +0530 Subject: [PATCH 15/34] fix: build --- .github/workflows/sims.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index f337c98..4e994e8 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -23,7 +23,7 @@ jobs: with: go-version: "1.21" check-latest: true - - run: make build + - run: make -C simapp build - name: Install runsim run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - uses: actions/cache@v3 From 0384df3341c6b5ea0b94ec3272d7b6ac7bca76bb Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 10:34:02 +0530 Subject: [PATCH 16/34] WIP --- .github/workflows/sims.yml | 148 ------------ simapp/app/sim_bench_test.go | 164 -------------- simapp/app/sim_test.go | 397 --------------------------------- x/cada/module/module.go | 32 ++- x/cada/simulation/genesis.go | 16 -- x/cada/simulation/oprations.go | 96 -------- 6 files changed, 15 insertions(+), 838 deletions(-) delete mode 100644 .github/workflows/sims.yml delete mode 100644 simapp/app/sim_bench_test.go delete mode 100644 simapp/app/sim_test.go delete mode 100644 x/cada/simulation/genesis.go delete mode 100644 x/cada/simulation/oprations.go diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml deleted file mode 100644 index 4e994e8..0000000 --- a/.github/workflows/sims.yml +++ /dev/null @@ -1,148 +0,0 @@ -name: Sims -# Sims workflow runs multiple types of simulations (nondeterminism, import-export, after-import, multi-seed-short) -# This workflow will run on all Pull Requests, if a .go, .mod or .sum file have been changed -on: - pull_request: - push: - branches: - - main - -concurrency: - group: ci-${{ github.ref }}-sims - cancel-in-progress: true - -jobs: - build: - permissions: - contents: read # for actions/checkout to fetch code - runs-on: ubuntu-latest - if: "!contains(github.event.head_commit.message, 'skip-sims')" - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - run: make -C simapp build - - name: Install runsim - run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - - uses: actions/cache@v3 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary - - test-sim-nondeterminism: - runs-on: ubuntu-latest - needs: [build] - timeout-minutes: 60 - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - uses: actions/cache@v3 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary - - name: test-sim-import-export - run: | - make test-sim-nondeterminism - - test-sim-import-export: - runs-on: ubuntu-latest - needs: [build] - timeout-minutes: 60 - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - uses: actions/cache@v3 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary - - name: test-sim-import-export - run: | - make test-sim-import-export - - test-sim-after-import: - runs-on: ubuntu-latest - needs: [build] - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - uses: actions/cache@v3 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary - - name: test-sim-after-import - run: | - make test-sim-after-import - - test-sim-multi-seed-short: - runs-on: ubuntu-latest - needs: [build] - timeout-minutes: 60 - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - uses: actions/cache@v3 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary - - name: test-sim-multi-seed-short - run: | - make test-sim-multi-seed-short - - sims-notify-success: - needs: - [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] - runs-on: ubuntu-latest - if: ${{ success() }} - steps: - - name: Check out repository - uses: actions/checkout@v3 - - name: Get previous workflow status - uses: ./.github/actions/last-workflow-status - id: last_status - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - - - name: Notify Slack on success - if: ${{ steps.last_status.outputs.last_status == 'failure' }} - uses: rtCamp/action-slack-notify@v2.2.0 - env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} - SLACK_CHANNEL: sdk-sims - SLACK_USERNAME: Sim Tests - SLACK_ICON_EMOJI: ":white_check_mark:" - SLACK_COLOR: good - SLACK_MESSAGE: Sims are passing - SLACK_FOOTER: "" - - sims-notify-failure: - permissions: - contents: none - needs: - [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] - runs-on: ubuntu-latest - if: ${{ failure() }} - steps: - - name: Notify Slack on failure - uses: rtCamp/action-slack-notify@v2.2.0 - env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} - SLACK_CHANNEL: sdk-sims - SLACK_USERNAME: Sim Tests - SLACK_ICON_EMOJI: ":skull:" - SLACK_COLOR: danger - SLACK_MESSAGE: Sims are failing - SLACK_FOOTER: "" \ No newline at end of file diff --git a/simapp/app/sim_bench_test.go b/simapp/app/sim_bench_test.go deleted file mode 100644 index 7e89fce..0000000 --- a/simapp/app/sim_bench_test.go +++ /dev/null @@ -1,164 +0,0 @@ -package app - -import ( - "fmt" - "os" - "testing" - - cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" - flag "github.com/spf13/pflag" - "github.com/spf13/viper" - "github.com/stretchr/testify/require" - - "github.com/cosmos/cosmos-sdk/baseapp" - "github.com/cosmos/cosmos-sdk/client/flags" - "github.com/cosmos/cosmos-sdk/server" - simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" - simtypes "github.com/cosmos/cosmos-sdk/types/simulation" - "github.com/cosmos/cosmos-sdk/x/simulation" - simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" -) - -var FlagEnableBenchStreamingValue bool - -// Get flags every time the simulator is run -func init() { - flag.BoolVar(&FlagEnableBenchStreamingValue, "EnableStreaming", false, "Enable streaming service") -} - -// Profile with: -// /usr/local/go/bin/go test -benchmem -run=^$ cosmossdk.io/simapp -bench ^BenchmarkFullAppSimulation$ -Commit=true -cpuprofile cpu.out -func BenchmarkFullAppSimulation(b *testing.B) { - b.ReportAllocs() - - config := simcli.NewConfigFromFlags() - config.ChainID = SimAppChainID - - db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "goleveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) - if err != nil { - b.Fatalf("simulation setup failed: %s", err.Error()) - } - - if skip { - b.Skip("skipping benchmark application simulation") - } - - defer func() { - require.NoError(b, db.Close()) - require.NoError(b, os.RemoveAll(dir)) - }() - - appOptions := viper.New() - if FlagEnableStreamingValue { - m := make(map[string]interface{}) - m["streaming.abci.keys"] = []string{"*"} - m["streaming.abci.plugin"] = "abci_v1" - m["streaming.abci.stop-node-on-err"] = true - for key, value := range m { - appOptions.SetDefault(key, value) - } - } - appOptions.SetDefault(flags.FlagHome, DefaultNodeHome) - appOptions.SetDefault(server.FlagInvCheckPeriod, simcli.FlagPeriodValue) - - app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(SimAppChainID)) - - // run randomized simulation - _, simParams, simErr := simulation.SimulateFromSeed( - b, - os.Stdout, - app.BaseApp, - simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), - simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 - simtestutil.SimulationOperations(app, app.AppCodec(), config), - BlockedAddresses(), - config, - app.AppCodec(), - ) - - // export state and simParams before the simulation error is checked - if err = simtestutil.CheckExportSimulation(app, config, simParams); err != nil { - b.Fatal(err) - } - - if simErr != nil { - b.Fatal(simErr) - } - - if config.Commit { - simtestutil.PrintStats(db) - } -} - -func BenchmarkInvariants(b *testing.B) { - b.ReportAllocs() - - config := simcli.NewConfigFromFlags() - config.ChainID = SimAppChainID - - db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-invariant-bench", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) - if err != nil { - b.Fatalf("simulation setup failed: %s", err.Error()) - } - - if skip { - b.Skip("skipping benchmark application simulation") - } - - config.AllInvariants = false - - defer func() { - require.NoError(b, db.Close()) - require.NoError(b, os.RemoveAll(dir)) - }() - - appOptions := make(simtestutil.AppOptionsMap, 0) - appOptions[flags.FlagHome] = DefaultNodeHome - appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue - - app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(SimAppChainID)) - - // run randomized simulation - _, simParams, simErr := simulation.SimulateFromSeed( - b, - os.Stdout, - app.BaseApp, - simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), - simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 - simtestutil.SimulationOperations(app, app.AppCodec(), config), - BlockedAddresses(), - config, - app.AppCodec(), - ) - - // export state and simParams before the simulation error is checked - if err = simtestutil.CheckExportSimulation(app, config, simParams); err != nil { - b.Fatal(err) - } - - if simErr != nil { - b.Fatal(simErr) - } - - if config.Commit { - simtestutil.PrintStats(db) - } - - ctx := app.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight() + 1}) - - // 3. Benchmark each invariant separately - // - // NOTE: We use the crisis keeper as it has all the invariants registered with - // their respective metadata which makes it useful for testing/benchmarking. - for _, cr := range app.CrisisKeeper.Routes() { - cr := cr - b.Run(fmt.Sprintf("%s/%s", cr.ModuleName, cr.Route), func(b *testing.B) { - if res, stop := cr.Invar(ctx); stop { - b.Fatalf( - "broken invariant at block %d of %d\n%s", - ctx.BlockHeight()-1, config.NumBlocks, res, - ) - } - }) - } -} diff --git a/simapp/app/sim_test.go b/simapp/app/sim_test.go deleted file mode 100644 index 960884d..0000000 --- a/simapp/app/sim_test.go +++ /dev/null @@ -1,397 +0,0 @@ -package app - -import ( - "encoding/json" - "flag" - "fmt" - "math/rand" - "os" - "runtime/debug" - "strings" - "testing" - - abci "github.com/cometbft/cometbft/abci/types" - cmtproto "github.com/cometbft/cometbft/proto/tendermint/types" - dbm "github.com/cosmos/cosmos-db" - "github.com/spf13/viper" - "github.com/stretchr/testify/require" - - "cosmossdk.io/log" - "cosmossdk.io/store" - storetypes "cosmossdk.io/store/types" - "cosmossdk.io/x/feegrant" - - "github.com/cosmos/cosmos-sdk/baseapp" - "github.com/cosmos/cosmos-sdk/client/flags" - "github.com/cosmos/cosmos-sdk/server" - simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" - simtypes "github.com/cosmos/cosmos-sdk/types/simulation" - authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" - "github.com/cosmos/cosmos-sdk/x/simulation" - simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" - slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" - stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" -) - -// SimAppChainID hardcoded chainID for simulation -const SimAppChainID = "demo" - -var FlagEnableStreamingValue bool - -// Get flags every time the simulator is run -func init() { - simcli.GetSimulatorFlags() - flag.BoolVar(&FlagEnableStreamingValue, "EnableStreaming", false, "Enable streaming service") -} - -// fauxMerkleModeOpt returns a BaseApp option to use a dbStoreAdapter instead of -// an IAVLStore for faster simulation speed. -func fauxMerkleModeOpt(bapp *baseapp.BaseApp) { - bapp.SetFauxMerkleMode() -} - -// interBlockCacheOpt returns a BaseApp option function that sets the persistent -// inter-block write-through cache. -func interBlockCacheOpt() func(*baseapp.BaseApp) { - return baseapp.SetInterBlockCache(store.NewCommitKVStoreCacheManager()) -} - -func TestFullAppSimulation(t *testing.T) { - config := simcli.NewConfigFromFlags() - config.ChainID = SimAppChainID - - db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) - if skip { - t.Skip("skipping application simulation") - } - require.NoError(t, err, "simulation setup failed") - - defer func() { - require.NoError(t, db.Close()) - require.NoError(t, os.RemoveAll(dir)) - }() - - appOptions := make(simtestutil.AppOptionsMap, 0) - appOptions[flags.FlagHome] = DefaultNodeHome - appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue - - app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) - require.Equal(t, appName, app.Name()) - - // run randomized simulation - _, simParams, simErr := simulation.SimulateFromSeed( - t, - os.Stdout, - app.BaseApp, - simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), - simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 - simtestutil.SimulationOperations(app, app.AppCodec(), config), - BlockedAddresses(), - config, - app.AppCodec(), - ) - - // export state and simParams before the simulation error is checked - err = simtestutil.CheckExportSimulation(app, config, simParams) - require.NoError(t, err) - require.NoError(t, simErr) - - if config.Commit { - simtestutil.PrintStats(db) - } -} - -func TestAppImportExport(t *testing.T) { - config := simcli.NewConfigFromFlags() - config.ChainID = SimAppChainID - - db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) - if skip { - t.Skip("skipping application import/export simulation") - } - require.NoError(t, err, "simulation setup failed") - - defer func() { - require.NoError(t, db.Close()) - require.NoError(t, os.RemoveAll(dir)) - }() - - appOptions := make(simtestutil.AppOptionsMap, 0) - appOptions[flags.FlagHome] = DefaultNodeHome - appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue - - app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) - require.Equal(t, appName, app.Name()) - - // Run randomized simulation - _, simParams, simErr := simulation.SimulateFromSeed( - t, - os.Stdout, - app.BaseApp, - simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), - simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 - simtestutil.SimulationOperations(app, app.AppCodec(), config), - BlockedAddresses(), - config, - app.AppCodec(), - ) - - // export state and simParams before the simulation error is checked - err = simtestutil.CheckExportSimulation(app, config, simParams) - require.NoError(t, err) - require.NoError(t, simErr) - - if config.Commit { - simtestutil.PrintStats(db) - } - - fmt.Printf("exporting genesis...\n") - - exported, err := app.ExportAppStateAndValidators(false, []string{}, []string{}) - require.NoError(t, err) - - fmt.Printf("importing genesis...\n") - - newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", simcli.FlagVerboseValue, simcli.FlagEnabledValue) - require.NoError(t, err, "simulation setup failed") - - defer func() { - require.NoError(t, newDB.Close()) - require.NoError(t, os.RemoveAll(newDir)) - }() - - newApp := NewChainApp(log.NewNopLogger(), newDB, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) - require.Equal(t, appName, newApp.Name()) - - var genesisState GenesisState - err = json.Unmarshal(exported.AppState, &genesisState) - require.NoError(t, err) - - ctxA := app.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight()}) - ctxB := newApp.NewContextLegacy(true, cmtproto.Header{Height: app.LastBlockHeight()}) - _, err = newApp.ModuleManager.InitGenesis(ctxB, app.AppCodec(), genesisState) - - if err != nil { - if strings.Contains(err.Error(), "validator set is empty after InitGenesis") { - logger.Info("Skipping simulation as all validators have been unbonded") - logger.Info("err", err, "stacktrace", string(debug.Stack())) - return - } - } - - require.NoError(t, err) - err = newApp.StoreConsensusParams(ctxB, exported.ConsensusParams) - require.NoError(t, err) - fmt.Printf("comparing stores...\n") - - // skip certain prefixes - skipPrefixes := map[string][][]byte{ - stakingtypes.StoreKey: { - stakingtypes.UnbondingQueueKey, stakingtypes.RedelegationQueueKey, stakingtypes.ValidatorQueueKey, - stakingtypes.HistoricalInfoKey, stakingtypes.UnbondingIDKey, stakingtypes.UnbondingIndexKey, - stakingtypes.UnbondingTypeKey, stakingtypes.ValidatorUpdatesKey, - }, - authzkeeper.StoreKey: {authzkeeper.GrantQueuePrefix}, - feegrant.StoreKey: {feegrant.FeeAllowanceQueueKeyPrefix}, - slashingtypes.StoreKey: {slashingtypes.ValidatorMissedBlockBitmapKeyPrefix}, - } - - storeKeys := app.GetStoreKeys() - require.NotEmpty(t, storeKeys) - - for _, appKeyA := range storeKeys { - // only compare kvstores - if _, ok := appKeyA.(*storetypes.KVStoreKey); !ok { - continue - } - - keyName := appKeyA.Name() - appKeyB := newApp.GetKey(keyName) - - storeA := ctxA.KVStore(appKeyA) - storeB := ctxB.KVStore(appKeyB) - - failedKVAs, failedKVBs := simtestutil.DiffKVStores(storeA, storeB, skipPrefixes[keyName]) - require.Equal(t, len(failedKVAs), len(failedKVBs), "unequal sets of key-values to compare %s", keyName) - - require.Equal(t, 0, len(failedKVAs), simtestutil.GetSimulationLog(keyName, app.SimulationManager().StoreDecoders, failedKVAs, failedKVBs)) - } -} - -func TestAppSimulationAfterImport(t *testing.T) { - config := simcli.NewConfigFromFlags() - config.ChainID = SimAppChainID - - db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", simcli.FlagVerboseValue, simcli.FlagEnabledValue) - if skip { - t.Skip("skipping application simulation after import") - } - require.NoError(t, err, "simulation setup failed") - - defer func() { - require.NoError(t, db.Close()) - require.NoError(t, os.RemoveAll(dir)) - }() - - appOptions := make(simtestutil.AppOptionsMap, 0) - appOptions[flags.FlagHome] = DefaultNodeHome - appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue - - app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) - require.Equal(t, appName, app.Name()) - - // Run randomized simulation - stopEarly, simParams, simErr := simulation.SimulateFromSeed( - t, - os.Stdout, - app.BaseApp, - simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), - simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 - simtestutil.SimulationOperations(app, app.AppCodec(), config), - BlockedAddresses(), - config, - app.AppCodec(), - ) - - // export state and simParams before the simulation error is checked - err = simtestutil.CheckExportSimulation(app, config, simParams) - require.NoError(t, err) - require.NoError(t, simErr) - - if config.Commit { - simtestutil.PrintStats(db) - } - - if stopEarly { - fmt.Println("can't export or import a zero-validator genesis, exiting test...") - return - } - - fmt.Printf("exporting genesis...\n") - - exported, err := app.ExportAppStateAndValidators(true, []string{}, []string{}) - require.NoError(t, err) - - fmt.Printf("importing genesis...\n") - - newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", simcli.FlagVerboseValue, simcli.FlagEnabledValue) - require.NoError(t, err, "simulation setup failed") - - defer func() { - require.NoError(t, newDB.Close()) - require.NoError(t, os.RemoveAll(newDir)) - }() - - newApp := NewChainApp(log.NewNopLogger(), newDB, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(SimAppChainID)) - require.Equal(t, appName, newApp.Name()) - - newApp.InitChain(&abci.RequestInitChain{ - AppStateBytes: exported.AppState, - ChainId: SimAppChainID, - }) - - _, _, err = simulation.SimulateFromSeed( - t, - os.Stdout, - newApp.BaseApp, - simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), - simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 - simtestutil.SimulationOperations(newApp, newApp.AppCodec(), config), - BlockedAddresses(), - config, - app.AppCodec(), - ) - require.NoError(t, err) -} - -// TODO: Make another test for the fuzzer itself, which just has noOp txs -// and doesn't depend on the application. -func TestAppStateDeterminism(t *testing.T) { - if !simcli.FlagEnabledValue { - t.Skip("skipping application simulation") - } - - config := simcli.NewConfigFromFlags() - config.InitialBlockHeight = 1 - config.ExportParamsPath = "" - config.OnOperation = false - config.AllInvariants = false - config.ChainID = SimAppChainID - - numSeeds := 3 - numTimesToRunPerSeed := 3 // This used to be set to 5, but we've temporarily reduced it to 3 for the sake of faster CI. - appHashList := make([]json.RawMessage, numTimesToRunPerSeed) - - // We will be overriding the random seed and just run a single simulation on the provided seed value - if config.Seed != simcli.DefaultSeedValue { - numSeeds = 1 - } - - appOptions := viper.New() - if FlagEnableStreamingValue { - m := make(map[string]interface{}) - m["streaming.abci.keys"] = []string{"*"} - m["streaming.abci.plugin"] = "abci_v1" - m["streaming.abci.stop-node-on-err"] = true - for key, value := range m { - appOptions.SetDefault(key, value) - } - } - appOptions.SetDefault(flags.FlagHome, DefaultNodeHome) - appOptions.SetDefault(server.FlagInvCheckPeriod, simcli.FlagPeriodValue) - if simcli.FlagVerboseValue { - appOptions.SetDefault(flags.FlagLogLevel, "debug") - } - - for i := 0; i < numSeeds; i++ { - if config.Seed == simcli.DefaultSeedValue { - config.Seed = rand.Int63() - } - - fmt.Println("config.Seed: ", config.Seed) - - for j := 0; j < numTimesToRunPerSeed; j++ { - var logger log.Logger - if simcli.FlagVerboseValue { - logger = log.NewTestLogger(t) - } else { - logger = log.NewNopLogger() - } - - db := dbm.NewMemDB() - app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(SimAppChainID)) - - fmt.Printf( - "running non-determinism simulation; seed %d: %d/%d, attempt: %d/%d\n", - config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, - ) - - _, _, err := simulation.SimulateFromSeed( - t, - os.Stdout, - app.BaseApp, - simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), - simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 - simtestutil.SimulationOperations(app, app.AppCodec(), config), - BlockedAddresses(), - config, - app.AppCodec(), - ) - require.NoError(t, err) - - if config.Commit { - simtestutil.PrintStats(db) - } - - appHash := app.LastCommitID().Hash - appHashList[j] = appHash - - if j != 0 { - require.Equal( - t, string(appHashList[0]), string(appHashList[j]), - "non-determinism in seed %d: %d/%d, attempt: %d/%d\n", config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, - ) - } - } - } -} diff --git a/x/cada/module/module.go b/x/cada/module/module.go index 8180d24..b5b78ea 100644 --- a/x/cada/module/module.go +++ b/x/cada/module/module.go @@ -12,14 +12,12 @@ import ( codectypes "github.com/cosmos/cosmos-sdk/codec/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" - simtypes "github.com/cosmos/cosmos-sdk/types/simulation" authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" gwruntime "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" "github.com/vitwit/avail-da-module/x/cada/client/cli" "github.com/vitwit/avail-da-module/x/cada/keeper" - simulation "github.com/vitwit/avail-da-module/x/cada/simulation" types "github.com/vitwit/avail-da-module/x/cada/types" ) @@ -173,18 +171,18 @@ func (am AppModule) IsOnePerModuleType() {} // IsAppModule implements the appmodule.AppModule interface. func (am AppModule) IsAppModule() {} -func (AppModule) GenerateGenesisState(simState *module.SimulationState) { - simulation.RandomizedGenState(simState) -} - -// RegisterStoreDecoder registers a decoder for distribution module's types -func (am AppModule) RegisterStoreDecoder(_ simtypes.StoreDecoderRegistry) { -} - -// WeightedOperations returns the all the accounts module operations with their respective weights. -func (am AppModule) WeightedOperations(simState module.SimulationState) []simtypes.WeightedOperation { - return simulation.WeightedOperations( - simState.AppParams, simState.Cdc, simState.TxConfig, - am.authkeeper, am.bankkeeper, *am.keeper, - ) -} +// func (AppModule) GenerateGenesisState(simState *module.SimulationState) { +// simulation.RandomizedGenState(simState) +// } + +// // RegisterStoreDecoder registers a decoder for distribution module's types +// func (am AppModule) RegisterStoreDecoder(_ simtypes.StoreDecoderRegistry) { +// } + +// // WeightedOperations returns the all the accounts module operations with their respective weights. +// func (am AppModule) WeightedOperations(simState module.SimulationState) []simtypes.WeightedOperation { +// return simulation.WeightedOperations( +// simState.AppParams, simState.Cdc, simState.TxConfig, +// am.authkeeper, am.bankkeeper, *am.keeper, +// ) +// } diff --git a/x/cada/simulation/genesis.go b/x/cada/simulation/genesis.go deleted file mode 100644 index 5f32926..0000000 --- a/x/cada/simulation/genesis.go +++ /dev/null @@ -1,16 +0,0 @@ -package simulation - -import ( - "github.com/cosmos/cosmos-sdk/types/module" - types "github.com/vitwit/avail-da-module/x/cada/types" -) - -// RandomizedGenState creates a randomized GenesisState for testing. -func RandomizedGenState(simState *module.SimulationState) { - // Since your GenesisState is empty, there's not much to randomize. - // We'll just set the GenesisState to its empty struct. - genesis := types.GenesisState{} - - // Here we use simState to set the default genesis - simState.GenState[types.ModuleName] = simState.Cdc.MustMarshalJSON(&genesis) -} diff --git a/x/cada/simulation/oprations.go b/x/cada/simulation/oprations.go deleted file mode 100644 index 025e9e1..0000000 --- a/x/cada/simulation/oprations.go +++ /dev/null @@ -1,96 +0,0 @@ -package simulation - -import ( - "math/rand" - - "github.com/cosmos/cosmos-sdk/baseapp" - "github.com/cosmos/cosmos-sdk/client" - "github.com/cosmos/cosmos-sdk/codec" - sdk "github.com/cosmos/cosmos-sdk/types" - simtypes "github.com/cosmos/cosmos-sdk/types/simulation" - authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" - bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" - "github.com/cosmos/cosmos-sdk/x/simulation" - "github.com/vitwit/avail-da-module/x/cada/keeper" - availtypes "github.com/vitwit/avail-da-module/x/cada/types" -) - -const ( - OpWeightMsgUpdateBlobStatusRequest = "op_weight_msg_update_blob_status" - - DefaultWeightMsgUpdateStatusRequest = 100 -) - -func WeightedOperations( - appParams simtypes.AppParams, cdc codec.JSONCodec, txConfig client.TxConfig, - ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper, -) simulation.WeightedOperations { - var weightMsgUpdateBlobStatusRequest int - appParams.GetOrGenerate(OpWeightMsgUpdateBlobStatusRequest, &weightMsgUpdateBlobStatusRequest, nil, func(_ *rand.Rand) { - weightMsgUpdateBlobStatusRequest = DefaultWeightMsgUpdateStatusRequest - }) - - return simulation.WeightedOperations{ - simulation.NewWeightedOperation( - weightMsgUpdateBlobStatusRequest, - SimulateUpdateBlobStatus(txConfig, cdc, ak, bk, k), - ), - } -} - -func SimulateUpdateBlobStatus(txConfig client.TxConfig, _ codec.JSONCodec, ak authkeeper.AccountKeeper, - bk bankkeeper.Keeper, _ keeper.Keeper, -) simtypes.Operation { - return func( - r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, - ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { - simaAccount, _ := simtypes.RandomAcc(r, accs) - - // Ensure the account has a valid public key - account := ak.GetAccount(ctx, simaAccount.Address) - if account == nil || account.GetPubKey() == nil { - return simtypes.NoOpMsg(availtypes.ModuleName, availtypes.TypeMsgUpdateBlobStatus, "account has no pubkey"), nil, nil - } - - fromBlock := r.Uint64()%100 + 1 - toBlock := fromBlock + r.Uint64()%10 - - isSuccess := r.Intn(2) == 1 - - availHeight := r.Uint64()%100 + 1 - - blockRange := availtypes.Range{ - From: fromBlock, - To: toBlock, - } - - // Fetch spendable coins to simulate transaction fees (even if just dummy fees) - spendable := bk.SpendableCoins(ctx, simaAccount.Address) - if spendable.Empty() { - return simtypes.NoOpMsg(availtypes.ModuleName, availtypes.TypeMsgUpdateBlobStatus, "account has no spendable coins"), nil, nil - } - - // Ensure TxGen is properly initialized - if txConfig == nil { - return simtypes.NoOpMsg(availtypes.ModuleName, availtypes.TypeMsgUpdateBlobStatus, "TxGen is nil"), nil, nil - } - - msg := availtypes.NewMsgUpdateBlobStatus(simaAccount.Address.String(), blockRange, availHeight, isSuccess) - - txCtx := simulation.OperationInput{ - R: r, - App: app, - TxGen: txConfig, - Msg: msg, - Context: ctx, - SimAccount: simaAccount, - ModuleName: availtypes.ModuleName, - CoinsSpentInMsg: spendable, - } - - // Generate and deliver the transaction - return simulation.GenAndDeliverTxWithRandFees(txCtx) - - //return simtypes.NewOperationMsg(msg, true, ""), nil, nil - } -} From 86cd89475d4ca8b2d678e8010e8f7dd5d9e9abb2 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 11:01:26 +0530 Subject: [PATCH 17/34] fix specs path --- README.md | 8 +++++--- simapp/Makefile | 2 +- {integration_docs => x/cada/integration_docs}/README.md | 0 {integration_docs => x/cada/integration_docs}/config.md | 0 .../cada/integration_docs}/integration.md | 0 {integration_docs => x/cada/integration_docs}/spawn.md | 0 x/cada/specs/04_client.md | 1 + x/cada/specs/07_vote_extension.md | 1 + 8 files changed, 8 insertions(+), 4 deletions(-) rename {integration_docs => x/cada/integration_docs}/README.md (100%) rename {integration_docs => x/cada/integration_docs}/config.md (100%) rename {integration_docs => x/cada/integration_docs}/integration.md (100%) rename {integration_docs => x/cada/integration_docs}/spawn.md (100%) diff --git a/README.md b/README.md index d40ae6a..31f770d 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ CADA is a module designed to connect Cosmos sovereign chains with the Avail netw # Integration Guide -To integrate the CADA module into your application, follow the steps outlined in the [integration guide](./integration_docs/README.md) +To integrate the CADA module into your application, follow the steps outlined in the [integration guide](./x/cada/integration_docs/README.md) Note: Ensure that the Avail light client URL is correctly configured for the module to function as expected. For instructions on setup Avail locally, please refer to [this documentation](https://github.com/rollkit/avail-da?tab=readme-ov-file#avail-da). @@ -31,6 +31,7 @@ These are main components in the workflow: The core functionality of the **CADA** module is integrated with and operates on the Cosmos blockchain. In the CADA module: + - At each block interval, the `PreBlocker` ABCI method sends a request to the `Relayer`, specifying the range of block heights that are ready to be posted to the **Avail** Data Availability (DA) network. ![Data Submission](https://github.com/user-attachments/assets/fc4d23cc-f6bd-4210-8407-47a57adcc290) @@ -40,6 +41,7 @@ In the CADA module: ![Vote Extension](https://github.com/user-attachments/assets/ea5b10ab-fb64-4ed0-8761-44675a852a01) ## 2. Relayer + The **Relayer** facilitates communication between the Cosmos Chain, the Avail light client, and the Cosmos Provider. - **Data Submission**: The relayer is responsible for fetching block data from the Cosmos provider and posting it to the Avail light client via an HTTP request. @@ -48,8 +50,8 @@ The **Relayer** facilitates communication between the Cosmos Chain, the Avail li - **Data Verification**: During verification, the relayer communicates with the Avail light client to confirm whether the data is truly available at the specified height. - ## 3. Avail Light Node + The **Avail Light Client** allows interaction with the Avail DA network without requiring a full node, and without having to trust remote peers. It leverages **Data Availability Sampling (DAS)**, which the light client performs on every newly created block. - The chain communicates with the Avail light client via the relayer during the data submission and data availability verification processes. @@ -57,12 +59,12 @@ The **Avail Light Client** allows interaction with the Avail DA network without Find more details about the Avail Light Client [here](https://docs.availproject.org/docs/operate-a-node/run-a-light-client/Overview). ## 4. Cosmos Provider + The **Cosmos Provider** is responsible for fetching block data via RPC so that the data can be posted to Avail for availability checks. # Workflow - - At each block interval, a request is sent from the `PreBlocker` ABCI method to the Keeper, specifying the range of block heights that are ready to be posted to the `Avail` DA network. - The range of block heights should be from `provenHeight + 1` to `min(provenHeight + MaxBlocksLimitForBlob, CurrentBlockHeight)`. diff --git a/simapp/Makefile b/simapp/Makefile index bec3ba0..a2636f8 100644 --- a/simapp/Makefile +++ b/simapp/Makefile @@ -10,7 +10,7 @@ SIMAPP = ./app # for dockerized protobuf tools DOCKER := $(shell which docker) -HTTPS_GIT := github.com/rollchains/rollchain.git +HTTPS_GIT := github.com/vitwit/cada.git export GO111MODULE = on diff --git a/integration_docs/README.md b/x/cada/integration_docs/README.md similarity index 100% rename from integration_docs/README.md rename to x/cada/integration_docs/README.md diff --git a/integration_docs/config.md b/x/cada/integration_docs/config.md similarity index 100% rename from integration_docs/config.md rename to x/cada/integration_docs/config.md diff --git a/integration_docs/integration.md b/x/cada/integration_docs/integration.md similarity index 100% rename from integration_docs/integration.md rename to x/cada/integration_docs/integration.md diff --git a/integration_docs/spawn.md b/x/cada/integration_docs/spawn.md similarity index 100% rename from integration_docs/spawn.md rename to x/cada/integration_docs/spawn.md diff --git a/x/cada/specs/04_client.md b/x/cada/specs/04_client.md index 3d028f5..f5a188e 100644 --- a/x/cada/specs/04_client.md +++ b/x/cada/specs/04_client.md @@ -17,6 +17,7 @@ simd query cada --help ``` #### Query the Status + The `get-da-status` command enables users to retrieve comprehensive information, including the range of blocks currently being posted to Avail, the current status, the last proven height, the Avail height where the data is made available, and the voting block height by which voting should conclude. ```sh diff --git a/x/cada/specs/07_vote_extension.md b/x/cada/specs/07_vote_extension.md index 4043aa7..0866fa2 100644 --- a/x/cada/specs/07_vote_extension.md +++ b/x/cada/specs/07_vote_extension.md @@ -76,6 +76,7 @@ if currentHeight+1 != int64(voteEndHeight) || blobStatus != IN_VOTING_STATE { ``` ### VerifyVoteExtensionHandler + The `VerifyVoteExtensionHandler` method is responsible for validating the format and content of the vote extensions generated by the `ExtendVoteHandler`. This method performs a basic validation check on the received vote extension, ensuring it meets the necessary format requirements. It then returns a response indicating whether the vote extension is accepted. From 36d23ae3c13b4ff80722d3f5813d9c80564f8bf2 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 11:35:43 +0530 Subject: [PATCH 18/34] add new line in makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index dc38c5f..28871be 100644 --- a/Makefile +++ b/Makefile @@ -229,4 +229,4 @@ test-rosetta: benchmark: @go test -mod=readonly -bench=. $(PACKAGES_NOSIMULATION) -.PHONY: benchmark \ No newline at end of file +.PHONY: benchmark From 4016b1001d504c1cec44464a7d80ea94a6b8ca3c Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 16:09:34 +0530 Subject: [PATCH 19/34] fix: add working sims tests --- .github/workflows/sims.yml | 149 ++++++++++++ Makefile | 14 +- simapp/app/sim_bench_test.go | 150 ++++++++++++ simapp/app/sim_test.go | 408 +++++++++++++++++++++++++++++++++ x/cada/keeper/keeper.go | 4 + x/cada/module/module.go | 32 +-- x/cada/simulation/genesis.go | 16 ++ x/cada/simulation/operation.go | 103 +++++++++ 8 files changed, 854 insertions(+), 22 deletions(-) create mode 100644 .github/workflows/sims.yml create mode 100644 simapp/app/sim_bench_test.go create mode 100644 simapp/app/sim_test.go create mode 100644 x/cada/simulation/genesis.go create mode 100644 x/cada/simulation/operation.go diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml new file mode 100644 index 0000000..6a776f9 --- /dev/null +++ b/.github/workflows/sims.yml @@ -0,0 +1,149 @@ +name: Sims +# Sims workflow runs multiple types of simulations (nondeterminism, import-export, after-import, multi-seed-short) +# This workflow will run on all Pull Requests, if a .go, .mod or .sum file have been changed +on: + pull_request: + push: + branches: + - main + +concurrency: + group: ci-${{ github.ref }}-sims + cancel-in-progress: true + +jobs: + build: + permissions: + contents: read # for actions/checkout to fetch code + runs-on: ubuntu-latest + if: "!contains(github.event.head_commit.message, 'skip-sims')" + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - run: make -C simapp build + - name: Install runsim + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + + test-sim-nondeterminism: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-import-export + run: | + make test-sim-nondeterminism + + test-sim-import-export: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-import-export + run: | + make test-sim-import-export + + test-sim-after-import: + runs-on: ubuntu-latest + needs: [build] + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-after-import + run: | + make test-sim-after-import + + test-sim-multi-seed-short: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-go@v4 + with: + go-version: "1.21" + check-latest: true + - uses: actions/cache@v3 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-multi-seed-short + run: | + make test-sim-multi-seed-short + + sims-notify-success: + needs: + [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] + runs-on: ubuntu-latest + if: ${{ success() }} + steps: + - name: Check out repository + uses: actions/checkout@v3 + - name: Get previous workflow status + uses: ./.github/actions/last-workflow-status + id: last_status + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + + - name: Notify Slack on success + if: ${{ steps.last_status.outputs.last_status == 'failure' }} + uses: rtCamp/action-slack-notify@v2.2.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_CHANNEL: sdk-sims + SLACK_USERNAME: Sim Tests + SLACK_ICON_EMOJI: ":white_check_mark:" + SLACK_COLOR: good + SLACK_MESSAGE: Sims are passing + SLACK_FOOTER: "" + + sims-notify-failure: + permissions: + contents: none + needs: + [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] + runs-on: ubuntu-latest + if: ${{ failure() }} + steps: + - name: Notify Slack on failure + uses: rtCamp/action-slack-notify@v2.2.0 + env: + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_CHANNEL: sdk-sims + SLACK_USERNAME: Sim Tests + SLACK_ICON_EMOJI: ":skull:" + SLACK_COLOR: danger + SLACK_MESSAGE: Sims are failing + SLACK_FOOTER: "" + \ No newline at end of file diff --git a/Makefile b/Makefile index 28871be..a66ba5c 100644 --- a/Makefile +++ b/Makefile @@ -162,33 +162,33 @@ test-sim-nondeterminism: test-sim-custom-genesis-fast: @echo "Running custom genesis simulation..." @echo "By default, ${HOME}/.cada/config/genesis.json will be used." - @cd ${CURRENT_DIR}/simapp && go test -mod=readonly -run TestFullAppSimulation -Genesis=${HOME}/.cada/config/genesis.json \ + @cd ${CURRENT_DIR}/simapp/app && go test -mod=readonly -run TestFullAppSimulation -Genesis=${HOME}/.cada/config/genesis.json \ -Enabled=true -NumBlocks=100 -BlockSize=200 -Commit=true -Seed=99 -Period=5 -v -timeout 24h test-sim-import-export: runsim @echo "Running application import/export simulation. This may take several minutes..." - @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppImportExport + @cd ${CURRENT_DIR}/simapp/app && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppImportExport test-sim-after-import: runsim @echo "Running application simulation-after-import. This may take several minutes..." - @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppSimulationAfterImport + @cd ${CURRENT_DIR}/simapp/app && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 5 TestAppSimulationAfterImport test-sim-custom-genesis-multi-seed: runsim @echo "Running multi-seed custom genesis simulation..." @echo "By default, ${HOME}/.cada/config/genesis.json will be used." - @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Genesis=${HOME}/.cada/config/genesis.json -SimAppPkg=. -ExitOnFail 400 5 TestFullAppSimulation + @cd ${CURRENT_DIR}/simapp/app && $(BINDIR)/runsim -Genesis=${HOME}/.cada/config/genesis.json -SimAppPkg=. -ExitOnFail 400 5 TestFullAppSimulation test-sim-multi-seed-long: runsim @echo "Running long multi-seed application simulation. This may take awhile!" - @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 500 50 TestFullAppSimulation + @cd ${CURRENT_DIR}/simapp/app && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 500 50 TestFullAppSimulation test-sim-multi-seed-short: runsim @echo "Running short multi-seed application simulation. This may take awhile!" - @cd ${CURRENT_DIR}/simapp && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 10 TestFullAppSimulation + @cd ${CURRENT_DIR}/simapp/app && $(BINDIR)/runsim -Jobs=4 -SimAppPkg=. -ExitOnFail 50 10 TestFullAppSimulation test-sim-benchmark-invariants: @echo "Running simulation invariant benchmarks..." - cd ${CURRENT_DIR}/simapp && @go test -mod=readonly -benchmem -bench=BenchmarkInvariants -run=^$ \ + cd ${CURRENT_DIR}/simapp/app && @go test -mod=readonly -benchmem -bench=BenchmarkInvariants -run=^$ \ -Enabled=true -NumBlocks=1000 -BlockSize=200 \ -Period=1 -Commit=true -Seed=57 -v -timeout 24h diff --git a/simapp/app/sim_bench_test.go b/simapp/app/sim_bench_test.go new file mode 100644 index 0000000..66033db --- /dev/null +++ b/simapp/app/sim_bench_test.go @@ -0,0 +1,150 @@ +package app + +import ( + "fmt" + "os" + "testing" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + "github.com/cosmos/cosmos-sdk/x/simulation" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" +) + +// Profile with: +// /usr/local/go/bin/go test -benchmem -run=^$ cosmossdk.io/simapp -bench ^BenchmarkFullAppSimulation$ -Commit=true -cpuprofile cpu.out +func BenchmarkFullAppSimulation(b *testing.B) { + b.ReportAllocs() + config := simcli.NewConfigFromFlags() + config.ChainID = AppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "goleveldb-app-sim", "Simulation", + simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if err != nil { + b.Fatalf("simulation setup failed: %s", err.Error()) + } + + if skip { + b.Skip("skipping benchmark application simulation") + } + + defer func() { + db.Close() + err = os.RemoveAll(dir) + if err != nil { + b.Fatal(err) + } + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(AppChainID)) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + if err = simtestutil.CheckExportSimulation(app, config, simParams); err != nil { + b.Fatal(err) + } + + if simErr != nil { + b.Fatal(simErr) + } + + if config.Commit { + simtestutil.PrintStats(db) + } +} + +func BenchmarkInvariants(b *testing.B) { + b.ReportAllocs() + + config := simcli.NewConfigFromFlags() + config.ChainID = AppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-invariant-bench", "Simulation", + simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if err != nil { + b.Fatalf("simulation setup failed: %s", err.Error()) + } + + if skip { + b.Skip("skipping benchmark application simulation") + } + + config.AllInvariants = false + + defer func() { + db.Close() + err = os.RemoveAll(dir) + if err != nil { + b.Fatal(err) + } + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(AppChainID)) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + b, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + if err = simtestutil.CheckExportSimulation(app, config, simParams); err != nil { + b.Fatal(err) + } + + if simErr != nil { + b.Fatal(simErr) + } + + if config.Commit { + simtestutil.PrintStats(db) + } + + ctx := app.NewContext(true) + + // 3. Benchmark each invariant separately + // + // NOTE: We use the crisis keeper as it has all the invariants registered with + // their respective metadata which makes it useful for testing/benchmarking. + for _, cr := range app.CrisisKeeper.Routes() { + cr := cr + b.Run(fmt.Sprintf("%s/%s", cr.ModuleName, cr.Route), func(b *testing.B) { + if res, stop := cr.Invar(ctx); stop { + b.Fatalf( + "broken invariant at block %d of %d\n%s", + ctx.BlockHeight()-1, config.NumBlocks, res, + ) + } + }) + } +} diff --git a/simapp/app/sim_test.go b/simapp/app/sim_test.go new file mode 100644 index 0000000..f718de8 --- /dev/null +++ b/simapp/app/sim_test.go @@ -0,0 +1,408 @@ +package app + +import ( + "encoding/json" + "fmt" + "os" + "runtime/debug" + "strings" + "testing" + + "cosmossdk.io/log" + "cosmossdk.io/store" + storetypes "cosmossdk.io/store/types" + evidencetypes "cosmossdk.io/x/evidence/types" + abci "github.com/cometbft/cometbft/abci/types" + dbm "github.com/cosmos/cosmos-db" + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client/flags" + "github.com/cosmos/cosmos-sdk/server" + simtestutil "github.com/cosmos/cosmos-sdk/testutil/sims" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + minttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" + "github.com/cosmos/cosmos-sdk/x/simulation" + simcli "github.com/cosmos/cosmos-sdk/x/simulation/client/cli" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + capabilitytypes "github.com/cosmos/ibc-go/modules/capability/types" + "github.com/stretchr/testify/require" + cadatypes "github.com/vitwit/avail-da-module/x/cada/types" + "golang.org/x/exp/rand" +) + +// AppChainID hardcoded chainID for simulation +const AppChainID = "demo" + +// Get flags every time the simulator is run +func init() { + simcli.GetSimulatorFlags() +} + +type StoreKeysPrefixes struct { + A storetypes.StoreKey + B storetypes.StoreKey + Prefixes [][]byte +} + +// fauxMerkleModeOpt returns a BaseApp option to use a dbStoreAdapter instead of +// an IAVLStore for faster simulation speed. +func fauxMerkleModeOpt(bapp *baseapp.BaseApp) { + bapp.SetFauxMerkleMode() +} + +// interBlockCacheOpt returns a BaseApp option function that sets the persistent +// inter-block write-through cache. +func interBlockCacheOpt() func(*baseapp.BaseApp) { + return baseapp.SetInterBlockCache(store.NewCommitKVStoreCacheManager()) +} + +func TestFullAppSimulation(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = AppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", + simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application simulation") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(AppChainID)) + + require.Equal(t, appName, app.Name()) + + // run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(app, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } +} + +func TestAppImportExport(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = AppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", + simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application import/export simulation") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(AppChainID)) + require.Equal(t, appName, app.Name()) + + // Run randomized simulation + _, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(app, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } + + fmt.Printf("exporting genesis...\n") + + exported, err := app.ExportAppStateAndValidators(false, []string{}, []string{}) + require.NoError(t, err) + + fmt.Printf("importing genesis...\n") + + newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", + simcli.FlagVerboseValue, simcli.FlagEnabledValue) + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, newDB.Close()) + require.NoError(t, os.RemoveAll(newDir)) + }() + + newApp := NewChainApp(log.NewNopLogger(), newDB, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(AppChainID)) + require.Equal(t, appName, newApp.Name()) + + var genesisState GenesisState + err = json.Unmarshal(exported.AppState, &genesisState) + require.NoError(t, err) + + defer func() { + if r := recover(); r != nil { + err := fmt.Sprintf("%v", r) + if !strings.Contains(err, "validator set is empty after InitGenesis") && + !strings.Contains(err, "invalid cacheMergeIterator") { + panic(r) + } + logger.Info("Skipping simulation as all validators have been unbonded") + logger.Info("err", err, "stacktrace", string(debug.Stack())) + } + }() + + ctxA := app.NewContext(true) + ctxB := newApp.NewContext(true) + newApp.ModuleManager.InitGenesis(ctxB, app.AppCodec(), genesisState) + newApp.StoreConsensusParams(ctxB, exported.ConsensusParams) + + fmt.Printf("comparing stores...\n") + + storeKeysPrefixes := []StoreKeysPrefixes{ + {app.GetKey(authtypes.StoreKey), newApp.GetKey(authtypes.StoreKey), [][]byte{}}, + { + app.GetKey(stakingtypes.StoreKey), newApp.GetKey(stakingtypes.StoreKey), + [][]byte{ + stakingtypes.UnbondingQueueKey, stakingtypes.RedelegationQueueKey, stakingtypes.ValidatorQueueKey, + stakingtypes.HistoricalInfoKey, stakingtypes.UnbondingIDKey, stakingtypes.UnbondingIndexKey, + stakingtypes.UnbondingTypeKey, stakingtypes.ValidatorUpdatesKey, + }, + }, // ordering may change but it doesn't matter + {app.GetKey(slashingtypes.StoreKey), newApp.GetKey(slashingtypes.StoreKey), [][]byte{}}, + {app.GetKey(minttypes.StoreKey), newApp.GetKey(minttypes.StoreKey), [][]byte{}}, + {app.GetKey(distrtypes.StoreKey), newApp.GetKey(distrtypes.StoreKey), [][]byte{}}, + {app.GetKey(banktypes.StoreKey), newApp.GetKey(banktypes.StoreKey), [][]byte{banktypes.BalancesPrefix}}, + {app.GetKey(paramtypes.StoreKey), newApp.GetKey(paramtypes.StoreKey), [][]byte{}}, + {app.GetKey(govtypes.StoreKey), newApp.GetKey(govtypes.StoreKey), [][]byte{}}, + {app.GetKey(evidencetypes.StoreKey), newApp.GetKey(evidencetypes.StoreKey), [][]byte{}}, + {app.GetKey(capabilitytypes.StoreKey), newApp.GetKey(capabilitytypes.StoreKey), [][]byte{}}, + {app.GetKey(authzkeeper.StoreKey), newApp.GetKey(authzkeeper.StoreKey), [][]byte{authzkeeper.GrantKey, authzkeeper.GrantQueuePrefix}}, + {app.GetKey(cadatypes.StoreKey), newApp.GetKey(cadatypes.StoreKey), [][]byte{}}, + } + + for _, skp := range storeKeysPrefixes { + storeA := ctxA.KVStore(skp.A) + storeB := ctxB.KVStore(skp.B) + + failedKVAs, failedKVBs := simtestutil.DiffKVStores(storeA, storeB, skp.Prefixes) + require.Equal(t, len(failedKVAs), len(failedKVBs), "unequal sets of key-values to compare") + + fmt.Printf("compared %d different key/value pairs between %s and %s\n", len(failedKVAs), skp.A, skp.B) + require.Equal(t, len(failedKVAs), 0, simtestutil.GetSimulationLog(skp.A.Name(), app.SimulationManager().StoreDecoders, failedKVAs, failedKVBs)) + } +} + +func TestAppSimulationAfterImport(t *testing.T) { + config := simcli.NewConfigFromFlags() + config.ChainID = AppChainID + + db, dir, logger, skip, err := simtestutil.SetupSimulation(config, "leveldb-app-sim", "Simulation", + simcli.FlagVerboseValue, simcli.FlagEnabledValue) + if skip { + t.Skip("skipping application simulation after import") + } + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, db.Close()) + require.NoError(t, os.RemoveAll(dir)) + }() + + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + app := NewChainApp(logger, db, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(AppChainID)) + require.Equal(t, appName, app.Name()) + + // Run randomized simulation + stopEarly, simParams, simErr := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + + // export state and simParams before the simulation error is checked + err = simtestutil.CheckExportSimulation(app, config, simParams) + require.NoError(t, err) + require.NoError(t, simErr) + + if config.Commit { + simtestutil.PrintStats(db) + } + + if stopEarly { + fmt.Println("can't export or import a zero-validator genesis, exiting test...") + return + } + + fmt.Printf("exporting genesis...\n") + + exported, err := app.ExportAppStateAndValidators(true, []string{}, []string{}) + require.NoError(t, err) + + fmt.Printf("importing genesis...\n") + + newDB, newDir, _, _, err := simtestutil.SetupSimulation(config, "leveldb-app-sim-2", "Simulation-2", + simcli.FlagVerboseValue, simcli.FlagEnabledValue) + require.NoError(t, err, "simulation setup failed") + + defer func() { + require.NoError(t, newDB.Close()) + require.NoError(t, os.RemoveAll(newDir)) + }() + + newApp := NewChainApp(log.NewNopLogger(), newDB, nil, true, appOptions, fauxMerkleModeOpt, baseapp.SetChainID(AppChainID)) + require.Equal(t, appName, newApp.Name()) + + defer func() { + if r := recover(); r != nil { + err := fmt.Sprintf("%v", r) + if !strings.Contains(err, "validator set is empty after InitGenesis") { + panic(r) + } + logger.Info("Skipping simulation as all validators have been unbonded") + logger.Info("err", err, "stacktrace", string(debug.Stack())) + } + }() + + newApp.InitChain(&abci.RequestInitChain{ + AppStateBytes: exported.AppState, + ChainId: AppChainID, + }) + + _, _, err = simulation.SimulateFromSeed( + t, + os.Stdout, + newApp.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(newApp, newApp.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + require.NoError(t, err) +} + +// TODO: Make another test for the fuzzer itself, which just has noOp txs +// and doesn't depend on the application. +func TestAppStateDeterminism(t *testing.T) { + if !simcli.FlagEnabledValue { + t.Skip("skipping application simulation") + } + + config := simcli.NewConfigFromFlags() + config.InitialBlockHeight = 1 + config.ExportParamsPath = "" + config.OnOperation = false + config.AllInvariants = false + config.ChainID = AppChainID + + numSeeds := 3 + numTimesToRunPerSeed := 5 + + // We will be overriding the random seed and just run a single simulation on the provided seed value + if config.Seed != simcli.DefaultSeedValue { + numSeeds = 1 + } + + appHashList := make([]json.RawMessage, numTimesToRunPerSeed) + appOptions := make(simtestutil.AppOptionsMap, 0) + appOptions[flags.FlagHome] = DefaultNodeHome + appOptions[server.FlagInvCheckPeriod] = simcli.FlagPeriodValue + + for i := 0; i < numSeeds; i++ { + if config.Seed == simcli.DefaultSeedValue { + config.Seed = rand.Int63() + } + + for j := 0; j < numTimesToRunPerSeed; j++ { + var logger log.Logger + if simcli.FlagVerboseValue { + logger = log.NewTestLogger(t) + } else { + logger = log.NewNopLogger() + } + + db := dbm.NewMemDB() + app := NewChainApp(logger, db, nil, true, appOptions, interBlockCacheOpt(), baseapp.SetChainID(AppChainID)) + + fmt.Printf( + "running non-determinism simulation; seed %d: %d/%d, attempt: %d/%d\n", + config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, + ) + + _, _, err := simulation.SimulateFromSeed( + t, + os.Stdout, + app.BaseApp, + simtestutil.AppStateFn(app.AppCodec(), app.SimulationManager(), app.DefaultGenesis()), + simtypes.RandomAccounts, // Replace with own random account function if using keys other than secp256k1 + simtestutil.SimulationOperations(app, app.AppCodec(), config), + BlockedAddresses(), + config, + app.AppCodec(), + ) + require.NoError(t, err) + + if config.Commit { + simtestutil.PrintStats(db) + } + + appHash := app.LastCommitID().Hash + appHashList[j] = appHash + + if j != 0 { + require.Equal( + t, string(appHashList[0]), string(appHashList[j]), + "non-determinism in seed %d: %d/%d, attempt: %d/%d\n", config.Seed, i+1, numSeeds, j+1, numTimesToRunPerSeed, + ) + } + } + } +} diff --git a/x/cada/keeper/keeper.go b/x/cada/keeper/keeper.go index 5048ebd..d881e77 100644 --- a/x/cada/keeper/keeper.go +++ b/x/cada/keeper/keeper.go @@ -66,3 +66,7 @@ func (k *Keeper) GetBlobStatus(ctx sdk.Context) uint32 { store := ctx.KVStore(k.storeKey) return GetStatusFromStore(store) } + +func (k Keeper) GetStoreKey() storetypes2.StoreKey { + return k.storeKey +} diff --git a/x/cada/module/module.go b/x/cada/module/module.go index b5b78ea..8180d24 100644 --- a/x/cada/module/module.go +++ b/x/cada/module/module.go @@ -12,12 +12,14 @@ import ( codectypes "github.com/cosmos/cosmos-sdk/codec/types" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" gwruntime "github.com/grpc-ecosystem/grpc-gateway/runtime" "github.com/spf13/cobra" "github.com/vitwit/avail-da-module/x/cada/client/cli" "github.com/vitwit/avail-da-module/x/cada/keeper" + simulation "github.com/vitwit/avail-da-module/x/cada/simulation" types "github.com/vitwit/avail-da-module/x/cada/types" ) @@ -171,18 +173,18 @@ func (am AppModule) IsOnePerModuleType() {} // IsAppModule implements the appmodule.AppModule interface. func (am AppModule) IsAppModule() {} -// func (AppModule) GenerateGenesisState(simState *module.SimulationState) { -// simulation.RandomizedGenState(simState) -// } - -// // RegisterStoreDecoder registers a decoder for distribution module's types -// func (am AppModule) RegisterStoreDecoder(_ simtypes.StoreDecoderRegistry) { -// } - -// // WeightedOperations returns the all the accounts module operations with their respective weights. -// func (am AppModule) WeightedOperations(simState module.SimulationState) []simtypes.WeightedOperation { -// return simulation.WeightedOperations( -// simState.AppParams, simState.Cdc, simState.TxConfig, -// am.authkeeper, am.bankkeeper, *am.keeper, -// ) -// } +func (AppModule) GenerateGenesisState(simState *module.SimulationState) { + simulation.RandomizedGenState(simState) +} + +// RegisterStoreDecoder registers a decoder for distribution module's types +func (am AppModule) RegisterStoreDecoder(_ simtypes.StoreDecoderRegistry) { +} + +// WeightedOperations returns the all the accounts module operations with their respective weights. +func (am AppModule) WeightedOperations(simState module.SimulationState) []simtypes.WeightedOperation { + return simulation.WeightedOperations( + simState.AppParams, simState.Cdc, simState.TxConfig, + am.authkeeper, am.bankkeeper, *am.keeper, + ) +} diff --git a/x/cada/simulation/genesis.go b/x/cada/simulation/genesis.go new file mode 100644 index 0000000..5f32926 --- /dev/null +++ b/x/cada/simulation/genesis.go @@ -0,0 +1,16 @@ +package simulation + +import ( + "github.com/cosmos/cosmos-sdk/types/module" + types "github.com/vitwit/avail-da-module/x/cada/types" +) + +// RandomizedGenState creates a randomized GenesisState for testing. +func RandomizedGenState(simState *module.SimulationState) { + // Since your GenesisState is empty, there's not much to randomize. + // We'll just set the GenesisState to its empty struct. + genesis := types.GenesisState{} + + // Here we use simState to set the default genesis + simState.GenState[types.ModuleName] = simState.Cdc.MustMarshalJSON(&genesis) +} diff --git a/x/cada/simulation/operation.go b/x/cada/simulation/operation.go new file mode 100644 index 0000000..2f7e06c --- /dev/null +++ b/x/cada/simulation/operation.go @@ -0,0 +1,103 @@ +package simulation + +import ( + "math/rand" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/codec" + sdk "github.com/cosmos/cosmos-sdk/types" + moduletestutil "github.com/cosmos/cosmos-sdk/types/module/testutil" + simtypes "github.com/cosmos/cosmos-sdk/types/simulation" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + "github.com/cosmos/cosmos-sdk/x/simulation" + "github.com/vitwit/avail-da-module/x/cada/keeper" + cadastore "github.com/vitwit/avail-da-module/x/cada/keeper" + availtypes "github.com/vitwit/avail-da-module/x/cada/types" +) + +const ( + OpWeightMsgUpdateBlobStatusRequest = "op_weight_msg_update_blob_status" + + DefaultWeightMsgUpdateStatusRequest = 100 +) + +func WeightedOperations( + appParams simtypes.AppParams, cdc codec.JSONCodec, txConfig client.TxConfig, + ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper, +) simulation.WeightedOperations { + var weightMsgUpdateBlobStatusRequest int + appParams.GetOrGenerate(OpWeightMsgUpdateBlobStatusRequest, &weightMsgUpdateBlobStatusRequest, nil, func(_ *rand.Rand) { + weightMsgUpdateBlobStatusRequest = DefaultWeightMsgUpdateStatusRequest + }) + + return simulation.WeightedOperations{ + simulation.NewWeightedOperation( + weightMsgUpdateBlobStatusRequest, + SimulateMsgUpdateBlobStatus(ak, bk, k), + ), + } +} + +func SimulateMsgUpdateBlobStatus(ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper) simtypes.Operation { + return func( + r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { + + ctx = ctx.WithBlockHeight(20) + // Randomly select a sender account + sender, _ := simtypes.RandomAcc(r, accs) + + // Ensure the sender has sufficient balance + account := ak.GetAccount(ctx, sender.Address) + spendable := bk.SpendableCoins(ctx, account.GetAddress()) + + // Generate random fees for the transaction + fees, err := simtypes.RandomFees(r, ctx, spendable) + if err != nil { + return simtypes.NoOpMsg(availtypes.ModuleName, availtypes.TypeMsgUpdateBlobStatus, "unable to generate fees"), nil, err + } + + // Prepare a random blob status update + newStatus := true // You can randomize this value as needed + fromBlock := uint64(5) // Example block range start + toBlock := uint64(20) // Example block range end + availHeight := uint64(120) + + ran := availtypes.Range{ + From: fromBlock, + To: toBlock, + } + + msg := availtypes.NewMsgUpdateBlobStatus( + sender.Address.String(), + ran, + availHeight, + newStatus, + ) + + store := ctx.KVStore(k.GetStoreKey()) + cadastore.UpdateEndHeight(ctx, store, uint64(20)) + + cadastore.UpdateProvenHeight(ctx, store, uint64(4)) + + cadastore.UpdateBlobStatus(ctx, store, uint32(1)) + + // Set up the transaction context + txCtx := simulation.OperationInput{ + R: r, + App: app, + TxGen: moduletestutil.MakeTestEncodingConfig().TxConfig, + Cdc: nil, + Msg: msg, + Context: ctx, + SimAccount: sender, + AccountKeeper: ak, + ModuleName: availtypes.ModuleName, + } + + // Generate and deliver the transaction + return simulation.GenAndDeliverTx(txCtx, fees) + } +} From 7ef2b181aef60b0cf17793476307503535603976 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 16:15:08 +0530 Subject: [PATCH 20/34] fix lint --- x/cada/simulation/operation.go | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/x/cada/simulation/operation.go b/x/cada/simulation/operation.go index 2f7e06c..4eb5c9d 100644 --- a/x/cada/simulation/operation.go +++ b/x/cada/simulation/operation.go @@ -12,8 +12,7 @@ import ( authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" "github.com/cosmos/cosmos-sdk/x/simulation" - "github.com/vitwit/avail-da-module/x/cada/keeper" - cadastore "github.com/vitwit/avail-da-module/x/cada/keeper" + cadakeeper "github.com/vitwit/avail-da-module/x/cada/keeper" availtypes "github.com/vitwit/avail-da-module/x/cada/types" ) @@ -24,8 +23,8 @@ const ( ) func WeightedOperations( - appParams simtypes.AppParams, cdc codec.JSONCodec, txConfig client.TxConfig, - ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper, + appParams simtypes.AppParams, _ codec.JSONCodec, _ client.TxConfig, + ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k cadakeeper.Keeper, ) simulation.WeightedOperations { var weightMsgUpdateBlobStatusRequest int appParams.GetOrGenerate(OpWeightMsgUpdateBlobStatusRequest, &weightMsgUpdateBlobStatusRequest, nil, func(_ *rand.Rand) { @@ -40,11 +39,10 @@ func WeightedOperations( } } -func SimulateMsgUpdateBlobStatus(ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k keeper.Keeper) simtypes.Operation { +func SimulateMsgUpdateBlobStatus(ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k cadakeeper.Keeper) simtypes.Operation { return func( r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { - ctx = ctx.WithBlockHeight(20) // Randomly select a sender account sender, _ := simtypes.RandomAcc(r, accs) @@ -78,11 +76,11 @@ func SimulateMsgUpdateBlobStatus(ak authkeeper.AccountKeeper, bk bankkeeper.Keep ) store := ctx.KVStore(k.GetStoreKey()) - cadastore.UpdateEndHeight(ctx, store, uint64(20)) + cadakeeper.UpdateEndHeight(ctx, store, uint64(20)) - cadastore.UpdateProvenHeight(ctx, store, uint64(4)) + cadakeeper.UpdateProvenHeight(ctx, store, uint64(4)) - cadastore.UpdateBlobStatus(ctx, store, uint32(1)) + cadakeeper.UpdateBlobStatus(ctx, store, uint32(1)) // Set up the transaction context txCtx := simulation.OperationInput{ From c24d1eb5d8be525698365156b86ac7656a1d8869 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 16:18:13 +0530 Subject: [PATCH 21/34] fix lint --- x/cada/simulation/operation.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x/cada/simulation/operation.go b/x/cada/simulation/operation.go index 4eb5c9d..b6ff89f 100644 --- a/x/cada/simulation/operation.go +++ b/x/cada/simulation/operation.go @@ -41,7 +41,7 @@ func WeightedOperations( func SimulateMsgUpdateBlobStatus(ak authkeeper.AccountKeeper, bk bankkeeper.Keeper, k cadakeeper.Keeper) simtypes.Operation { return func( - r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, chainID string, + r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, _ string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { ctx = ctx.WithBlockHeight(20) // Randomly select a sender account From 6165944b254883a5e36d44ea7700ded0d95d5dff Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 16:26:46 +0530 Subject: [PATCH 22/34] update runism in workflow --- .github/workflows/sims.yml | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 6a776f9..f50f033 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -24,9 +24,19 @@ jobs: go-version: "1.21" check-latest: true - run: make -C simapp build + + install-runsim: + runs-on: ubuntu-latest + needs: build + steps: + - uses: actions/setup-go@v3 + with: + go-version: "1.21" + - name: Display go version + run: go version - name: Install runsim - run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - - uses: actions/cache@v3 + run: export GO111MODULE="on" && go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - uses: actions/cache@v3.0.8 with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary @@ -146,4 +156,3 @@ jobs: SLACK_COLOR: danger SLACK_MESSAGE: Sims are failing SLACK_FOOTER: "" - \ No newline at end of file From de34992e0bc7a79fceeb8d8e1d50972bc59f255a Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 16:42:22 +0530 Subject: [PATCH 23/34] add go bin path --- .github/workflows/sims.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index f50f033..2113389 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -36,6 +36,8 @@ jobs: run: go version - name: Install runsim run: export GO111MODULE="on" && go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + - name: Add Go bin to PATH + run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds the Go bin directory to PATH - uses: actions/cache@v3.0.8 with: path: ~/go/bin @@ -73,6 +75,8 @@ jobs: with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary + - name: Add Go bin to PATH + run: echo "${HOME}/go/bin" >> $GITHUB_PATH - name: test-sim-import-export run: | make test-sim-import-export @@ -90,6 +94,8 @@ jobs: with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary + - name: Add Go bin to PATH + run: echo "${HOME}/go/bin" >> $GITHUB_PATH - name: test-sim-after-import run: | make test-sim-after-import @@ -108,6 +114,8 @@ jobs: with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary + - name: Add Go bin to PATH + run: echo "${HOME}/go/bin" >> $GITHUB_PATH - name: test-sim-multi-seed-short run: | make test-sim-multi-seed-short From 708414273b6f7430ddce580b3814b1648453f966 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 16:46:01 +0530 Subject: [PATCH 24/34] remove notify failure & success --- .github/workflows/sims.yml | 45 -------------------------------------- 1 file changed, 45 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 2113389..e9eb67a 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -119,48 +119,3 @@ jobs: - name: test-sim-multi-seed-short run: | make test-sim-multi-seed-short - - sims-notify-success: - needs: - [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] - runs-on: ubuntu-latest - if: ${{ success() }} - steps: - - name: Check out repository - uses: actions/checkout@v3 - - name: Get previous workflow status - uses: ./.github/actions/last-workflow-status - id: last_status - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - - - name: Notify Slack on success - if: ${{ steps.last_status.outputs.last_status == 'failure' }} - uses: rtCamp/action-slack-notify@v2.2.0 - env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} - SLACK_CHANNEL: sdk-sims - SLACK_USERNAME: Sim Tests - SLACK_ICON_EMOJI: ":white_check_mark:" - SLACK_COLOR: good - SLACK_MESSAGE: Sims are passing - SLACK_FOOTER: "" - - sims-notify-failure: - permissions: - contents: none - needs: - [test-sim-multi-seed-short, test-sim-after-import, test-sim-import-export] - runs-on: ubuntu-latest - if: ${{ failure() }} - steps: - - name: Notify Slack on failure - uses: rtCamp/action-slack-notify@v2.2.0 - env: - SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} - SLACK_CHANNEL: sdk-sims - SLACK_USERNAME: Sim Tests - SLACK_ICON_EMOJI: ":skull:" - SLACK_COLOR: danger - SLACK_MESSAGE: Sims are failing - SLACK_FOOTER: "" From 77f52ee0daf21daea478335d6c5333f65d9f915c Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 16:55:15 +0530 Subject: [PATCH 25/34] fix runsim issue --- .github/workflows/sims.yml | 31 +++++++++++++++++++++++-------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index e9eb67a..e5688df 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -25,23 +25,38 @@ jobs: check-latest: true - run: make -C simapp build + # install-runsim: + # runs-on: ubuntu-latest + # needs: build + # steps: + # - uses: actions/setup-go@v3 + # with: + # go-version: "1.21" + # - name: Display go version + # run: go version + # - name: Install runsim + # run: export GO111MODULE="on" && go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + # - name: Add Go bin to PATH + # run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds the Go bin directory to PATH + # - uses: actions/cache@v3.0.8 + # with: + # path: ~/go/bin + # key: ${{ runner.os }}-go-runsim-binary + install-runsim: runs-on: ubuntu-latest - needs: build steps: - - uses: actions/setup-go@v3 + - uses: actions/setup-go@v4 with: go-version: "1.21" - name: Display go version run: go version - name: Install runsim - run: export GO111MODULE="on" && go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - name: Add Go bin to PATH - run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds the Go bin directory to PATH - - uses: actions/cache@v3.0.8 - with: - path: ~/go/bin - key: ${{ runner.os }}-go-runsim-binary + run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds Go bin to PATH for subsequent steps + - name: Verify runsim installation + run: runsim --help test-sim-nondeterminism: runs-on: ubuntu-latest From 4c7aad998f9e875976d73154c49d03b44544f8d6 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 17:04:56 +0530 Subject: [PATCH 26/34] WIP --- .github/workflows/sims.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index e5688df..19b0ba8 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -94,6 +94,7 @@ jobs: run: echo "${HOME}/go/bin" >> $GITHUB_PATH - name: test-sim-import-export run: | + ${HOME}/go/bin/runsim --help make test-sim-import-export test-sim-after-import: From 6f8c16fd1472ea5cc00b40004be87f83f1a72f1a Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 17:10:04 +0530 Subject: [PATCH 27/34] WIP --- .github/workflows/sims.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 19b0ba8..c539145 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -78,7 +78,7 @@ jobs: test-sim-import-export: runs-on: ubuntu-latest - needs: [build] + needs: [build, install-runsim] timeout-minutes: 60 steps: - uses: actions/checkout@v3 @@ -90,11 +90,8 @@ jobs: with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary - - name: Add Go bin to PATH - run: echo "${HOME}/go/bin" >> $GITHUB_PATH - name: test-sim-import-export - run: | - ${HOME}/go/bin/runsim --help + run: | make test-sim-import-export test-sim-after-import: From b5ce372d123f59a481c1eb5e6615279f65db2bae Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Mon, 30 Sep 2024 17:18:27 +0530 Subject: [PATCH 28/34] WIP --- .github/workflows/sims.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index c539145..507bf2d 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -82,11 +82,11 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 + - uses: actions/setup-go@v3 with: go-version: "1.21" check-latest: true - - uses: actions/cache@v3 + - uses: actions/cache@v3.0.8 with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary From ca64b37af1b7f5d9433082d1eb1eb0eab81be29b Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Tue, 1 Oct 2024 10:34:26 +0530 Subject: [PATCH 29/34] WIP --- .github/workflows/sims.yml | 223 ++++++++++++++++++++++++++----------- 1 file changed, 156 insertions(+), 67 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 507bf2d..8931610 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -13,122 +13,211 @@ concurrency: jobs: build: - permissions: - contents: read # for actions/checkout to fetch code runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-sims')" steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 + - uses: actions/checkout@v4 + with: + ref: "release/v0.50.x" + - uses: actions/setup-go@v5 with: - go-version: "1.21" + go-version: "1.22" check-latest: true - - run: make -C simapp build - - # install-runsim: - # runs-on: ubuntu-latest - # needs: build - # steps: - # - uses: actions/setup-go@v3 - # with: - # go-version: "1.21" - # - name: Display go version - # run: go version - # - name: Install runsim - # run: export GO111MODULE="on" && go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - # - name: Add Go bin to PATH - # run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds the Go bin directory to PATH - # - uses: actions/cache@v3.0.8 - # with: - # path: ~/go/bin - # key: ${{ runner.os }}-go-runsim-binary + - run: make build install-runsim: + permissions: + contents: none runs-on: ubuntu-latest + needs: build steps: - - uses: actions/setup-go@v4 + - uses: actions/setup-go@v5 with: - go-version: "1.21" - - name: Display go version - run: go version + go-version: "1.22" + check-latest: true - name: Install runsim run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - - name: Add Go bin to PATH - run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds Go bin to PATH for subsequent steps - - name: Verify runsim installation - run: runsim --help - - test-sim-nondeterminism: - runs-on: ubuntu-latest - needs: [build] - timeout-minutes: 60 - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 - with: - go-version: "1.21" - check-latest: true - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary - - name: test-sim-import-export - run: | - make test-sim-nondeterminism test-sim-import-export: runs-on: ubuntu-latest needs: [build, install-runsim] timeout-minutes: 60 steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v3 + - uses: actions/checkout@v4 + with: + ref: "release/v0.50.x" + - uses: actions/setup-go@v5 with: - go-version: "1.21" + go-version: "1.22" check-latest: true - - uses: actions/cache@v3.0.8 + - uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary - name: test-sim-import-export - run: | + run: | make test-sim-import-export test-sim-after-import: runs-on: ubuntu-latest - needs: [build] + needs: [build, install-runsim] steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 + - uses: actions/checkout@v4 with: - go-version: "1.21" + ref: "release/v0.50.x" + - uses: actions/setup-go@v5 + with: + go-version: "1.22" check-latest: true - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary - - name: Add Go bin to PATH - run: echo "${HOME}/go/bin" >> $GITHUB_PATH - name: test-sim-after-import run: | make test-sim-after-import test-sim-multi-seed-short: runs-on: ubuntu-latest - needs: [build] - timeout-minutes: 60 + needs: [build, install-runsim] steps: - - uses: actions/checkout@v3 - - uses: actions/setup-go@v4 + - uses: actions/checkout@v4 + with: + ref: "release/v0.50.x" + - uses: actions/setup-go@v5 with: - go-version: "1.21" + go-version: "1.22" check-latest: true - - uses: actions/cache@v3 + - uses: actions/cache@v4 with: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary - - name: Add Go bin to PATH - run: echo "${HOME}/go/bin" >> $GITHUB_PATH - name: test-sim-multi-seed-short run: | make test-sim-multi-seed-short + +# jobs: +# build: +# permissions: +# contents: read # for actions/checkout to fetch code +# runs-on: ubuntu-latest +# if: "!contains(github.event.head_commit.message, 'skip-sims')" +# steps: +# - uses: actions/checkout@v3 +# - uses: actions/setup-go@v4 +# with: +# go-version: "1.21" +# check-latest: true +# - run: make -C simapp build + +# # install-runsim: +# # runs-on: ubuntu-latest +# # needs: build +# # steps: +# # - uses: actions/setup-go@v3 +# # with: +# # go-version: "1.21" +# # - name: Display go version +# # run: go version +# # - name: Install runsim +# # run: export GO111MODULE="on" && go install github.com/cosmos/tools/cmd/runsim@v1.0.0 +# # - name: Add Go bin to PATH +# # run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds the Go bin directory to PATH +# # - uses: actions/cache@v3.0.8 +# # with: +# # path: ~/go/bin +# # key: ${{ runner.os }}-go-runsim-binary + +# install-runsim: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/setup-go@v4 +# with: +# go-version: "1.21" +# - name: Display go version +# run: go version +# - name: Install runsim +# run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 +# - name: Add Go bin to PATH +# run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds Go bin to PATH for subsequent steps +# - name: Verify runsim installation +# run: runsim --help + +# test-sim-nondeterminism: +# runs-on: ubuntu-latest +# needs: [build] +# timeout-minutes: 60 +# steps: +# - uses: actions/checkout@v3 +# - uses: actions/setup-go@v4 +# with: +# go-version: "1.21" +# check-latest: true +# - uses: actions/cache@v3 +# with: +# path: ~/go/bin +# key: ${{ runner.os }}-go-runsim-binary +# - name: test-sim-import-export +# run: | +# make test-sim-nondeterminism + +# test-sim-import-export: +# runs-on: ubuntu-latest +# needs: [build, install-runsim] +# timeout-minutes: 60 +# steps: +# - uses: actions/checkout@v3 +# - uses: actions/setup-go@v3 +# with: +# go-version: "1.21" +# check-latest: true +# - uses: actions/cache@v3.0.8 +# with: +# path: ~/go/bin +# key: ${{ runner.os }}-go-runsim-binary +# - name: test-sim-import-export +# run: | +# make test-sim-import-export + +# test-sim-after-import: +# runs-on: ubuntu-latest +# needs: [build] +# steps: +# - uses: actions/checkout@v3 +# - uses: actions/setup-go@v4 +# with: +# go-version: "1.21" +# check-latest: true +# - uses: actions/cache@v3 +# with: +# path: ~/go/bin +# key: ${{ runner.os }}-go-runsim-binary +# - name: Add Go bin to PATH +# run: echo "${HOME}/go/bin" >> $GITHUB_PATH +# - name: test-sim-after-import +# run: | +# make test-sim-after-import + +# test-sim-multi-seed-short: +# runs-on: ubuntu-latest +# needs: [build] +# timeout-minutes: 60 +# steps: +# - uses: actions/checkout@v3 +# - uses: actions/setup-go@v4 +# with: +# go-version: "1.21" +# check-latest: true +# - uses: actions/cache@v3 +# with: +# path: ~/go/bin +# key: ${{ runner.os }}-go-runsim-binary +# - name: Add Go bin to PATH +# run: echo "${HOME}/go/bin" >> $GITHUB_PATH +# - name: test-sim-multi-seed-short +# run: | +# make test-sim-multi-seed-short From f95ac9bfb1d0de3af866a671c31a4985883c4753 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Tue, 1 Oct 2024 10:39:15 +0530 Subject: [PATCH 30/34] WIP --- .github/workflows/sims.yml | 8 -------- 1 file changed, 8 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 8931610..4d0f659 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -17,8 +17,6 @@ jobs: if: "!contains(github.event.head_commit.message, 'skip-sims')" steps: - uses: actions/checkout@v4 - with: - ref: "release/v0.50.x" - uses: actions/setup-go@v5 with: go-version: "1.22" @@ -48,8 +46,6 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v4 - with: - ref: "release/v0.50.x" - uses: actions/setup-go@v5 with: go-version: "1.22" @@ -67,8 +63,6 @@ jobs: needs: [build, install-runsim] steps: - uses: actions/checkout@v4 - with: - ref: "release/v0.50.x" - uses: actions/setup-go@v5 with: go-version: "1.22" @@ -86,8 +80,6 @@ jobs: needs: [build, install-runsim] steps: - uses: actions/checkout@v4 - with: - ref: "release/v0.50.x" - uses: actions/setup-go@v5 with: go-version: "1.22" From 88ca7c755ccbda161f8a17d6e68137bcfd681aac Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Tue, 1 Oct 2024 10:40:58 +0530 Subject: [PATCH 31/34] WIP --- .github/workflows/sims.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index 4d0f659..fff1f9a 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -21,7 +21,7 @@ jobs: with: go-version: "1.22" check-latest: true - - run: make build + - run: make -C simapp build install-runsim: permissions: From 60f8433b74bbb0100b85d65a7a2099c9373d6bb0 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Tue, 1 Oct 2024 10:55:29 +0530 Subject: [PATCH 32/34] WIP --- Makefile | 6 ++--- tools/Makefile | 69 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+), 4 deletions(-) create mode 100644 tools/Makefile diff --git a/Makefile b/Makefile index a66ba5c..5215357 100644 --- a/Makefile +++ b/Makefile @@ -95,7 +95,6 @@ clean: rm -f $(TARGET) - ############################################################################### ### testnet ### ############################################################################### @@ -125,6 +124,8 @@ test-all: test-unit test-ledger-mock test-race test-cover TEST_PACKAGES=./... TEST_TARGETS := test-unit test-unit-proto test-ledger-mock test-race test-ledger test-race +include tools/Makefile + # Test runs-specific rules. To add a new test target, just add # a new rule, customise ARGS or TEST_PACKAGES ad libitum, and # append the new rule to the TEST_TARGETS list. @@ -151,9 +152,6 @@ endif .PHONY: run-tests test test-all $(TEST_TARGETS) -runsim: - go install github.com/cosmos/tools/cmd/runsim@v1.0.0 - test-sim-nondeterminism: @echo "Running non-determinism test..." @cd ${CURRENT_DIR}/simapp/app && go test -mod=readonly -run TestAppStateDeterminism -Enabled=true \ diff --git a/tools/Makefile b/tools/Makefile new file mode 100644 index 0000000..e302695 --- /dev/null +++ b/tools/Makefile @@ -0,0 +1,69 @@ +### +# Find OS and Go environment +# GO contains the Go binary +# FS contains the OS file separator +### +ifeq ($(OS),Windows_NT) + GO := $(shell where go.exe 2> NUL) + FS := "\\" +else + GO := $(shell command -v go 2> /dev/null) + FS := "/" +endif + +ifeq ($(GO),) + $(error could not find go. Is it in PATH? $(GO)) +endif + +############################################################################### +### Functions ### +############################################################################### + +go_get = $(if $(findstring Windows_NT,$(OS)),\ +IF NOT EXIST $(GITHUBDIR)$(FS)$(1)$(FS) ( mkdir $(GITHUBDIR)$(FS)$(1) ) else (cd .) &\ +IF NOT EXIST $(GITHUBDIR)$(FS)$(1)$(FS)$(2)$(FS) ( cd $(GITHUBDIR)$(FS)$(1) && git clone https://github.com/$(1)/$(2) ) else (cd .) &\ +,\ +mkdir -p $(GITHUBDIR)$(FS)$(1) &&\ +(test ! -d $(GITHUBDIR)$(FS)$(1)$(FS)$(2) && cd $(GITHUBDIR)$(FS)$(1) && git clone https://github.com/$(1)/$(2)) || true &&\ +)\ +cd $(GITHUBDIR)$(FS)$(1)$(FS)$(2) && git fetch origin && git checkout -q $(3) + +mkfile_path := $(abspath $(lastword $(MAKEFILE_LIST))) +mkfile_dir := $(shell cd $(shell dirname $(mkfile_path)); pwd) + + +############################################################################### +### Tools ### +############################################################################### + +PREFIX ?= /usr/local +BIN ?= $(PREFIX)/bin +UNAME_S ?= $(shell uname -s) +UNAME_M ?= $(shell uname -m) + +GOPATH ?= $(shell $(GO) env GOPATH) +GITHUBDIR := $(GOPATH)$(FS)src$(FS)github.com + +BUF_VERSION ?= 0.11.0 + +TOOLS_DESTDIR ?= $(GOPATH)/bin +RUNSIM = $(TOOLS_DESTDIR)/runsim + +tools: tools-stamp +tools-stamp: runsim + # Create dummy file to satisfy dependency and avoid + # rebuilding when this Makefile target is hit twice + # in a row. + touch $@ + +# Install the runsim binary +runsim: $(RUNSIM) +$(RUNSIM): + @echo "Installing runsim..." + @go install github.com/cosmos/tools/cmd/runsim@v1.0.0 + +tools-clean: + rm -f $(GOLANGCI_LINT) $(RUNSIM) + rm -f tools-stamp + +.PHONY: tools-clean runsim \ No newline at end of file From 2e8d43bb2c75a39de3fa41d1cf6d498a84fc73e7 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Tue, 1 Oct 2024 11:11:46 +0530 Subject: [PATCH 33/34] add non determinism workflow test --- .github/workflows/sims.yml | 140 +++++-------------------------------- 1 file changed, 18 insertions(+), 122 deletions(-) diff --git a/.github/workflows/sims.yml b/.github/workflows/sims.yml index fff1f9a..32266e2 100644 --- a/.github/workflows/sims.yml +++ b/.github/workflows/sims.yml @@ -40,6 +40,24 @@ jobs: path: ~/go/bin key: ${{ runner.os }}-go-runsim-binary + test-sim-nondeterminism: + runs-on: ubuntu-latest + needs: [build] + timeout-minutes: 60 + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-go@v5 + with: + go-version: "1.22" + check-latest: true + - uses: actions/cache@v4 + with: + path: ~/go/bin + key: ${{ runner.os }}-go-runsim-binary + - name: test-sim-import-export + run: | + make test-sim-nondeterminism + test-sim-import-export: runs-on: ubuntu-latest needs: [build, install-runsim] @@ -91,125 +109,3 @@ jobs: - name: test-sim-multi-seed-short run: | make test-sim-multi-seed-short - -# jobs: -# build: -# permissions: -# contents: read # for actions/checkout to fetch code -# runs-on: ubuntu-latest -# if: "!contains(github.event.head_commit.message, 'skip-sims')" -# steps: -# - uses: actions/checkout@v3 -# - uses: actions/setup-go@v4 -# with: -# go-version: "1.21" -# check-latest: true -# - run: make -C simapp build - -# # install-runsim: -# # runs-on: ubuntu-latest -# # needs: build -# # steps: -# # - uses: actions/setup-go@v3 -# # with: -# # go-version: "1.21" -# # - name: Display go version -# # run: go version -# # - name: Install runsim -# # run: export GO111MODULE="on" && go install github.com/cosmos/tools/cmd/runsim@v1.0.0 -# # - name: Add Go bin to PATH -# # run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds the Go bin directory to PATH -# # - uses: actions/cache@v3.0.8 -# # with: -# # path: ~/go/bin -# # key: ${{ runner.os }}-go-runsim-binary - -# install-runsim: -# runs-on: ubuntu-latest -# steps: -# - uses: actions/setup-go@v4 -# with: -# go-version: "1.21" -# - name: Display go version -# run: go version -# - name: Install runsim -# run: go install github.com/cosmos/tools/cmd/runsim@v1.0.0 -# - name: Add Go bin to PATH -# run: echo "${HOME}/go/bin" >> $GITHUB_PATH # This adds Go bin to PATH for subsequent steps -# - name: Verify runsim installation -# run: runsim --help - -# test-sim-nondeterminism: -# runs-on: ubuntu-latest -# needs: [build] -# timeout-minutes: 60 -# steps: -# - uses: actions/checkout@v3 -# - uses: actions/setup-go@v4 -# with: -# go-version: "1.21" -# check-latest: true -# - uses: actions/cache@v3 -# with: -# path: ~/go/bin -# key: ${{ runner.os }}-go-runsim-binary -# - name: test-sim-import-export -# run: | -# make test-sim-nondeterminism - -# test-sim-import-export: -# runs-on: ubuntu-latest -# needs: [build, install-runsim] -# timeout-minutes: 60 -# steps: -# - uses: actions/checkout@v3 -# - uses: actions/setup-go@v3 -# with: -# go-version: "1.21" -# check-latest: true -# - uses: actions/cache@v3.0.8 -# with: -# path: ~/go/bin -# key: ${{ runner.os }}-go-runsim-binary -# - name: test-sim-import-export -# run: | -# make test-sim-import-export - -# test-sim-after-import: -# runs-on: ubuntu-latest -# needs: [build] -# steps: -# - uses: actions/checkout@v3 -# - uses: actions/setup-go@v4 -# with: -# go-version: "1.21" -# check-latest: true -# - uses: actions/cache@v3 -# with: -# path: ~/go/bin -# key: ${{ runner.os }}-go-runsim-binary -# - name: Add Go bin to PATH -# run: echo "${HOME}/go/bin" >> $GITHUB_PATH -# - name: test-sim-after-import -# run: | -# make test-sim-after-import - -# test-sim-multi-seed-short: -# runs-on: ubuntu-latest -# needs: [build] -# timeout-minutes: 60 -# steps: -# - uses: actions/checkout@v3 -# - uses: actions/setup-go@v4 -# with: -# go-version: "1.21" -# check-latest: true -# - uses: actions/cache@v3 -# with: -# path: ~/go/bin -# key: ${{ runner.os }}-go-runsim-binary -# - name: Add Go bin to PATH -# run: echo "${HOME}/go/bin" >> $GITHUB_PATH -# - name: test-sim-multi-seed-short -# run: | -# make test-sim-multi-seed-short From f5f4559db80307ec4fb59410d3a6c71f3cece4b6 Mon Sep 17 00:00:00 2001 From: NagaTulasi Date: Tue, 1 Oct 2024 11:51:56 +0530 Subject: [PATCH 34/34] remove comments --- chainclient/create_client.go | 2 -- x/cada/simulation/operation.go | 39 +++++++++++++++++++--------------- 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/chainclient/create_client.go b/chainclient/create_client.go index bc511e2..78ef0ec 100644 --- a/chainclient/create_client.go +++ b/chainclient/create_client.go @@ -23,8 +23,6 @@ const ( defaultGasLimit = 300000 ) -// var availdHomePath = xfilepath.JoinFromHome(xfilepath.Path("availsdk")) - func NewClientCtx(kr keyring.Keyring, c *cometrpc.HTTP, chainID string, cdc codec.BinaryCodec, homepath string, fromAddress sdk.AccAddress, ) client.Context { diff --git a/x/cada/simulation/operation.go b/x/cada/simulation/operation.go index b6ff89f..488418a 100644 --- a/x/cada/simulation/operation.go +++ b/x/cada/simulation/operation.go @@ -13,7 +13,7 @@ import ( bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" "github.com/cosmos/cosmos-sdk/x/simulation" cadakeeper "github.com/vitwit/avail-da-module/x/cada/keeper" - availtypes "github.com/vitwit/avail-da-module/x/cada/types" + cadatypes "github.com/vitwit/avail-da-module/x/cada/types" ) const ( @@ -44,31 +44,28 @@ func SimulateMsgUpdateBlobStatus(ak authkeeper.AccountKeeper, bk bankkeeper.Keep r *rand.Rand, app *baseapp.BaseApp, ctx sdk.Context, accs []simtypes.Account, _ string, ) (simtypes.OperationMsg, []simtypes.FutureOperation, error) { ctx = ctx.WithBlockHeight(20) - // Randomly select a sender account + sender, _ := simtypes.RandomAcc(r, accs) - // Ensure the sender has sufficient balance account := ak.GetAccount(ctx, sender.Address) spendable := bk.SpendableCoins(ctx, account.GetAddress()) - // Generate random fees for the transaction fees, err := simtypes.RandomFees(r, ctx, spendable) if err != nil { - return simtypes.NoOpMsg(availtypes.ModuleName, availtypes.TypeMsgUpdateBlobStatus, "unable to generate fees"), nil, err + return simtypes.NoOpMsg(cadatypes.ModuleName, cadatypes.TypeMsgUpdateBlobStatus, "unable to generate fees"), nil, err } - // Prepare a random blob status update - newStatus := true // You can randomize this value as needed - fromBlock := uint64(5) // Example block range start - toBlock := uint64(20) // Example block range end + newStatus := true + fromBlock := uint64(5) + toBlock := uint64(20) availHeight := uint64(120) - ran := availtypes.Range{ + ran := cadatypes.Range{ From: fromBlock, To: toBlock, } - msg := availtypes.NewMsgUpdateBlobStatus( + msg := cadatypes.NewMsgUpdateBlobStatus( sender.Address.String(), ran, availHeight, @@ -76,13 +73,22 @@ func SimulateMsgUpdateBlobStatus(ak authkeeper.AccountKeeper, bk bankkeeper.Keep ) store := ctx.KVStore(k.GetStoreKey()) - cadakeeper.UpdateEndHeight(ctx, store, uint64(20)) - cadakeeper.UpdateProvenHeight(ctx, store, uint64(4)) + err = cadakeeper.UpdateEndHeight(ctx, store, uint64(20)) + if err != nil { + return simtypes.NoOpMsg(cadatypes.ModuleName, cadatypes.TypeMsgUpdateBlobStatus, "unable to update end height"), nil, err + } + + err = cadakeeper.UpdateProvenHeight(ctx, store, uint64(4)) + if err != nil { + return simtypes.NoOpMsg(cadatypes.ModuleName, cadatypes.TypeMsgUpdateBlobStatus, "unable to update proven height"), nil, err + } - cadakeeper.UpdateBlobStatus(ctx, store, uint32(1)) + err = cadakeeper.UpdateBlobStatus(ctx, store, uint32(1)) + if err != nil { + return simtypes.NoOpMsg(cadatypes.ModuleName, cadatypes.TypeMsgUpdateBlobStatus, "unable to update status to pending state"), nil, err + } - // Set up the transaction context txCtx := simulation.OperationInput{ R: r, App: app, @@ -92,10 +98,9 @@ func SimulateMsgUpdateBlobStatus(ak authkeeper.AccountKeeper, bk bankkeeper.Keep Context: ctx, SimAccount: sender, AccountKeeper: ak, - ModuleName: availtypes.ModuleName, + ModuleName: cadatypes.ModuleName, } - // Generate and deliver the transaction return simulation.GenAndDeliverTx(txCtx, fees) } }