diff --git a/CHANGELOG.md b/CHANGELOG.md
index e101c5d85..3c1e2f457 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -61,6 +61,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
#### Nibiru EVM
- [#1837](https://github.com/NibiruChain/nibiru/pull/1837) - feat(eth): protos, eth types, and evm module types
+- [#1838](https://github.com/NibiruChain/nibiru/pull/1838) - feat(eth): Go-ethereum, crypto, encoding, and unit tests for evm/types
#### Dapp modules: perp, spot, etc
diff --git a/cmd/ethclient/const.go b/cmd/ethclient/const.go
new file mode 100644
index 000000000..b3d7a3038
--- /dev/null
+++ b/cmd/ethclient/const.go
@@ -0,0 +1,3 @@
+package ethclient
+
+const Bech32Prefix = "nibi"
diff --git a/eth/crypto/codec/amino.go b/eth/crypto/codec/amino.go
new file mode 100644
index 000000000..39d87afb8
--- /dev/null
+++ b/eth/crypto/codec/amino.go
@@ -0,0 +1,27 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package codec
+
+import (
+ "github.com/cosmos/cosmos-sdk/codec"
+ "github.com/cosmos/cosmos-sdk/codec/legacy"
+ cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec"
+ "github.com/cosmos/cosmos-sdk/crypto/keyring"
+
+ "github.com/NibiruChain/nibiru/eth/crypto/ethsecp256k1"
+)
+
+// RegisterCrypto registers all crypto dependency types with the provided Amino
+// codec.
+func RegisterCrypto(cdc *codec.LegacyAmino) {
+ cdc.RegisterConcrete(ðsecp256k1.PubKey{},
+ ethsecp256k1.PubKeyName, nil)
+ cdc.RegisterConcrete(ðsecp256k1.PrivKey{},
+ ethsecp256k1.PrivKeyName, nil)
+
+ keyring.RegisterLegacyAminoCodec(cdc)
+ cryptocodec.RegisterCrypto(cdc)
+
+ // NOTE: update SDK's amino codec to include the ethsecp256k1 keys.
+ // DO NOT REMOVE unless deprecated on the SDK.
+ legacy.Cdc = cdc
+}
diff --git a/eth/crypto/codec/codec.go b/eth/crypto/codec/codec.go
new file mode 100644
index 000000000..9ab6fdbc5
--- /dev/null
+++ b/eth/crypto/codec/codec.go
@@ -0,0 +1,15 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package codec
+
+import (
+ codectypes "github.com/cosmos/cosmos-sdk/codec/types"
+ cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
+
+ "github.com/NibiruChain/nibiru/eth/crypto/ethsecp256k1"
+)
+
+// RegisterInterfaces register the cryptographic key concrete types.
+func RegisterInterfaces(registry codectypes.InterfaceRegistry) {
+ registry.RegisterImplementations((*cryptotypes.PubKey)(nil), ðsecp256k1.PubKey{})
+ registry.RegisterImplementations((*cryptotypes.PrivKey)(nil), ðsecp256k1.PrivKey{})
+}
diff --git a/eth/crypto/ethsecp256k1/benchmark_test.go b/eth/crypto/ethsecp256k1/benchmark_test.go
new file mode 100644
index 000000000..815cc5832
--- /dev/null
+++ b/eth/crypto/ethsecp256k1/benchmark_test.go
@@ -0,0 +1,34 @@
+package ethsecp256k1
+
+import (
+ "fmt"
+ "testing"
+)
+
+func BenchmarkGenerateKey(b *testing.B) {
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ if _, err := GenerateKey(); err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+func BenchmarkPubKey_VerifySignature(b *testing.B) {
+ privKey, err := GenerateKey()
+ if err != nil {
+ b.Fatal(err)
+ }
+ pubKey := privKey.PubKey()
+
+ b.ResetTimer()
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ msg := []byte(fmt.Sprintf("%10d", i))
+ sig, err := privKey.Sign(msg)
+ if err != nil {
+ b.Fatal(err)
+ }
+ pubKey.VerifySignature(msg, sig)
+ }
+}
diff --git a/eth/crypto/ethsecp256k1/ethsecp256k1.go b/eth/crypto/ethsecp256k1/ethsecp256k1.go
new file mode 100644
index 000000000..73f44fd75
--- /dev/null
+++ b/eth/crypto/ethsecp256k1/ethsecp256k1.go
@@ -0,0 +1,248 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+
+package ethsecp256k1
+
+import (
+ "bytes"
+ "crypto/ecdsa"
+ "crypto/subtle"
+ "fmt"
+
+ errorsmod "cosmossdk.io/errors"
+ "github.com/NibiruChain/nibiru/eth/ethereum/eip712"
+ tmcrypto "github.com/cometbft/cometbft/crypto"
+ "github.com/cosmos/cosmos-sdk/codec"
+ cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
+ errortypes "github.com/cosmos/cosmos-sdk/types/errors"
+ "github.com/ethereum/go-ethereum/crypto"
+)
+
+const (
+ // PrivKeySize defines the size of the PrivKey bytes
+ PrivKeySize = 32
+ // PubKeySize defines the size of the PubKey bytes
+ PubKeySize = 33
+ // KeyType is the string constant for the Secp256k1 algorithm
+ KeyType = "eth_secp256k1"
+)
+
+// Amino encoding names
+const (
+ // PrivKeyName defines the amino encoding name for the EthSecp256k1 private key
+ PrivKeyName = "ethermint/PrivKeyEthSecp256k1"
+ // PubKeyName defines the amino encoding name for the EthSecp256k1 public key
+ PubKeyName = "ethermint/PubKeyEthSecp256k1"
+)
+
+// ----------------------------------------------------------------------------
+// secp256k1 Private Key
+
+var (
+ _ cryptotypes.PrivKey = &PrivKey{}
+ _ codec.AminoMarshaler = &PrivKey{}
+)
+
+// GenerateKey generates a new random private key. It returns an error upon
+// failure.
+func GenerateKey() (*PrivKey, error) {
+ priv, err := crypto.GenerateKey()
+ if err != nil {
+ return nil, err
+ }
+
+ return &PrivKey{
+ Key: crypto.FromECDSA(priv),
+ }, nil
+}
+
+// Bytes returns the byte representation of the ECDSA Private Key.
+func (privKey PrivKey) Bytes() []byte {
+ bz := make([]byte, len(privKey.Key))
+ copy(bz, privKey.Key)
+
+ return bz
+}
+
+// PubKey returns the ECDSA private key's public key. If the privkey is not valid
+// it returns a nil value.
+func (privKey PrivKey) PubKey() cryptotypes.PubKey {
+ ecdsaPrivKey, err := privKey.ToECDSA()
+ if err != nil {
+ return nil
+ }
+
+ return &PubKey{
+ Key: crypto.CompressPubkey(&ecdsaPrivKey.PublicKey),
+ }
+}
+
+// Equals returns true if two ECDSA private keys are equal and false otherwise.
+func (privKey PrivKey) Equals(other cryptotypes.LedgerPrivKey) bool {
+ return privKey.Type() == other.Type() && subtle.ConstantTimeCompare(privKey.Bytes(), other.Bytes()) == 1
+}
+
+// Type returns eth_secp256k1
+func (privKey PrivKey) Type() string {
+ return KeyType
+}
+
+// MarshalAmino overrides Amino binary marshaling.
+func (privKey PrivKey) MarshalAmino() ([]byte, error) {
+ return privKey.Key, nil
+}
+
+// UnmarshalAmino overrides Amino binary marshaling.
+func (privKey *PrivKey) UnmarshalAmino(bz []byte) error {
+ if len(bz) != PrivKeySize {
+ return fmt.Errorf("invalid privkey size, expected %d got %d", PrivKeySize, len(bz))
+ }
+ privKey.Key = bz
+
+ return nil
+}
+
+// MarshalAminoJSON overrides Amino JSON marshaling.
+func (privKey PrivKey) MarshalAminoJSON() ([]byte, error) {
+ // When we marshal to Amino JSON, we don't marshal the "key" field itself,
+ // just its contents (i.e. the key bytes).
+ return privKey.MarshalAmino()
+}
+
+// UnmarshalAminoJSON overrides Amino JSON marshaling.
+func (privKey *PrivKey) UnmarshalAminoJSON(bz []byte) error {
+ return privKey.UnmarshalAmino(bz)
+}
+
+// Sign creates a recoverable ECDSA signature on the secp256k1 curve over the
+// provided hash of the message. The produced signature is 65 bytes
+// where the last byte contains the recovery ID.
+func (privKey PrivKey) Sign(digestBz []byte) ([]byte, error) {
+ // TODO: remove
+ if len(digestBz) != crypto.DigestLength {
+ digestBz = crypto.Keccak256Hash(digestBz).Bytes()
+ }
+
+ key, err := privKey.ToECDSA()
+ if err != nil {
+ return nil, err
+ }
+
+ return crypto.Sign(digestBz, key)
+}
+
+// ToECDSA returns the ECDSA private key as a reference to ecdsa.PrivateKey type.
+func (privKey PrivKey) ToECDSA() (*ecdsa.PrivateKey, error) {
+ return crypto.ToECDSA(privKey.Bytes())
+}
+
+// ----------------------------------------------------------------------------
+// secp256k1 Public Key
+
+var (
+ _ cryptotypes.PubKey = &PubKey{}
+ _ codec.AminoMarshaler = &PubKey{}
+)
+
+// Address returns the address of the ECDSA public key.
+// The function will return an empty address if the public key is invalid.
+func (pubKey PubKey) Address() tmcrypto.Address {
+ pubk, err := crypto.DecompressPubkey(pubKey.Key)
+ if err != nil {
+ return nil
+ }
+
+ return tmcrypto.Address(crypto.PubkeyToAddress(*pubk).Bytes())
+}
+
+// Bytes returns the raw bytes of the ECDSA public key.
+func (pubKey PubKey) Bytes() []byte {
+ bz := make([]byte, len(pubKey.Key))
+ copy(bz, pubKey.Key)
+
+ return bz
+}
+
+// String implements the fmt.Stringer interface.
+func (pubKey PubKey) String() string {
+ return fmt.Sprintf("EthPubKeySecp256k1{%X}", pubKey.Key)
+}
+
+// Type returns eth_secp256k1
+func (pubKey PubKey) Type() string {
+ return KeyType
+}
+
+// Equals returns true if the pubkey type is the same and their bytes are deeply equal.
+func (pubKey PubKey) Equals(other cryptotypes.PubKey) bool {
+ return pubKey.Type() == other.Type() && bytes.Equal(pubKey.Bytes(), other.Bytes())
+}
+
+// MarshalAmino overrides Amino binary marshaling.
+func (pubKey PubKey) MarshalAmino() ([]byte, error) {
+ return pubKey.Key, nil
+}
+
+// UnmarshalAmino overrides Amino binary marshaling.
+func (pubKey *PubKey) UnmarshalAmino(bz []byte) error {
+ if len(bz) != PubKeySize {
+ return errorsmod.Wrapf(errortypes.ErrInvalidPubKey, "invalid pubkey size, expected %d, got %d", PubKeySize, len(bz))
+ }
+ pubKey.Key = bz
+
+ return nil
+}
+
+// MarshalAminoJSON overrides Amino JSON marshaling.
+func (pubKey PubKey) MarshalAminoJSON() ([]byte, error) {
+ // When we marshal to Amino JSON, we don't marshal the "key" field itself,
+ // just its contents (i.e. the key bytes).
+ return pubKey.MarshalAmino()
+}
+
+// UnmarshalAminoJSON overrides Amino JSON marshaling.
+func (pubKey *PubKey) UnmarshalAminoJSON(bz []byte) error {
+ return pubKey.UnmarshalAmino(bz)
+}
+
+// VerifySignature verifies that the ECDSA public key created a given signature over
+// the provided message. It will calculate the Keccak256 hash of the message
+// prior to verification and approve verification if the signature can be verified
+// from either the original message or its EIP-712 representation.
+//
+// CONTRACT: The signature should be in [R || S] format.
+func (pubKey PubKey) VerifySignature(msg, sig []byte) bool {
+ return pubKey.verifySignatureECDSA(msg, sig) || pubKey.verifySignatureAsEIP712(msg, sig)
+}
+
+// Verifies the signature as an EIP-712 signature by first converting the message payload
+// to EIP-712 object bytes, then performing ECDSA verification on the hash. This is to support
+// signing a Cosmos payload using EIP-712.
+func (pubKey PubKey) verifySignatureAsEIP712(msg, sig []byte) bool {
+ eip712Bytes, err := eip712.GetEIP712BytesForMsg(msg)
+ if err != nil {
+ return false
+ }
+
+ if pubKey.verifySignatureECDSA(eip712Bytes, sig) {
+ return true
+ }
+
+ // Try verifying the signature using the legacy EIP-712 encoding
+ legacyEIP712Bytes, err := eip712.LegacyGetEIP712BytesForMsg(msg)
+ if err != nil {
+ return false
+ }
+
+ return pubKey.verifySignatureECDSA(legacyEIP712Bytes, sig)
+}
+
+// Perform standard ECDSA signature verification for the given raw bytes and signature.
+func (pubKey PubKey) verifySignatureECDSA(msg, sig []byte) bool {
+ if len(sig) == crypto.SignatureLength {
+ // remove recovery ID (V) if contained in the signature
+ sig = sig[:len(sig)-1]
+ }
+
+ // the signature needs to be in [R || S] format when provided to VerifySignature
+ return crypto.VerifySignature(pubKey.Key, crypto.Keccak256Hash(msg).Bytes(), sig)
+}
diff --git a/eth/crypto/ethsecp256k1/ethsecp256k1_test.go b/eth/crypto/ethsecp256k1/ethsecp256k1_test.go
new file mode 100644
index 000000000..044f1c20b
--- /dev/null
+++ b/eth/crypto/ethsecp256k1/ethsecp256k1_test.go
@@ -0,0 +1,124 @@
+package ethsecp256k1
+
+import (
+ "encoding/base64"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/cosmos/cosmos-sdk/codec"
+
+ "github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/crypto/secp256k1"
+
+ cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
+)
+
+func TestPrivKey(t *testing.T) {
+ // validate type and equality
+ privKey, err := GenerateKey()
+ require.NoError(t, err)
+ require.Implements(t, (*cryptotypes.PrivKey)(nil), privKey)
+
+ // validate inequality
+ privKey2, err := GenerateKey()
+ require.NoError(t, err)
+ require.False(t, privKey.Equals(privKey2))
+
+ // validate Ethereum address equality
+ addr := privKey.PubKey().Address()
+ key, err := privKey.ToECDSA()
+ require.NoError(t, err)
+ expectedAddr := crypto.PubkeyToAddress(key.PublicKey)
+ require.Equal(t, expectedAddr.Bytes(), addr.Bytes())
+
+ // validate we can sign some bytes
+ msg := []byte("hello world")
+ sigHash := crypto.Keccak256Hash(msg)
+ expectedSig, err := secp256k1.Sign(sigHash.Bytes(), privKey.Bytes())
+ require.NoError(t, err)
+
+ sig, err := privKey.Sign(sigHash.Bytes())
+ require.NoError(t, err)
+ require.Equal(t, expectedSig, sig)
+}
+
+func TestPrivKey_PubKey(t *testing.T) {
+ privKey, err := GenerateKey()
+ require.NoError(t, err)
+
+ // validate type and equality
+ pubKey := &PubKey{
+ Key: privKey.PubKey().Bytes(),
+ }
+ require.Implements(t, (*cryptotypes.PubKey)(nil), pubKey)
+
+ // validate inequality
+ privKey2, err := GenerateKey()
+ require.NoError(t, err)
+ require.False(t, pubKey.Equals(privKey2.PubKey()))
+
+ // validate signature
+ msg := []byte("hello world")
+ sigHash := crypto.Keccak256Hash(msg)
+ sig, err := privKey.Sign(sigHash.Bytes())
+ require.NoError(t, err)
+
+ res := pubKey.VerifySignature(msg, sig)
+ require.True(t, res)
+}
+
+func TestMarshalAmino(t *testing.T) {
+ aminoCdc := codec.NewLegacyAmino()
+ privKey, err := GenerateKey()
+ require.NoError(t, err)
+
+ pubKey := privKey.PubKey().(*PubKey)
+
+ testCases := []struct {
+ desc string
+ msg codec.AminoMarshaler
+ typ interface{}
+ expBinary []byte
+ expJSON string
+ }{
+ {
+ "ethsecp256k1 private key",
+ privKey,
+ &PrivKey{},
+ append([]byte{32}, privKey.Bytes()...), // Length-prefixed.
+ "\"" + base64.StdEncoding.EncodeToString(privKey.Bytes()) + "\"",
+ },
+ {
+ "ethsecp256k1 public key",
+ pubKey,
+ &PubKey{},
+ append([]byte{33}, pubKey.Bytes()...), // Length-prefixed.
+ "\"" + base64.StdEncoding.EncodeToString(pubKey.Bytes()) + "\"",
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ // Do a round trip of encoding/decoding binary.
+ bz, err := aminoCdc.Marshal(tc.msg)
+ require.NoError(t, err)
+ require.Equal(t, tc.expBinary, bz)
+
+ err = aminoCdc.Unmarshal(bz, tc.typ)
+ require.NoError(t, err)
+
+ require.Equal(t, tc.msg, tc.typ)
+
+ // Do a round trip of encoding/decoding JSON.
+ bz, err = aminoCdc.MarshalJSON(tc.msg)
+ require.NoError(t, err)
+ require.Equal(t, tc.expJSON, string(bz))
+
+ err = aminoCdc.UnmarshalJSON(bz, tc.typ)
+ require.NoError(t, err)
+
+ require.Equal(t, tc.msg, tc.typ)
+ })
+ }
+}
diff --git a/eth/crypto/ethsecp256k1/keys.pb.go b/eth/crypto/ethsecp256k1/keys.pb.go
new file mode 100644
index 000000000..93ec27286
--- /dev/null
+++ b/eth/crypto/ethsecp256k1/keys.pb.go
@@ -0,0 +1,500 @@
+// Code generated by protoc-gen-gogo. DO NOT EDIT.
+// source: ethermint/crypto/v1/ethsecp256k1/keys.proto
+
+package ethsecp256k1
+
+import (
+ fmt "fmt"
+ _ "github.com/cosmos/gogoproto/gogoproto"
+ proto "github.com/cosmos/gogoproto/proto"
+ io "io"
+ math "math"
+ math_bits "math/bits"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
+
+// PubKey defines a type alias for an ecdsa.PublicKey that implements
+// Tendermint's PubKey interface. It represents the 33-byte compressed public
+// key format.
+type PubKey struct {
+ // key is the public key in byte form
+ Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
+}
+
+func (m *PubKey) Reset() { *m = PubKey{} }
+func (*PubKey) ProtoMessage() {}
+func (*PubKey) Descriptor() ([]byte, []int) {
+ return fileDescriptor_0c10cadcf35beb64, []int{0}
+}
+func (m *PubKey) XXX_Unmarshal(b []byte) error {
+ return m.Unmarshal(b)
+}
+func (m *PubKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ if deterministic {
+ return xxx_messageInfo_PubKey.Marshal(b, m, deterministic)
+ } else {
+ b = b[:cap(b)]
+ n, err := m.MarshalToSizedBuffer(b)
+ if err != nil {
+ return nil, err
+ }
+ return b[:n], nil
+ }
+}
+func (m *PubKey) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PubKey.Merge(m, src)
+}
+func (m *PubKey) XXX_Size() int {
+ return m.Size()
+}
+func (m *PubKey) XXX_DiscardUnknown() {
+ xxx_messageInfo_PubKey.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PubKey proto.InternalMessageInfo
+
+func (m *PubKey) GetKey() []byte {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+// PrivKey defines a type alias for an ecdsa.PrivateKey that implements
+// Tendermint's PrivateKey interface.
+type PrivKey struct {
+ // key is the private key in byte form
+ Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"`
+}
+
+func (m *PrivKey) Reset() { *m = PrivKey{} }
+func (m *PrivKey) String() string { return proto.CompactTextString(m) }
+func (*PrivKey) ProtoMessage() {}
+func (*PrivKey) Descriptor() ([]byte, []int) {
+ return fileDescriptor_0c10cadcf35beb64, []int{1}
+}
+func (m *PrivKey) XXX_Unmarshal(b []byte) error {
+ return m.Unmarshal(b)
+}
+func (m *PrivKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ if deterministic {
+ return xxx_messageInfo_PrivKey.Marshal(b, m, deterministic)
+ } else {
+ b = b[:cap(b)]
+ n, err := m.MarshalToSizedBuffer(b)
+ if err != nil {
+ return nil, err
+ }
+ return b[:n], nil
+ }
+}
+func (m *PrivKey) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_PrivKey.Merge(m, src)
+}
+func (m *PrivKey) XXX_Size() int {
+ return m.Size()
+}
+func (m *PrivKey) XXX_DiscardUnknown() {
+ xxx_messageInfo_PrivKey.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PrivKey proto.InternalMessageInfo
+
+func (m *PrivKey) GetKey() []byte {
+ if m != nil {
+ return m.Key
+ }
+ return nil
+}
+
+func init() {
+ proto.RegisterType((*PubKey)(nil), "ethermint.crypto.v1.ethsecp256k1.PubKey")
+ proto.RegisterType((*PrivKey)(nil), "ethermint.crypto.v1.ethsecp256k1.PrivKey")
+}
+
+func init() {
+ proto.RegisterFile("ethermint/crypto/v1/ethsecp256k1/keys.proto", fileDescriptor_0c10cadcf35beb64)
+}
+
+var fileDescriptor_0c10cadcf35beb64 = []byte{
+ // 197 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4e, 0x2d, 0xc9, 0x48,
+ 0x2d, 0xca, 0xcd, 0xcc, 0x2b, 0xd1, 0x4f, 0x2e, 0xaa, 0x2c, 0x28, 0xc9, 0xd7, 0x2f, 0x33, 0xd4,
+ 0x4f, 0x2d, 0xc9, 0x28, 0x4e, 0x4d, 0x2e, 0x30, 0x32, 0x35, 0xcb, 0x36, 0xd4, 0xcf, 0x4e, 0xad,
+ 0x2c, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x52, 0x80, 0x2b, 0xd6, 0x83, 0x28, 0xd6, 0x2b,
+ 0x33, 0xd4, 0x43, 0x56, 0x2c, 0x25, 0x92, 0x9e, 0x9f, 0x9e, 0x0f, 0x56, 0xac, 0x0f, 0x62, 0x41,
+ 0xf4, 0x29, 0x29, 0x70, 0xb1, 0x05, 0x94, 0x26, 0x79, 0xa7, 0x56, 0x0a, 0x09, 0x70, 0x31, 0x67,
+ 0xa7, 0x56, 0x4a, 0x30, 0x2a, 0x30, 0x6a, 0xf0, 0x04, 0x81, 0x98, 0x56, 0x2c, 0x33, 0x16, 0xc8,
+ 0x33, 0x28, 0x49, 0x73, 0xb1, 0x07, 0x14, 0x65, 0x96, 0x61, 0x55, 0xe2, 0xe4, 0x71, 0xe2, 0x91,
+ 0x1c, 0xe3, 0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0x4e, 0x78, 0x2c, 0xc7, 0x70, 0xe1,
+ 0xb1, 0x1c, 0xc3, 0x8d, 0xc7, 0x72, 0x0c, 0x51, 0x7a, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a,
+ 0xc9, 0xf9, 0xb9, 0xfa, 0xa9, 0x65, 0xb9, 0xf9, 0xc5, 0x50, 0xb2, 0xcc, 0xd0, 0x0c, 0xe6, 0x1d,
+ 0x64, 0xe7, 0x25, 0xb1, 0x81, 0xdd, 0x63, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x03, 0x69, 0xeb,
+ 0xbb, 0xf6, 0x00, 0x00, 0x00,
+}
+
+func (m *PubKey) Marshal() (dAtA []byte, err error) {
+ size := m.Size()
+ dAtA = make([]byte, size)
+ n, err := m.MarshalToSizedBuffer(dAtA[:size])
+ if err != nil {
+ return nil, err
+ }
+ return dAtA[:n], nil
+}
+
+func (m *PubKey) MarshalTo(dAtA []byte) (int, error) {
+ size := m.Size()
+ return m.MarshalToSizedBuffer(dAtA[:size])
+}
+
+func (m *PubKey) MarshalToSizedBuffer(dAtA []byte) (int, error) {
+ i := len(dAtA)
+ _ = i
+ var l int
+ _ = l
+ if len(m.Key) > 0 {
+ i -= len(m.Key)
+ copy(dAtA[i:], m.Key)
+ i = encodeVarintKeys(dAtA, i, uint64(len(m.Key)))
+ i--
+ dAtA[i] = 0xa
+ }
+ return len(dAtA) - i, nil
+}
+
+func (m *PrivKey) Marshal() (dAtA []byte, err error) {
+ size := m.Size()
+ dAtA = make([]byte, size)
+ n, err := m.MarshalToSizedBuffer(dAtA[:size])
+ if err != nil {
+ return nil, err
+ }
+ return dAtA[:n], nil
+}
+
+func (m *PrivKey) MarshalTo(dAtA []byte) (int, error) {
+ size := m.Size()
+ return m.MarshalToSizedBuffer(dAtA[:size])
+}
+
+func (m *PrivKey) MarshalToSizedBuffer(dAtA []byte) (int, error) {
+ i := len(dAtA)
+ _ = i
+ var l int
+ _ = l
+ if len(m.Key) > 0 {
+ i -= len(m.Key)
+ copy(dAtA[i:], m.Key)
+ i = encodeVarintKeys(dAtA, i, uint64(len(m.Key)))
+ i--
+ dAtA[i] = 0xa
+ }
+ return len(dAtA) - i, nil
+}
+
+func encodeVarintKeys(dAtA []byte, offset int, v uint64) int {
+ offset -= sovKeys(v)
+ base := offset
+ for v >= 1<<7 {
+ dAtA[offset] = uint8(v&0x7f | 0x80)
+ v >>= 7
+ offset++
+ }
+ dAtA[offset] = uint8(v)
+ return base
+}
+func (m *PubKey) Size() (n int) {
+ if m == nil {
+ return 0
+ }
+ var l int
+ _ = l
+ l = len(m.Key)
+ if l > 0 {
+ n += 1 + l + sovKeys(uint64(l))
+ }
+ return n
+}
+
+func (m *PrivKey) Size() (n int) {
+ if m == nil {
+ return 0
+ }
+ var l int
+ _ = l
+ l = len(m.Key)
+ if l > 0 {
+ n += 1 + l + sovKeys(uint64(l))
+ }
+ return n
+}
+
+func sovKeys(x uint64) (n int) {
+ return (math_bits.Len64(x|1) + 6) / 7
+}
+func sozKeys(x uint64) (n int) {
+ return sovKeys(uint64((x << 1) ^ uint64((int64(x) >> 63))))
+}
+func (m *PubKey) Unmarshal(dAtA []byte) error {
+ l := len(dAtA)
+ iNdEx := 0
+ for iNdEx < l {
+ preIndex := iNdEx
+ var wire uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowKeys
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := dAtA[iNdEx]
+ iNdEx++
+ wire |= uint64(b&0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ fieldNum := int32(wire >> 3)
+ wireType := int(wire & 0x7)
+ if wireType == 4 {
+ return fmt.Errorf("proto: PubKey: wiretype end group for non-group")
+ }
+ if fieldNum <= 0 {
+ return fmt.Errorf("proto: PubKey: illegal tag %d (wire type %d)", fieldNum, wire)
+ }
+ switch fieldNum {
+ case 1:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType)
+ }
+ var byteLen int
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowKeys
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := dAtA[iNdEx]
+ iNdEx++
+ byteLen |= int(b&0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ if byteLen < 0 {
+ return ErrInvalidLengthKeys
+ }
+ postIndex := iNdEx + byteLen
+ if postIndex < 0 {
+ return ErrInvalidLengthKeys
+ }
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.Key = append(m.Key[:0], dAtA[iNdEx:postIndex]...)
+ if m.Key == nil {
+ m.Key = []byte{}
+ }
+ iNdEx = postIndex
+ default:
+ iNdEx = preIndex
+ skippy, err := skipKeys(dAtA[iNdEx:])
+ if err != nil {
+ return err
+ }
+ if (skippy < 0) || (iNdEx+skippy) < 0 {
+ return ErrInvalidLengthKeys
+ }
+ if (iNdEx + skippy) > l {
+ return io.ErrUnexpectedEOF
+ }
+ iNdEx += skippy
+ }
+ }
+
+ if iNdEx > l {
+ return io.ErrUnexpectedEOF
+ }
+ return nil
+}
+func (m *PrivKey) Unmarshal(dAtA []byte) error {
+ l := len(dAtA)
+ iNdEx := 0
+ for iNdEx < l {
+ preIndex := iNdEx
+ var wire uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowKeys
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := dAtA[iNdEx]
+ iNdEx++
+ wire |= uint64(b&0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ fieldNum := int32(wire >> 3)
+ wireType := int(wire & 0x7)
+ if wireType == 4 {
+ return fmt.Errorf("proto: PrivKey: wiretype end group for non-group")
+ }
+ if fieldNum <= 0 {
+ return fmt.Errorf("proto: PrivKey: illegal tag %d (wire type %d)", fieldNum, wire)
+ }
+ switch fieldNum {
+ case 1:
+ if wireType != 2 {
+ return fmt.Errorf("proto: wrong wireType = %d for field Key", wireType)
+ }
+ var byteLen int
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return ErrIntOverflowKeys
+ }
+ if iNdEx >= l {
+ return io.ErrUnexpectedEOF
+ }
+ b := dAtA[iNdEx]
+ iNdEx++
+ byteLen |= int(b&0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ if byteLen < 0 {
+ return ErrInvalidLengthKeys
+ }
+ postIndex := iNdEx + byteLen
+ if postIndex < 0 {
+ return ErrInvalidLengthKeys
+ }
+ if postIndex > l {
+ return io.ErrUnexpectedEOF
+ }
+ m.Key = append(m.Key[:0], dAtA[iNdEx:postIndex]...)
+ if m.Key == nil {
+ m.Key = []byte{}
+ }
+ iNdEx = postIndex
+ default:
+ iNdEx = preIndex
+ skippy, err := skipKeys(dAtA[iNdEx:])
+ if err != nil {
+ return err
+ }
+ if (skippy < 0) || (iNdEx+skippy) < 0 {
+ return ErrInvalidLengthKeys
+ }
+ if (iNdEx + skippy) > l {
+ return io.ErrUnexpectedEOF
+ }
+ iNdEx += skippy
+ }
+ }
+
+ if iNdEx > l {
+ return io.ErrUnexpectedEOF
+ }
+ return nil
+}
+func skipKeys(dAtA []byte) (n int, err error) {
+ l := len(dAtA)
+ iNdEx := 0
+ depth := 0
+ for iNdEx < l {
+ var wire uint64
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return 0, ErrIntOverflowKeys
+ }
+ if iNdEx >= l {
+ return 0, io.ErrUnexpectedEOF
+ }
+ b := dAtA[iNdEx]
+ iNdEx++
+ wire |= (uint64(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ wireType := int(wire & 0x7)
+ switch wireType {
+ case 0:
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return 0, ErrIntOverflowKeys
+ }
+ if iNdEx >= l {
+ return 0, io.ErrUnexpectedEOF
+ }
+ iNdEx++
+ if dAtA[iNdEx-1] < 0x80 {
+ break
+ }
+ }
+ case 1:
+ iNdEx += 8
+ case 2:
+ var length int
+ for shift := uint(0); ; shift += 7 {
+ if shift >= 64 {
+ return 0, ErrIntOverflowKeys
+ }
+ if iNdEx >= l {
+ return 0, io.ErrUnexpectedEOF
+ }
+ b := dAtA[iNdEx]
+ iNdEx++
+ length |= (int(b) & 0x7F) << shift
+ if b < 0x80 {
+ break
+ }
+ }
+ if length < 0 {
+ return 0, ErrInvalidLengthKeys
+ }
+ iNdEx += length
+ case 3:
+ depth++
+ case 4:
+ if depth == 0 {
+ return 0, ErrUnexpectedEndOfGroupKeys
+ }
+ depth--
+ case 5:
+ iNdEx += 4
+ default:
+ return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
+ }
+ if iNdEx < 0 {
+ return 0, ErrInvalidLengthKeys
+ }
+ if depth == 0 {
+ return iNdEx, nil
+ }
+ }
+ return 0, io.ErrUnexpectedEOF
+}
+
+var (
+ ErrInvalidLengthKeys = fmt.Errorf("proto: negative length found during unmarshaling")
+ ErrIntOverflowKeys = fmt.Errorf("proto: integer overflow")
+ ErrUnexpectedEndOfGroupKeys = fmt.Errorf("proto: unexpected end of group")
+)
diff --git a/eth/crypto/hd/algorithm.go b/eth/crypto/hd/algorithm.go
new file mode 100644
index 000000000..d6541ff08
--- /dev/null
+++ b/eth/crypto/hd/algorithm.go
@@ -0,0 +1,112 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package hd
+
+import (
+ "github.com/btcsuite/btcd/btcutil/hdkeychain"
+ "github.com/btcsuite/btcd/chaincfg"
+ bip39 "github.com/tyler-smith/go-bip39"
+
+ "github.com/ethereum/go-ethereum/accounts"
+ "github.com/ethereum/go-ethereum/crypto"
+
+ "github.com/cosmos/cosmos-sdk/crypto/hd"
+ "github.com/cosmos/cosmos-sdk/crypto/keyring"
+ cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
+
+ "github.com/NibiruChain/nibiru/eth/crypto/ethsecp256k1"
+)
+
+const (
+ // EthSecp256k1Type defines the ECDSA secp256k1 used on Ethereum
+ EthSecp256k1Type = hd.PubKeyType(ethsecp256k1.KeyType)
+)
+
+var (
+ // SupportedAlgorithms defines the list of signing algorithms used:
+ // - eth_secp256k1 (Ethereum)
+ // - secp256k1 (Tendermint)
+ SupportedAlgorithms = keyring.SigningAlgoList{EthSecp256k1, hd.Secp256k1}
+ // SupportedAlgorithmsLedger defines the list of signing algorithms used on the Ledger device:
+ // - eth_secp256k1 (Ethereum)
+ // - secp256k1 (Tendermint)
+ SupportedAlgorithmsLedger = keyring.SigningAlgoList{EthSecp256k1, hd.Secp256k1}
+)
+
+// EthSecp256k1Option defines a function keys options for the ethereum Secp256k1 curve.
+// It supports eth_secp256k1 and secp256k1 keys for accounts.
+func EthSecp256k1Option() keyring.Option {
+ return func(options *keyring.Options) {
+ options.SupportedAlgos = SupportedAlgorithms
+ options.SupportedAlgosLedger = SupportedAlgorithmsLedger
+ }
+}
+
+var (
+ _ keyring.SignatureAlgo = EthSecp256k1
+
+ // EthSecp256k1 uses the Bitcoin secp256k1 ECDSA parameters.
+ EthSecp256k1 = ethSecp256k1Algo{}
+)
+
+type ethSecp256k1Algo struct{}
+
+// Name returns eth_secp256k1
+func (s ethSecp256k1Algo) Name() hd.PubKeyType {
+ return EthSecp256k1Type
+}
+
+// Derive derives and returns the eth_secp256k1 private key for the given mnemonic and HD path.
+func (s ethSecp256k1Algo) Derive() hd.DeriveFn {
+ return func(mnemonic, bip39Passphrase, path string) ([]byte, error) {
+ hdpath, err := accounts.ParseDerivationPath(path)
+ if err != nil {
+ return nil, err
+ }
+
+ seed, err := bip39.NewSeedWithErrorChecking(mnemonic, bip39Passphrase)
+ if err != nil {
+ return nil, err
+ }
+
+ // create a BTC-utils hd-derivation key chain
+ masterKey, err := hdkeychain.NewMaster(seed, &chaincfg.MainNetParams)
+ if err != nil {
+ return nil, err
+ }
+
+ key := masterKey
+ for _, n := range hdpath {
+ key, err = key.Derive(n)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // btc-utils representation of a secp256k1 private key
+ privateKey, err := key.ECPrivKey()
+ if err != nil {
+ return nil, err
+ }
+
+ // cast private key to a convertible form (single scalar field element of secp256k1)
+ // and then load into ethcrypto private key format.
+ // TODO: add links to godocs of the two methods or implementations of them, to compare equivalency
+ privateKeyECDSA := privateKey.ToECDSA()
+ derivedKey := crypto.FromECDSA(privateKeyECDSA)
+
+ return derivedKey, nil
+ }
+}
+
+// Generate generates a eth_secp256k1 private key from the given bytes.
+func (s ethSecp256k1Algo) Generate() hd.GenerateFn {
+ return func(bz []byte) cryptotypes.PrivKey {
+ bzArr := make([]byte, ethsecp256k1.PrivKeySize)
+ copy(bzArr, bz)
+
+ // TODO: modulo P
+ return ðsecp256k1.PrivKey{
+ Key: bzArr,
+ }
+ }
+}
diff --git a/eth/crypto/hd/algorithm_test.go b/eth/crypto/hd/algorithm_test.go
new file mode 100644
index 000000000..0c268f156
--- /dev/null
+++ b/eth/crypto/hd/algorithm_test.go
@@ -0,0 +1,132 @@
+package hd
+
+import (
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/ethereum/go-ethereum/common"
+
+ amino "github.com/cosmos/cosmos-sdk/codec"
+ "github.com/cosmos/cosmos-sdk/codec/types"
+ "github.com/cosmos/cosmos-sdk/crypto/keyring"
+
+ cryptocodec "github.com/NibiruChain/nibiru/eth/crypto/codec"
+ enccodec "github.com/NibiruChain/nibiru/eth/encoding/codec"
+ ethtypes "github.com/NibiruChain/nibiru/eth/types"
+)
+
+var TestCodec amino.Codec
+
+func init() {
+ cdc := amino.NewLegacyAmino()
+ cryptocodec.RegisterCrypto(cdc)
+
+ interfaceRegistry := types.NewInterfaceRegistry()
+ TestCodec = amino.NewProtoCodec(interfaceRegistry)
+ enccodec.RegisterInterfaces(interfaceRegistry)
+}
+
+const (
+ mnemonic = "picnic rent average infant boat squirrel federal assault mercy purity very motor fossil wheel verify upset box fresh horse vivid copy predict square regret"
+
+ // hdWalletFixEnv defines whether the standard (correct) bip39
+ // derivation path was used, or if derivation was affected by
+ // https://github.com/btcsuite/btcutil/issues/179
+ hdWalletFixEnv = "GO_ETHEREUM_HDWALLET_FIX_ISSUE_179"
+)
+
+const appName = "nibid"
+
+func TestKeyring(t *testing.T) {
+ dir := t.TempDir()
+ mockIn := strings.NewReader("")
+ kr, err := keyring.New(appName, keyring.BackendTest, dir, mockIn, TestCodec, EthSecp256k1Option())
+ require.NoError(t, err)
+
+ // fail in retrieving key
+ info, err := kr.Key("foo")
+ require.Error(t, err)
+ require.Nil(t, info)
+
+ mockIn.Reset("password\npassword\n")
+ info, mnemonic, err := kr.NewMnemonic("foo", keyring.English, ethtypes.BIP44HDPath, keyring.DefaultBIP39Passphrase, EthSecp256k1)
+ require.NoError(t, err)
+ require.NotEmpty(t, mnemonic)
+ require.Equal(t, "foo", info.Name)
+ require.Equal(t, "local", info.GetType().String())
+ pubKey, err := info.GetPubKey()
+ require.NoError(t, err)
+ require.Equal(t, string(EthSecp256k1Type), pubKey.Type())
+
+ hdPath := ethtypes.BIP44HDPath
+
+ bz, err := EthSecp256k1.Derive()(mnemonic, keyring.DefaultBIP39Passphrase, hdPath)
+ require.NoError(t, err)
+ require.NotEmpty(t, bz)
+
+ wrongBz, err := EthSecp256k1.Derive()(mnemonic, keyring.DefaultBIP39Passphrase, "/wrong/hdPath")
+ require.Error(t, err)
+ require.Empty(t, wrongBz)
+
+ privkey := EthSecp256k1.Generate()(bz)
+ addr := common.BytesToAddress(privkey.PubKey().Address().Bytes())
+
+ os.Setenv(hdWalletFixEnv, "true")
+ wallet, err := NewFromMnemonic(mnemonic)
+ os.Setenv(hdWalletFixEnv, "")
+ require.NoError(t, err)
+
+ path := MustParseDerivationPath(hdPath)
+
+ account, err := wallet.Derive(path, false)
+ require.NoError(t, err)
+ require.Equal(t, addr.String(), account.Address.String())
+}
+
+func TestDerivation(t *testing.T) {
+ bz, err := EthSecp256k1.Derive()(mnemonic, keyring.DefaultBIP39Passphrase, ethtypes.BIP44HDPath)
+ require.NoError(t, err)
+ require.NotEmpty(t, bz)
+
+ badBz, err := EthSecp256k1.Derive()(mnemonic, keyring.DefaultBIP39Passphrase, "44'/60'/0'/0/0")
+ require.NoError(t, err)
+ require.NotEmpty(t, badBz)
+
+ require.NotEqual(t, bz, badBz)
+
+ privkey := EthSecp256k1.Generate()(bz)
+ badPrivKey := EthSecp256k1.Generate()(badBz)
+
+ require.False(t, privkey.Equals(badPrivKey))
+
+ wallet, err := NewFromMnemonic(mnemonic)
+ require.NoError(t, err)
+
+ path := MustParseDerivationPath(ethtypes.BIP44HDPath)
+ account, err := wallet.Derive(path, false)
+ require.NoError(t, err)
+
+ badPath := MustParseDerivationPath("44'/60'/0'/0/0")
+ badAccount, err := wallet.Derive(badPath, false)
+ require.NoError(t, err)
+
+ // Equality of Address BIP44
+ require.Equal(t, account.Address.String(), "0xA588C66983a81e800Db4dF74564F09f91c026351")
+ require.Equal(t, badAccount.Address.String(), "0xF8D6FDf2B8b488ea37e54903750dcd13F67E71cb")
+ // Inequality of wrong derivation path address
+ require.NotEqual(t, account.Address.String(), badAccount.Address.String())
+ // Equality of impls between Ethereum and Nibiru
+ require.Equal(t, common.BytesToAddress(privkey.PubKey().Address().Bytes()).String(), "0xA588C66983a81e800Db4dF74564F09f91c026351")
+ require.Equal(t, common.BytesToAddress(badPrivKey.PubKey().Address().Bytes()).String(), "0xF8D6FDf2B8b488ea37e54903750dcd13F67E71cb")
+
+ // Equality of impls between Ethereum and Nibiru
+ require.Equal(t, common.BytesToAddress(privkey.PubKey().Address()).String(), account.Address.String())
+ require.Equal(t, common.BytesToAddress(badPrivKey.PubKey().Address()).String(), badAccount.Address.String())
+
+ // Inequality of wrong derivation path
+ require.NotEqual(t, common.BytesToAddress(privkey.PubKey().Address()).String(), badAccount.Address.String())
+ require.NotEqual(t, common.BytesToAddress(badPrivKey.PubKey().Address()).String(), account.Address.Hex())
+}
diff --git a/eth/crypto/hd/benchmark_test.go b/eth/crypto/hd/benchmark_test.go
new file mode 100644
index 000000000..d74b98533
--- /dev/null
+++ b/eth/crypto/hd/benchmark_test.go
@@ -0,0 +1,31 @@
+package hd
+
+import (
+ "testing"
+
+ "github.com/NibiruChain/nibiru/eth/types"
+ "github.com/cosmos/cosmos-sdk/crypto/keyring"
+)
+
+func BenchmarkEthSecp256k1Algo_Derive(b *testing.B) {
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ deriveFn := EthSecp256k1.Derive()
+ if _, err := deriveFn(mnemonic, keyring.DefaultBIP39Passphrase, types.BIP44HDPath); err != nil {
+ b.Fatal(err)
+ }
+ }
+}
+
+func BenchmarkEthSecp256k1Algo_Generate(b *testing.B) {
+ bz, err := EthSecp256k1.Derive()(mnemonic, keyring.DefaultBIP39Passphrase, types.BIP44HDPath)
+ if err != nil {
+ b.Fatal(err)
+ }
+
+ b.ResetTimer()
+ b.ReportAllocs()
+ for i := 0; i < b.N; i++ {
+ (ðSecp256k1Algo{}).Generate()(bz)
+ }
+}
diff --git a/eth/crypto/hd/utils_test.go b/eth/crypto/hd/utils_test.go
new file mode 100644
index 000000000..2082368d8
--- /dev/null
+++ b/eth/crypto/hd/utils_test.go
@@ -0,0 +1,181 @@
+// NOTE: This code is being used as test helper functions.
+package hd
+
+import (
+ "crypto/ecdsa"
+ "errors"
+ "os"
+ "sync"
+
+ "github.com/ethereum/go-ethereum/accounts"
+ "github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/crypto"
+
+ "github.com/btcsuite/btcd/btcutil/hdkeychain"
+ "github.com/btcsuite/btcd/chaincfg"
+ bip39 "github.com/tyler-smith/go-bip39"
+)
+
+const issue179FixEnvar = "GO_ETHEREUM_HDWALLET_FIX_ISSUE_179"
+
+// Wallet is the underlying wallet struct.
+type Wallet struct {
+ mnemonic string
+ masterKey *hdkeychain.ExtendedKey
+ seed []byte
+ paths map[common.Address]accounts.DerivationPath
+ accounts []accounts.Account
+ stateLock sync.RWMutex
+ fixIssue172 bool
+}
+
+// NewFromMnemonic returns a new wallet from a BIP-39 mnemonic.
+func NewFromMnemonic(mnemonic string) (*Wallet, error) {
+ if mnemonic == "" {
+ return nil, errors.New("mnemonic is required")
+ }
+
+ if !bip39.IsMnemonicValid(mnemonic) {
+ return nil, errors.New("mnemonic is invalid")
+ }
+
+ seed, err := NewSeedFromMnemonic(mnemonic)
+ if err != nil {
+ return nil, err
+ }
+
+ wallet, err := newWallet(seed)
+ if err != nil {
+ return nil, err
+ }
+ wallet.mnemonic = mnemonic
+
+ return wallet, nil
+}
+
+// NewSeedFromMnemonic returns a BIP-39 seed based on a BIP-39 mnemonic.
+func NewSeedFromMnemonic(mnemonic string) ([]byte, error) {
+ if mnemonic == "" {
+ return nil, errors.New("mnemonic is required")
+ }
+
+ return bip39.NewSeedWithErrorChecking(mnemonic, "")
+}
+
+func newWallet(seed []byte) (*Wallet, error) {
+ masterKey, err := hdkeychain.NewMaster(seed, &chaincfg.MainNetParams)
+ if err != nil {
+ return nil, err
+ }
+
+ return &Wallet{
+ masterKey: masterKey,
+ seed: seed,
+ accounts: []accounts.Account{},
+ paths: map[common.Address]accounts.DerivationPath{},
+ fixIssue172: false || len(os.Getenv(issue179FixEnvar)) > 0,
+ }, nil
+}
+
+// Derive implements accounts.Wallet, deriving a new account at the specific
+// derivation path. If pin is set to true, the account will be added to the list
+// of tracked accounts.
+func (w *Wallet) Derive(path accounts.DerivationPath, pin bool) (accounts.Account, error) {
+ // Try to derive the actual account and update its URL if successful
+ w.stateLock.RLock() // Avoid device disappearing during derivation
+
+ address, err := w.deriveAddress(path)
+
+ w.stateLock.RUnlock()
+
+ // If an error occurred or no pinning was requested, return
+ if err != nil {
+ return accounts.Account{}, err
+ }
+
+ account := accounts.Account{
+ Address: address,
+ URL: accounts.URL{
+ Scheme: "",
+ Path: path.String(),
+ },
+ }
+
+ if !pin {
+ return account, nil
+ }
+
+ // Pinning needs to modify the state
+ w.stateLock.Lock()
+ defer w.stateLock.Unlock()
+
+ if _, ok := w.paths[address]; !ok {
+ w.accounts = append(w.accounts, account)
+ w.paths[address] = path
+ }
+
+ return account, nil
+}
+
+// MustParseDerivationPath parses the derivation path in string format into
+// []uint32 but will panic if it can't parse it.
+func MustParseDerivationPath(path string) accounts.DerivationPath {
+ parsed, err := accounts.ParseDerivationPath(path)
+ if err != nil {
+ panic(err)
+ }
+
+ return parsed
+}
+
+// DerivePrivateKey derives the private key of the derivation path.
+func (w *Wallet) derivePrivateKey(path accounts.DerivationPath) (*ecdsa.PrivateKey, error) {
+ var err error
+ key := w.masterKey
+ for _, n := range path {
+ if w.fixIssue172 && key.IsAffectedByIssue172() {
+ key, err = key.Derive(n)
+ } else {
+ //lint:ignore SA1019 this is used for testing only
+ key, err = key.DeriveNonStandard(n) //nolint:staticcheck
+ }
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ privateKey, err := key.ECPrivKey()
+ privateKeyECDSA := privateKey.ToECDSA()
+ if err != nil {
+ return nil, err
+ }
+
+ return privateKeyECDSA, nil
+}
+
+// derivePublicKey derives the public key of the derivation path.
+func (w *Wallet) derivePublicKey(path accounts.DerivationPath) (*ecdsa.PublicKey, error) {
+ privateKeyECDSA, err := w.derivePrivateKey(path)
+ if err != nil {
+ return nil, err
+ }
+
+ publicKey := privateKeyECDSA.Public()
+ publicKeyECDSA, ok := publicKey.(*ecdsa.PublicKey)
+ if !ok {
+ return nil, errors.New("failed to get public key")
+ }
+
+ return publicKeyECDSA, nil
+}
+
+// DeriveAddress derives the account address of the derivation path.
+func (w *Wallet) deriveAddress(path accounts.DerivationPath) (common.Address, error) {
+ publicKeyECDSA, err := w.derivePublicKey(path)
+ if err != nil {
+ return common.Address{}, err
+ }
+
+ address := crypto.PubkeyToAddress(*publicKeyECDSA)
+ return address, nil
+}
diff --git a/eth/crypto/keyring/options.go b/eth/crypto/keyring/options.go
new file mode 100644
index 000000000..fa4e4c55f
--- /dev/null
+++ b/eth/crypto/keyring/options.go
@@ -0,0 +1,42 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+
+package keyring
+
+import (
+ "github.com/cosmos/cosmos-sdk/crypto/keyring"
+ "github.com/cosmos/cosmos-sdk/crypto/types"
+
+ "github.com/NibiruChain/nibiru/eth/crypto/ethsecp256k1"
+ "github.com/NibiruChain/nibiru/eth/crypto/hd"
+)
+
+// AppName defines the Ledger app used for signing.
+const AppName = "Ethereum"
+
+var (
+ // SupportedAlgorithms defines the list of signing algorithms used on Nibiru:
+ // - eth_secp256k1 (Ethereum)
+ SupportedAlgorithms = keyring.SigningAlgoList{hd.EthSecp256k1}
+ // SupportedAlgorithmsLedger defines the list of signing algorithms used on
+ // Nibiru for the Ledger device:
+ // - secp256k1 (in order to comply with Cosmos SDK)
+ // The Ledger derivation function is responsible for all signing and address generation.
+ SupportedAlgorithmsLedger = keyring.SigningAlgoList{hd.EthSecp256k1}
+ // CreatePubkey uses the ethsecp256k1 pubkey with Ethereum address generation and keccak hashing
+ CreatePubkey = func(key []byte) types.PubKey { return ðsecp256k1.PubKey{Key: key} }
+ // SkipDERConversion represents whether the signed Ledger output should skip conversion from DER to BER.
+ // This is set to true for signing performed by the Ledger Ethereum app.
+ SkipDERConversion = true
+)
+
+// EthSecp256k1Option defines a function keys options for the ethereum Secp256k1 curve.
+// It supports eth_secp256k1 keys for accounts.
+func Option() keyring.Option {
+ return func(options *keyring.Options) {
+ options.SupportedAlgos = SupportedAlgorithms
+ options.SupportedAlgosLedger = SupportedAlgorithmsLedger
+ options.LedgerCreateKey = CreatePubkey
+ options.LedgerAppName = AppName
+ options.LedgerSigSkipDERConv = SkipDERConversion
+ }
+}
diff --git a/eth/crypto/secp256r1/verify.go b/eth/crypto/secp256r1/verify.go
new file mode 100644
index 000000000..9b4ba8685
--- /dev/null
+++ b/eth/crypto/secp256r1/verify.go
@@ -0,0 +1,52 @@
+// Copyright 2014 The go-ethereum Authors
+// This file is part of the go-ethereum library.
+//
+// The go-ethereum library is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Lesser General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// The go-ethereum library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public License
+// along with the go-ethereum library. If not, see .
+
+package secp256r1
+
+import (
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "math/big"
+)
+
+// Verifies the given signature (r, s) for the given hash and public key (x, y).
+func Verify(hash []byte, r, s, x, y *big.Int) bool {
+ // Create the public key format
+ publicKey := newECDSAPublicKey(x, y)
+
+ // Check if they are invalid public key coordinates
+ if publicKey == nil {
+ return false
+ }
+
+ // Verify the signature with the public key,
+ // then return true if it's valid, false otherwise
+ return ecdsa.Verify(publicKey, hash, r, s)
+}
+
+// newECDSAPublicKey creates an ECDSA P256 public key from the given coordinates
+func newECDSAPublicKey(x, y *big.Int) *ecdsa.PublicKey {
+ // Check if the given coordinates are valid and in the reference point (infinity)
+ if x == nil || y == nil || x.Sign() == 0 && y.Sign() == 0 || !elliptic.P256().IsOnCurve(x, y) {
+ return nil
+ }
+
+ return &ecdsa.PublicKey{
+ Curve: elliptic.P256(),
+ X: x,
+ Y: y,
+ }
+}
diff --git a/eth/encoding/codec/codec.go b/eth/encoding/codec/codec.go
new file mode 100644
index 000000000..90b2ad433
--- /dev/null
+++ b/eth/encoding/codec/codec.go
@@ -0,0 +1,26 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package codec
+
+import (
+ "github.com/cosmos/cosmos-sdk/codec"
+ codectypes "github.com/cosmos/cosmos-sdk/codec/types"
+ "github.com/cosmos/cosmos-sdk/std"
+ sdk "github.com/cosmos/cosmos-sdk/types"
+
+ cryptocodec "github.com/NibiruChain/nibiru/eth/crypto/codec"
+ "github.com/NibiruChain/nibiru/eth/types"
+)
+
+// RegisterLegacyAminoCodec registers Interfaces from types, crypto, and SDK std.
+func RegisterLegacyAminoCodec(cdc *codec.LegacyAmino) {
+ sdk.RegisterLegacyAminoCodec(cdc)
+ cryptocodec.RegisterCrypto(cdc)
+ codec.RegisterEvidences(cdc)
+}
+
+// RegisterInterfaces registers Interfaces from types, crypto, and SDK std.
+func RegisterInterfaces(interfaceRegistry codectypes.InterfaceRegistry) {
+ std.RegisterInterfaces(interfaceRegistry)
+ cryptocodec.RegisterInterfaces(interfaceRegistry)
+ types.RegisterInterfaces(interfaceRegistry)
+}
diff --git a/eth/encoding/config.go b/eth/encoding/config.go
new file mode 100644
index 000000000..b50b67e22
--- /dev/null
+++ b/eth/encoding/config.go
@@ -0,0 +1,32 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package encoding
+
+import (
+ "cosmossdk.io/simapp/params"
+ amino "github.com/cosmos/cosmos-sdk/codec"
+ "github.com/cosmos/cosmos-sdk/codec/types"
+ "github.com/cosmos/cosmos-sdk/types/module"
+ "github.com/cosmos/cosmos-sdk/x/auth/tx"
+
+ enccodec "github.com/NibiruChain/nibiru/eth/encoding/codec"
+)
+
+// MakeConfig creates an EncodingConfig for testing
+func MakeConfig(mb module.BasicManager) params.EncodingConfig {
+ cdc := amino.NewLegacyAmino()
+ interfaceRegistry := types.NewInterfaceRegistry()
+ codec := amino.NewProtoCodec(interfaceRegistry)
+
+ encodingConfig := params.EncodingConfig{
+ InterfaceRegistry: interfaceRegistry,
+ Codec: codec,
+ TxConfig: tx.NewTxConfig(codec, tx.DefaultSignModes),
+ Amino: cdc,
+ }
+
+ enccodec.RegisterLegacyAminoCodec(encodingConfig.Amino)
+ mb.RegisterLegacyAminoCodec(encodingConfig.Amino)
+ enccodec.RegisterInterfaces(encodingConfig.InterfaceRegistry)
+ mb.RegisterInterfaces(encodingConfig.InterfaceRegistry)
+ return encodingConfig
+}
diff --git a/eth/encoding/config_test.go b/eth/encoding/config_test.go
new file mode 100644
index 000000000..f2f2e0d88
--- /dev/null
+++ b/eth/encoding/config_test.go
@@ -0,0 +1,41 @@
+package encoding_test
+
+import (
+ "math/big"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+
+ ethtypes "github.com/ethereum/go-ethereum/core/types"
+
+ "github.com/NibiruChain/nibiru/app"
+ "github.com/NibiruChain/nibiru/eth/encoding"
+ "github.com/NibiruChain/nibiru/x/common/testutil"
+ evmtypes "github.com/NibiruChain/nibiru/x/evm/types"
+)
+
+func TestTxEncoding(t *testing.T) {
+ addr, key := testutil.PrivKeyEth()
+ signer := testutil.NewSigner(key)
+
+ ethTxParams := evmtypes.EvmTxArgs{
+ ChainID: big.NewInt(1),
+ Nonce: 1,
+ Amount: big.NewInt(10),
+ GasLimit: 100000,
+ GasFeeCap: big.NewInt(1),
+ GasTipCap: big.NewInt(1),
+ Input: []byte{},
+ }
+ msg := evmtypes.NewTx(ðTxParams)
+ msg.From = addr.Hex()
+
+ ethSigner := ethtypes.LatestSignerForChainID(big.NewInt(1))
+ err := msg.Sign(ethSigner, signer)
+ require.NoError(t, err)
+
+ cfg := encoding.MakeConfig(app.ModuleBasics)
+
+ _, err = cfg.TxConfig.TxEncoder()(msg)
+ require.Error(t, err, "encoding failed")
+}
diff --git a/eth/ethereum/eip712/domain.go b/eth/ethereum/eip712/domain.go
new file mode 100644
index 000000000..8017a9ae8
--- /dev/null
+++ b/eth/ethereum/eip712/domain.go
@@ -0,0 +1,20 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package eip712
+
+import (
+ "github.com/ethereum/go-ethereum/common/math"
+ "github.com/ethereum/go-ethereum/signer/core/apitypes"
+)
+
+// createEIP712Domain creates the typed data domain for the given chainID.
+func createEIP712Domain(chainID uint64) apitypes.TypedDataDomain {
+ domain := apitypes.TypedDataDomain{
+ Name: "Cosmos Web3",
+ Version: "1.0.0",
+ ChainId: math.NewHexOrDecimal256(int64(chainID)), // #nosec G701
+ VerifyingContract: "cosmos",
+ Salt: "0",
+ }
+
+ return domain
+}
diff --git a/eth/ethereum/eip712/eip712.go b/eth/ethereum/eip712/eip712.go
new file mode 100644
index 000000000..eab245d57
--- /dev/null
+++ b/eth/ethereum/eip712/eip712.go
@@ -0,0 +1,35 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package eip712
+
+import (
+ "github.com/ethereum/go-ethereum/signer/core/apitypes"
+)
+
+// WrapTxToTypedData wraps an Amino-encoded Cosmos Tx JSON SignDoc
+// bytestream into an EIP712-compatible TypedData request.
+func WrapTxToTypedData(
+ chainID uint64,
+ data []byte,
+) (apitypes.TypedData, error) {
+ messagePayload, err := createEIP712MessagePayload(data)
+ message := messagePayload.message
+ if err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ types, err := createEIP712Types(messagePayload)
+ if err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ domain := createEIP712Domain(chainID)
+
+ typedData := apitypes.TypedData{
+ Types: types,
+ PrimaryType: txField,
+ Domain: domain,
+ Message: message,
+ }
+
+ return typedData, nil
+}
diff --git a/eth/ethereum/eip712/eip712_fuzzer_test.go b/eth/ethereum/eip712/eip712_fuzzer_test.go
new file mode 100644
index 000000000..3d5318d14
--- /dev/null
+++ b/eth/ethereum/eip712/eip712_fuzzer_test.go
@@ -0,0 +1,193 @@
+package eip712_test
+
+import (
+ "fmt"
+ "strings"
+
+ rand "github.com/cometbft/cometbft/libs/rand"
+
+ "github.com/NibiruChain/nibiru/eth/ethereum/eip712"
+ "github.com/tidwall/gjson"
+ "github.com/tidwall/sjson"
+)
+
+type EIP712FuzzTestParams struct {
+ numTestObjects int
+ maxNumFieldsPerObject int
+ minStringLength int
+ maxStringLength int
+ randomFloatRange float64
+ maxArrayLength int
+ maxObjectDepth int
+}
+
+const (
+ numPrimitiveJSONTypes = 3
+ numJSONTypes = 5
+ asciiRangeStart = 65
+ asciiRangeEnd = 127
+ fuzzTestName = "Flatten"
+)
+
+const (
+ jsonBoolType = iota
+ jsonStringType = iota
+ jsonFloatType = iota
+ jsonArrayType = iota
+ jsonObjectType = iota
+)
+
+var params = EIP712FuzzTestParams{
+ numTestObjects: 16,
+ maxNumFieldsPerObject: 16,
+ minStringLength: 16,
+ maxStringLength: 48,
+ randomFloatRange: 120000000,
+ maxArrayLength: 8,
+ maxObjectDepth: 4,
+}
+
+// TestRandomPayloadFlattening generates many random payloads with different JSON values to ensure
+// that Flattening works across all inputs.
+// Note that this is a fuzz test, although it doesn't use Go's Fuzz testing suite, since there are
+// variable input sizes, types, and fields. While it may be possible to translate a single input into
+// a JSON object, it would require difficult parsing, and ultimately approximates our randomized unit
+// tests as they are.
+func (suite *EIP712TestSuite) TestRandomPayloadFlattening() {
+ // Re-seed rand generator
+ rand.Seed(rand.Int64())
+
+ for i := 0; i < params.numTestObjects; i++ {
+ suite.Run(fmt.Sprintf("%v%d", fuzzTestName, i), func() {
+ payload := suite.generateRandomPayload(i)
+
+ flattened, numMessages, err := eip712.FlattenPayloadMessages(payload)
+
+ suite.Require().NoError(err)
+ suite.Require().Equal(numMessages, i)
+
+ suite.verifyPayloadAgainstFlattened(payload, flattened)
+ })
+ }
+}
+
+// generateRandomPayload creates a random payload of the desired format, with random sub-objects.
+func (suite *EIP712TestSuite) generateRandomPayload(numMessages int) gjson.Result {
+ payload := suite.createRandomJSONObject().Raw
+ msgs := make([]gjson.Result, numMessages)
+
+ for i := 0; i < numMessages; i++ {
+ msgs[i] = suite.createRandomJSONObject()
+ }
+
+ payload, err := sjson.Set(payload, msgsFieldName, msgs)
+ suite.Require().NoError(err)
+
+ return gjson.Parse(payload)
+}
+
+// createRandomJSONObject creates a JSON object with random fields.
+func (suite *EIP712TestSuite) createRandomJSONObject() gjson.Result {
+ var err error
+ payloadRaw := ""
+
+ numFields := suite.createRandomIntInRange(0, params.maxNumFieldsPerObject)
+ for i := 0; i < numFields; i++ {
+ key := suite.createRandomString()
+
+ randField := suite.createRandomJSONField(i, 0)
+ payloadRaw, err = sjson.Set(payloadRaw, key, randField)
+ suite.Require().NoError(err)
+ }
+
+ return gjson.Parse(payloadRaw)
+}
+
+// createRandomJSONField creates a random field with a random JSON type, with the possibility of
+// nested fields up to depth objects.
+func (suite *EIP712TestSuite) createRandomJSONField(t int, depth int) interface{} {
+ switch t % numJSONTypes {
+ case jsonBoolType:
+ return suite.createRandomBoolean()
+ case jsonStringType:
+ return suite.createRandomString()
+ case jsonFloatType:
+ return suite.createRandomFloat()
+ case jsonArrayType:
+ return suite.createRandomJSONNestedArray(depth)
+ case jsonObjectType:
+ return suite.createRandomJSONNestedObject(depth)
+ default:
+ return nil
+ }
+}
+
+// createRandomJSONNestedArray creates an array of random nested JSON fields.
+func (suite *EIP712TestSuite) createRandomJSONNestedArray(depth int) []interface{} {
+ arr := make([]interface{}, rand.Intn(params.maxArrayLength))
+ for i := range arr {
+ arr[i] = suite.createRandomJSONNestedField(depth)
+ }
+
+ return arr
+}
+
+// createRandomJSONNestedObject creates a key-value set of objects with random nested JSON fields.
+func (suite *EIP712TestSuite) createRandomJSONNestedObject(depth int) interface{} {
+ numFields := rand.Intn(params.maxNumFieldsPerObject)
+ obj := make(map[string]interface{})
+
+ for i := 0; i < numFields; i++ {
+ subField := suite.createRandomJSONNestedField(depth)
+
+ obj[suite.createRandomString()] = subField
+ }
+
+ return obj
+}
+
+// createRandomJSONNestedField serves as a helper for createRandomJSONField and returns a random
+// subfield to populate an array or object type.
+func (suite *EIP712TestSuite) createRandomJSONNestedField(depth int) interface{} {
+ var newFieldType int
+
+ if depth == params.maxObjectDepth {
+ newFieldType = rand.Intn(numPrimitiveJSONTypes)
+ } else {
+ newFieldType = rand.Intn(numJSONTypes)
+ }
+
+ return suite.createRandomJSONField(newFieldType, depth+1)
+}
+
+func (suite *EIP712TestSuite) createRandomBoolean() bool {
+ return rand.Intn(2) == 0
+}
+
+func (suite *EIP712TestSuite) createRandomFloat() float64 {
+ return (rand.Float64() - 0.5) * params.randomFloatRange
+}
+
+func (suite *EIP712TestSuite) createRandomString() string {
+ bzLen := suite.createRandomIntInRange(params.minStringLength, params.maxStringLength)
+ bz := make([]byte, bzLen)
+
+ for i := 0; i < bzLen; i++ {
+ bz[i] = byte(suite.createRandomIntInRange(asciiRangeStart, asciiRangeEnd))
+ }
+
+ str := string(bz)
+
+ // Remove control characters, since they will make JSON invalid
+ str = strings.ReplaceAll(str, "{", "")
+ str = strings.ReplaceAll(str, "}", "")
+ str = strings.ReplaceAll(str, "]", "")
+ str = strings.ReplaceAll(str, "[", "")
+
+ return str
+}
+
+// createRandomIntInRange provides a random integer between [min, max)
+func (suite *EIP712TestSuite) createRandomIntInRange(min int, max int) int {
+ return rand.Intn(max-min) + min
+}
diff --git a/eth/ethereum/eip712/eip712_legacy.go b/eth/ethereum/eip712/eip712_legacy.go
new file mode 100644
index 000000000..fc71bdbfd
--- /dev/null
+++ b/eth/ethereum/eip712/eip712_legacy.go
@@ -0,0 +1,438 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package eip712
+
+import (
+ "encoding/json"
+ "fmt"
+ "math/big"
+ "reflect" // #nosec G702 for sensitive import
+ "strings"
+ "time"
+
+ errorsmod "cosmossdk.io/errors"
+ sdkmath "cosmossdk.io/math"
+ codectypes "github.com/cosmos/cosmos-sdk/codec/types"
+ "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519"
+ sdk "github.com/cosmos/cosmos-sdk/types"
+ errortypes "github.com/cosmos/cosmos-sdk/types/errors"
+
+ "github.com/ethereum/go-ethereum/common"
+ "github.com/ethereum/go-ethereum/common/math"
+ "github.com/ethereum/go-ethereum/signer/core/apitypes"
+)
+
+type FeeDelegationOptions struct {
+ FeePayer sdk.AccAddress
+}
+
+const (
+ typeDefPrefix = "_"
+)
+
+// LegacyWrapTxToTypedData is an ultimate method that wraps Amino-encoded Cosmos Tx JSON data
+// into an EIP712-compatible TypedData request.
+func LegacyWrapTxToTypedData(
+ cdc codectypes.AnyUnpacker,
+ chainID uint64,
+ msg sdk.Msg,
+ data []byte,
+ feeDelegation *FeeDelegationOptions,
+) (apitypes.TypedData, error) {
+ txData := make(map[string]interface{})
+
+ if err := json.Unmarshal(data, &txData); err != nil {
+ return apitypes.TypedData{}, errorsmod.Wrap(errortypes.ErrJSONUnmarshal, "failed to JSON unmarshal data")
+ }
+
+ domain := apitypes.TypedDataDomain{
+ Name: "Cosmos Web3",
+ Version: "1.0.0",
+ ChainId: math.NewHexOrDecimal256(int64(chainID)),
+ VerifyingContract: "cosmos",
+ Salt: "0",
+ }
+
+ msgTypes, err := extractMsgTypes(cdc, "MsgValue", msg)
+ if err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ if feeDelegation != nil {
+ feeInfo, ok := txData["fee"].(map[string]interface{})
+ if !ok {
+ return apitypes.TypedData{}, errorsmod.Wrap(errortypes.ErrInvalidType, "cannot parse fee from tx data")
+ }
+
+ feeInfo["feePayer"] = feeDelegation.FeePayer.String()
+
+ // also patching msgTypes to include feePayer
+ msgTypes["Fee"] = []apitypes.Type{
+ {Name: "feePayer", Type: "string"},
+ {Name: "amount", Type: "Coin[]"},
+ {Name: "gas", Type: "string"},
+ }
+ }
+
+ typedData := apitypes.TypedData{
+ Types: msgTypes,
+ PrimaryType: "Tx",
+ Domain: domain,
+ Message: txData,
+ }
+
+ return typedData, nil
+}
+
+func extractMsgTypes(cdc codectypes.AnyUnpacker, msgTypeName string, msg sdk.Msg) (apitypes.Types, error) {
+ rootTypes := apitypes.Types{
+ "EIP712Domain": {
+ {
+ Name: "name",
+ Type: "string",
+ },
+ {
+ Name: "version",
+ Type: "string",
+ },
+ {
+ Name: "chainId",
+ Type: "uint256",
+ },
+ {
+ Name: "verifyingContract",
+ Type: "string",
+ },
+ {
+ Name: "salt",
+ Type: "string",
+ },
+ },
+ "Tx": {
+ {Name: "account_number", Type: "string"},
+ {Name: "chain_id", Type: "string"},
+ {Name: "fee", Type: "Fee"},
+ {Name: "memo", Type: "string"},
+ {Name: "msgs", Type: "Msg[]"},
+ {Name: "sequence", Type: "string"},
+ // Note timeout_height was removed because it was not getting filled with the legacyTx
+ // {Name: "timeout_height", Type: "string"},
+ },
+ "Fee": {
+ {Name: "amount", Type: "Coin[]"},
+ {Name: "gas", Type: "string"},
+ },
+ "Coin": {
+ {Name: "denom", Type: "string"},
+ {Name: "amount", Type: "string"},
+ },
+ "Msg": {
+ {Name: "type", Type: "string"},
+ {Name: "value", Type: msgTypeName},
+ },
+ msgTypeName: {},
+ }
+
+ if err := walkFields(cdc, rootTypes, msgTypeName, msg); err != nil {
+ return nil, err
+ }
+
+ return rootTypes, nil
+}
+
+func walkFields(cdc codectypes.AnyUnpacker, typeMap apitypes.Types, rootType string, in interface{}) (err error) {
+ defer doRecover(&err)
+
+ t := reflect.TypeOf(in)
+ v := reflect.ValueOf(in)
+
+ for {
+ if t.Kind() == reflect.Ptr ||
+ t.Kind() == reflect.Interface {
+ t = t.Elem()
+ v = v.Elem()
+
+ continue
+ }
+
+ break
+ }
+
+ return legacyTraverseFields(cdc, typeMap, rootType, typeDefPrefix, t, v)
+}
+
+type cosmosAnyWrapper struct {
+ Type string `json:"type"`
+ Value interface{} `json:"value"`
+}
+
+func legacyTraverseFields(
+ cdc codectypes.AnyUnpacker,
+ typeMap apitypes.Types,
+ rootType string,
+ prefix string,
+ t reflect.Type,
+ v reflect.Value,
+) error {
+ n := t.NumField()
+
+ if prefix == typeDefPrefix {
+ if len(typeMap[rootType]) == n {
+ return nil
+ }
+ } else {
+ typeDef := sanitizeTypedef(prefix)
+ if len(typeMap[typeDef]) == n {
+ return nil
+ }
+ }
+
+ for i := 0; i < n; i++ {
+ var (
+ field reflect.Value
+ err error
+ )
+
+ if v.IsValid() {
+ field = v.Field(i)
+ }
+
+ fieldType := t.Field(i).Type
+ fieldName := jsonNameFromTag(t.Field(i).Tag)
+
+ if fieldType == cosmosAnyType {
+ // Unpack field, value as Any
+ if fieldType, field, err = unpackAny(cdc, field); err != nil {
+ return err
+ }
+ }
+
+ // If field is an empty value, do not include in types, since it will not be present in the object
+ if field.IsZero() {
+ continue
+ }
+
+ for {
+ if fieldType.Kind() == reflect.Ptr {
+ fieldType = fieldType.Elem()
+
+ if field.IsValid() {
+ field = field.Elem()
+ }
+
+ continue
+ }
+
+ if fieldType.Kind() == reflect.Interface {
+ fieldType = reflect.TypeOf(field.Interface())
+ continue
+ }
+
+ if field.Kind() == reflect.Ptr {
+ field = field.Elem()
+ continue
+ }
+
+ break
+ }
+
+ var isCollection bool
+ if fieldType.Kind() == reflect.Array || fieldType.Kind() == reflect.Slice {
+ if field.Len() == 0 {
+ // skip empty collections from type mapping
+ continue
+ }
+
+ fieldType = fieldType.Elem()
+ field = field.Index(0)
+ isCollection = true
+
+ if fieldType == cosmosAnyType {
+ if fieldType, field, err = unpackAny(cdc, field); err != nil {
+ return err
+ }
+ }
+ }
+
+ for {
+ if fieldType.Kind() == reflect.Ptr {
+ fieldType = fieldType.Elem()
+
+ if field.IsValid() {
+ field = field.Elem()
+ }
+
+ continue
+ }
+
+ if fieldType.Kind() == reflect.Interface {
+ fieldType = reflect.TypeOf(field.Interface())
+ continue
+ }
+
+ if field.Kind() == reflect.Ptr {
+ field = field.Elem()
+ continue
+ }
+
+ break
+ }
+
+ fieldPrefix := fmt.Sprintf("%s.%s", prefix, fieldName)
+
+ ethTyp := typToEth(fieldType)
+
+ if len(ethTyp) > 0 {
+ // Support array of uint64
+ if isCollection && fieldType.Kind() != reflect.Slice && fieldType.Kind() != reflect.Array {
+ ethTyp += "[]"
+ }
+
+ if prefix == typeDefPrefix {
+ typeMap[rootType] = append(typeMap[rootType], apitypes.Type{
+ Name: fieldName,
+ Type: ethTyp,
+ })
+ } else {
+ typeDef := sanitizeTypedef(prefix)
+ typeMap[typeDef] = append(typeMap[typeDef], apitypes.Type{
+ Name: fieldName,
+ Type: ethTyp,
+ })
+ }
+
+ continue
+ }
+
+ if fieldType.Kind() == reflect.Struct {
+ var fieldTypedef string
+
+ if isCollection {
+ fieldTypedef = sanitizeTypedef(fieldPrefix) + "[]"
+ } else {
+ fieldTypedef = sanitizeTypedef(fieldPrefix)
+ }
+
+ if prefix == typeDefPrefix {
+ typeMap[rootType] = append(typeMap[rootType], apitypes.Type{
+ Name: fieldName,
+ Type: fieldTypedef,
+ })
+ } else {
+ typeDef := sanitizeTypedef(prefix)
+ typeMap[typeDef] = append(typeMap[typeDef], apitypes.Type{
+ Name: fieldName,
+ Type: fieldTypedef,
+ })
+ }
+
+ if err := legacyTraverseFields(cdc, typeMap, rootType, fieldPrefix, fieldType, field); err != nil {
+ return err
+ }
+
+ continue
+ }
+ }
+
+ return nil
+}
+
+func jsonNameFromTag(tag reflect.StructTag) string {
+ jsonTags := tag.Get("json")
+ parts := strings.Split(jsonTags, ",")
+ return parts[0]
+}
+
+// Unpack the given Any value with Type/Value deconstruction
+func unpackAny(cdc codectypes.AnyUnpacker, field reflect.Value) (reflect.Type, reflect.Value, error) {
+ anyData, ok := field.Interface().(*codectypes.Any)
+ if !ok {
+ return nil, reflect.Value{}, errorsmod.Wrapf(errortypes.ErrPackAny, "%T", field.Interface())
+ }
+
+ anyWrapper := &cosmosAnyWrapper{
+ Type: anyData.TypeUrl,
+ }
+
+ if err := cdc.UnpackAny(anyData, &anyWrapper.Value); err != nil {
+ return nil, reflect.Value{}, errorsmod.Wrap(err, "failed to unpack Any in msg struct")
+ }
+
+ fieldType := reflect.TypeOf(anyWrapper)
+ field = reflect.ValueOf(anyWrapper)
+
+ return fieldType, field, nil
+}
+
+var (
+ hashType = reflect.TypeOf(common.Hash{})
+ addressType = reflect.TypeOf(common.Address{})
+ bigIntType = reflect.TypeOf(big.Int{})
+ cosmIntType = reflect.TypeOf(sdkmath.Int{})
+ cosmDecType = reflect.TypeOf(sdkmath.LegacyDec{})
+ timeType = reflect.TypeOf(time.Time{})
+ cosmosAnyType = reflect.TypeOf(&codectypes.Any{})
+ edType = reflect.TypeOf(ed25519.PubKey{})
+)
+
+// typToEth supports only basic types and arrays of basic types.
+// https://github.com/ethereum/EIPs/blob/master/EIPS/eip-712.md
+func typToEth(typ reflect.Type) string {
+ const str = "string"
+
+ switch typ.Kind() {
+ case reflect.String:
+ return str
+ case reflect.Bool:
+ return "bool"
+ case reflect.Int:
+ return "int64"
+ case reflect.Int8:
+ return "int8"
+ case reflect.Int16:
+ return "int16"
+ case reflect.Int32:
+ return "int32"
+ case reflect.Int64:
+ return "int64"
+ case reflect.Uint:
+ return "uint64"
+ case reflect.Uint8:
+ return "uint8"
+ case reflect.Uint16:
+ return "uint16"
+ case reflect.Uint32:
+ return "uint32"
+ case reflect.Uint64:
+ return "uint64"
+ case reflect.Slice:
+ ethName := typToEth(typ.Elem())
+ if len(ethName) > 0 {
+ return ethName + "[]"
+ }
+ case reflect.Array:
+ ethName := typToEth(typ.Elem())
+ if len(ethName) > 0 {
+ return ethName + "[]"
+ }
+ case reflect.Ptr:
+ if typ.Elem().ConvertibleTo(bigIntType) ||
+ typ.Elem().ConvertibleTo(timeType) ||
+ typ.Elem().ConvertibleTo(edType) ||
+ typ.Elem().ConvertibleTo(cosmDecType) ||
+ typ.Elem().ConvertibleTo(cosmIntType) {
+ return str
+ }
+ case reflect.Struct:
+ if typ.ConvertibleTo(hashType) ||
+ typ.ConvertibleTo(addressType) ||
+ typ.ConvertibleTo(bigIntType) ||
+ typ.ConvertibleTo(edType) ||
+ typ.ConvertibleTo(timeType) ||
+ typ.ConvertibleTo(cosmDecType) ||
+ typ.ConvertibleTo(cosmIntType) {
+ return str
+ }
+ }
+
+ return ""
+}
diff --git a/eth/ethereum/eip712/eip712_test.go b/eth/ethereum/eip712/eip712_test.go
new file mode 100644
index 000000000..ff9b89c4c
--- /dev/null
+++ b/eth/ethereum/eip712/eip712_test.go
@@ -0,0 +1,633 @@
+package eip712_test
+
+import (
+ "bytes"
+ "fmt"
+ "testing"
+
+ "cosmossdk.io/math"
+
+ chainparams "cosmossdk.io/simapp/params"
+ "github.com/NibiruChain/nibiru/eth/ethereum/eip712"
+ "github.com/cosmos/cosmos-sdk/client"
+ "github.com/ethereum/go-ethereum/crypto"
+ "github.com/ethereum/go-ethereum/signer/core/apitypes"
+ "github.com/tidwall/gjson"
+ "github.com/tidwall/sjson"
+
+ "github.com/NibiruChain/nibiru/eth/crypto/ethsecp256k1"
+ cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types"
+ sdk "github.com/cosmos/cosmos-sdk/types"
+
+ "github.com/NibiruChain/nibiru/app"
+ "github.com/NibiruChain/nibiru/cmd/ethclient"
+
+ "github.com/NibiruChain/nibiru/eth/encoding"
+ txtypes "github.com/cosmos/cosmos-sdk/types/tx"
+ "github.com/cosmos/cosmos-sdk/types/tx/signing"
+ authsigning "github.com/cosmos/cosmos-sdk/x/auth/signing"
+ banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
+
+ distributiontypes "github.com/cosmos/cosmos-sdk/x/distribution/types"
+ govtypesv1 "github.com/cosmos/cosmos-sdk/x/gov/types/v1"
+ govtypes "github.com/cosmos/cosmos-sdk/x/gov/types/v1beta1"
+ stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types"
+ "github.com/stretchr/testify/suite"
+)
+
+// Unit tests for single-signer EIP-712 signature verification. Multi-signature key verification tests are out-of-scope
+// here and included with the ante_tests.
+
+const (
+ msgsFieldName = "msgs"
+ baseDenom = "anibi"
+ TESTNET_CHAIN_ID = "nibiru_9000"
+)
+
+type EIP712TestSuite struct {
+ suite.Suite
+
+ config chainparams.EncodingConfig
+ clientCtx client.Context
+ useLegacyEIP712TypedData bool
+ denom string
+}
+
+type EIP712TestParams struct {
+ fee txtypes.Fee
+ address sdk.AccAddress
+ accountNumber uint64
+ sequence uint64
+ memo string
+}
+
+func TestEIP712TestSuite(t *testing.T) {
+ suite.Run(t, &EIP712TestSuite{})
+ // Note that we don't test the Legacy EIP-712 Extension, since that case
+ // is sufficiently covered by the AnteHandler tests.
+ suite.Run(t, &EIP712TestSuite{
+ useLegacyEIP712TypedData: true,
+ })
+}
+
+func (suite *EIP712TestSuite) SetupTest() {
+ suite.config = encoding.MakeConfig(app.ModuleBasics)
+ suite.clientCtx = client.Context{}.WithTxConfig(suite.config.TxConfig)
+ suite.denom = baseDenom
+
+ sdk.GetConfig().SetBech32PrefixForAccount(ethclient.Bech32Prefix, "")
+ eip712.SetEncodingConfig(suite.config)
+}
+
+// createTestAddress creates random test addresses for messages
+func (suite *EIP712TestSuite) createTestAddress() sdk.AccAddress {
+ privkey, _ := ethsecp256k1.GenerateKey()
+ key, err := privkey.ToECDSA()
+ suite.Require().NoError(err)
+
+ addr := crypto.PubkeyToAddress(key.PublicKey)
+
+ return addr.Bytes()
+}
+
+// createTestKeyPair creates a random keypair for signing and verification
+func (suite *EIP712TestSuite) createTestKeyPair() (*ethsecp256k1.PrivKey, *ethsecp256k1.PubKey) {
+ privKey, err := ethsecp256k1.GenerateKey()
+ suite.Require().NoError(err)
+
+ pubKey := ðsecp256k1.PubKey{
+ Key: privKey.PubKey().Bytes(),
+ }
+ suite.Require().Implements((*cryptotypes.PubKey)(nil), pubKey)
+
+ return privKey, pubKey
+}
+
+// makeCoins helps create an instance of sdk.Coins[] with single coin
+func (suite *EIP712TestSuite) makeCoins(denom string, amount math.Int) sdk.Coins {
+ return sdk.NewCoins(
+ sdk.NewCoin(
+ denom,
+ amount,
+ ),
+ )
+}
+
+func (suite *EIP712TestSuite) TestEIP712() {
+ suite.SetupTest()
+
+ signModes := []signing.SignMode{
+ signing.SignMode_SIGN_MODE_DIRECT,
+ signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON,
+ }
+
+ params := EIP712TestParams{
+ fee: txtypes.Fee{
+ Amount: suite.makeCoins(suite.denom, math.NewInt(2000)),
+ GasLimit: 20000,
+ },
+ address: suite.createTestAddress(),
+ accountNumber: 25,
+ sequence: 78,
+ memo: "",
+ }
+
+ testCases := []struct {
+ title string
+ chainID string
+ msgs []sdk.Msg
+ timeoutHeight uint64
+ expectSuccess bool
+ }{
+ {
+ title: "Succeeds - Standard MsgSend",
+ msgs: []sdk.Msg{
+ banktypes.NewMsgSend(
+ suite.createTestAddress(),
+ suite.createTestAddress(),
+ suite.makeCoins(suite.denom, math.NewInt(1)),
+ ),
+ },
+ expectSuccess: true,
+ },
+ {
+ title: "Succeeds - Standard MsgVote",
+ msgs: []sdk.Msg{
+ govtypes.NewMsgVote(
+ suite.createTestAddress(),
+ 5,
+ govtypes.OptionNo,
+ ),
+ },
+ expectSuccess: true,
+ },
+ {
+ title: "Succeeds - Standard MsgDelegate",
+ msgs: []sdk.Msg{
+ stakingtypes.NewMsgDelegate(
+ suite.createTestAddress(),
+ sdk.ValAddress(suite.createTestAddress()),
+ suite.makeCoins(suite.denom, math.NewInt(1))[0],
+ ),
+ },
+ expectSuccess: true,
+ },
+ {
+ title: "Succeeds - Standard MsgWithdrawDelegationReward",
+ msgs: []sdk.Msg{
+ distributiontypes.NewMsgWithdrawDelegatorReward(
+ suite.createTestAddress(),
+ sdk.ValAddress(suite.createTestAddress()),
+ ),
+ },
+ expectSuccess: true,
+ },
+ {
+ title: "Succeeds - Two Single-Signer MsgDelegate",
+ msgs: []sdk.Msg{
+ stakingtypes.NewMsgDelegate(
+ params.address,
+ sdk.ValAddress(suite.createTestAddress()),
+ suite.makeCoins(suite.denom, math.NewInt(1))[0],
+ ),
+ stakingtypes.NewMsgDelegate(
+ params.address,
+ sdk.ValAddress(suite.createTestAddress()),
+ suite.makeCoins(suite.denom, math.NewInt(5))[0],
+ ),
+ },
+ expectSuccess: true,
+ },
+ {
+ title: "Succeeds - Single-Signer MsgVote V1 with Omitted Value",
+ msgs: []sdk.Msg{
+ govtypesv1.NewMsgVote(
+ params.address,
+ 5,
+ govtypesv1.VoteOption_VOTE_OPTION_NO,
+ "",
+ ),
+ },
+ expectSuccess: true,
+ },
+ {
+ title: "Succeeds - Single-Signer MsgSend + MsgVote",
+ msgs: []sdk.Msg{
+ govtypes.NewMsgVote(
+ params.address,
+ 5,
+ govtypes.OptionNo,
+ ),
+ banktypes.NewMsgSend(
+ params.address,
+ suite.createTestAddress(),
+ suite.makeCoins(suite.denom, math.NewInt(50)),
+ ),
+ },
+ expectSuccess: !suite.useLegacyEIP712TypedData,
+ },
+ {
+ title: "Succeeds - Single-Signer 2x MsgVoteV1 with Different Schemas",
+ msgs: []sdk.Msg{
+ govtypesv1.NewMsgVote(
+ params.address,
+ 5,
+ govtypesv1.VoteOption_VOTE_OPTION_NO,
+ "",
+ ),
+ govtypesv1.NewMsgVote(
+ params.address,
+ 10,
+ govtypesv1.VoteOption_VOTE_OPTION_YES,
+ "Has Metadata",
+ ),
+ },
+ expectSuccess: !suite.useLegacyEIP712TypedData,
+ },
+ {
+ title: "Fails - Two MsgVotes with Different Signers",
+ msgs: []sdk.Msg{
+ govtypes.NewMsgVote(
+ suite.createTestAddress(),
+ 5,
+ govtypes.OptionNo,
+ ),
+ govtypes.NewMsgVote(
+ suite.createTestAddress(),
+ 25,
+ govtypes.OptionAbstain,
+ ),
+ },
+ expectSuccess: false,
+ },
+ {
+ title: "Fails - Empty Transaction",
+ msgs: []sdk.Msg{},
+ expectSuccess: false,
+ },
+ {
+ title: "Fails - Invalid ChainID",
+ chainID: "invalidchainid",
+ msgs: []sdk.Msg{
+ govtypes.NewMsgVote(
+ suite.createTestAddress(),
+ 5,
+ govtypes.OptionNo,
+ ),
+ },
+ expectSuccess: false,
+ },
+ {
+ title: "Fails - Includes TimeoutHeight",
+ msgs: []sdk.Msg{
+ govtypes.NewMsgVote(
+ suite.createTestAddress(),
+ 5,
+ govtypes.OptionNo,
+ ),
+ },
+ timeoutHeight: 1000,
+ expectSuccess: false,
+ },
+ {
+ title: "Fails - Single Message / Multi-Signer",
+ msgs: []sdk.Msg{
+ banktypes.NewMsgMultiSend(
+ []banktypes.Input{
+ banktypes.NewInput(
+ suite.createTestAddress(),
+ suite.makeCoins(suite.denom, math.NewInt(50)),
+ ),
+ banktypes.NewInput(
+ suite.createTestAddress(),
+ suite.makeCoins(suite.denom, math.NewInt(50)),
+ ),
+ },
+ []banktypes.Output{
+ banktypes.NewOutput(
+ suite.createTestAddress(),
+ suite.makeCoins(suite.denom, math.NewInt(50)),
+ ),
+ banktypes.NewOutput(
+ suite.createTestAddress(),
+ suite.makeCoins(suite.denom, math.NewInt(50)),
+ ),
+ },
+ ),
+ },
+ expectSuccess: false,
+ },
+ }
+
+ for _, tc := range testCases {
+ for _, signMode := range signModes {
+ suite.Run(tc.title, func() {
+ privKey, pubKey := suite.createTestKeyPair()
+
+ txBuilder := suite.clientCtx.TxConfig.NewTxBuilder()
+
+ txBuilder.SetGasLimit(params.fee.GasLimit)
+ txBuilder.SetFeeAmount(params.fee.Amount)
+
+ err := txBuilder.SetMsgs(tc.msgs...)
+ suite.Require().NoError(err)
+
+ txBuilder.SetMemo(params.memo)
+
+ // Prepare signature field with empty signatures
+ txSigData := signing.SingleSignatureData{
+ SignMode: signMode,
+ Signature: nil,
+ }
+ txSig := signing.SignatureV2{
+ PubKey: pubKey,
+ Data: &txSigData,
+ Sequence: params.sequence,
+ }
+
+ err = txBuilder.SetSignatures([]signing.SignatureV2{txSig}...)
+ suite.Require().NoError(err)
+
+ chainID := TESTNET_CHAIN_ID + "-1"
+ if tc.chainID != "" {
+ chainID = tc.chainID
+ }
+
+ if tc.timeoutHeight != 0 {
+ txBuilder.SetTimeoutHeight(tc.timeoutHeight)
+ }
+
+ signerData := authsigning.SignerData{
+ ChainID: chainID,
+ AccountNumber: params.accountNumber,
+ Sequence: params.sequence,
+ PubKey: pubKey,
+ Address: sdk.MustBech32ifyAddressBytes(ethclient.Bech32Prefix, pubKey.Bytes()),
+ }
+
+ bz, err := suite.clientCtx.TxConfig.SignModeHandler().GetSignBytes(
+ signMode,
+ signerData,
+ txBuilder.GetTx(),
+ )
+ suite.Require().NoError(err)
+
+ suite.verifyEIP712SignatureVerification(tc.expectSuccess, *privKey, *pubKey, bz)
+
+ // Verify payload flattening only if the payload is in valid JSON format
+ if signMode == signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON {
+ suite.verifySignDocFlattening(bz)
+
+ if tc.expectSuccess {
+ suite.verifyBasicTypedData(bz)
+ }
+ }
+ })
+ }
+ }
+}
+
+// verifyEIP712SignatureVerification verifies that the payload passes signature verification if signed as its EIP-712 representation.
+func (suite *EIP712TestSuite) verifyEIP712SignatureVerification(expectedSuccess bool, privKey ethsecp256k1.PrivKey, pubKey ethsecp256k1.PubKey, signBytes []byte) {
+ eip712Bytes, err := eip712.GetEIP712BytesForMsg(signBytes)
+
+ if suite.useLegacyEIP712TypedData {
+ eip712Bytes, err = eip712.LegacyGetEIP712BytesForMsg(signBytes)
+ }
+
+ if !expectedSuccess {
+ suite.Require().Error(err)
+ return
+ }
+
+ suite.Require().NoError(err)
+
+ sig, err := privKey.Sign(eip712Bytes)
+ suite.Require().NoError(err)
+
+ // Verify against original payload bytes. This should pass, even though it is not
+ // the original message that was signed.
+ res := pubKey.VerifySignature(signBytes, sig)
+ suite.Require().True(res)
+
+ // Verify against the signed EIP-712 bytes. This should pass, since it is the message signed.
+ res = pubKey.VerifySignature(eip712Bytes, sig)
+ suite.Require().True(res)
+
+ // Verify against random bytes to ensure it does not pass unexpectedly (sanity check).
+ randBytes := make([]byte, len(signBytes))
+ copy(randBytes, signBytes)
+ // Change the first element of signBytes to a different value
+ randBytes[0] = (signBytes[0] + 10) % 255
+ res = pubKey.VerifySignature(randBytes, sig)
+ suite.Require().False(res)
+}
+
+// verifySignDocFlattening tests the flattening algorithm against the sign doc's JSON payload,
+// using verifyPayloadAgainstFlattened.
+func (suite *EIP712TestSuite) verifySignDocFlattening(signDoc []byte) {
+ payload := gjson.ParseBytes(signDoc)
+ suite.Require().True(payload.IsObject())
+
+ flattened, _, err := eip712.FlattenPayloadMessages(payload)
+ suite.Require().NoError(err)
+
+ suite.verifyPayloadAgainstFlattened(payload, flattened)
+}
+
+// verifyPayloadAgainstFlattened compares a payload against its flattened counterpart to ensure that
+// the flattening algorithm behaved as expected.
+func (suite *EIP712TestSuite) verifyPayloadAgainstFlattened(payload gjson.Result, flattened gjson.Result) {
+ payloadMap, ok := payload.Value().(map[string]interface{})
+ suite.Require().True(ok)
+ flattenedMap, ok := flattened.Value().(map[string]interface{})
+ suite.Require().True(ok)
+
+ suite.verifyPayloadMapAgainstFlattenedMap(payloadMap, flattenedMap)
+}
+
+// verifyPayloadMapAgainstFlattenedMap directly compares two JSON maps in Go representations to
+// test flattening.
+func (suite *EIP712TestSuite) verifyPayloadMapAgainstFlattenedMap(original map[string]interface{}, flattened map[string]interface{}) {
+ interfaceMessages, ok := original[msgsFieldName]
+ suite.Require().True(ok)
+
+ messages, ok := interfaceMessages.([]interface{})
+ suite.Require().True(ok)
+
+ // Verify message contents
+ for i, msg := range messages {
+ flattenedMsg, ok := flattened[fmt.Sprintf("msg%d", i)]
+ suite.Require().True(ok)
+
+ flattenedMsgJSON, ok := flattenedMsg.(map[string]interface{})
+ suite.Require().True(ok)
+
+ suite.Require().Equal(flattenedMsgJSON, msg)
+ }
+
+ // Verify new payload does not have msgs field
+ _, ok = flattened[msgsFieldName]
+ suite.Require().False(ok)
+
+ // Verify number of total keys
+ numKeysOriginal := len(original)
+ numKeysFlattened := len(flattened)
+ numMessages := len(messages)
+
+ // + N keys, then -1 for msgs
+ suite.Require().Equal(numKeysFlattened, numKeysOriginal+numMessages-1)
+
+ // Verify contents of remaining keys
+ for k, obj := range original {
+ if k == msgsFieldName {
+ continue
+ }
+
+ flattenedObj, ok := flattened[k]
+ suite.Require().True(ok)
+
+ suite.Require().Equal(obj, flattenedObj)
+ }
+}
+
+// verifyBasicTypedData performs basic verification on the TypedData generation.
+func (suite *EIP712TestSuite) verifyBasicTypedData(signDoc []byte) {
+ typedData, err := eip712.GetEIP712TypedDataForMsg(signDoc)
+
+ suite.Require().NoError(err)
+
+ jsonPayload := gjson.ParseBytes(signDoc)
+ suite.Require().True(jsonPayload.IsObject())
+
+ flattened, _, err := eip712.FlattenPayloadMessages(jsonPayload)
+ suite.Require().NoError(err)
+ suite.Require().True(flattened.IsObject())
+
+ flattenedMsgMap, ok := flattened.Value().(map[string]interface{})
+ suite.Require().True(ok)
+
+ suite.Require().Equal(typedData.Message, flattenedMsgMap)
+}
+
+// TestFlattenPayloadErrorHandling tests error handling in TypedData generation,
+// specifically regarding the payload.
+func (suite *EIP712TestSuite) TestFlattenPayloadErrorHandling() {
+ // No msgs
+ _, _, err := eip712.FlattenPayloadMessages(gjson.Parse(""))
+ suite.Require().ErrorContains(err, "no messages found")
+
+ // Non-array Msgs
+ _, _, err = eip712.FlattenPayloadMessages(gjson.Parse(`{"msgs": 10}`))
+ suite.Require().ErrorContains(err, "array of messages")
+
+ // Array with non-object items
+ _, _, err = eip712.FlattenPayloadMessages(gjson.Parse(`{"msgs": [10, 20]}`))
+ suite.Require().ErrorContains(err, "not valid JSON")
+
+ // Malformed payload
+ malformed, err := sjson.Set(suite.generateRandomPayload(2).Raw, "msg0", 20)
+ suite.Require().NoError(err)
+ _, _, err = eip712.FlattenPayloadMessages(gjson.Parse(malformed))
+ suite.Require().ErrorContains(err, "malformed payload")
+}
+
+// TestTypedDataErrorHandling tests error handling for TypedData generation
+// in the main algorithm.
+func (suite *EIP712TestSuite) TestTypedDataErrorHandling() {
+ // Empty JSON
+ _, err := eip712.WrapTxToTypedData(0, make([]byte, 0))
+ suite.Require().ErrorContains(err, "invalid JSON")
+
+ _, err = eip712.WrapTxToTypedData(0, []byte(gjson.Parse(`{"msgs": 10}`).Raw))
+ suite.Require().ErrorContains(err, "array of messages")
+
+ // Invalid message 'type'
+ _, err = eip712.WrapTxToTypedData(0, []byte(gjson.Parse(`{"msgs": [{ "type": 10 }] }`).Raw))
+ suite.Require().ErrorContains(err, "message type value")
+
+ // Max duplicate type recursion depth
+ messagesArr := new(bytes.Buffer)
+ maxRecursionDepth := 1001
+
+ messagesArr.WriteString("[")
+ for i := 0; i < maxRecursionDepth; i++ {
+ messagesArr.WriteString(fmt.Sprintf(`{ "type": "msgType", "value": { "field%v": 10 } }`, i))
+ if i != maxRecursionDepth-1 {
+ messagesArr.WriteString(",")
+ }
+ }
+ messagesArr.WriteString("]")
+
+ _, err = eip712.WrapTxToTypedData(0, []byte(fmt.Sprintf(`{ "msgs": %v }`, messagesArr)))
+ suite.Require().ErrorContains(err, "maximum number of duplicates")
+}
+
+// TestTypedDataEdgeCases tests certain interesting edge cases to ensure that they work
+// (or don't work) as expected.
+func (suite *EIP712TestSuite) TestTypedDataEdgeCases() {
+ // Type without '/' separator
+ typedData, err := eip712.WrapTxToTypedData(0, []byte(gjson.Parse(`{"msgs": [{ "type": "MsgSend", "value": { "field": 10 } }] }`).Raw))
+ suite.Require().NoError(err)
+ types := typedData.Types["TypeMsgSend0"]
+ suite.Require().Greater(len(types), 0)
+
+ // Null value
+ typedData, err = eip712.WrapTxToTypedData(0, []byte(gjson.Parse(`{"msgs": [{ "type": "MsgSend", "value": { "field": null } }] }`).Raw))
+ suite.Require().NoError(err)
+ types = typedData.Types["TypeValue0"]
+ // Skip null type, since we don't expect any in the payload
+ suite.Require().Equal(len(types), 0)
+
+ // Boolean value
+ typedData, err = eip712.WrapTxToTypedData(0, []byte(gjson.Parse(`{"msgs": [{ "type": "MsgSend", "value": { "field": true } }] }`).Raw))
+ suite.Require().NoError(err)
+ types = typedData.Types["TypeValue0"]
+ suite.Require().Equal(len(types), 1)
+ suite.Require().Equal(types[0], apitypes.Type{
+ Name: "field",
+ Type: "bool",
+ })
+
+ // Empty array
+ typedData, err = eip712.WrapTxToTypedData(0, []byte(gjson.Parse(`{"msgs": [{ "type": "MsgSend", "value": { "field": [] } }] }`).Raw))
+ suite.Require().NoError(err)
+ types = typedData.Types["TypeValue0"]
+ suite.Require().Equal(types[0], apitypes.Type{
+ Name: "field",
+ Type: "string[]",
+ })
+
+ // Simple arrays
+ typedData, err = eip712.WrapTxToTypedData(0, []byte(gjson.Parse(`{"msgs": [{ "type": "MsgSend", "value": { "array": [1, 2, 3] } }] }`).Raw))
+ suite.Require().NoError(err)
+ types = typedData.Types["TypeValue0"]
+ suite.Require().Equal(len(types), 1)
+ suite.Require().Equal(types[0], apitypes.Type{
+ Name: "array",
+ Type: "int64[]",
+ })
+
+ // Nested arrays (EIP-712 does not support nested arrays)
+ typedData, err = eip712.WrapTxToTypedData(0, []byte(gjson.Parse(`{"msgs": [{ "type": "MsgSend", "value": { "array": [[1, 2, 3], [1, 2]] } }] }`).Raw))
+ suite.Require().NoError(err)
+ types = typedData.Types["TypeValue0"]
+ suite.Require().Equal(len(types), 0)
+}
+
+// TestTypedDataGeneration tests certain qualities about the output Types representation.
+func (suite *EIP712TestSuite) TestTypedDataGeneration() {
+ // Multiple messages with the same schema should share one type
+ payloadRaw := `{ "msgs": [{ "type": "msgType", "value": { "field1": 10 }}, { "type": "msgType", "value": { "field1": 20 }}] }`
+
+ typedData, err := eip712.WrapTxToTypedData(0, []byte(payloadRaw))
+ suite.Require().NoError(err)
+ suite.Require().True(typedData.Types["TypemsgType1"] == nil)
+
+ // Multiple messages with different schemas should have different types
+ payloadRaw = `{ "msgs": [{ "type": "msgType", "value": { "field1": 10 }}, { "type": "msgType", "value": { "field2": 20 }}] }`
+
+ typedData, err = eip712.WrapTxToTypedData(0, []byte(payloadRaw))
+ suite.Require().NoError(err)
+ suite.Require().False(typedData.Types["TypemsgType1"] == nil)
+}
diff --git a/eth/ethereum/eip712/encoding.go b/eth/ethereum/eip712/encoding.go
new file mode 100644
index 000000000..e384565f4
--- /dev/null
+++ b/eth/ethereum/eip712/encoding.go
@@ -0,0 +1,239 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package eip712
+
+import (
+ "errors"
+ "fmt"
+
+ "cosmossdk.io/simapp/params"
+ "github.com/cosmos/cosmos-sdk/x/auth/migrations/legacytx"
+
+ sdk "github.com/cosmos/cosmos-sdk/types"
+ txTypes "github.com/cosmos/cosmos-sdk/types/tx"
+
+ ethtypes "github.com/NibiruChain/nibiru/eth/types"
+ apitypes "github.com/ethereum/go-ethereum/signer/core/apitypes"
+
+ "github.com/cosmos/cosmos-sdk/codec"
+)
+
+var (
+ protoCodec codec.ProtoCodecMarshaler
+ aminoCodec *codec.LegacyAmino
+)
+
+// SetEncodingConfig set the encoding config to the singleton codecs (Amino and Protobuf).
+// The process of unmarshaling SignDoc bytes into a SignDoc object requires having a codec
+// populated with all relevant message types. As a result, we must call this method on app
+// initialization with the app's encoding config.
+func SetEncodingConfig(cfg params.EncodingConfig) {
+ aminoCodec = cfg.Amino
+ protoCodec = codec.NewProtoCodec(cfg.InterfaceRegistry)
+}
+
+// GetEIP712BytesForMsg returns the EIP-712 object bytes for the given SignDoc bytes by decoding the bytes into
+// an EIP-712 object, then converting via WrapTxToTypedData. See https://eips.ethereum.org/EIPS/eip-712 for more.
+func GetEIP712BytesForMsg(signDocBytes []byte) ([]byte, error) {
+ typedData, err := GetEIP712TypedDataForMsg(signDocBytes)
+ if err != nil {
+ return nil, err
+ }
+
+ _, rawData, err := apitypes.TypedDataAndHash(typedData)
+ if err != nil {
+ return nil, fmt.Errorf("could not get EIP-712 object bytes: %w", err)
+ }
+
+ return []byte(rawData), nil
+}
+
+// GetEIP712TypedDataForMsg returns the EIP-712 TypedData representation for either
+// Amino or Protobuf encoded signature doc bytes.
+func GetEIP712TypedDataForMsg(signDocBytes []byte) (apitypes.TypedData, error) {
+ // Attempt to decode as both Amino and Protobuf since the message format is unknown.
+ // If either decode works, we can move forward with the corresponding typed data.
+ typedDataAmino, errAmino := decodeAminoSignDoc(signDocBytes)
+ if errAmino == nil && isValidEIP712Payload(typedDataAmino) {
+ return typedDataAmino, nil
+ }
+ typedDataProtobuf, errProtobuf := decodeProtobufSignDoc(signDocBytes)
+ if errProtobuf == nil && isValidEIP712Payload(typedDataProtobuf) {
+ return typedDataProtobuf, nil
+ }
+
+ return apitypes.TypedData{}, fmt.Errorf("could not decode sign doc as either Amino or Protobuf.\n amino: %v\n protobuf: %v", errAmino, errProtobuf)
+}
+
+// isValidEIP712Payload ensures that the given TypedData does not contain empty fields from
+// an improper initialization.
+func isValidEIP712Payload(typedData apitypes.TypedData) bool {
+ return len(typedData.Message) != 0 && len(typedData.Types) != 0 && typedData.PrimaryType != "" && typedData.Domain != apitypes.TypedDataDomain{}
+}
+
+// decodeAminoSignDoc attempts to decode the provided sign doc (bytes) as an Amino payload
+// and returns a signable EIP-712 TypedData object.
+func decodeAminoSignDoc(signDocBytes []byte) (apitypes.TypedData, error) {
+ // Ensure codecs have been initialized
+ if err := validateCodecInit(); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ var aminoDoc legacytx.StdSignDoc
+ if err := aminoCodec.UnmarshalJSON(signDocBytes, &aminoDoc); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ var fees legacytx.StdFee
+ if err := aminoCodec.UnmarshalJSON(aminoDoc.Fee, &fees); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ // Validate payload messages
+ msgs := make([]sdk.Msg, len(aminoDoc.Msgs))
+ for i, jsonMsg := range aminoDoc.Msgs {
+ var m sdk.Msg
+ if err := aminoCodec.UnmarshalJSON(jsonMsg, &m); err != nil {
+ return apitypes.TypedData{}, fmt.Errorf("failed to unmarshal sign doc message: %w", err)
+ }
+ msgs[i] = m
+ }
+
+ if err := validatePayloadMessages(msgs); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ chainID, err := ethtypes.ParseChainID(aminoDoc.ChainID)
+ if err != nil {
+ return apitypes.TypedData{}, errors.New("invalid chain ID passed as argument")
+ }
+
+ typedData, err := WrapTxToTypedData(
+ chainID.Uint64(),
+ signDocBytes,
+ )
+ if err != nil {
+ return apitypes.TypedData{}, fmt.Errorf("could not convert to EIP712 representation: %w", err)
+ }
+
+ return typedData, nil
+}
+
+// decodeProtobufSignDoc attempts to decode the provided sign doc (bytes) as a Protobuf payload
+// and returns a signable EIP-712 TypedData object.
+func decodeProtobufSignDoc(signDocBytes []byte) (apitypes.TypedData, error) {
+ // Ensure codecs have been initialized
+ if err := validateCodecInit(); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ signDoc := &txTypes.SignDoc{}
+ if err := signDoc.Unmarshal(signDocBytes); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ authInfo := &txTypes.AuthInfo{}
+ if err := authInfo.Unmarshal(signDoc.AuthInfoBytes); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ body := &txTypes.TxBody{}
+ if err := body.Unmarshal(signDoc.BodyBytes); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ // Until support for these fields is added, throw an error at their presence
+ if body.TimeoutHeight != 0 || len(body.ExtensionOptions) != 0 || len(body.NonCriticalExtensionOptions) != 0 {
+ return apitypes.TypedData{}, errors.New("body contains unsupported fields: TimeoutHeight, ExtensionOptions, or NonCriticalExtensionOptions")
+ }
+
+ if len(authInfo.SignerInfos) != 1 {
+ return apitypes.TypedData{}, fmt.Errorf("invalid number of signer infos provided, expected 1 got %v", len(authInfo.SignerInfos))
+ }
+
+ // Validate payload messages
+ msgs := make([]sdk.Msg, len(body.Messages))
+ for i, protoMsg := range body.Messages {
+ var m sdk.Msg
+ if err := protoCodec.UnpackAny(protoMsg, &m); err != nil {
+ return apitypes.TypedData{}, fmt.Errorf("could not unpack message object with error %w", err)
+ }
+ msgs[i] = m
+ }
+
+ if err := validatePayloadMessages(msgs); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ signerInfo := authInfo.SignerInfos[0]
+
+ chainID, err := ethtypes.ParseChainID(signDoc.ChainId)
+ if err != nil {
+ return apitypes.TypedData{}, fmt.Errorf("invalid chain ID passed as argument: %w", err)
+ }
+
+ stdFee := &legacytx.StdFee{
+ Amount: authInfo.Fee.Amount,
+ Gas: authInfo.Fee.GasLimit,
+ }
+
+ tip := authInfo.Tip
+
+ // WrapTxToTypedData expects the payload as an Amino Sign Doc
+ signBytes := legacytx.StdSignBytes(
+ signDoc.ChainId,
+ signDoc.AccountNumber,
+ signerInfo.Sequence,
+ body.TimeoutHeight,
+ *stdFee,
+ msgs,
+ body.Memo,
+ tip,
+ )
+
+ typedData, err := WrapTxToTypedData(
+ chainID.Uint64(),
+ signBytes,
+ )
+ if err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ return typedData, nil
+}
+
+// validateCodecInit ensures that both Amino and Protobuf encoding codecs have been set on app init,
+// so the module does not panic if either codec is not found.
+func validateCodecInit() error {
+ if aminoCodec == nil || protoCodec == nil {
+ return errors.New("missing codec: codecs have not been properly initialized using SetEncodingConfig")
+ }
+
+ return nil
+}
+
+// validatePayloadMessages ensures that the transaction messages can be represented in an EIP-712
+// encoding by checking that messages exist and share a single signer.
+func validatePayloadMessages(msgs []sdk.Msg) error {
+ if len(msgs) == 0 {
+ return errors.New("unable to build EIP-712 payload: transaction does contain any messages")
+ }
+
+ var msgSigner sdk.AccAddress
+
+ for i, m := range msgs {
+ if len(m.GetSigners()) != 1 {
+ return errors.New("unable to build EIP-712 payload: expect exactly 1 signer")
+ }
+
+ if i == 0 {
+ msgSigner = m.GetSigners()[0]
+ continue
+ }
+
+ if !msgSigner.Equals(m.GetSigners()[0]) {
+ return errors.New("unable to build EIP-712 payload: multiple signers detected")
+ }
+ }
+
+ return nil
+}
diff --git a/eth/ethereum/eip712/encoding_legacy.go b/eth/ethereum/eip712/encoding_legacy.go
new file mode 100644
index 000000000..57e784f46
--- /dev/null
+++ b/eth/ethereum/eip712/encoding_legacy.go
@@ -0,0 +1,266 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package eip712
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+
+ "github.com/cosmos/cosmos-sdk/x/auth/migrations/legacytx"
+
+ sdk "github.com/cosmos/cosmos-sdk/types"
+ txTypes "github.com/cosmos/cosmos-sdk/types/tx"
+
+ ethtypes "github.com/NibiruChain/nibiru/eth/types"
+ apitypes "github.com/ethereum/go-ethereum/signer/core/apitypes"
+)
+
+type aminoMessage struct {
+ Type string `json:"type"`
+ Value interface{} `json:"value"`
+}
+
+// LegacyGetEIP712BytesForMsg returns the EIP-712 object bytes for the given SignDoc bytes by decoding the bytes into
+// an EIP-712 object, then converting via LegacyWrapTxToTypedData. See https://eips.ethereum.org/EIPS/eip-712 for more.
+func LegacyGetEIP712BytesForMsg(signDocBytes []byte) ([]byte, error) {
+ typedData, err := LegacyGetEIP712TypedDataForMsg(signDocBytes)
+ if err != nil {
+ return nil, err
+ }
+
+ _, rawData, err := apitypes.TypedDataAndHash(typedData)
+ if err != nil {
+ return nil, fmt.Errorf("could not get EIP-712 object bytes: %w", err)
+ }
+
+ return []byte(rawData), nil
+}
+
+// LegacyGetEIP712TypedDataForMsg returns the EIP-712 TypedData representation for either
+// Amino or Protobuf encoded signature doc bytes.
+func LegacyGetEIP712TypedDataForMsg(signDocBytes []byte) (apitypes.TypedData, error) {
+ // Attempt to decode as both Amino and Protobuf since the message format is unknown.
+ // If either decode works, we can move forward with the corresponding typed data.
+ typedDataAmino, errAmino := legacyDecodeAminoSignDoc(signDocBytes)
+ if errAmino == nil && isValidEIP712Payload(typedDataAmino) {
+ return typedDataAmino, nil
+ }
+ typedDataProtobuf, errProtobuf := legacyDecodeProtobufSignDoc(signDocBytes)
+ if errProtobuf == nil && isValidEIP712Payload(typedDataProtobuf) {
+ return typedDataProtobuf, nil
+ }
+
+ return apitypes.TypedData{}, fmt.Errorf("could not decode sign doc as either Amino or Protobuf.\n amino: %v\n protobuf: %v", errAmino, errProtobuf)
+}
+
+// legacyDecodeAminoSignDoc attempts to decode the provided sign doc (bytes) as an Amino payload
+// and returns a signable EIP-712 TypedData object.
+func legacyDecodeAminoSignDoc(signDocBytes []byte) (apitypes.TypedData, error) {
+ // Ensure codecs have been initialized
+ if err := validateCodecInit(); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ var aminoDoc legacytx.StdSignDoc
+ if err := aminoCodec.UnmarshalJSON(signDocBytes, &aminoDoc); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ var fees legacytx.StdFee
+ if err := aminoCodec.UnmarshalJSON(aminoDoc.Fee, &fees); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ // Validate payload messages
+ msgs := make([]sdk.Msg, len(aminoDoc.Msgs))
+ for i, jsonMsg := range aminoDoc.Msgs {
+ var m sdk.Msg
+ if err := aminoCodec.UnmarshalJSON(jsonMsg, &m); err != nil {
+ return apitypes.TypedData{}, fmt.Errorf("failed to unmarshal sign doc message: %w", err)
+ }
+ msgs[i] = m
+ }
+
+ if err := legacyValidatePayloadMessages(msgs); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ // Use first message for fee payer and type inference
+ msg := msgs[0]
+
+ // By convention, the fee payer is the first address in the list of signers.
+ feePayer := msg.GetSigners()[0]
+ feeDelegation := &FeeDelegationOptions{
+ FeePayer: feePayer,
+ }
+
+ chainID, err := ethtypes.ParseChainID(aminoDoc.ChainID)
+ if err != nil {
+ return apitypes.TypedData{}, errors.New("invalid chain ID passed as argument")
+ }
+
+ typedData, err := LegacyWrapTxToTypedData(
+ protoCodec,
+ chainID.Uint64(),
+ msg,
+ signDocBytes,
+ feeDelegation,
+ )
+ if err != nil {
+ return apitypes.TypedData{}, fmt.Errorf("could not convert to EIP712 representation: %w", err)
+ }
+
+ return typedData, nil
+}
+
+// legacyDecodeProtobufSignDoc attempts to decode the provided sign doc (bytes) as a Protobuf payload
+// and returns a signable EIP-712 TypedData object.
+func legacyDecodeProtobufSignDoc(signDocBytes []byte) (apitypes.TypedData, error) {
+ // Ensure codecs have been initialized
+ if err := validateCodecInit(); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ signDoc := &txTypes.SignDoc{}
+ if err := signDoc.Unmarshal(signDocBytes); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ authInfo := &txTypes.AuthInfo{}
+ if err := authInfo.Unmarshal(signDoc.AuthInfoBytes); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ body := &txTypes.TxBody{}
+ if err := body.Unmarshal(signDoc.BodyBytes); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ // Until support for these fields is added, throw an error at their presence
+ if body.TimeoutHeight != 0 || len(body.ExtensionOptions) != 0 || len(body.NonCriticalExtensionOptions) != 0 {
+ return apitypes.TypedData{}, errors.New("body contains unsupported fields: TimeoutHeight, ExtensionOptions, or NonCriticalExtensionOptions")
+ }
+
+ if len(authInfo.SignerInfos) != 1 {
+ return apitypes.TypedData{}, fmt.Errorf("invalid number of signer infos provided, expected 1 got %v", len(authInfo.SignerInfos))
+ }
+
+ // Validate payload messages
+ msgs := make([]sdk.Msg, len(body.Messages))
+ for i, protoMsg := range body.Messages {
+ var m sdk.Msg
+ if err := protoCodec.UnpackAny(protoMsg, &m); err != nil {
+ return apitypes.TypedData{}, fmt.Errorf("could not unpack message object with error %w", err)
+ }
+ msgs[i] = m
+ }
+
+ if err := legacyValidatePayloadMessages(msgs); err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ // Use first message for fee payer and type inference
+ msg := msgs[0]
+
+ signerInfo := authInfo.SignerInfos[0]
+
+ chainID, err := ethtypes.ParseChainID(signDoc.ChainId)
+ if err != nil {
+ return apitypes.TypedData{}, fmt.Errorf("invalid chain ID passed as argument: %w", err)
+ }
+
+ stdFee := &legacytx.StdFee{
+ Amount: authInfo.Fee.Amount,
+ Gas: authInfo.Fee.GasLimit,
+ }
+
+ feePayer := msg.GetSigners()[0]
+ feeDelegation := &FeeDelegationOptions{
+ FeePayer: feePayer,
+ }
+
+ tip := authInfo.Tip
+
+ // WrapTxToTypedData expects the payload as an Amino Sign Doc
+ signBytes := legacytx.StdSignBytes(
+ signDoc.ChainId,
+ signDoc.AccountNumber,
+ signerInfo.Sequence,
+ body.TimeoutHeight,
+ *stdFee,
+ msgs,
+ body.Memo,
+ tip,
+ )
+
+ typedData, err := LegacyWrapTxToTypedData(
+ protoCodec,
+ chainID.Uint64(),
+ msg,
+ signBytes,
+ feeDelegation,
+ )
+ if err != nil {
+ return apitypes.TypedData{}, err
+ }
+
+ return typedData, nil
+}
+
+// validatePayloadMessages ensures that the transaction messages can be represented in an EIP-712
+// encoding by checking that messages exist, are of the same type, and share a single signer.
+func legacyValidatePayloadMessages(msgs []sdk.Msg) error {
+ if len(msgs) == 0 {
+ return errors.New("unable to build EIP-712 payload: transaction does contain any messages")
+ }
+
+ var msgType string
+ var msgSigner sdk.AccAddress
+
+ for i, m := range msgs {
+ t, err := getMsgType(m)
+ if err != nil {
+ return err
+ }
+
+ if len(m.GetSigners()) != 1 {
+ return errors.New("unable to build EIP-712 payload: expect exactly 1 signer")
+ }
+
+ if i == 0 {
+ msgType = t
+ msgSigner = m.GetSigners()[0]
+ continue
+ }
+
+ if t != msgType {
+ return errors.New("unable to build EIP-712 payload: different types of messages detected")
+ }
+
+ if !msgSigner.Equals(m.GetSigners()[0]) {
+ return errors.New("unable to build EIP-712 payload: multiple signers detected")
+ }
+ }
+
+ return nil
+}
+
+// getMsgType returns the message type prefix for the given Cosmos SDK Msg
+func getMsgType(msg sdk.Msg) (string, error) {
+ jsonBytes, err := aminoCodec.MarshalJSON(msg)
+ if err != nil {
+ return "", err
+ }
+
+ var jsonMsg aminoMessage
+ if err := json.Unmarshal(jsonBytes, &jsonMsg); err != nil {
+ return "", err
+ }
+
+ // Verify Type was successfully filled in
+ if jsonMsg.Type == "" {
+ return "", errors.New("could not decode message: type is missing")
+ }
+
+ return jsonMsg.Type, nil
+}
diff --git a/eth/ethereum/eip712/message.go b/eth/ethereum/eip712/message.go
new file mode 100644
index 000000000..ce92259da
--- /dev/null
+++ b/eth/ethereum/eip712/message.go
@@ -0,0 +1,148 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package eip712
+
+import (
+ "fmt"
+
+ errorsmod "cosmossdk.io/errors"
+ errortypes "github.com/cosmos/cosmos-sdk/types/errors"
+
+ "github.com/tidwall/gjson"
+ "github.com/tidwall/sjson"
+)
+
+type eip712MessagePayload struct {
+ payload gjson.Result
+ numPayloadMsgs int
+ message map[string]interface{}
+}
+
+const (
+ payloadMsgsField = "msgs"
+)
+
+// createEIP712MessagePayload generates the EIP-712 message payload
+// corresponding to the input data.
+func createEIP712MessagePayload(data []byte) (eip712MessagePayload, error) {
+ basicPayload, err := unmarshalBytesToJSONObject(data)
+ if err != nil {
+ return eip712MessagePayload{}, err
+ }
+
+ payload, numPayloadMsgs, err := FlattenPayloadMessages(basicPayload)
+ if err != nil {
+ return eip712MessagePayload{}, errorsmod.Wrap(err, "failed to flatten payload JSON messages")
+ }
+
+ message, ok := payload.Value().(map[string]interface{})
+ if !ok {
+ return eip712MessagePayload{}, errorsmod.Wrap(errortypes.ErrInvalidType, "failed to parse JSON as map")
+ }
+
+ messagePayload := eip712MessagePayload{
+ payload: payload,
+ numPayloadMsgs: numPayloadMsgs,
+ message: message,
+ }
+
+ return messagePayload, nil
+}
+
+// unmarshalBytesToJSONObject converts a bytestream into
+// a JSON object, then makes sure the JSON is an object.
+func unmarshalBytesToJSONObject(data []byte) (gjson.Result, error) {
+ if !gjson.ValidBytes(data) {
+ return gjson.Result{}, errorsmod.Wrap(errortypes.ErrJSONUnmarshal, "invalid JSON received")
+ }
+
+ payload := gjson.ParseBytes(data)
+
+ if !payload.IsObject() {
+ return gjson.Result{}, errorsmod.Wrap(errortypes.ErrJSONUnmarshal, "failed to JSON unmarshal data as object")
+ }
+
+ return payload, nil
+}
+
+// FlattenPayloadMessages flattens the input payload's messages, representing
+// them as key-value pairs of "msg{i}": {Msg}, rather than as an array of Msgs.
+// We do this to support messages with different schemas.
+func FlattenPayloadMessages(payload gjson.Result) (gjson.Result, int, error) {
+ flattened := payload
+ var err error
+
+ msgs, err := getPayloadMessages(payload)
+ if err != nil {
+ return gjson.Result{}, 0, err
+ }
+
+ for i, msg := range msgs {
+ flattened, err = payloadWithNewMessage(flattened, msg, i)
+ if err != nil {
+ return gjson.Result{}, 0, err
+ }
+ }
+
+ flattened, err = payloadWithoutMsgsField(flattened)
+ if err != nil {
+ return gjson.Result{}, 0, err
+ }
+
+ return flattened, len(msgs), nil
+}
+
+// getPayloadMessages processes and returns the payload messages as a JSON array.
+func getPayloadMessages(payload gjson.Result) ([]gjson.Result, error) {
+ rawMsgs := payload.Get(payloadMsgsField)
+
+ if !rawMsgs.Exists() {
+ return nil, errorsmod.Wrap(errortypes.ErrInvalidRequest, "no messages found in payload, unable to parse")
+ }
+
+ if !rawMsgs.IsArray() {
+ return nil, errorsmod.Wrap(errortypes.ErrInvalidRequest, "expected type array of messages, cannot parse")
+ }
+
+ return rawMsgs.Array(), nil
+}
+
+// payloadWithNewMessage returns the updated payload object with the message
+// set at the field corresponding to index.
+func payloadWithNewMessage(payload gjson.Result, msg gjson.Result, index int) (gjson.Result, error) {
+ field := msgFieldForIndex(index)
+
+ if payload.Get(field).Exists() {
+ return gjson.Result{}, errorsmod.Wrapf(
+ errortypes.ErrInvalidRequest,
+ "malformed payload received, did not expect to find key at field %v", field,
+ )
+ }
+
+ if !msg.IsObject() {
+ return gjson.Result{}, errorsmod.Wrapf(errortypes.ErrInvalidRequest, "msg at index %d is not valid JSON: %v", index, msg)
+ }
+
+ newRaw, err := sjson.SetRaw(payload.Raw, field, msg.Raw)
+ if err != nil {
+ return gjson.Result{}, err
+ }
+
+ return gjson.Parse(newRaw), nil
+}
+
+// msgFieldForIndex returns the payload field for a given message post-flattening.
+// e.g. msgs[2] becomes 'msg2'
+func msgFieldForIndex(i int) string {
+ return fmt.Sprintf("msg%d", i)
+}
+
+// payloadWithoutMsgsField returns the updated payload without the "msgs" array
+// field, which flattening makes obsolete.
+func payloadWithoutMsgsField(payload gjson.Result) (gjson.Result, error) {
+ newRaw, err := sjson.Delete(payload.Raw, payloadMsgsField)
+ if err != nil {
+ return gjson.Result{}, err
+ }
+
+ return gjson.Parse(newRaw), nil
+}
diff --git a/eth/ethereum/eip712/preprocess.go b/eth/ethereum/eip712/preprocess.go
new file mode 100644
index 000000000..5610655ea
--- /dev/null
+++ b/eth/ethereum/eip712/preprocess.go
@@ -0,0 +1,85 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package eip712
+
+import (
+ "fmt"
+
+ "github.com/NibiruChain/nibiru/eth/types"
+ "github.com/cosmos/cosmos-sdk/client"
+ codectypes "github.com/cosmos/cosmos-sdk/codec/types"
+ cosmoskr "github.com/cosmos/cosmos-sdk/crypto/keyring"
+ "github.com/cosmos/cosmos-sdk/types/tx/signing"
+ authtx "github.com/cosmos/cosmos-sdk/x/auth/tx"
+)
+
+// PreprocessLedgerTx reformats Ledger-signed Cosmos transactions to match the fork expected by Ethermint
+// by including the signature in a Web3Tx extension and sending a blank signature in the body.
+func PreprocessLedgerTx(chainID string, keyType cosmoskr.KeyType, txBuilder client.TxBuilder) error {
+ // Only process Ledger transactions
+ if keyType != cosmoskr.TypeLedger {
+ return nil
+ }
+
+ // Init extension builder to set Web3 extension
+ extensionBuilder, ok := txBuilder.(authtx.ExtensionOptionsTxBuilder)
+ if !ok {
+ return fmt.Errorf("cannot cast TxBuilder to ExtensionOptionsTxBuilder")
+ }
+
+ // Get signatures from TxBuilder
+ sigs, err := txBuilder.GetTx().GetSignaturesV2()
+ if err != nil {
+ return fmt.Errorf("could not get signatures: %w", err)
+ }
+
+ // Verify single-signer
+ if len(sigs) != 1 {
+ return fmt.Errorf("invalid number of signatures, expected 1 and got %v", len(sigs))
+ }
+
+ signature := sigs[0]
+ sigData, ok := signature.Data.(*signing.SingleSignatureData)
+ if !ok {
+ return fmt.Errorf("unexpected signature type, expected SingleSignatureData")
+ }
+ sigBytes := sigData.Signature
+
+ // Parse Chain ID as big.Int
+ chainIDInt, err := types.ParseChainID(chainID)
+ if err != nil {
+ return fmt.Errorf("could not parse chain id: %w", err)
+ }
+
+ // Add ExtensionOptionsWeb3Tx extension with signature
+ var option *codectypes.Any
+ option, err = codectypes.NewAnyWithValue(&types.ExtensionOptionsWeb3Tx{
+ FeePayer: txBuilder.GetTx().FeePayer().String(),
+ TypedDataChainID: chainIDInt.Uint64(),
+ FeePayerSig: sigBytes,
+ })
+ if err != nil {
+ return fmt.Errorf("could not set extension as any: %w", err)
+ }
+
+ extensionBuilder.SetExtensionOptions(option)
+
+ // Set blank signature with Amino Sign Type
+ // (Regardless of input signMode, Nibiru requires Amino signature type for
+ // Ledger support on EVM)
+ blankSig := signing.SingleSignatureData{
+ SignMode: signing.SignMode_SIGN_MODE_LEGACY_AMINO_JSON,
+ Signature: nil,
+ }
+ sig := signing.SignatureV2{
+ PubKey: signature.PubKey,
+ Data: &blankSig,
+ Sequence: signature.Sequence,
+ }
+
+ err = txBuilder.SetSignatures(sig)
+ if err != nil {
+ return fmt.Errorf("unable to set signatures on payload: %w", err)
+ }
+
+ return nil
+}
diff --git a/eth/ethereum/eip712/preprocess_test.go b/eth/ethereum/eip712/preprocess_test.go
new file mode 100644
index 000000000..86c478c1c
--- /dev/null
+++ b/eth/ethereum/eip712/preprocess_test.go
@@ -0,0 +1,157 @@
+package eip712_test
+
+import (
+ "encoding/hex"
+ "strings"
+ "testing"
+
+ "cosmossdk.io/math"
+ "github.com/cosmos/cosmos-sdk/client"
+ "github.com/cosmos/cosmos-sdk/crypto/keyring"
+ sdk "github.com/cosmos/cosmos-sdk/types"
+ "github.com/cosmos/cosmos-sdk/types/tx/signing"
+ banktypes "github.com/cosmos/cosmos-sdk/x/bank/types"
+
+ "github.com/NibiruChain/nibiru/app"
+ "github.com/NibiruChain/nibiru/eth/encoding"
+ "github.com/NibiruChain/nibiru/eth/ethereum/eip712"
+ "github.com/NibiruChain/nibiru/x/common/testutil"
+ "github.com/stretchr/testify/require"
+)
+
+// Testing Constants
+var (
+ chainID = "cataclysm" + "-1"
+ ctx = client.Context{}.WithTxConfig(
+ encoding.MakeConfig(app.ModuleBasics).TxConfig,
+ )
+)
+var feePayerAddress = "nibi17xpfvakm2amg962yls6f84z3kell8c5ljcjw34"
+
+type TestCaseStruct struct {
+ txBuilder client.TxBuilder
+ expectedFeePayer string
+ expectedGas uint64
+ expectedFee math.Int
+ expectedMemo string
+ expectedMsg string
+ expectedSignatureBytes []byte
+}
+
+func TestBlankTxBuilder(t *testing.T) {
+ txBuilder := ctx.TxConfig.NewTxBuilder()
+
+ err := eip712.PreprocessLedgerTx(
+ chainID,
+ keyring.TypeLedger,
+ txBuilder,
+ )
+
+ require.Error(t, err)
+}
+
+func TestNonLedgerTxBuilder(t *testing.T) {
+ txBuilder := ctx.TxConfig.NewTxBuilder()
+
+ err := eip712.PreprocessLedgerTx(
+ chainID,
+ keyring.TypeLocal,
+ txBuilder,
+ )
+
+ require.NoError(t, err)
+}
+
+func TestInvalidChainId(t *testing.T) {
+ txBuilder := ctx.TxConfig.NewTxBuilder()
+
+ err := eip712.PreprocessLedgerTx(
+ "invalid-chain-id",
+ keyring.TypeLedger,
+ txBuilder,
+ )
+
+ require.Error(t, err)
+}
+
+func createBasicTestCase(t *testing.T) TestCaseStruct {
+ t.Helper()
+ txBuilder := ctx.TxConfig.NewTxBuilder()
+
+ feePayer, err := sdk.AccAddressFromBech32(feePayerAddress)
+ require.NoError(t, err)
+
+ txBuilder.SetFeePayer(feePayer)
+
+ // Create signature unrelated to payload for testing
+ signatureHex := strings.Repeat("01", 65)
+ signatureBytes, err := hex.DecodeString(signatureHex)
+ require.NoError(t, err)
+
+ _, privKey := testutil.PrivKeyEth()
+ sigsV2 := signing.SignatureV2{
+ PubKey: privKey.PubKey(), // Use unrelated public key for testing
+ Data: &signing.SingleSignatureData{
+ SignMode: signing.SignMode_SIGN_MODE_DIRECT,
+ Signature: signatureBytes,
+ },
+ Sequence: 0,
+ }
+
+ err = txBuilder.SetSignatures(sigsV2)
+ require.NoError(t, err)
+
+ return TestCaseStruct{
+ txBuilder: txBuilder,
+ expectedFeePayer: feePayer.String(),
+ expectedGas: 0,
+ expectedFee: math.NewInt(0),
+ expectedMemo: "",
+ expectedMsg: "",
+ expectedSignatureBytes: signatureBytes,
+ }
+}
+
+func createPopulatedTestCase(t *testing.T) TestCaseStruct {
+ t.Helper()
+ basicTestCase := createBasicTestCase(t)
+ txBuilder := basicTestCase.txBuilder
+
+ gasLimit := uint64(200000)
+ memo := ""
+ denom := baseDenom
+ feeAmount := math.NewInt(2000)
+
+ txBuilder.SetFeeAmount(sdk.NewCoins(
+ sdk.NewCoin(
+ denom,
+ feeAmount,
+ )))
+
+ txBuilder.SetGasLimit(gasLimit)
+ txBuilder.SetMemo(memo)
+
+ msgSend := banktypes.MsgSend{
+ FromAddress: feePayerAddress,
+ ToAddress: "nibi12luku6uxehhak02py4rcz65zu0swh7wjun6msa",
+ Amount: sdk.NewCoins(
+ sdk.NewCoin(
+ baseDenom,
+ math.NewInt(10000000),
+ ),
+ ),
+ }
+
+ err := txBuilder.SetMsgs(&msgSend)
+ require.NoError(t, err)
+
+ return TestCaseStruct{
+ txBuilder: txBuilder,
+ expectedFeePayer: basicTestCase.expectedFeePayer,
+ expectedGas: gasLimit,
+ expectedFee: feeAmount,
+ expectedMemo: memo,
+ expectedMsg: msgSend.String(),
+ expectedSignatureBytes: basicTestCase.expectedSignatureBytes,
+ }
+}
diff --git a/eth/ethereum/eip712/types.go b/eth/ethereum/eip712/types.go
new file mode 100644
index 000000000..2a7fe7b1f
--- /dev/null
+++ b/eth/ethereum/eip712/types.go
@@ -0,0 +1,390 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package eip712
+
+import (
+ "bytes"
+ "fmt"
+ "sort"
+ "strings"
+
+ "golang.org/x/text/cases"
+ "golang.org/x/text/language"
+
+ errorsmod "cosmossdk.io/errors"
+ errortypes "github.com/cosmos/cosmos-sdk/types/errors"
+
+ "github.com/ethereum/go-ethereum/signer/core/apitypes"
+ "github.com/tidwall/gjson"
+)
+
+const (
+ rootPrefix = "_"
+ typePrefix = "Type"
+
+ txField = "Tx"
+ ethBool = "bool"
+ ethInt64 = "int64"
+ ethString = "string"
+
+ msgTypeField = "type"
+
+ maxDuplicateTypeDefs = 1000
+)
+
+// getEIP712Types creates and returns the EIP-712 types
+// for the given message payload.
+func createEIP712Types(messagePayload eip712MessagePayload) (apitypes.Types, error) {
+ eip712Types := apitypes.Types{
+ "EIP712Domain": {
+ {
+ Name: "name",
+ Type: "string",
+ },
+ {
+ Name: "version",
+ Type: "string",
+ },
+ {
+ Name: "chainId",
+ Type: "uint256",
+ },
+ {
+ Name: "verifyingContract",
+ Type: "string",
+ },
+ {
+ Name: "salt",
+ Type: "string",
+ },
+ },
+ "Tx": {
+ {Name: "account_number", Type: "string"},
+ {Name: "chain_id", Type: "string"},
+ {Name: "fee", Type: "Fee"},
+ {Name: "memo", Type: "string"},
+ {Name: "sequence", Type: "string"},
+ // Note timeout_height was removed because it was not getting filled with the legacyTx
+ },
+ "Fee": {
+ {Name: "amount", Type: "Coin[]"},
+ {Name: "gas", Type: "string"},
+ },
+ "Coin": {
+ {Name: "denom", Type: "string"},
+ {Name: "amount", Type: "string"},
+ },
+ }
+
+ for i := 0; i < messagePayload.numPayloadMsgs; i++ {
+ field := msgFieldForIndex(i)
+ msg := messagePayload.payload.Get(field)
+
+ if err := addMsgTypesToRoot(eip712Types, field, msg); err != nil {
+ return nil, err
+ }
+ }
+
+ return eip712Types, nil
+}
+
+// addMsgTypesToRoot adds all types for the given message
+// to eip712Types, recursively handling object sub-fields.
+func addMsgTypesToRoot(eip712Types apitypes.Types, msgField string, msg gjson.Result) (err error) {
+ defer doRecover(&err)
+
+ if !msg.IsObject() {
+ return errorsmod.Wrapf(errortypes.ErrInvalidRequest, "message is not valid JSON, cannot parse types")
+ }
+
+ msgRootType, err := msgRootType(msg)
+ if err != nil {
+ return err
+ }
+
+ msgTypeDef, err := recursivelyAddTypesToRoot(eip712Types, msgRootType, rootPrefix, msg)
+ if err != nil {
+ return err
+ }
+
+ addMsgTypeDefToTxSchema(eip712Types, msgField, msgTypeDef)
+
+ return nil
+}
+
+// msgRootType parses the message and returns the formatted
+// type signature corresponding to the message type.
+func msgRootType(msg gjson.Result) (string, error) {
+ msgType := msg.Get(msgTypeField).Str
+ if msgType == "" {
+ // .Str is empty for arrays and objects
+ return "", errorsmod.Wrap(errortypes.ErrInvalidType, "malformed message type value, expected type string")
+ }
+
+ // Convert e.g. cosmos-sdk/MsgSend to TypeMsgSend
+ typeTokenized := strings.Split(msgType, "/")
+ msgSignature := typeTokenized[len(typeTokenized)-1]
+ rootType := fmt.Sprintf("%v%v", typePrefix, msgSignature)
+
+ return rootType, nil
+}
+
+// addMsgTypeDefToTxSchema adds the message's field-type pairing
+// to the Tx schema.
+func addMsgTypeDefToTxSchema(eip712Types apitypes.Types, msgField, msgTypeDef string) {
+ eip712Types[txField] = append(eip712Types[txField], apitypes.Type{
+ Name: msgField,
+ Type: msgTypeDef,
+ })
+}
+
+// recursivelyAddTypesToRoot walks all types in the given map
+// and recursively adds sub-maps as new types when necessary.
+// It adds all type definitions to typeMap, then returns a key
+// to the json object's type definition within the map.
+func recursivelyAddTypesToRoot(
+ typeMap apitypes.Types,
+ rootType string,
+ prefix string,
+ payload gjson.Result,
+) (string, error) {
+ typesToAdd := []apitypes.Type{}
+
+ // Must sort the JSON keys for deterministic type generation.
+ sortedFieldNames, err := sortedJSONKeys(payload)
+ if err != nil {
+ return "", errorsmod.Wrap(err, "unable to sort object keys")
+ }
+
+ typeDef := typeDefForPrefix(prefix, rootType)
+
+ for _, fieldName := range sortedFieldNames {
+ field := payload.Get(fieldName)
+ if !field.Exists() {
+ continue
+ }
+
+ // Handle array type by unwrapping the first element.
+ // Note that arrays with multiple types are not supported
+ // using EIP-712, so we can ignore that case.
+ isCollection := false
+ if field.IsArray() {
+ fieldAsArray := field.Array()
+
+ if len(fieldAsArray) == 0 {
+ // Arbitrarily add string[] type to handle empty arrays,
+ // since we cannot access the underlying object.
+ emptyArrayType := "string[]"
+ typesToAdd = appendedTypesList(typesToAdd, fieldName, emptyArrayType)
+
+ continue
+ }
+
+ field = fieldAsArray[0]
+ isCollection = true
+ }
+
+ ethType := getEthTypeForJSON(field)
+
+ // Handle JSON primitive types by adding the corresponding
+ // EIP-712 type to the types schema.
+ if ethType != "" {
+ if isCollection {
+ ethType += "[]"
+ }
+ typesToAdd = appendedTypesList(typesToAdd, fieldName, ethType)
+
+ continue
+ }
+
+ // Handle object types recursively. Note that nested array types are not supported
+ // in EIP-712, so we can exclude that case.
+ if field.IsObject() {
+ fieldPrefix := prefixForSubField(prefix, fieldName)
+
+ fieldTypeDef, err := recursivelyAddTypesToRoot(typeMap, rootType, fieldPrefix, field)
+ if err != nil {
+ return "", err
+ }
+
+ fieldTypeDef = sanitizeTypedef(fieldTypeDef)
+ if isCollection {
+ fieldTypeDef += "[]"
+ }
+
+ typesToAdd = appendedTypesList(typesToAdd, fieldName, fieldTypeDef)
+
+ continue
+ }
+ }
+
+ return addTypesToRoot(typeMap, typeDef, typesToAdd)
+}
+
+// sortedJSONKeys returns the sorted JSON keys for the input object,
+// to be used for deterministic iteration.
+func sortedJSONKeys(json gjson.Result) ([]string, error) {
+ if !json.IsObject() {
+ return nil, errorsmod.Wrap(errortypes.ErrInvalidType, "expected JSON map to parse")
+ }
+
+ jsonMap := json.Map()
+
+ keys := make([]string, len(jsonMap))
+ i := 0
+ // #nosec G705 for map iteration
+ for k := range jsonMap {
+ keys[i] = k
+ i++
+ }
+
+ sort.Slice(keys, func(i, j int) bool {
+ return strings.Compare(keys[i], keys[j]) > 0
+ })
+
+ return keys, nil
+}
+
+// typeDefForPrefix computes the type definition for the given
+// prefix. This value will represent the types key within
+// the EIP-712 types map.
+func typeDefForPrefix(prefix, rootType string) string {
+ if prefix == rootPrefix {
+ return rootType
+ }
+ return sanitizeTypedef(prefix)
+}
+
+// appendedTypesList returns an array of Types with a new element
+// consisting of name and typeDef.
+func appendedTypesList(types []apitypes.Type, name, typeDef string) []apitypes.Type {
+ return append(types, apitypes.Type{
+ Name: name,
+ Type: typeDef,
+ })
+}
+
+// prefixForSubField computes the prefix for a subfield by
+// indicating that it's derived from the object associated with prefix.
+func prefixForSubField(prefix, fieldName string) string {
+ return fmt.Sprintf("%s.%s", prefix, fieldName)
+}
+
+// addTypesToRoot attempts to add the types to the root at key
+// typeDef and returns the key at which the types are present,
+// or an error if they cannot be added. If the typeDef key is a
+// duplicate, we return the key corresponding to an identical copy
+// if present, without modifying the structure. Otherwise, we insert
+// the types at the next available typeDef-{n} field. We do this to
+// support identically named payloads with different schemas.
+func addTypesToRoot(typeMap apitypes.Types, typeDef string, types []apitypes.Type) (string, error) {
+ var indexedTypeDef string
+
+ indexAsDuplicate := 0
+
+ for {
+ indexedTypeDef = typeDefWithIndex(typeDef, indexAsDuplicate)
+ existingTypes, foundElement := typeMap[indexedTypeDef]
+
+ // Found identical duplicate, so we can simply return
+ // the existing type definition.
+ if foundElement && typesAreEqual(types, existingTypes) {
+ return indexedTypeDef, nil
+ }
+
+ // Found no element, so we can create a new one at this index.
+ if !foundElement {
+ break
+ }
+
+ indexAsDuplicate++
+
+ if indexAsDuplicate == maxDuplicateTypeDefs {
+ return "", errorsmod.Wrap(errortypes.ErrInvalidRequest, "exceeded maximum number of duplicates for a single type definition")
+ }
+ }
+
+ typeMap[indexedTypeDef] = types
+
+ return indexedTypeDef, nil
+}
+
+// typeDefWithIndex creates a duplicate-indexed type definition
+// to differentiate between different schemas with the same name.
+func typeDefWithIndex(typeDef string, index int) string {
+ return fmt.Sprintf("%v%d", typeDef, index)
+}
+
+// typesAreEqual compares two apitypes.Type arrays
+// and returns a boolean indicating whether they have
+// the same values.
+// It assumes both arrays are in the same sorted order.
+func typesAreEqual(types1 []apitypes.Type, types2 []apitypes.Type) bool {
+ if len(types1) != len(types2) {
+ return false
+ }
+
+ for i := 0; i < len(types1); i++ {
+ if types1[i].Name != types2[i].Name || types1[i].Type != types2[i].Type {
+ return false
+ }
+ }
+
+ return true
+}
+
+// _.foo_bar.baz -> TypeFooBarBaz
+//
+// Since Geth does not tolerate complex EIP-712 type names, we need to sanitize
+// the inputs.
+func sanitizeTypedef(str string) string {
+ buf := new(bytes.Buffer)
+ caser := cases.Title(language.English, cases.NoLower)
+ parts := strings.Split(str, ".")
+
+ for _, part := range parts {
+ if part == rootPrefix {
+ buf.WriteString(typePrefix)
+ continue
+ }
+
+ subparts := strings.Split(part, "_")
+ for _, subpart := range subparts {
+ buf.WriteString(caser.String(subpart))
+ }
+ }
+
+ return buf.String()
+}
+
+// getEthTypeForJSON converts a JSON type to an Ethereum type.
+// It returns an empty string for Objects, Arrays, or Null.
+// See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-712.md for more.
+func getEthTypeForJSON(json gjson.Result) string {
+ switch json.Type {
+ case gjson.True, gjson.False:
+ return ethBool
+ case gjson.Number:
+ return ethInt64
+ case gjson.String:
+ return ethString
+ case gjson.JSON:
+ // Array or Object type
+ return ""
+ default:
+ return ""
+ }
+}
+
+// doRecover attempts to recover in the event of a panic to
+// prevent DOS and gracefully handle an error instead.
+func doRecover(err *error) {
+ if r := recover(); r != nil {
+ if e, ok := r.(error); ok {
+ e = errorsmod.Wrap(e, "panicked with error")
+ *err = e
+ return
+ }
+
+ *err = fmt.Errorf("%v", r)
+ }
+}
diff --git a/eth/types/assert.go b/eth/types/assert.go
new file mode 100644
index 000000000..6b91b85da
--- /dev/null
+++ b/eth/types/assert.go
@@ -0,0 +1,43 @@
+// Copyright Nibi, Inc.
+package types
+
+import (
+ "bytes"
+
+ errorsmod "cosmossdk.io/errors"
+ errortypes "github.com/cosmos/cosmos-sdk/types/errors"
+ "github.com/ethereum/go-ethereum/common"
+)
+
+// IsEmptyHash returns true if the hash corresponds to an empty ethereum hex hash.
+func IsEmptyHash(hash string) bool {
+ return bytes.Equal(common.HexToHash(hash).Bytes(), common.Hash{}.Bytes())
+}
+
+// IsZeroAddress returns true if the address corresponds to an empty ethereum hex address.
+func IsZeroAddress(address string) bool {
+ return bytes.Equal(common.HexToAddress(address).Bytes(), common.Address{}.Bytes())
+}
+
+// ValidateAddress returns an error if the provided string is either not a hex formatted string address
+func ValidateAddress(address string) error {
+ if !common.IsHexAddress(address) {
+ return errorsmod.Wrapf(
+ errortypes.ErrInvalidAddress, "address '%s' is not a valid ethereum hex address",
+ address,
+ )
+ }
+ return nil
+}
+
+// ValidateNonZeroAddress returns an error if the provided string is not a hex
+// formatted string address or is equal to zero
+func ValidateNonZeroAddress(address string) error {
+ if IsZeroAddress(address) {
+ return errorsmod.Wrapf(
+ errortypes.ErrInvalidAddress, "address '%s' must not be zero",
+ address,
+ )
+ }
+ return ValidateAddress(address)
+}
diff --git a/eth/types/chain_id.go b/eth/types/chain_id.go
new file mode 100644
index 000000000..3489af037
--- /dev/null
+++ b/eth/types/chain_id.go
@@ -0,0 +1,56 @@
+// Copyright 2023 Unique Divine and Nibi, Inc.
+package types
+
+import (
+ "fmt"
+ "math/big"
+ "regexp"
+ "strings"
+
+ errorsmod "cosmossdk.io/errors"
+)
+
+var (
+ regexChainID = `[a-z]{1,}`
+ regexEIP155Separator = `_{1}`
+ regexEIP155 = `[1-9][0-9]*`
+ regexEpochSeparator = `-{1}`
+ regexEpoch = `[1-9][0-9]*`
+ nibiruEvmChainId = regexp.MustCompile(fmt.Sprintf(`^(%s)%s(%s)%s(%s)$`,
+ regexChainID,
+ regexEIP155Separator,
+ regexEIP155,
+ regexEpochSeparator,
+ regexEpoch))
+)
+
+// IsValidChainID returns false if the given chain identifier is incorrectly formatted.
+func IsValidChainID(chainID string) bool {
+ if len(chainID) > 48 {
+ return false
+ }
+
+ return nibiruEvmChainId.MatchString(chainID)
+}
+
+// ParseChainID parses a string chain identifier's epoch to an Ethereum-compatible
+// chain-id in *big.Int format. The function returns an error if the chain-id has an invalid format
+func ParseChainID(chainID string) (*big.Int, error) {
+ chainID = strings.TrimSpace(chainID)
+ if len(chainID) > 48 {
+ return nil, errorsmod.Wrapf(ErrInvalidChainID, "chain-id '%s' cannot exceed 48 chars", chainID)
+ }
+
+ matches := nibiruEvmChainId.FindStringSubmatch(chainID)
+ if matches == nil || len(matches) != 4 || matches[1] == "" {
+ return nil, errorsmod.Wrapf(ErrInvalidChainID, "%s: %v", chainID, matches)
+ }
+
+ // verify that the chain-id entered is a base 10 integer
+ chainIDInt, ok := new(big.Int).SetString(matches[2], 10)
+ if !ok {
+ return nil, errorsmod.Wrapf(ErrInvalidChainID, "epoch %s must be base-10 integer format", matches[2])
+ }
+
+ return chainIDInt, nil
+}
diff --git a/eth/types/chain_id_test.go b/eth/types/chain_id_test.go
new file mode 100644
index 000000000..be83f0011
--- /dev/null
+++ b/eth/types/chain_id_test.go
@@ -0,0 +1,87 @@
+package types
+
+import (
+ "math/big"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestParseChainID(t *testing.T) {
+ testCases := []struct {
+ name string
+ chainID string
+ expError bool
+ expInt *big.Int
+ }{
+ {
+ "valid chain-id, single digit", "nibiru_1-1", false, big.NewInt(1),
+ },
+ {
+ "valid chain-id, multiple digits", "aragonchain_256-1", false, big.NewInt(256),
+ },
+ {
+ "invalid chain-id, double dash", "aragonchain-1-1", true, nil,
+ },
+ {
+ "invalid chain-id, double underscore", "aragonchain_1_1", true, nil,
+ },
+ {
+ "invalid chain-id, dash only", "-", true, nil,
+ },
+ {
+ "invalid chain-id, undefined identifier and EIP155", "-1", true, nil,
+ },
+ {
+ "invalid chain-id, undefined identifier", "_1-1", true, nil,
+ },
+ {
+ "invalid chain-id, uppercases", "NIBIRU_1-1", true, nil,
+ },
+ {
+ "invalid chain-id, mixed cases", "Nibiru_1-1", true, nil,
+ },
+ {
+ "invalid chain-id, special chars", "$&*#!_1-1", true, nil,
+ },
+ {
+ "invalid eip155 chain-id, cannot start with 0", "nibiru_001-1", true, nil,
+ },
+ {
+ "invalid eip155 chain-id, cannot invalid base", "nibiru_0x212-1", true, nil,
+ },
+ {
+ "invalid eip155 chain-id, non-integer", "nibiru_nibiru_9000-1", true, nil,
+ },
+ {
+ "invalid epoch, undefined", "nibiru_-", true, nil,
+ },
+ {
+ "blank chain ID", " ", true, nil,
+ },
+ {
+ "empty chain ID", "", true, nil,
+ },
+ {
+ "empty content for chain id, eip155 and epoch numbers", "_-", true, nil,
+ },
+ {
+ "long chain-id", "nibiru_" + strings.Repeat("1", 45) + "-1", true, nil,
+ },
+ }
+
+ for _, tc := range testCases {
+ chainIDEpoch, err := ParseChainID(tc.chainID)
+ if tc.expError {
+ require.Error(t, err, tc.name)
+ require.Nil(t, chainIDEpoch)
+
+ require.False(t, IsValidChainID(tc.chainID), tc.name)
+ } else {
+ require.NoError(t, err, tc.name)
+ require.Equal(t, tc.expInt, chainIDEpoch, tc.name)
+ require.True(t, IsValidChainID(tc.chainID))
+ }
+ }
+}
diff --git a/eth/types/codec.go b/eth/types/codec.go
new file mode 100644
index 000000000..aebe26f7a
--- /dev/null
+++ b/eth/types/codec.go
@@ -0,0 +1,26 @@
+// Copyright Nibi, Inc.
+package types
+
+import (
+ codectypes "github.com/cosmos/cosmos-sdk/codec/types"
+ "github.com/cosmos/cosmos-sdk/types/tx"
+ authtypes "github.com/cosmos/cosmos-sdk/x/auth/types"
+)
+
+// RegisterInterfaces registers the tendermint concrete client-related
+// implementations and interfaces.
+func RegisterInterfaces(registry codectypes.InterfaceRegistry) {
+ registry.RegisterImplementations(
+ (*authtypes.AccountI)(nil),
+ &EthAccount{},
+ )
+ registry.RegisterImplementations(
+ (*authtypes.GenesisAccount)(nil),
+ &EthAccount{},
+ )
+ registry.RegisterImplementations(
+ (*tx.TxExtensionOptionI)(nil),
+ &ExtensionOptionsWeb3Tx{},
+ &ExtensionOptionDynamicFeeTx{},
+ )
+}
diff --git a/eth/types/errors.go b/eth/types/errors.go
new file mode 100644
index 000000000..2376eb9e8
--- /dev/null
+++ b/eth/types/errors.go
@@ -0,0 +1,17 @@
+package types
+
+import (
+ sdkerrors "cosmossdk.io/errors"
+)
+
+var moduleErrorCodeIdx uint32 = 1
+
+func registerError(msg string) *sdkerrors.Error {
+ moduleErrorCodeIdx += 1
+ return sdkerrors.Register("eth", moduleErrorCodeIdx, msg)
+}
+
+// Module "sentinel" errors
+var (
+ ErrInvalidChainID = registerError("invalid Ethereum chain ID")
+)
diff --git a/eth/types/hdpath.go b/eth/types/hdpath.go
new file mode 100644
index 000000000..c60ae40f3
--- /dev/null
+++ b/eth/types/hdpath.go
@@ -0,0 +1,33 @@
+// Copyright Nibi, Inc.
+package types
+
+import (
+ ethaccounts "github.com/ethereum/go-ethereum/accounts"
+)
+
+var (
+ // Bip44CoinType satisfies EIP84. See https://github.com/ethereum/EIPs/issues/84 for more info.
+ Bip44CoinType uint32 = 60
+
+ // BIP44HDPath is the default BIP44 HD path used on Ethereum.
+ BIP44HDPath = ethaccounts.DefaultBaseDerivationPath.String()
+)
+
+type (
+ HDPathIterator func() ethaccounts.DerivationPath
+)
+
+// HDPathIterator receives a base path as a string and a boolean for the desired iterator type and
+// returns a function that iterates over the base HD path, returning the string.
+func NewHDPathIterator(basePath string, ledgerIter bool) (HDPathIterator, error) {
+ hdPath, err := ethaccounts.ParseDerivationPath(basePath)
+ if err != nil {
+ return nil, err
+ }
+
+ if ledgerIter {
+ return ethaccounts.LedgerLiveIterator(hdPath), nil
+ }
+
+ return ethaccounts.DefaultIterator(hdPath), nil
+}
diff --git a/eth/types/safe_math.go b/eth/types/safe_math.go
new file mode 100644
index 000000000..3fd70c872
--- /dev/null
+++ b/eth/types/safe_math.go
@@ -0,0 +1,35 @@
+package types
+
+import (
+ fmt "fmt"
+ math "math"
+ "math/big"
+
+ errorsmod "cosmossdk.io/errors"
+ sdkmath "cosmossdk.io/math"
+ errortypes "github.com/cosmos/cosmos-sdk/types/errors"
+)
+
+const maxBitLen = 256
+
+// SafeNewIntFromBigInt constructs Int from big.Int, return error if more than 256bits
+func SafeNewIntFromBigInt(i *big.Int) (sdkmath.Int, error) {
+ if !IsValidInt256(i) {
+ return sdkmath.NewInt(0), fmt.Errorf("big int out of bound: %s", i)
+ }
+ return sdkmath.NewIntFromBigInt(i), nil
+}
+
+// IsValidInt256 check the bound of 256 bit number
+func IsValidInt256(i *big.Int) bool {
+ return i == nil || i.BitLen() <= maxBitLen
+}
+
+// SafeInt64 checks for overflows while casting a uint64 to int64 value.
+func SafeInt64(value uint64) (int64, error) {
+ if value > uint64(math.MaxInt64) {
+ return 0, errorsmod.Wrapf(errortypes.ErrInvalidHeight, "uint64 value %v cannot exceed %v", value, int64(math.MaxInt64))
+ }
+
+ return int64(value), nil // #nosec G701 -- checked for int overflow already
+}
diff --git a/geth/.dockerignore b/geth/.dockerignore
new file mode 100644
index 000000000..0c013d18b
--- /dev/null
+++ b/geth/.dockerignore
@@ -0,0 +1,5 @@
+**/*_test.go
+
+build/_workspace
+build/_bin
+tests/testdata
diff --git a/geth/.gitattributes b/geth/.gitattributes
new file mode 100644
index 000000000..0269fab9c
--- /dev/null
+++ b/geth/.gitattributes
@@ -0,0 +1,3 @@
+# Auto detect text files and perform LF normalization
+* text=auto
+*.sol linguist-language=Solidity
diff --git a/geth/.github/CODEOWNERS b/geth/.github/CODEOWNERS
new file mode 100644
index 000000000..89ddbc170
--- /dev/null
+++ b/geth/.github/CODEOWNERS
@@ -0,0 +1,24 @@
+# Lines starting with '#' are comments.
+# Each line is a file pattern followed by one or more owners.
+
+accounts/usbwallet @karalabe
+accounts/scwallet @gballet
+accounts/abi @gballet @MariusVanDerWijden
+cmd/clef @holiman
+cmd/puppeth @karalabe
+consensus @karalabe
+core/ @karalabe @holiman @rjl493456442
+eth/ @karalabe @holiman @rjl493456442
+eth/catalyst/ @gballet
+eth/tracers/ @s1na
+graphql/ @gballet @s1na
+les/ @zsfelfoldi @rjl493456442
+light/ @zsfelfoldi @rjl493456442
+mobile/ @karalabe @ligi
+node/ @fjl
+p2p/ @fjl @zsfelfoldi
+rpc/ @fjl @holiman
+p2p/simulations @fjl
+p2p/protocols @fjl
+p2p/testing @fjl
+signer/ @holiman
diff --git a/geth/.github/CONTRIBUTING.md b/geth/.github/CONTRIBUTING.md
new file mode 100644
index 000000000..a08542df2
--- /dev/null
+++ b/geth/.github/CONTRIBUTING.md
@@ -0,0 +1,40 @@
+# Contributing
+
+Thank you for considering to help out with the source code! We welcome
+contributions from anyone on the internet, and are grateful for even the
+smallest of fixes!
+
+If you'd like to contribute to go-ethereum, please fork, fix, commit and send a
+pull request for the maintainers to review and merge into the main code base. If
+you wish to submit more complex changes though, please check up with the core
+devs first on [our gitter channel](https://gitter.im/ethereum/go-ethereum) to
+ensure those changes are in line with the general philosophy of the project
+and/or get some early feedback which can make both your efforts much lighter as
+well as our review and merge procedures quick and simple.
+
+## Coding guidelines
+
+Please make sure your contributions adhere to our coding guidelines:
+
+ * Code must adhere to the official Go
+[formatting](https://golang.org/doc/effective_go.html#formatting) guidelines
+(i.e. uses [gofmt](https://golang.org/cmd/gofmt/)).
+ * Code must be documented adhering to the official Go
+[commentary](https://golang.org/doc/effective_go.html#commentary) guidelines.
+ * Pull requests need to be based on and opened against the `master` branch.
+ * Commit messages should be prefixed with the package(s) they modify.
+ * E.g. "eth, rpc: make trace configs optional"
+
+## Can I have feature X
+
+Before you submit a feature request, please check and make sure that it isn't
+possible through some other means. The JavaScript-enabled console is a powerful
+feature in the right hands. Please check our
+[Geth documentation page](https://geth.ethereum.org/docs/) for more info
+and help.
+
+## Configuration, dependencies, and tests
+
+Please see the [Developers' Guide](https://geth.ethereum.org/docs/developers/devguide)
+for more details on configuring your environment, managing project dependencies
+and testing procedures.
diff --git a/geth/.github/ISSUE_TEMPLATE/bug.md b/geth/.github/ISSUE_TEMPLATE/bug.md
new file mode 100644
index 000000000..2aa2c48a6
--- /dev/null
+++ b/geth/.github/ISSUE_TEMPLATE/bug.md
@@ -0,0 +1,30 @@
+---
+name: Report a bug
+about: Something with go-ethereum is not working as expected
+title: ''
+labels: 'type:bug'
+assignees: ''
+---
+
+#### System information
+
+Geth version: `geth version`
+OS & Version: Windows/Linux/OSX
+Commit hash : (if `develop`)
+
+#### Expected behaviour
+
+
+#### Actual behaviour
+
+
+#### Steps to reproduce the behaviour
+
+
+#### Backtrace
+
+````
+[backtrace]
+````
+
+When submitting logs: please submit them as text and not screenshots.
\ No newline at end of file
diff --git a/geth/.github/ISSUE_TEMPLATE/feature.md b/geth/.github/ISSUE_TEMPLATE/feature.md
new file mode 100644
index 000000000..aacd885f9
--- /dev/null
+++ b/geth/.github/ISSUE_TEMPLATE/feature.md
@@ -0,0 +1,17 @@
+---
+name: Request a feature
+about: Report a missing feature - e.g. as a step before submitting a PR
+title: ''
+labels: 'type:feature'
+assignees: ''
+---
+
+# Rationale
+
+Why should this feature exist?
+What are the use-cases?
+
+# Implementation
+
+Do you have ideas regarding the implementation of this feature?
+Are you willing to implement this feature?
\ No newline at end of file
diff --git a/geth/.github/ISSUE_TEMPLATE/question.md b/geth/.github/ISSUE_TEMPLATE/question.md
new file mode 100644
index 000000000..8f460ab55
--- /dev/null
+++ b/geth/.github/ISSUE_TEMPLATE/question.md
@@ -0,0 +1,9 @@
+---
+name: Ask a question
+about: Something is unclear
+title: ''
+labels: 'type:docs'
+assignees: ''
+---
+
+This should only be used in very rare cases e.g. if you are not 100% sure if something is a bug or asking a question that leads to improving the documentation. For general questions please use [discord](https://discord.gg/nthXNEv) or the Ethereum stack exchange at https://ethereum.stackexchange.com.
diff --git a/geth/.github/no-response.yml b/geth/.github/no-response.yml
new file mode 100644
index 000000000..903d4ce85
--- /dev/null
+++ b/geth/.github/no-response.yml
@@ -0,0 +1,11 @@
+# Number of days of inactivity before an Issue is closed for lack of response
+daysUntilClose: 30
+# Label requiring a response
+responseRequiredLabel: "need:more-information"
+# Comment to post when closing an Issue for lack of response. Set to `false` to disable
+closeComment: >
+ This issue has been automatically closed because there has been no response
+ to our request for more information from the original author. With only the
+ information that is currently in the issue, we don't have enough information
+ to take action. Please reach out if you have more relevant information or
+ answers to our questions so that we can investigate further.
diff --git a/geth/.github/stale.yml b/geth/.github/stale.yml
new file mode 100644
index 000000000..6d921cc79
--- /dev/null
+++ b/geth/.github/stale.yml
@@ -0,0 +1,17 @@
+# Number of days of inactivity before an issue becomes stale
+daysUntilStale: 366
+# Number of days of inactivity before a stale issue is closed
+daysUntilClose: 42
+# Issues with these labels will never be considered stale
+exemptLabels:
+ - pinned
+ - security
+# Label to use when marking an issue as stale
+staleLabel: "status:inactive"
+# Comment to post when marking an issue as stale. Set to `false` to disable
+markComment: >
+ This issue has been automatically marked as stale because it has not had
+ recent activity. It will be closed if no further activity occurs. Thank you
+ for your contributions.
+# Comment to post when closing a stale issue. Set to `false` to disable
+closeComment: false
diff --git a/geth/.github/workflows/build.yml b/geth/.github/workflows/build.yml
new file mode 100644
index 000000000..ab82de990
--- /dev/null
+++ b/geth/.github/workflows/build.yml
@@ -0,0 +1,33 @@
+name: Build
+on:
+ pull_request:
+ branches:
+ - master
+
+jobs:
+ cleanup-runs:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: rokroskar/workflow-run-cleanup-action@master
+ env:
+ GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
+ if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/main'"
+
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/setup-go@v3
+ with:
+ go-version: 1.19
+ check-latest: true
+ - uses: technote-space/get-diff-action@v6.1.2
+ id: git_diff
+ with:
+ PATTERNS: |
+ **/**.go
+ go.mod
+ go.sum
+ - run: |
+ make build
+ if: env.GIT_DIFF
diff --git a/geth/.github/workflows/dependencies.yml b/geth/.github/workflows/dependencies.yml
new file mode 100644
index 000000000..8ab944a13
--- /dev/null
+++ b/geth/.github/workflows/dependencies.yml
@@ -0,0 +1,28 @@
+name: "Dependency Review"
+on: pull_request
+
+permissions:
+ contents: read
+
+jobs:
+ dependency-review:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: 1.19
+ check-latest: true
+ - name: "Checkout Repository"
+ uses: actions/checkout@v3
+ - uses: technote-space/get-diff-action@v6.1.2
+ with:
+ PATTERNS: |
+ **/**.go
+ go.mod
+ go.sum
+ - name: "Dependency Review"
+ uses: actions/dependency-review-action@v3
+ if: env.GIT_DIFF
+ - name: "Go vulnerability check"
+ run: make vulncheck
+ if: env.GIT_DIFF
diff --git a/geth/.github/workflows/lint.yml b/geth/.github/workflows/lint.yml
new file mode 100644
index 000000000..877682899
--- /dev/null
+++ b/geth/.github/workflows/lint.yml
@@ -0,0 +1,72 @@
+name: Lint
+# Lint runs golangci-lint over the entire ethermint repository This workflow is
+# run on every pull request and push to main The `golangci` will pass without
+# running if no *.{go, mod, sum} files have been changed.
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+jobs:
+ golangci:
+ name: Run golangci-lint
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ steps:
+ # Required: setup-go, for all versions v3.0.0+ of golangci-lint
+ - uses: actions/setup-go@v3
+ with:
+ go-version: 1.19
+ check-latest: true
+ - uses: actions/checkout@v3
+ - uses: technote-space/get-diff-action@v6.1.2
+ with:
+ PATTERNS: |
+ **/**.go
+ go.mod
+ go.sum
+ - uses: golangci/golangci-lint-action@v3.3.1
+ with:
+ # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
+ version: latest
+ args: --timeout 10m
+ github-token: ${{ secrets.github_token }}
+ # Check only if there are differences in the source code
+ if: env.GIT_DIFF
+ markdown-lint:
+ name: Run markdown-lint
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ steps:
+ - uses: actions/checkout@v3
+ - uses: technote-space/get-diff-action@v6.1.2
+ with:
+ PATTERNS: |
+ docs/**/*.md
+ x/**/*.md
+ README.md
+ - uses: nosborn/github-action-markdown-cli@v3.2.0
+ with:
+ files: .
+ config_file: .markdownlint.yml
+ ignore_path: .markdownlintignore
+ # Check only if there are differences in the source code
+ if: env.GIT_DIFF
+ gomod2nix:
+ name: Check gomod2nix.toml file is up to date
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2.3.4
+ - uses: cachix/install-nix-action@v18
+ - uses: cachix/cachix-action@v12
+ with:
+ name: ethermint
+ - uses: technote-space/get-diff-action@v6.1.2
+ with:
+ PATTERNS: |
+ **/**.py
+ - name: run gomod2nix
+ run: |
+ nix run -f ./nix gomod2nix
+ git diff --no-ext-diff --exit-code
+ if: env.GIT_DIFF
diff --git a/geth/.github/workflows/markdown-links.yml b/geth/.github/workflows/markdown-links.yml
new file mode 100644
index 000000000..f1fefae84
--- /dev/null
+++ b/geth/.github/workflows/markdown-links.yml
@@ -0,0 +1,29 @@
+name: Check Markdown links
+on:
+ pull_request:
+ paths:
+ - '**.md'
+ push:
+ branches:
+ - master
+ paths:
+ - '**.md'
+
+jobs:
+ markdown-link-check:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ - uses: technote-space/get-diff-action@v6.1.2
+ id: git_diff
+ with:
+ PATTERNS: |
+ **/**.md
+ - uses: gaurav-nelson/github-action-markdown-link-check@master
+ with:
+ folder-path: "docs"
+ check-modified-files-only: "yes"
+ use-quiet-mode: "yes"
+ base-branch: "main"
+ config-file: "mlc_config.json"
+ if: env.GIT_DIFF
diff --git a/geth/.github/workflows/security.yml b/geth/.github/workflows/security.yml
new file mode 100644
index 000000000..df9be0ec6
--- /dev/null
+++ b/geth/.github/workflows/security.yml
@@ -0,0 +1,37 @@
+name: Run Gosec
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+
+jobs:
+ Gosec:
+ permissions:
+ security-events: write
+
+ runs-on: ubuntu-latest
+ env:
+ GO111MODULE: on
+ steps:
+ - name: Checkout Source
+ uses: actions/checkout@v3
+ - name: Get Diff
+ uses: technote-space/get-diff-action@v6.1.2
+ with:
+ PATTERNS: |
+ **/*.go
+ go.mod
+ go.sum
+ - name: Run Gosec Security Scanner
+ uses: cosmos/gosec@master
+ with:
+ # we let the report trigger content trigger a failure using the GitHub Security features.
+ args: "-no-fail -fmt sarif -out results.sarif ./..."
+ if: "env.GIT_DIFF_FILTERED != ''"
+ - name: Upload SARIF file
+ uses: github/codeql-action/upload-sarif@v2
+ with:
+ # Path to SARIF file relative to the root of the repository
+ sarif_file: results.sarif
+ if: "env.GIT_DIFF_FILTERED != ''"
diff --git a/geth/.github/workflows/super-linter.yml b/geth/.github/workflows/super-linter.yml
new file mode 100644
index 000000000..7a5b55f5a
--- /dev/null
+++ b/geth/.github/workflows/super-linter.yml
@@ -0,0 +1,38 @@
+# This workflow executes several linters on changed files based on languages used in your code base whenever
+# you push a code or open a pull request.
+#
+# You can adjust the behavior by modifying this file.
+# For more information, see:
+# https://github.com/github/super-linter
+---
+name: Lint Code Base
+
+on:
+ push:
+ branches: ["master"]
+ pull_request:
+ branches: ["master"]
+jobs:
+ run-lint:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v3
+ with:
+ # Full git history is needed to get a proper list of changed files within `super-linter`
+ fetch-depth: 0
+
+ - name: Lint Code Base
+ uses: github/super-linter@v4
+ env:
+ LINTER_RULES_PATH: /
+ YAML_CONFIG_FILE: .yamllint
+ VALIDATE_ALL_CODEBASE: false
+ MARKDOWN_CONFIG_FILE: .markdownlint.yml
+ PROTOBUF_CONFIG_FILE: .protolint.yml
+ VALIDATE_NATURAL_LANGUAGE: false
+ VALIDATE_OPENAPI: false
+ VALIDATE_JSCPD: false
+ VALIDATE_GO: false
+ DEFAULT_BRANCH: "master"
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/geth/.github/workflows/test.yml b/geth/.github/workflows/test.yml
new file mode 100644
index 000000000..2b9aa299a
--- /dev/null
+++ b/geth/.github/workflows/test.yml
@@ -0,0 +1,35 @@
+name: Tests
+on:
+ pull_request:
+ push:
+ branches:
+ - master
+ - release/**
+
+jobs:
+ cleanup-runs:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: rokroskar/workflow-run-cleanup-action@master
+ env:
+ GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
+ if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/master'"
+
+ test-all:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/setup-go@v3
+ with:
+ go-version: 1.19
+ check-latest: true
+ - uses: actions/checkout@v3
+ - uses: technote-space/get-diff-action@v6.1.2
+ with:
+ PATTERNS: |
+ **/**.go
+ go.mod
+ go.sum
+ - name: Test and Create Coverage Report
+ run: |
+ make test all
+ if: env.GIT_DIFF
diff --git a/geth/.gitignore b/geth/.gitignore
new file mode 100644
index 000000000..1ee8b8302
--- /dev/null
+++ b/geth/.gitignore
@@ -0,0 +1,49 @@
+# See http://help.github.com/ignore-files/ for more about ignoring files.
+#
+# If you find yourself ignoring temporary files generated by your text editor
+# or operating system, you probably want to add a global ignore instead:
+# git config --global core.excludesfile ~/.gitignore_global
+
+/tmp
+*/**/*un~
+*/**/*.test
+*un~
+.DS_Store
+*/**/.DS_Store
+.ethtest
+*/**/*tx_database*
+*/**/*dapps*
+build/_vendor/pkg
+
+#*
+.#*
+*#
+*~
+.project
+.settings
+
+# used by the Makefile
+/build/_workspace/
+/build/cache/
+/build/bin/
+/geth*.zip
+
+# travis
+profile.tmp
+profile.cov
+
+# IdeaIDE
+.idea
+
+# VS Code
+.vscode
+
+# dashboard
+/dashboard/assets/flow-typed
+/dashboard/assets/node_modules
+/dashboard/assets/stats.json
+/dashboard/assets/bundle.js
+/dashboard/assets/bundle.js.map
+/dashboard/assets/package-lock.json
+
+**/yarn-error.log
diff --git a/geth/.gitmodules b/geth/.gitmodules
new file mode 100644
index 000000000..241c169c4
--- /dev/null
+++ b/geth/.gitmodules
@@ -0,0 +1,8 @@
+[submodule "tests"]
+ path = tests/testdata
+ url = https://github.com/ethereum/tests
+ shallow = true
+[submodule "evm-benchmarks"]
+ path = tests/evm-benchmarks
+ url = https://github.com/ipsilon/evm-benchmarks
+ shallow = true
diff --git a/geth/.golangci.yml b/geth/.golangci.yml
new file mode 100644
index 000000000..4c1297223
--- /dev/null
+++ b/geth/.golangci.yml
@@ -0,0 +1,73 @@
+# This file configures github.com/golangci/golangci-lint.
+
+run:
+ timeout: 20m
+ tests: true
+ # default is true. Enables skipping of directories:
+ # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$
+ skip-dirs-use-default: true
+ skip-files:
+ - core/genesis_alloc.go
+
+linters:
+ disable-all: true
+ enable:
+ - deadcode
+ - goconst
+ - goimports
+ - gosimple
+ - govet
+ - ineffassign
+ - misspell
+ - unconvert
+ - varcheck
+ - typecheck
+ - unused
+ - staticcheck
+ - bidichk
+ - durationcheck
+ - exportloopref
+ - gosec
+ - whitespace
+
+ # - structcheck # lots of false positives
+ # - errcheck #lot of false positives
+ # - contextcheck
+ # - errchkjson # lots of false positives
+ # - errorlint # this check crashes
+ # - exhaustive # silly check
+ # - makezero # false positives
+ # - nilerr # several intentional
+
+linters-settings:
+ gofmt:
+ simplify: true
+ goconst:
+ min-len: 3 # minimum length of string constant
+ min-occurrences: 6 # minimum number of occurrences
+ gosec:
+ excludes:
+ - G404 # Use of weak random number generator - lots of FP
+ - G107 # Potential http request -- those are intentional
+ - G306 # G306: Expect WriteFile permissions to be 0600 or less
+
+issues:
+ exclude-rules:
+ - path: crypto/bn256/cloudflare/optate.go
+ linters:
+ - deadcode
+ - staticcheck
+ - path: internal/build/pgp.go
+ text: 'SA1019: package golang.org/x/crypto/openpgp is deprecated'
+ - path: core/vm/contracts.go
+ text: 'SA1019: package golang.org/x/crypto/ripemd160 is deprecated'
+ - path: accounts/usbwallet/trezor.go
+ text: 'SA1019: package github.com/golang/protobuf/proto is deprecated'
+ - path: accounts/usbwallet/trezor/
+ text: 'SA1019: package github.com/golang/protobuf/proto is deprecated'
+ exclude:
+ - 'SA1019: event.TypeMux is deprecated: use Feed'
+ - 'SA1019: strings.Title is deprecated'
+ - 'SA1019: strings.Title has been deprecated since Go 1.18 and an alternative has been available since Go 1.0: The rule Title uses for word boundaries does not handle Unicode punctuation properly. Use golang.org/x/text/cases instead.'
+ - 'SA1029: should not use built-in type string as key for value'
+ - 'G306: Expect WriteFile permissions to be 0600 or less'
diff --git a/geth/.mailmap b/geth/.mailmap
new file mode 100644
index 000000000..aa074b76d
--- /dev/null
+++ b/geth/.mailmap
@@ -0,0 +1,237 @@
+Aaron Buchwald
+
+Aaron Kumavis
+
+Abel Nieto
+Abel Nieto
+
+Afri Schoedon <58883403+q9f@users.noreply.github.com>
+Afri Schoedon <5chdn@users.noreply.github.com> <58883403+q9f@users.noreply.github.com>
+
+Alec Perseghin
+
+Aleksey Smyrnov
+
+Alex Leverington
+Alex Leverington
+
+Alex Pozhilenkov
+Alex Pozhilenkov
+
+Alexey Akhunov
+
+Alon Muroch
+
+Andrey Petrov
+Andrey Petrov
+
+Arkadiy Paronyan
+
+Armin Braun
+
+Aron Fischer
+
+Austin Roberts
+Austin Roberts
+
+Bas van Kervel
+Bas van Kervel
+Bas van Kervel
+Bas van Kervel
+
+Boqin Qin
+Boqin Qin
+
+Casey Detrio
+
+Cheng Li
+
+Chris Ziogas
+Chris Ziogas
+
+Christoph Jentzsch
+
+Diederik Loerakker
+
+Dimitry Khokhlov
+
+Domino Valdano
+Domino Valdano
+
+Edgar Aroutiounian
+
+Elliot Shepherd
+
+Enrique Fynn
+
+Enrique Fynn
+Enrique Fynn
+
+Ernesto del Toro
+Ernesto del Toro
+
+Everton Fraga
+
+Felix Lange
+Felix Lange
+
+Frank Wang
+
+Gary Rong
+
+Gavin Wood
+
+Gregg Dourgarian
+
+Guillaume Ballet
+Guillaume Ballet <3272758+gballet@users.noreply.github.com>
+
+Guillaume Nicolas
+
+Hanjiang Yu
+Hanjiang Yu <42531996+de1acr0ix@users.noreply.github.com>
+
+Heiko Hees
+
+Henning Diedrich
+Henning Diedrich Drake Burroughs
+
+Hwanjo Heo <34005989+hwanjo@users.noreply.github.com>
+
+Iskander (Alex) Sharipov
+Iskander (Alex) Sharipov
+
+Jae Kwon
+
+Janoš Guljaš
+Janoš Guljaš Janos Guljas
+
+Jared Wasinger
+
+Jason Carver
+Jason Carver
+
+Javier Peletier
+Javier Peletier
+
+Jeffrey Wilcke
+Jeffrey Wilcke
+Jeffrey Wilcke
+Jeffrey Wilcke
+
+Jens Agerberg
+
+Joseph Chow
+Joseph Chow ethers
+
+
+Joseph Goulden
+
+Justin Drake
+
+Kenso Trabing
+Kenso Trabing
+
+Liang Ma
+Liang Ma
+
+Louis Holbrook
+Louis Holbrook
+
+Maran Hidskes
+
+Marian Oancea
+
+Martin Becze
+Martin Becze
+
+Martin Lundfall
+
+Matt Garnett <14004106+lightclient@users.noreply.github.com>
+
+Matthew Halpern
+Matthew Halpern
+
+Michael Riabzev
+
+Nchinda Nchinda
+
+Nick Dodson
+
+Nick Johnson
+
+Nick Savers
+
+Nishant Das
+Nishant Das
+
+Olivier Hervieu
+
+Pascal Dierich
+Pascal Dierich
+
+RJ Catalano
+RJ Catalano
+
+Ralph Caraveo
+
+Rene Lubov <41963722+renaynay@users.noreply.github.com>
+
+Robert Zaremba
+Robert Zaremba
+
+Roman Mandeleil
+
+Sorin Neacsu
+Sorin Neacsu
+
+Sven Ehlert
+
+Taylor Gerring
+Taylor Gerring
+
+Thomas Bocek
+
+Tim Cooijmans
+
+Valentin Wüstholz
+Valentin Wüstholz
+
+Victor Tran
+
+Viktor Trón
+
+Ville Sundell
+
+Vincent G
+
+Vitalik Buterin
+
+Vlad Gluhovsky
+Vlad Gluhovsky
+
+Wenshao Zhong
+Wenshao Zhong <11510383@mail.sustc.edu.cn>
+Wenshao Zhong <374662347@qq.com>
+
+Will Villanueva
+
+Xiaobing Jiang
+
+Xudong Liu <33193253+r1cs@users.noreply.github.com>
+
+Yohann LeÌon
+
+Zachinquarantine
+Zachinquarantine
+
+Ziyuan Zhong
+
+Zsolt Felföldi
+
+meowsbits
+meowsbits <45600330+meowsbits@users.noreply.github.com>
+
+nedifi <103940716+nedifi@users.noreply.github.com>
+
+МакÑим ЧуÑовлÑнов
diff --git a/geth/.markdownlint.yml b/geth/.markdownlint.yml
new file mode 100644
index 000000000..ad27776bc
--- /dev/null
+++ b/geth/.markdownlint.yml
@@ -0,0 +1,21 @@
+"default": true
+"MD001": false
+"MD004": false
+"MD007":
+ "indent": 4
+"MD013": false
+"MD024":
+ "siblings_only": true
+"MD025": false
+"MD026":
+ "punctuation": ".;:"
+"MD029": false
+"MD033": false
+"MD034": false
+"MD036": false
+"MD040": false
+"MD041": false
+"MD051": false
+"MD049":
+ "style": "asterisk"
+"no-hard-tabs": false
diff --git a/geth/.markdownlintignore b/geth/.markdownlintignore
new file mode 100644
index 000000000..c6fc33add
--- /dev/null
+++ b/geth/.markdownlintignore
@@ -0,0 +1,3 @@
+CHANGELOG.md
+docs/*
+
diff --git a/geth/.protolint.yml b/geth/.protolint.yml
new file mode 100644
index 000000000..bb2b72883
--- /dev/null
+++ b/geth/.protolint.yml
@@ -0,0 +1,174 @@
+---
+# Lint directives.
+lint:
+ # # Linter files to ignore.
+ # ignores:
+ # - id: MESSAGE_NAMES_UPPER_CAMEL_CASE
+ # files:
+ # # NOTE: UNIX paths will be properly accepted by both UNIX and Windows.
+ # - _example/proto/simple.proto
+ # - id: ENUM_NAMES_UPPER_CAMEL_CASE
+ # files:
+ # - path/to/foo.proto
+
+ # # Linter files to walk.
+ # files:
+ # # The specific files to exclude.
+ # exclude:
+ # # NOTE: UNIX paths will be properly accepted by both UNIX and Windows.
+ # - path/to/file
+
+ # # Linter directories to walk.
+ # directories:
+ # # The specific directories to exclude.
+ # exclude:
+ # # NOTE: UNIX paths will be properly accepted by both UNIX and Windows.
+ # - path/to/dir
+
+ # Linter rules.
+ # Run `protolint list` to see all available rules.
+ rules:
+ # Determines whether or not to include the default set of linters.
+ no_default: true
+
+ # Set the default to all linters. This option works the other way around as no_default does.
+ # If you want to enable this option, delete the comment out below and no_default.
+ # all_default: true
+
+ # The specific linters to add.
+ add:
+ - FIELD_NAMES_LOWER_SNAKE_CASE
+ - MESSAGE_NAMES_UPPER_CAMEL_CASE
+ - MAX_LINE_LENGTH
+ - INDENT
+ # - SERVICE_NAMES_END_WITH
+ - FIELD_NAMES_EXCLUDE_PREPOSITIONS
+ - MESSAGE_NAMES_EXCLUDE_PREPOSITIONS
+ - FILE_NAMES_LOWER_SNAKE_CASE
+ - IMPORTS_SORTED
+ - PACKAGE_NAME_LOWER_CASE
+ - ORDER
+ - MESSAGES_HAVE_COMMENT
+ - SERVICES_HAVE_COMMENT
+ - RPCS_HAVE_COMMENT
+ - FIELDS_HAVE_COMMENT
+ - PROTO3_FIELDS_AVOID_REQUIRED
+ - PROTO3_GROUPS_AVOID
+ # - REPEATED_FIELD_NAMES_PLURALIZED
+ - ENUMS_HAVE_COMMENT
+ - ENUM_FIELDS_HAVE_COMMENT
+ - SYNTAX_CONSISTENT
+ - RPC_NAMES_UPPER_CAMEL_CASE
+ # - FILE_HAS_COMMENT
+ - QUOTE_CONSISTENT
+
+ # # The specific linters to remove.
+ # remove:
+ # - RPC_NAMES_UPPER_CAMEL_CASE
+
+ # Linter rules option.
+ rules_option:
+ # MAX_LINE_LENGTH rule option.
+ max_line_length:
+ # Enforces a maximum line length
+ max_chars: 120
+ # Specifies the character count for tab characters
+ tab_chars: 2
+
+ # INDENT rule option.
+ indent:
+ # Available styles are 4(4-spaces), 2(2-spaces) or tab.
+ style: 2
+ # Specifies if it should stop considering and inserting new lines at the appropriate positions
+ # when the inner elements are on the same line. Default is false.
+ not_insert_newline: true
+
+ # # FILE_NAMES_LOWER_SNAKE_CASE rule option.
+ # file_names_lower_snake_case:
+ # excludes:
+ # - ../proto/invalidFileName.proto
+
+ # QUOTE_CONSISTENT rule option.
+ quote_consistent:
+ # Available quote are "double" or "single".
+ quote: double
+
+ # ENUM_FIELD_NAMES_ZERO_VALUE_END_WITH rule option.
+ enum_field_names_zero_value_end_with:
+ suffix: INVALID
+
+ # # SERVICE_NAMES_END_WITH rule option.
+ # service_names_end_with:
+ # text: Service
+
+ # FIELD_NAMES_EXCLUDE_PREPOSITIONS rule option.
+ field_names_exclude_prepositions:
+ # The specific prepositions to determine if the field name includes.
+ prepositions:
+ - for
+ - at
+ - of
+ # The specific keywords including prepositions to ignore. E.g. end_of_support is a term you would like to use, and skip checking.
+ excludes:
+ - duration_of_decay
+
+ # # REPEATED_FIELD_NAMES_PLURALIZED rule option.
+ # ## The spec for each rules follows the implementation of https://github.com/gertd/go-pluralize.
+ # ## Plus, you can refer to this rule's test code.
+ # repeated_field_names_pluralized:
+ # uncountable_rules:
+ # - paper
+ # irregular_rules:
+ # Irregular: Regular
+
+ # MESSAGE_NAMES_EXCLUDE_PREPOSITIONS rule option.
+ message_names_exclude_prepositions:
+ # The specific prepositions to determine if the message name includes.
+ prepositions:
+ - With
+ - For
+ - Of
+ # # The specific keywords including prepositions to ignore. E.g. EndOfSupport is a term you would like to use, and skip checking.
+ # excludes:
+ # - EndOfSupport
+
+ # # RPC_NAMES_CASE rule option.
+ # rpc_names_case:
+ # # The specific convention the name should conforms to.
+ # ## Available conventions are "lower_camel_case", "upper_snake_case", or "lower_snake_case".
+ # convention: upper_snake_case
+
+ # MESSAGES_HAVE_COMMENT rule option.
+ messages_have_comment:
+ # Comments need to begin with the name of the thing being described. default is false.
+ should_follow_golang_style: true
+
+ # SERVICES_HAVE_COMMENT rule option.
+ services_have_comment:
+ # Comments need to begin with the name of the thing being described. default is false.
+ should_follow_golang_style: true
+
+ # RPCS_HAVE_COMMENT rule option.
+ rpcs_have_comment:
+ # Comments need to begin with the name of the thing being described. default is false.
+ should_follow_golang_style: true
+
+ # FIELDS_HAVE_COMMENT rule option.
+ fields_have_comment:
+ # Comments need to begin with the name of the thing being described. default is false.
+ should_follow_golang_style: true
+
+ # ENUMS_HAVE_COMMENT rule option.
+ enums_have_comment:
+ # Comments need to begin with the name of the thing being described. default is false.
+ should_follow_golang_style: true
+
+ # ENUM_FIELDS_HAVE_COMMENT rule option.
+ enum_fields_have_comment:
+ # Comments need to begin with the name of the thing being described. default is false.
+ should_follow_golang_style: true
+
+ # # SYNTAX_CONSISTENT rule option.
+ # syntax_consistent:
+ # # Default is proto3.
+ # version: proto2
diff --git a/geth/.travis.yml b/geth/.travis.yml
new file mode 100644
index 000000000..e08e271f3
--- /dev/null
+++ b/geth/.travis.yml
@@ -0,0 +1,247 @@
+language: go
+go_import_path: github.com/ethereum/go-ethereum
+sudo: false
+jobs:
+ allow_failures:
+ - stage: build
+ os: osx
+ go: 1.17.x
+ env:
+ - azure-osx
+ - azure-ios
+ - cocoapods-ios
+
+ include:
+ # This builder only tests code linters on latest version of Go
+ - stage: lint
+ os: linux
+ dist: bionic
+ go: 1.18.x
+ env:
+ - lint
+ git:
+ submodules: false # avoid cloning ethereum/tests
+ script:
+ - go run build/ci.go lint
+
+ # These builders create the Docker sub-images for multi-arch push and each
+ # will attempt to push the multi-arch image if they are the last builder
+ - stage: build
+ if: type = push
+ os: linux
+ arch: amd64
+ dist: bionic
+ go: 1.18.x
+ env:
+ - docker
+ services:
+ - docker
+ git:
+ submodules: false # avoid cloning ethereum/tests
+ before_install:
+ - export DOCKER_CLI_EXPERIMENTAL=enabled
+ script:
+ - go run build/ci.go docker -image -manifest amd64,arm64 -upload ethereum/client-go
+
+ - stage: build
+ if: type = push
+ os: linux
+ arch: arm64
+ dist: bionic
+ go: 1.18.x
+ env:
+ - docker
+ services:
+ - docker
+ git:
+ submodules: false # avoid cloning ethereum/tests
+ before_install:
+ - export DOCKER_CLI_EXPERIMENTAL=enabled
+ script:
+ - go run build/ci.go docker -image -manifest amd64,arm64 -upload ethereum/client-go
+
+ # This builder does the Ubuntu PPA upload
+ - stage: build
+ if: type = push
+ os: linux
+ dist: bionic
+ go: 1.18.x
+ env:
+ - ubuntu-ppa
+ - GO111MODULE=on
+ git:
+ submodules: false # avoid cloning ethereum/tests
+ addons:
+ apt:
+ packages:
+ - devscripts
+ - debhelper
+ - dput
+ - fakeroot
+ - python-bzrlib
+ - python-paramiko
+ script:
+ - echo '|1|7SiYPr9xl3uctzovOTj4gMwAC1M=|t6ReES75Bo/PxlOPJ6/GsGbTrM0= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA0aKz5UTUndYgIGG7dQBV+HaeuEZJ2xPHo2DS2iSKvUL4xNMSAY4UguNW+pX56nAQmZKIZZ8MaEvSj6zMEDiq6HFfn5JcTlM80UwlnyKe8B8p7Nk06PPQLrnmQt5fh0HmEcZx+JU9TZsfCHPnX7MNz4ELfZE6cFsclClrKim3BHUIGq//t93DllB+h4O9LHjEUsQ1Sr63irDLSutkLJD6RXchjROXkNirlcNVHH/jwLWR5RcYilNX7S5bIkK8NlWPjsn/8Ua5O7I9/YoE97PpO6i73DTGLh5H9JN/SITwCKBkgSDWUt61uPK3Y11Gty7o2lWsBjhBUm2Y38CBsoGmBw==' >> ~/.ssh/known_hosts
+ - go run build/ci.go debsrc -upload ethereum/ethereum -sftp-user geth-ci -signer "Go Ethereum Linux Builder "
+
+ # This builder does the Linux Azure uploads
+ - stage: build
+ if: type = push
+ os: linux
+ dist: bionic
+ sudo: required
+ go: 1.18.x
+ env:
+ - azure-linux
+ - GO111MODULE=on
+ git:
+ submodules: false # avoid cloning ethereum/tests
+ addons:
+ apt:
+ packages:
+ - gcc-multilib
+ script:
+ # Build for the primary platforms that Trusty can manage
+ - go run build/ci.go install -dlgo
+ - go run build/ci.go archive -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
+ - go run build/ci.go install -dlgo -arch 386
+ - go run build/ci.go archive -arch 386 -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
+
+ # Switch over GCC to cross compilation (breaks 386, hence why do it here only)
+ - sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install gcc-arm-linux-gnueabi libc6-dev-armel-cross gcc-arm-linux-gnueabihf libc6-dev-armhf-cross gcc-aarch64-linux-gnu libc6-dev-arm64-cross
+ - sudo ln -s /usr/include/asm-generic /usr/include/asm
+
+ - GOARM=5 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabi-gcc
+ - GOARM=5 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
+ - GOARM=6 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabi-gcc
+ - GOARM=6 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
+ - GOARM=7 go run build/ci.go install -dlgo -arch arm -cc arm-linux-gnueabihf-gcc
+ - GOARM=7 go run build/ci.go archive -arch arm -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
+ - go run build/ci.go install -dlgo -arch arm64 -cc aarch64-linux-gnu-gcc
+ - go run build/ci.go archive -arch arm64 -type tar -signer LINUX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
+
+ # This builder does the Android Maven and Azure uploads
+ - stage: build
+ if: type = push
+ os: linux
+ dist: bionic
+ addons:
+ apt:
+ packages:
+ - openjdk-8-jdk
+ env:
+ - azure-android
+ - maven-android
+ - GO111MODULE=on
+ git:
+ submodules: false # avoid cloning ethereum/tests
+ before_install:
+ # Install Android and it's dependencies manually, Travis is stale
+ - export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
+ - curl https://dl.google.com/android/repository/commandlinetools-linux-6858069_latest.zip -o android.zip
+ - unzip -q android.zip -d $HOME/sdk && rm android.zip
+ - mv $HOME/sdk/cmdline-tools $HOME/sdk/latest && mkdir $HOME/sdk/cmdline-tools && mv $HOME/sdk/latest $HOME/sdk/cmdline-tools
+ - export PATH=$PATH:$HOME/sdk/cmdline-tools/latest/bin
+ - export ANDROID_HOME=$HOME/sdk
+
+ - yes | sdkmanager --licenses >/dev/null
+ - sdkmanager "platform-tools" "platforms;android-15" "platforms;android-19" "platforms;android-24" "ndk-bundle"
+
+ # Install Go to allow building with
+ - curl https://dl.google.com/go/go1.18.linux-amd64.tar.gz | tar -xz
+ - export PATH=`pwd`/go/bin:$PATH
+ - export GOROOT=`pwd`/go
+ - export GOPATH=$HOME/go
+ script:
+ # Build the Android archive and upload it to Maven Central and Azure
+ - mkdir -p $GOPATH/src/github.com/ethereum
+ - ln -s `pwd` $GOPATH/src/github.com/ethereum/go-ethereum
+ - go run build/ci.go aar -signer ANDROID_SIGNING_KEY -signify SIGNIFY_KEY -deploy https://oss.sonatype.org -upload gethstore/builds
+
+ # This builder does the OSX Azure, iOS CocoaPods and iOS Azure uploads
+ - stage: build
+ if: type = push
+ os: osx
+ go: 1.18.x
+ env:
+ - azure-osx
+ - azure-ios
+ - cocoapods-ios
+ - GO111MODULE=on
+ git:
+ submodules: false # avoid cloning ethereum/tests
+ script:
+ - go run build/ci.go install -dlgo
+ - go run build/ci.go archive -type tar -signer OSX_SIGNING_KEY -signify SIGNIFY_KEY -upload gethstore/builds
+
+ # Build the iOS framework and upload it to CocoaPods and Azure
+ - gem uninstall cocoapods -a -x
+ - gem install cocoapods
+
+ - mv ~/.cocoapods/repos/master ~/.cocoapods/repos/master.bak
+ - sed -i '.bak' 's/repo.join/!repo.join/g' $(dirname `gem which cocoapods`)/cocoapods/sources_manager.rb
+ - if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then git clone --depth=1 https://github.com/CocoaPods/Specs.git ~/.cocoapods/repos/master && pod setup --verbose; fi
+
+ - xctool -version
+ - xcrun simctl list
+
+ # Workaround for https://github.com/golang/go/issues/23749
+ - export CGO_CFLAGS_ALLOW='-fmodules|-fblocks|-fobjc-arc'
+ - go run build/ci.go xcode -signer IOS_SIGNING_KEY -signify SIGNIFY_KEY -deploy trunk -upload gethstore/builds
+
+ # These builders run the tests
+ - stage: build
+ os: linux
+ arch: amd64
+ dist: bionic
+ go: 1.18.x
+ env:
+ - GO111MODULE=on
+ script:
+ - go run build/ci.go test -coverage $TEST_PACKAGES
+
+ - stage: build
+ if: type = pull_request
+ os: linux
+ arch: arm64
+ dist: bionic
+ go: 1.18.x
+ env:
+ - GO111MODULE=on
+ script:
+ - go run build/ci.go test -coverage $TEST_PACKAGES
+
+ - stage: build
+ os: linux
+ dist: bionic
+ go: 1.17.x
+ env:
+ - GO111MODULE=on
+ script:
+ - go run build/ci.go test -coverage $TEST_PACKAGES
+
+ # This builder does the Azure archive purges to avoid accumulating junk
+ - stage: build
+ if: type = cron
+ os: linux
+ dist: bionic
+ go: 1.18.x
+ env:
+ - azure-purge
+ - GO111MODULE=on
+ git:
+ submodules: false # avoid cloning ethereum/tests
+ script:
+ - go run build/ci.go purge -store gethstore/builds -days 14
+
+ # This builder executes race tests
+ - stage: build
+ if: type = cron
+ os: linux
+ dist: bionic
+ go: 1.18.x
+ env:
+ - GO111MODULE=on
+ script:
+ - go run build/ci.go test -race -coverage $TEST_PACKAGES
+
diff --git a/geth/.yamllint b/geth/.yamllint
new file mode 100644
index 000000000..e53fd3d14
--- /dev/null
+++ b/geth/.yamllint
@@ -0,0 +1,31 @@
+---
+
+yaml-files:
+ - '*.yaml'
+ - '*.yml'
+ - '.yamllint'
+
+rules:
+ braces: enable
+ brackets: enable
+ colons: enable
+ commas: enable
+ comments:
+ level: warning
+ comments-indentation: disable
+ document-end: disable
+ document-start: disable
+ empty-lines: disable
+ empty-values: disable
+ float-values: disable
+ hyphens: enable
+ indentation: enable
+ key-duplicates: enable
+ key-ordering: disable
+ line-length: disable
+ new-line-at-end-of-file: enable
+ new-lines: enable
+ octal-values: disable
+ quoted-strings: disable
+ trailing-spaces: disable
+ truthy: disable
\ No newline at end of file
diff --git a/geth/AUTHORS b/geth/AUTHORS
new file mode 100644
index 000000000..151c85016
--- /dev/null
+++ b/geth/AUTHORS
@@ -0,0 +1,591 @@
+# This is the official list of go-ethereum authors for copyright purposes.
+
+6543 <6543@obermui.de>
+a e r t h
+Aaron Buchwald
+Abel Nieto
+Adam Babik
+Adam Schmideg
+Aditya
+Aditya Arora
+Adrià Cidre
+Afanasii Kurakin
+Afri Schoedon <5chdn@users.noreply.github.com>
+Agustin Armellini Fischer
+Ahyun
+Airead
+Alan Chen
+Alejandro Isaza
+Aleksey Smyrnov
+Ales Katona
+Alex Beregszaszi
+Alex Leverington
+Alex Mazalov
+Alex Pozhilenkov
+Alex Prut <1648497+alexprut@users.noreply.github.com>
+Alex Wu
+Alexander van der Meij
+Alexander Yastrebov
+Alexandre Van de Sande
+Alexey Akhunov
+Alexey Shekhirin
+alexwang <39109351+dipingxian2@users.noreply.github.com>
+Ali Atiia <42751398+aliatiia@users.noreply.github.com>
+Ali Hajimirza
+am2rican5
+AmitBRD <60668103+AmitBRD@users.noreply.github.com>
+Anatole <62328077+a2br@users.noreply.github.com>
+Andrea Franz
+Andrei Maiboroda
+Andrey Petrov
+ANOTHEL
+Antoine Rondelet
+Antoine Toulme
+Anton Evangelatov
+Antonio Salazar Cardozo
+Arba Sasmoyo
+Armani Ferrante
+Armin Braun
+Aron Fischer
+atsushi-ishibashi
+Austin Roberts
+ayeowch
+b00ris
+b1ackd0t
+bailantaotao
+baizhenxuan
+Balaji Shetty Pachai <32358081+balajipachai@users.noreply.github.com>
+Balint Gabor
+baptiste-b-pegasys <85155432+baptiste-b-pegasys@users.noreply.github.com>
+Bas van Kervel
+Benjamin Brent
+benma
+Benoit Verkindt
+Binacs
+bloonfield
+Bo
+Bo Ye
+Bob Glickstein
+Boqin Qin
+Brandon Harden
+Brent
+Brian Schroeder
+Bruno Å kvorc
+C. Brown
+Caesar Chad
+Casey Detrio
+CDsigma
+Ceelog
+Ceyhun Onur
+chabashilah
+changhong
+Chase Wright
+Chen Quan
+Cheng Li
+chenglin <910372762@qq.com>
+chenyufeng
+Chris Pacia
+Chris Ziogas
+Christian Muehlhaeuser
+Christoph Jentzsch
+chuwt
+cong
+Connor Stein
+Corey Lin <514971757@qq.com>
+courtier