Browse Source

light: migrate to proto (#4964)

pull/4980/head
Marko 4 years ago
committed by GitHub
parent
commit
4a87d60736
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 50 additions and 23 deletions
  1. +1
    -0
      CHANGELOG_PENDING.md
  2. +35
    -16
      light/store/db/db.go
  3. +13
    -7
      light/store/db/db_test.go
  4. +1
    -0
      types/evidence.go

+ 1
- 0
CHANGELOG_PENDING.md View File

@ -35,6 +35,7 @@ Friendly reminder, we have a [bug bounty program](https://hackerone.com/tendermi
- [state] \#4679 `TxResult` is a Protobuf type defined in `abci` types directory
- [state] \#4679 `state` reactor migration to Protobuf encoding
- [evidence] \#4959 Add json tags to `DuplicateVoteEvidence`
- [light] \#4964 `light` reactor migration to Protobuf encoding
- [store] \#4778 Transition store module to protobuf encoding
- `BlockStoreStateJSON` is now `BlockStoreState` and is encoded as binary in the database


+ 35
- 16
light/store/db/db.go View File

@ -7,11 +7,11 @@ import (
"strconv"
"sync"
"github.com/tendermint/go-amino"
"github.com/gogo/protobuf/proto"
dbm "github.com/tendermint/tm-db"
cryptoAmino "github.com/tendermint/tendermint/crypto/encoding/amino"
"github.com/tendermint/tendermint/light/store"
tmproto "github.com/tendermint/tendermint/proto/types"
"github.com/tendermint/tendermint/types"
)
@ -25,17 +25,11 @@ type dbs struct {
mtx sync.RWMutex
size uint16
cdc *amino.Codec
}
// New returns a Store that wraps any DB (with an optional prefix in case you
// want to use one DB with many light clients).
//
// Objects are marshalled using amino (github.com/tendermint/go-amino)
func New(db dbm.DB, prefix string) store.Store {
cdc := amino.NewCodec()
cryptoAmino.RegisterAmino(cdc)
size := uint16(0)
bz, err := db.Get(sizeKey)
@ -43,7 +37,7 @@ func New(db dbm.DB, prefix string) store.Store {
size = unmarshalSize(bz)
}
return &dbs{db: db, prefix: prefix, cdc: cdc, size: size}
return &dbs{db: db, prefix: prefix, size: size}
}
// SaveSignedHeaderAndValidatorSet persists SignedHeader and ValidatorSet to
@ -55,12 +49,19 @@ func (s *dbs) SaveSignedHeaderAndValidatorSet(sh *types.SignedHeader, valSet *ty
panic("negative or zero height")
}
shBz, err := s.cdc.MarshalBinaryLengthPrefixed(sh)
pbsh := sh.ToProto()
shBz, err := proto.Marshal(pbsh)
if err != nil {
return fmt.Errorf("marshalling header: %w", err)
return fmt.Errorf("marshalling SignedHeader: %w", err)
}
valSetBz, err := s.cdc.MarshalBinaryLengthPrefixed(valSet)
pbvs, err := valSet.ToProto()
if err != nil {
return fmt.Errorf("unable to transition validator set to protobuf: %w", err)
}
valSetBz, err := proto.Marshal(pbvs)
if err != nil {
return fmt.Errorf("marshalling validator set: %w", err)
}
@ -126,8 +127,17 @@ func (s *dbs) SignedHeader(height int64) (*types.SignedHeader, error) {
return nil, store.ErrSignedHeaderNotFound
}
var signedHeader *types.SignedHeader
err = s.cdc.UnmarshalBinaryLengthPrefixed(bz, &signedHeader)
var pbsh tmproto.SignedHeader
err = proto.Unmarshal(bz, &pbsh)
if err != nil {
return nil, err
}
signedHeader, err := types.SignedHeaderFromProto(&pbsh)
if err != nil {
return nil, err
}
return signedHeader, err
}
@ -147,8 +157,17 @@ func (s *dbs) ValidatorSet(height int64) (*types.ValidatorSet, error) {
return nil, store.ErrValidatorSetNotFound
}
var valSet *types.ValidatorSet
err = s.cdc.UnmarshalBinaryLengthPrefixed(bz, &valSet)
var pbvs tmproto.ValidatorSet
err = proto.Unmarshal(bz, &pbvs)
if err != nil {
return nil, err
}
valSet, err := types.ValidatorSetFromProto(&pbvs)
if err != nil {
return nil, err
}
return valSet, err
}


+ 13
- 7
light/store/db/db_test.go View File

@ -14,6 +14,7 @@ import (
func TestLast_FirstSignedHeaderHeight(t *testing.T) {
dbStore := New(dbm.NewMemDB(), "TestLast_FirstSignedHeaderHeight")
vals, _ := types.RandValidatorSet(10, 100)
// Empty store
height, err := dbStore.LastSignedHeaderHeight()
@ -26,7 +27,7 @@ func TestLast_FirstSignedHeaderHeight(t *testing.T) {
// 1 key
err = dbStore.SaveSignedHeaderAndValidatorSet(
&types.SignedHeader{Header: &types.Header{Height: 1}}, &types.ValidatorSet{})
&types.SignedHeader{Header: &types.Header{Height: 1}}, vals)
require.NoError(t, err)
height, err = dbStore.LastSignedHeaderHeight()
@ -40,7 +41,7 @@ func TestLast_FirstSignedHeaderHeight(t *testing.T) {
func Test_SaveSignedHeaderAndValidatorSet(t *testing.T) {
dbStore := New(dbm.NewMemDB(), "Test_SaveSignedHeaderAndValidatorSet")
vals, _ := types.RandValidatorSet(10, 100)
// Empty store
h, err := dbStore.SignedHeader(1)
require.Error(t, err)
@ -51,8 +52,9 @@ func Test_SaveSignedHeaderAndValidatorSet(t *testing.T) {
assert.Nil(t, valSet)
// 1 key
pa := vals.Validators[0].Address
err = dbStore.SaveSignedHeaderAndValidatorSet(
&types.SignedHeader{Header: &types.Header{Height: 1}}, &types.ValidatorSet{})
&types.SignedHeader{Header: &types.Header{Height: 1, ProposerAddress: pa}}, vals)
require.NoError(t, err)
h, err = dbStore.SignedHeader(1)
@ -78,6 +80,8 @@ func Test_SaveSignedHeaderAndValidatorSet(t *testing.T) {
func Test_SignedHeaderBefore(t *testing.T) {
dbStore := New(dbm.NewMemDB(), "Test_SignedHeaderBefore")
valSet, _ := types.RandValidatorSet(10, 100)
pa := valSet.Proposer.Address
assert.Panics(t, func() {
_, _ = dbStore.SignedHeaderBefore(0)
@ -85,7 +89,7 @@ func Test_SignedHeaderBefore(t *testing.T) {
})
err := dbStore.SaveSignedHeaderAndValidatorSet(
&types.SignedHeader{Header: &types.Header{Height: 2}}, &types.ValidatorSet{})
&types.SignedHeader{Header: &types.Header{Height: 2, ProposerAddress: pa}}, valSet)
require.NoError(t, err)
h, err := dbStore.SignedHeaderBefore(3)
@ -97,6 +101,7 @@ func Test_SignedHeaderBefore(t *testing.T) {
func Test_Prune(t *testing.T) {
dbStore := New(dbm.NewMemDB(), "Test_Prune")
valSet, _ := types.RandValidatorSet(10, 100)
// Empty store
assert.EqualValues(t, 0, dbStore.Size())
@ -105,7 +110,7 @@ func Test_Prune(t *testing.T) {
// One header
err = dbStore.SaveSignedHeaderAndValidatorSet(
&types.SignedHeader{Header: &types.Header{Height: 2}}, &types.ValidatorSet{})
&types.SignedHeader{Header: &types.Header{Height: 2}}, valSet)
require.NoError(t, err)
assert.EqualValues(t, 1, dbStore.Size())
@ -121,7 +126,7 @@ func Test_Prune(t *testing.T) {
// Multiple headers
for i := 1; i <= 10; i++ {
err = dbStore.SaveSignedHeaderAndValidatorSet(
&types.SignedHeader{Header: &types.Header{Height: int64(i)}}, &types.ValidatorSet{})
&types.SignedHeader{Header: &types.Header{Height: int64(i)}}, valSet)
require.NoError(t, err)
}
@ -136,6 +141,7 @@ func Test_Prune(t *testing.T) {
func Test_Concurrency(t *testing.T) {
dbStore := New(dbm.NewMemDB(), "Test_Prune")
vals, _ := types.RandValidatorSet(10, 100)
var wg sync.WaitGroup
for i := 1; i <= 100; i++ {
@ -144,7 +150,7 @@ func Test_Concurrency(t *testing.T) {
defer wg.Done()
dbStore.SaveSignedHeaderAndValidatorSet(
&types.SignedHeader{Header: &types.Header{Height: i}}, &types.ValidatorSet{})
&types.SignedHeader{Header: &types.Header{Height: i}}, vals)
dbStore.SignedHeader(i)
dbStore.ValidatorSet(i)


+ 1
- 0
types/evidence.go View File

@ -246,6 +246,7 @@ func EvidenceFromProto(evidence *tmproto.Evidence) (Evidence, error) {
return &dve, dve.ValidateBasic()
case *tmproto.Evidence_ConflictingHeadersEvidence:
h1, err := SignedHeaderFromProto(evi.ConflictingHeadersEvidence.H1)
if err != nil {
return nil, fmt.Errorf("from proto err: %w", err)


Loading…
Cancel
Save