diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index 543203cebd8..6f08c786ef4 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -443,6 +443,12 @@ SizeInBytes = 26214400 # 25MB per each pair (metachain, destinationShard) Shards = 4 +[ValidatorInfoPool] + Name = "ValidatorInfoPool" + Capacity = 10000 + SizeInBytes = 31457280 #30MB + Shards = 4 + #PublicKeyPeerId represents the main cache used to map Elrond block signing public keys to their associated peer id's. [PublicKeyPeerId] Name = "PublicKeyPeerId" diff --git a/cmd/node/config/enableEpochs.toml b/cmd/node/config/enableEpochs.toml index c1dee6fd6f6..48ab61d8ee9 100644 --- a/cmd/node/config/enableEpochs.toml +++ b/cmd/node/config/enableEpochs.toml @@ -221,6 +221,9 @@ # SetSenderInEeiOutputTransferEnableEpoch represents the epoch when setting the sender in eei output transfers will be enabled SetSenderInEeiOutputTransferEnableEpoch = 4 + # RefactorPeersMiniBlocksEnableEpoch represents the epoch when refactor of the peers mini blocks will be enabled + RefactorPeersMiniBlocksEnableEpoch = 5 + # MaxNodesChangeEnableEpoch holds configuration for changing the maximum number of nodes and the enabling epoch MaxNodesChangeEnableEpoch = [ { EpochEnable = 0, MaxNumNodes = 36, NodesToShufflePerShard = 4 }, diff --git a/common/constants.go b/common/constants.go index c2db1b561ad..f019b964bac 100644 --- a/common/constants.go +++ b/common/constants.go @@ -72,6 +72,9 @@ const PeerAuthenticationTopic = "peerAuthentication" // ConnectionTopic represents the topic used when sending the new connection message data const ConnectionTopic = "connection" +// ValidatorInfoTopic is the topic used for validatorInfo signaling +const ValidatorInfoTopic = "validatorInfo" + // PathShardPlaceholder represents the placeholder for the shard ID in paths const PathShardPlaceholder = "[S]" diff --git a/common/enablers/enableEpochsHandler.go b/common/enablers/enableEpochsHandler.go index fc259ae310b..4b4877f102c 100644 --- a/common/enablers/enableEpochsHandler.go +++ b/common/enablers/enableEpochsHandler.go @@ -111,6 +111,7 @@ func (handler *enableEpochsHandler) EpochConfirmed(epoch uint32, _ uint64) { handler.setFlagValue(epoch >= handler.enableEpochsConfig.CheckExecuteOnReadOnlyEnableEpoch, handler.checkExecuteOnReadOnlyFlag, "checkExecuteOnReadOnlyFlag") handler.setFlagValue(epoch >= handler.enableEpochsConfig.SetSenderInEeiOutputTransferEnableEpoch, handler.setSenderInEeiOutputTransferFlag, "setSenderInEeiOutputTransferFlag") handler.setFlagValue(epoch >= handler.enableEpochsConfig.ESDTMetadataContinuousCleanupEnableEpoch, handler.changeDelegationOwnerFlag, "changeDelegationOwnerFlag") + handler.setFlagValue(epoch >= handler.enableEpochsConfig.RefactorPeersMiniBlocksEnableEpoch, handler.refactorPeersMiniBlocksFlag, "refactorPeersMiniBlocksFlag") } func (handler *enableEpochsHandler) setFlagValue(value bool, flag *atomic.Flag, flagName string) { @@ -203,6 +204,11 @@ func (handler *enableEpochsHandler) MiniBlockPartialExecutionEnableEpoch() uint3 return handler.enableEpochsConfig.MiniBlockPartialExecutionEnableEpoch } +// RefactorPeersMiniBlocksEnableEpoch returns the epoch when refactor of peers mini blocks becomes active +func (handler *enableEpochsHandler) RefactorPeersMiniBlocksEnableEpoch() uint32 { + return handler.enableEpochsConfig.RefactorPeersMiniBlocksEnableEpoch +} + // IsInterfaceNil returns true if there is no value under the interface func (handler *enableEpochsHandler) IsInterfaceNil() bool { return handler == nil diff --git a/common/enablers/epochFlags.go b/common/enablers/epochFlags.go index 30d7a01cad0..3960d990ea3 100644 --- a/common/enablers/epochFlags.go +++ b/common/enablers/epochFlags.go @@ -81,6 +81,7 @@ type epochFlagsHolder struct { checkExecuteOnReadOnlyFlag *atomic.Flag setSenderInEeiOutputTransferFlag *atomic.Flag changeDelegationOwnerFlag *atomic.Flag + refactorPeersMiniBlocksFlag *atomic.Flag } func newEpochFlagsHolder() *epochFlagsHolder { @@ -163,6 +164,7 @@ func newEpochFlagsHolder() *epochFlagsHolder { checkExecuteOnReadOnlyFlag: &atomic.Flag{}, setSenderInEeiOutputTransferFlag: &atomic.Flag{}, changeDelegationOwnerFlag: &atomic.Flag{}, + refactorPeersMiniBlocksFlag: &atomic.Flag{}, } } @@ -608,3 +610,8 @@ func (holder *epochFlagsHolder) IsESDTNFTImprovementV1FlagEnabled() bool { func (holder *epochFlagsHolder) IsChangeDelegationOwnerFlagEnabled() bool { return holder.changeDelegationOwnerFlag.IsSet() } + +// IsRefactorPeersMiniBlocksFlagEnabled returns true if refactorPeersMiniBlocksFlag is enabled +func (holder *epochFlagsHolder) IsRefactorPeersMiniBlocksFlagEnabled() bool { + return holder.refactorPeersMiniBlocksFlag.IsSet() +} diff --git a/common/interface.go b/common/interface.go index 71b31719b4e..78b57d4a71e 100644 --- a/common/interface.go +++ b/common/interface.go @@ -202,6 +202,7 @@ type EnableEpochsHandler interface { CheckExecuteReadOnlyEnableEpoch() uint32 StorageAPICostOptimizationEnableEpoch() uint32 MiniBlockPartialExecutionEnableEpoch() uint32 + RefactorPeersMiniBlocksEnableEpoch() uint32 IsSCDeployFlagEnabled() bool IsBuiltInFunctionsFlagEnabled() bool IsRelayedTransactionsFlagEnabled() bool @@ -289,6 +290,7 @@ type EnableEpochsHandler interface { IsESDTNFTImprovementV1FlagEnabled() bool IsSetSenderInEeiOutputTransferFlagEnabled() bool IsChangeDelegationOwnerFlagEnabled() bool + IsRefactorPeersMiniBlocksFlagEnabled() bool IsInterfaceNil() bool } diff --git a/config/config.go b/config/config.go index 5a284ef87ca..e145a603285 100644 --- a/config/config.go +++ b/config/config.go @@ -170,6 +170,7 @@ type Config struct { WhiteListPool CacheConfig WhiteListerVerifiedTxs CacheConfig SmartContractDataPool CacheConfig + ValidatorInfoPool CacheConfig TrieSyncStorage TrieSyncStorageConfig EpochStartConfig EpochStartConfig AddressPubkeyConverter PubkeyConfig diff --git a/config/epochConfig.go b/config/epochConfig.go index edfb9520b91..2496a3250d3 100644 --- a/config/epochConfig.go +++ b/config/epochConfig.go @@ -86,6 +86,7 @@ type EnableEpochs struct { MiniBlockPartialExecutionEnableEpoch uint32 ESDTMetadataContinuousCleanupEnableEpoch uint32 SetSenderInEeiOutputTransferEnableEpoch uint32 + RefactorPeersMiniBlocksEnableEpoch uint32 } // GasScheduleByEpochs represents a gas schedule toml entry that will be applied from the provided epoch diff --git a/dataRetriever/constants.go b/dataRetriever/constants.go index 957c4a41cd2..926cffe11fa 100644 --- a/dataRetriever/constants.go +++ b/dataRetriever/constants.go @@ -11,3 +11,6 @@ const UnsignedTxPoolName = "uTxPool" // RewardTxPoolName defines the name of the reward transactions pool const RewardTxPoolName = "rewardTxPool" + +// ValidatorsInfoPoolName defines the name of the validators info pool +const ValidatorsInfoPoolName = "validatorsInfoPool" diff --git a/dataRetriever/dataPool/currentBlockPool.go b/dataRetriever/dataPool/currentBlockTransactionPool.go similarity index 80% rename from dataRetriever/dataPool/currentBlockPool.go rename to dataRetriever/dataPool/currentBlockTransactionPool.go index f8f3ac2c4ef..4ec1accf237 100644 --- a/dataRetriever/dataPool/currentBlockPool.go +++ b/dataRetriever/dataPool/currentBlockTransactionPool.go @@ -15,22 +15,22 @@ type transactionMapCacher struct { txsForBlock map[string]data.TransactionHandler } -// NewCurrentBlockPool returns a new pool to be used for current block -func NewCurrentBlockPool() *transactionMapCacher { +// NewCurrentBlockTransactionsPool returns a new transactions pool to be used for the current block +func NewCurrentBlockTransactionsPool() *transactionMapCacher { return &transactionMapCacher{ mutTxs: sync.RWMutex{}, txsForBlock: make(map[string]data.TransactionHandler), } } -// Clean creates a new pool +// Clean creates a new transaction pool func (tmc *transactionMapCacher) Clean() { tmc.mutTxs.Lock() tmc.txsForBlock = make(map[string]data.TransactionHandler) tmc.mutTxs.Unlock() } -// GetTx returns the element saved for the hash +// GetTx gets the transaction for the given hash func (tmc *transactionMapCacher) GetTx(txHash []byte) (data.TransactionHandler, error) { tmc.mutTxs.RLock() defer tmc.mutTxs.RUnlock() @@ -43,7 +43,7 @@ func (tmc *transactionMapCacher) GetTx(txHash []byte) (data.TransactionHandler, return tx, nil } -// AddTx writes the tx to the map +// AddTx adds the transaction for the given hash func (tmc *transactionMapCacher) AddTx(txHash []byte, tx data.TransactionHandler) { if check.IfNil(tx) { return diff --git a/dataRetriever/dataPool/currentBlockPool_test.go b/dataRetriever/dataPool/currentBlockTransactionPool_test.go similarity index 94% rename from dataRetriever/dataPool/currentBlockPool_test.go rename to dataRetriever/dataPool/currentBlockTransactionPool_test.go index 08d3ab82a15..dc589ff1433 100644 --- a/dataRetriever/dataPool/currentBlockPool_test.go +++ b/dataRetriever/dataPool/currentBlockTransactionPool_test.go @@ -13,7 +13,7 @@ func TestCurrentBlockPool_AddGetCleanTx(t *testing.T) { txHash := []byte("hash") tx := &transaction.Transaction{} - currentBlockPool := NewCurrentBlockPool() + currentBlockPool := NewCurrentBlockTransactionsPool() require.False(t, currentBlockPool.IsInterfaceNil()) currentBlockPool.AddTx(txHash, tx) @@ -31,5 +31,4 @@ func TestCurrentBlockPool_AddGetCleanTx(t *testing.T) { txFromPool, err = currentBlockPool.GetTx(txHash) require.Nil(t, txFromPool) require.Equal(t, dataRetriever.ErrTxNotFoundInBlockPool, err) - } diff --git a/dataRetriever/dataPool/currentEpochValidatorInfoPool.go b/dataRetriever/dataPool/currentEpochValidatorInfoPool.go new file mode 100644 index 00000000000..34ba5a841ab --- /dev/null +++ b/dataRetriever/dataPool/currentEpochValidatorInfoPool.go @@ -0,0 +1,60 @@ +package dataPool + +import ( + "sync" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/state" +) + +var _ dataRetriever.ValidatorInfoCacher = (*validatorInfoMapCacher)(nil) + +type validatorInfoMapCacher struct { + mutValidatorInfo sync.RWMutex + validatorInfoForEpoch map[string]*state.ShardValidatorInfo +} + +// NewCurrentEpochValidatorInfoPool returns a new validator info pool to be used for the current epoch +func NewCurrentEpochValidatorInfoPool() *validatorInfoMapCacher { + return &validatorInfoMapCacher{ + mutValidatorInfo: sync.RWMutex{}, + validatorInfoForEpoch: make(map[string]*state.ShardValidatorInfo), + } +} + +// Clean creates a new validator info pool +func (vimc *validatorInfoMapCacher) Clean() { + vimc.mutValidatorInfo.Lock() + vimc.validatorInfoForEpoch = make(map[string]*state.ShardValidatorInfo) + vimc.mutValidatorInfo.Unlock() +} + +// GetValidatorInfo gets the validator info for the given hash +func (vimc *validatorInfoMapCacher) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + vimc.mutValidatorInfo.RLock() + defer vimc.mutValidatorInfo.RUnlock() + + validatorInfo, ok := vimc.validatorInfoForEpoch[string(validatorInfoHash)] + if !ok { + return nil, dataRetriever.ErrValidatorInfoNotFoundInEpochPool + } + + return validatorInfo, nil +} + +// AddValidatorInfo adds the validator info for the given hash +func (vimc *validatorInfoMapCacher) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { + if check.IfNil(validatorInfo) { + return + } + + vimc.mutValidatorInfo.Lock() + vimc.validatorInfoForEpoch[string(validatorInfoHash)] = validatorInfo + vimc.mutValidatorInfo.Unlock() +} + +// IsInterfaceNil returns true if underlying object is nil +func (vimc *validatorInfoMapCacher) IsInterfaceNil() bool { + return vimc == nil +} diff --git a/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go b/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go new file mode 100644 index 00000000000..0337e815ddf --- /dev/null +++ b/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go @@ -0,0 +1,34 @@ +package dataPool + +import ( + "testing" + + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/stretchr/testify/require" +) + +func TestCurrentEpochValidatorInfoPool_AddGetCleanTx(t *testing.T) { + t.Parallel() + + validatorInfoHash := []byte("hash") + validatorInfo := &state.ShardValidatorInfo{} + currentValidatorInfoPool := NewCurrentEpochValidatorInfoPool() + require.False(t, currentValidatorInfoPool.IsInterfaceNil()) + + currentValidatorInfoPool.AddValidatorInfo(validatorInfoHash, validatorInfo) + currentValidatorInfoPool.AddValidatorInfo(validatorInfoHash, nil) + + validatorInfoFromPool, err := currentValidatorInfoPool.GetValidatorInfo([]byte("wrong hash")) + require.Nil(t, validatorInfoFromPool) + require.Equal(t, dataRetriever.ErrValidatorInfoNotFoundInEpochPool, err) + + validatorInfoFromPool, err = currentValidatorInfoPool.GetValidatorInfo(validatorInfoHash) + require.Nil(t, err) + require.Equal(t, validatorInfo, validatorInfoFromPool) + + currentValidatorInfoPool.Clean() + validatorInfoFromPool, err = currentValidatorInfoPool.GetValidatorInfo(validatorInfoHash) + require.Nil(t, validatorInfoFromPool) + require.Equal(t, dataRetriever.ErrValidatorInfoNotFoundInEpochPool, err) +} diff --git a/dataRetriever/dataPool/dataPool.go b/dataRetriever/dataPool/dataPool.go index 92eeeb291ff..4bb4c20720e 100644 --- a/dataRetriever/dataPool/dataPool.go +++ b/dataRetriever/dataPool/dataPool.go @@ -12,34 +12,38 @@ var _ dataRetriever.PoolsHolder = (*dataPool)(nil) var log = logger.GetOrCreate("dataRetriever/dataPool") type dataPool struct { - transactions dataRetriever.ShardedDataCacherNotifier - unsignedTransactions dataRetriever.ShardedDataCacherNotifier - rewardTransactions dataRetriever.ShardedDataCacherNotifier - headers dataRetriever.HeadersPool - miniBlocks storage.Cacher - peerChangesBlocks storage.Cacher - trieNodes storage.Cacher - trieNodesChunks storage.Cacher - currBlockTxs dataRetriever.TransactionCacher - smartContracts storage.Cacher - peerAuthentications storage.Cacher - heartbeats storage.Cacher + transactions dataRetriever.ShardedDataCacherNotifier + unsignedTransactions dataRetriever.ShardedDataCacherNotifier + rewardTransactions dataRetriever.ShardedDataCacherNotifier + headers dataRetriever.HeadersPool + miniBlocks storage.Cacher + peerChangesBlocks storage.Cacher + trieNodes storage.Cacher + trieNodesChunks storage.Cacher + currBlockTxs dataRetriever.TransactionCacher + currEpochValidatorInfo dataRetriever.ValidatorInfoCacher + smartContracts storage.Cacher + peerAuthentications storage.Cacher + heartbeats storage.Cacher + validatorsInfo dataRetriever.ShardedDataCacherNotifier } // DataPoolArgs represents the data pool's constructor structure type DataPoolArgs struct { - Transactions dataRetriever.ShardedDataCacherNotifier - UnsignedTransactions dataRetriever.ShardedDataCacherNotifier - RewardTransactions dataRetriever.ShardedDataCacherNotifier - Headers dataRetriever.HeadersPool - MiniBlocks storage.Cacher - PeerChangesBlocks storage.Cacher - TrieNodes storage.Cacher - TrieNodesChunks storage.Cacher - CurrentBlockTransactions dataRetriever.TransactionCacher - SmartContracts storage.Cacher - PeerAuthentications storage.Cacher - Heartbeats storage.Cacher + Transactions dataRetriever.ShardedDataCacherNotifier + UnsignedTransactions dataRetriever.ShardedDataCacherNotifier + RewardTransactions dataRetriever.ShardedDataCacherNotifier + Headers dataRetriever.HeadersPool + MiniBlocks storage.Cacher + PeerChangesBlocks storage.Cacher + TrieNodes storage.Cacher + TrieNodesChunks storage.Cacher + CurrentBlockTransactions dataRetriever.TransactionCacher + CurrentEpochValidatorInfo dataRetriever.ValidatorInfoCacher + SmartContracts storage.Cacher + PeerAuthentications storage.Cacher + Heartbeats storage.Cacher + ValidatorsInfo dataRetriever.ShardedDataCacherNotifier } // NewDataPool creates a data pools holder object @@ -65,6 +69,9 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { if check.IfNil(args.CurrentBlockTransactions) { return nil, dataRetriever.ErrNilCurrBlockTxs } + if check.IfNil(args.CurrentEpochValidatorInfo) { + return nil, dataRetriever.ErrNilCurrentEpochValidatorInfo + } if check.IfNil(args.TrieNodes) { return nil, dataRetriever.ErrNilTrieNodesPool } @@ -80,20 +87,25 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { if check.IfNil(args.Heartbeats) { return nil, dataRetriever.ErrNilHeartbeatPool } + if check.IfNil(args.ValidatorsInfo) { + return nil, dataRetriever.ErrNilValidatorInfoPool + } return &dataPool{ - transactions: args.Transactions, - unsignedTransactions: args.UnsignedTransactions, - rewardTransactions: args.RewardTransactions, - headers: args.Headers, - miniBlocks: args.MiniBlocks, - peerChangesBlocks: args.PeerChangesBlocks, - trieNodes: args.TrieNodes, - trieNodesChunks: args.TrieNodesChunks, - currBlockTxs: args.CurrentBlockTransactions, - smartContracts: args.SmartContracts, - peerAuthentications: args.PeerAuthentications, - heartbeats: args.Heartbeats, + transactions: args.Transactions, + unsignedTransactions: args.UnsignedTransactions, + rewardTransactions: args.RewardTransactions, + headers: args.Headers, + miniBlocks: args.MiniBlocks, + peerChangesBlocks: args.PeerChangesBlocks, + trieNodes: args.TrieNodes, + trieNodesChunks: args.TrieNodesChunks, + currBlockTxs: args.CurrentBlockTransactions, + currEpochValidatorInfo: args.CurrentEpochValidatorInfo, + smartContracts: args.SmartContracts, + peerAuthentications: args.PeerAuthentications, + heartbeats: args.Heartbeats, + validatorsInfo: args.ValidatorsInfo, }, nil } @@ -102,6 +114,11 @@ func (dp *dataPool) CurrentBlockTxs() dataRetriever.TransactionCacher { return dp.currBlockTxs } +// CurrentEpochValidatorInfo returns the holder for current epoch validator info +func (dp *dataPool) CurrentEpochValidatorInfo() dataRetriever.ValidatorInfoCacher { + return dp.currEpochValidatorInfo +} + // Transactions returns the holder for transactions func (dp *dataPool) Transactions() dataRetriever.ShardedDataCacherNotifier { return dp.transactions @@ -157,6 +174,11 @@ func (dp *dataPool) Heartbeats() storage.Cacher { return dp.heartbeats } +// ValidatorsInfo returns the holder for validators info +func (dp *dataPool) ValidatorsInfo() dataRetriever.ShardedDataCacherNotifier { + return dp.validatorsInfo +} + // Close closes all the components func (dp *dataPool) Close() error { var lastError error diff --git a/dataRetriever/dataPool/dataPool_test.go b/dataRetriever/dataPool/dataPool_test.go index d64648f28b0..05fc6e8e280 100644 --- a/dataRetriever/dataPool/dataPool_test.go +++ b/dataRetriever/dataPool/dataPool_test.go @@ -16,18 +16,20 @@ import ( func createMockDataPoolArgs() dataPool.DataPoolArgs { return dataPool.DataPoolArgs{ - Transactions: testscommon.NewShardedDataStub(), - UnsignedTransactions: testscommon.NewShardedDataStub(), - RewardTransactions: testscommon.NewShardedDataStub(), - Headers: &mock.HeadersCacherStub{}, - MiniBlocks: testscommon.NewCacherStub(), - PeerChangesBlocks: testscommon.NewCacherStub(), - TrieNodes: testscommon.NewCacherStub(), - TrieNodesChunks: testscommon.NewCacherStub(), - CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, - SmartContracts: testscommon.NewCacherStub(), - PeerAuthentications: testscommon.NewCacherStub(), - Heartbeats: testscommon.NewCacherStub(), + Transactions: testscommon.NewShardedDataStub(), + UnsignedTransactions: testscommon.NewShardedDataStub(), + RewardTransactions: testscommon.NewShardedDataStub(), + Headers: &mock.HeadersCacherStub{}, + MiniBlocks: testscommon.NewCacherStub(), + PeerChangesBlocks: testscommon.NewCacherStub(), + TrieNodes: testscommon.NewCacherStub(), + TrieNodesChunks: testscommon.NewCacherStub(), + CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, + CurrentEpochValidatorInfo: &mock.ValidatorInfoForCurrentEpochStub{}, + SmartContracts: testscommon.NewCacherStub(), + PeerAuthentications: testscommon.NewCacherStub(), + Heartbeats: testscommon.NewCacherStub(), + ValidatorsInfo: testscommon.NewShardedDataStub(), } } @@ -141,6 +143,17 @@ func TestNewDataPool_NilHeartbeatsShouldErr(t *testing.T) { assert.Nil(t, tdp) } +func TestNewDataPool_NilValidatorsInfoShouldErr(t *testing.T) { + t.Parallel() + + args := createMockDataPoolArgs() + args.ValidatorsInfo = nil + tdp, err := dataPool.NewDataPool(args) + + assert.Equal(t, dataRetriever.ErrNilValidatorInfoPool, err) + assert.Nil(t, tdp) +} + func TestNewDataPool_NilPeerBlocksShouldErr(t *testing.T) { t.Parallel() @@ -152,7 +165,8 @@ func TestNewDataPool_NilPeerBlocksShouldErr(t *testing.T) { assert.Nil(t, tdp) } -func TestNewDataPool_NilCurrBlockShouldErr(t *testing.T) { +func TestNewDataPool_NilCurrBlockTransactionsShouldErr(t *testing.T) { + t.Parallel() args := createMockDataPoolArgs() args.CurrentBlockTransactions = nil @@ -162,6 +176,17 @@ func TestNewDataPool_NilCurrBlockShouldErr(t *testing.T) { require.Equal(t, dataRetriever.ErrNilCurrBlockTxs, err) } +func TestNewDataPool_NilCurrEpochValidatorInfoShouldErr(t *testing.T) { + t.Parallel() + + args := createMockDataPoolArgs() + args.CurrentEpochValidatorInfo = nil + tdp, err := dataPool.NewDataPool(args) + + require.Nil(t, tdp) + require.Equal(t, dataRetriever.ErrNilCurrentEpochValidatorInfo, err) +} + func TestNewDataPool_OkValsShouldWork(t *testing.T) { t.Parallel() @@ -178,6 +203,7 @@ func TestNewDataPool_OkValsShouldWork(t *testing.T) { assert.True(t, args.MiniBlocks == tdp.MiniBlocks()) assert.True(t, args.PeerChangesBlocks == tdp.PeerChangesBlocks()) assert.True(t, args.CurrentBlockTransactions == tdp.CurrentBlockTxs()) + assert.True(t, args.CurrentEpochValidatorInfo == tdp.CurrentEpochValidatorInfo()) assert.True(t, args.TrieNodes == tdp.TrieNodes()) assert.True(t, args.TrieNodesChunks == tdp.TrieNodesChunks()) assert.True(t, args.SmartContracts == tdp.SmartContracts()) diff --git a/dataRetriever/errors.go b/dataRetriever/errors.go index 0b136705461..662ab1e2735 100644 --- a/dataRetriever/errors.go +++ b/dataRetriever/errors.go @@ -13,8 +13,11 @@ var ErrSendRequest = errors.New("cannot send request: peer list is empty or erro // ErrNilValue signals the value is nil var ErrNilValue = errors.New("nil value") -// ErrTxNotFoundInBlockPool signals the value is nil -var ErrTxNotFoundInBlockPool = errors.New("cannot find tx in current block pool") +// ErrTxNotFoundInBlockPool signals that transaction was not found in the current block pool +var ErrTxNotFoundInBlockPool = errors.New("transaction was not found in the current block pool") + +// ErrValidatorInfoNotFoundInEpochPool signals that validator info was not found in the current epoch pool +var ErrValidatorInfoNotFoundInEpochPool = errors.New("validator info was not found in the current epoch pool") // ErrNilMarshalizer signals that an operation has been attempted to or with a nil Marshalizer implementation var ErrNilMarshalizer = errors.New("nil Marshalizer") @@ -146,9 +149,12 @@ var ErrNilPeersRatingHandler = errors.New("nil peers rating handler") // ErrNilTrieDataGetter signals that a nil trie data getter has been provided var ErrNilTrieDataGetter = errors.New("nil trie data getter provided") -// ErrNilCurrBlockTxs signals that nil current blocks txs holder was provided +// ErrNilCurrBlockTxs signals that nil current block txs holder was provided var ErrNilCurrBlockTxs = errors.New("nil current block txs holder") +// ErrNilCurrentEpochValidatorInfo signals that nil current epoch validator info holder was provided +var ErrNilCurrentEpochValidatorInfo = errors.New("nil current epoch validator info holder") + // ErrNilRequestedItemsHandler signals that a nil requested items handler was provided var ErrNilRequestedItemsHandler = errors.New("nil requested items handler") @@ -247,3 +253,12 @@ var ErrNilPeerShardMapper = errors.New("nil peer shard mapper") // ErrStorerNotFound signals that the storer was not found var ErrStorerNotFound = errors.New("storer not found") + +// ErrNilValidatorInfoPool signals that a nil validator info pool has been provided +var ErrNilValidatorInfoPool = errors.New("nil validator info pool") + +// ErrNilValidatorInfoStorage signals that a nil validator info storage has been provided +var ErrNilValidatorInfoStorage = errors.New("nil validator info storage") + +// ErrValidatorInfoNotFound signals that no validator info was found +var ErrValidatorInfoNotFound = errors.New("validator info not found") diff --git a/dataRetriever/factory/dataPoolFactory.go b/dataRetriever/factory/dataPoolFactory.go index 74187ea0d8a..9e9bf367c6e 100644 --- a/dataRetriever/factory/dataPoolFactory.go +++ b/dataRetriever/factory/dataPoolFactory.go @@ -139,20 +139,28 @@ func NewDataPoolFromConfig(args ArgsDataPool) (dataRetriever.PoolsHolder, error) return nil, fmt.Errorf("%w while creating the cache for the heartbeat messages", err) } - currBlockTxs := dataPool.NewCurrentBlockPool() + validatorsInfo, err := shardedData.NewShardedData(dataRetriever.ValidatorsInfoPoolName, factory.GetCacherFromConfig(mainConfig.ValidatorInfoPool)) + if err != nil { + return nil, fmt.Errorf("%w while creating the cache for the validator info results", err) + } + + currBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() + currEpochValidatorInfo := dataPool.NewCurrentEpochValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ - Transactions: txPool, - UnsignedTransactions: uTxPool, - RewardTransactions: rewardTxPool, - Headers: hdrPool, - MiniBlocks: txBlockBody, - PeerChangesBlocks: peerChangeBlockBody, - TrieNodes: adaptedTrieNodesStorage, - TrieNodesChunks: trieNodesChunks, - CurrentBlockTransactions: currBlockTxs, - SmartContracts: smartContracts, - PeerAuthentications: peerAuthPool, - Heartbeats: heartbeatPool, + Transactions: txPool, + UnsignedTransactions: uTxPool, + RewardTransactions: rewardTxPool, + Headers: hdrPool, + MiniBlocks: txBlockBody, + PeerChangesBlocks: peerChangeBlockBody, + TrieNodes: adaptedTrieNodesStorage, + TrieNodesChunks: trieNodesChunks, + CurrentBlockTransactions: currBlockTransactions, + CurrentEpochValidatorInfo: currEpochValidatorInfo, + SmartContracts: smartContracts, + PeerAuthentications: peerAuthPool, + Heartbeats: heartbeatPool, + ValidatorsInfo: validatorsInfo, } return dataPool.NewDataPool(dataPoolArgs) } diff --git a/dataRetriever/factory/dataPoolFactory_test.go b/dataRetriever/factory/dataPoolFactory_test.go index 5b5fb174144..dd74757debd 100644 --- a/dataRetriever/factory/dataPoolFactory_test.go +++ b/dataRetriever/factory/dataPoolFactory_test.go @@ -144,6 +144,13 @@ func TestNewDataPoolFromConfig_BadConfigShouldErr(t *testing.T) { fmt.Println(err) require.True(t, errors.Is(err, storage.ErrNotSupportedCacheType)) require.True(t, strings.Contains(err.Error(), "the cache for the heartbeat messages")) + + args = getGoodArgs() + args.Config.ValidatorInfoPool.Capacity = 0 + holder, err = NewDataPoolFromConfig(args) + require.Nil(t, holder) + require.True(t, errors.Is(err, storage.ErrInvalidConfig)) + require.True(t, strings.Contains(err.Error(), "the cache for the validator info results")) } func getGoodArgs() ArgsDataPool { diff --git a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go index f502ac5b1dc..859bd85f024 100644 --- a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go @@ -403,3 +403,39 @@ func (brcf *baseResolversContainerFactory) createTrieNodesResolver( return resolver, nil } + +func (brcf *baseResolversContainerFactory) generateValidatorInfoResolver() error { + identifierValidatorInfo := common.ValidatorInfoTopic + shardC := brcf.shardCoordinator + resolverSender, err := brcf.createOneResolverSenderWithSpecifiedNumRequests(identifierValidatorInfo, EmptyExcludePeersOnTopic, shardC.SelfId(), brcf.numCrossShardPeers, brcf.numIntraShardPeers) + if err != nil { + return err + } + + validatorInfoStorage, err := brcf.store.GetStorer(dataRetriever.UnsignedTransactionUnit) + if err != nil { + return err + } + + arg := resolvers.ArgValidatorInfoResolver{ + SenderResolver: resolverSender, + Marshaller: brcf.marshalizer, + AntifloodHandler: brcf.inputAntifloodHandler, + Throttler: brcf.throttler, + ValidatorInfoPool: brcf.dataPools.ValidatorsInfo(), + ValidatorInfoStorage: validatorInfoStorage, + DataPacker: brcf.dataPacker, + IsFullHistoryNode: brcf.isFullHistoryNode, + } + validatorInfoResolver, err := resolvers.NewValidatorInfoResolver(arg) + if err != nil { + return err + } + + err = brcf.messenger.RegisterMessageProcessor(validatorInfoResolver.RequestTopic(), common.DefaultResolversIdentifier, validatorInfoResolver) + if err != nil { + return err + } + + return brcf.container.Add(identifierValidatorInfo, validatorInfoResolver) +} diff --git a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go index 96b1a967c8a..5b3622c9a96 100644 --- a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go @@ -130,6 +130,11 @@ func (mrcf *metaResolversContainerFactory) Create() (dataRetriever.ResolversCont return nil, err } + err = mrcf.generateValidatorInfoResolver() + if err != nil { + return nil, err + } + return mrcf.container, nil } diff --git a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go index 3a3539fa2ca..dbf99a824e1 100644 --- a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go +++ b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go @@ -384,8 +384,9 @@ func TestMetaResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolversTxs := noOfShards + 1 numResolversTrieNodes := 2 numResolversPeerAuth := 1 + numResolverValidatorInfo := 1 totalResolvers := numResolversShardHeadersForMetachain + numResolverMetablocks + numResolversMiniBlocks + - numResolversUnsigned + numResolversTxs + numResolversTrieNodes + numResolversRewards + numResolversPeerAuth + numResolversUnsigned + numResolversTxs + numResolversTrieNodes + numResolversRewards + numResolversPeerAuth + numResolverValidatorInfo assert.Equal(t, totalResolvers, container.Len()) diff --git a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go index df4e1333b8a..4c92e2c1223 100644 --- a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go @@ -128,6 +128,11 @@ func (srcf *shardResolversContainerFactory) Create() (dataRetriever.ResolversCon return nil, err } + err = srcf.generateValidatorInfoResolver() + if err != nil { + return nil, err + } + return srcf.container, nil } diff --git a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go index 69d2ab43c3c..134aa6f3d12 100644 --- a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go +++ b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go @@ -449,8 +449,9 @@ func TestShardResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolverMetaBlockHeaders := 1 numResolverTrieNodes := 1 numResolverPeerAuth := 1 + numResolverValidatorInfo := 1 totalResolvers := numResolverTxs + numResolverHeaders + numResolverMiniBlocks + numResolverMetaBlockHeaders + - numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes + numResolverPeerAuth + numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes + numResolverPeerAuth + numResolverValidatorInfo assert.Equal(t, totalResolvers, container.Len()) } diff --git a/dataRetriever/interface.go b/dataRetriever/interface.go index feaa9e00037..c722f0e0fbb 100644 --- a/dataRetriever/interface.go +++ b/dataRetriever/interface.go @@ -7,6 +7,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/counting" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go/p2p" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -56,6 +57,12 @@ type PeerAuthenticationResolver interface { RequestDataFromHashArray(hashes [][]byte, epoch uint32) error } +// ValidatorInfoResolver defines what a validator info resolver should do +type ValidatorInfoResolver interface { + Resolver + RequestDataFromHashArray(hashes [][]byte, epoch uint32) error +} + // TopicResolverSender defines what sending operations are allowed for a topic resolver type TopicResolverSender interface { SendOnRequestTopic(rd *RequestData, originalHashes [][]byte) error @@ -204,7 +211,7 @@ type HeadersPool interface { GetNumHeaders(shardId uint32) int } -// TransactionCacher defines the methods for the local cacher, info for current round +// TransactionCacher defines the methods for the local transaction cacher, needed for the current block type TransactionCacher interface { Clean() GetTx(txHash []byte) (data.TransactionHandler, error) @@ -212,6 +219,14 @@ type TransactionCacher interface { IsInterfaceNil() bool } +// ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current epoch +type ValidatorInfoCacher interface { + Clean() + GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) + AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) + IsInterfaceNil() bool +} + // PoolsHolder defines getters for data pools type PoolsHolder interface { Transactions() ShardedDataCacherNotifier @@ -224,8 +239,10 @@ type PoolsHolder interface { TrieNodesChunks() storage.Cacher SmartContracts() storage.Cacher CurrentBlockTxs() TransactionCacher + CurrentEpochValidatorInfo() ValidatorInfoCacher PeerAuthentications() storage.Cacher Heartbeats() storage.Cacher + ValidatorsInfo() ShardedDataCacherNotifier Close() error IsInterfaceNil() bool } diff --git a/dataRetriever/mock/validatorInfoForCurrentEpochStub.go b/dataRetriever/mock/validatorInfoForCurrentEpochStub.go new file mode 100644 index 00000000000..84905d69262 --- /dev/null +++ b/dataRetriever/mock/validatorInfoForCurrentEpochStub.go @@ -0,0 +1,39 @@ +package mock + +import ( + "github.com/ElrondNetwork/elrond-go/state" +) + +// ValidatorInfoForCurrentEpochStub - +type ValidatorInfoForCurrentEpochStub struct { + CleanCalled func() + GetValidatorInfoCalled func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) + AddValidatorInfoCalled func(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) +} + +// Clean - +func (v *ValidatorInfoForCurrentEpochStub) Clean() { + if v.CleanCalled != nil { + v.CleanCalled() + } +} + +// GetValidatorInfo - +func (v *ValidatorInfoForCurrentEpochStub) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if v.GetValidatorInfoCalled != nil { + return v.GetValidatorInfoCalled(validatorInfoHash) + } + return nil, nil +} + +// AddValidatorInfo - +func (v *ValidatorInfoForCurrentEpochStub) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { + if v.AddValidatorInfoCalled != nil { + v.AddValidatorInfoCalled(validatorInfoHash, validatorInfo) + } +} + +// IsInterfaceNil - +func (v *ValidatorInfoForCurrentEpochStub) IsInterfaceNil() bool { + return v == nil +} diff --git a/dataRetriever/requestHandlers/requestHandler.go b/dataRetriever/requestHandlers/requestHandler.go index 2b1055c61f3..fda20a0e149 100644 --- a/dataRetriever/requestHandlers/requestHandler.go +++ b/dataRetriever/requestHandlers/requestHandler.go @@ -30,6 +30,7 @@ const uniqueMiniblockSuffix = "mb" const uniqueHeadersSuffix = "hdr" const uniqueMetaHeadersSuffix = "mhdr" const uniqueTrieNodesSuffix = "tn" +const uniqueValidatorInfoSuffix = "vi" // TODO move the keys definitions that are whitelisted in core and use them in InterceptedData implementations, Identifiers() function @@ -552,6 +553,91 @@ func (rrh *resolverRequestHandler) RequestMetaHeaderByNonce(nonce uint64) { rrh.addRequestedItems([][]byte{key}, uniqueMetaHeadersSuffix) } +// RequestValidatorInfo asks for the validator info associated with a specific hash from connected peers +func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { + if !rrh.testIfRequestIsNeeded(hash, uniqueValidatorInfoSuffix) { + return + } + + log.Debug("requesting validator info messages from network", + "topic", common.ValidatorInfoTopic, + "hash", hash, + "epoch", rrh.epoch, + ) + + resolver, err := rrh.resolversFinder.MetaChainResolver(common.ValidatorInfoTopic) + if err != nil { + log.Error("RequestValidatorInfo.MetaChainResolver", + "error", err.Error(), + "topic", common.ValidatorInfoTopic, + "hash", hash, + "epoch", rrh.epoch, + ) + return + } + + rrh.whiteList.Add([][]byte{hash}) + + err = resolver.RequestDataFromHash(hash, rrh.epoch) + if err != nil { + log.Debug("RequestValidatorInfo.RequestDataFromHash", + "error", err.Error(), + "topic", common.ValidatorInfoTopic, + "hash", hash, + "epoch", rrh.epoch, + ) + return + } + + rrh.addRequestedItems([][]byte{hash}, uniqueValidatorInfoSuffix) +} + +// RequestValidatorsInfo asks for the validators` info associated with the specified hashes from connected peers +func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { + unrequestedHashes := rrh.getUnrequestedHashes(hashes, uniqueValidatorInfoSuffix) + if len(unrequestedHashes) == 0 { + return + } + + log.Debug("requesting validator info messages from network", + "topic", common.ValidatorInfoTopic, + "num hashes", len(unrequestedHashes), + "epoch", rrh.epoch, + ) + + resolver, err := rrh.resolversFinder.MetaChainResolver(common.ValidatorInfoTopic) + if err != nil { + log.Error("RequestValidatorInfo.MetaChainResolver", + "error", err.Error(), + "topic", common.ValidatorInfoTopic, + "num hashes", len(unrequestedHashes), + "epoch", rrh.epoch, + ) + return + } + + validatorInfoResolver, ok := resolver.(HashSliceResolver) + if !ok { + log.Warn("wrong assertion type when creating a validator info resolver") + return + } + + rrh.whiteList.Add(unrequestedHashes) + + err = validatorInfoResolver.RequestDataFromHashArray(unrequestedHashes, rrh.epoch) + if err != nil { + log.Debug("RequestValidatorInfo.RequestDataFromHash", + "error", err.Error(), + "topic", common.ValidatorInfoTopic, + "num hashes", len(unrequestedHashes), + "epoch", rrh.epoch, + ) + return + } + + rrh.addRequestedItems(unrequestedHashes, uniqueValidatorInfoSuffix) +} + func (rrh *resolverRequestHandler) testIfRequestIsNeeded(key []byte, suffix string) bool { rrh.sweepIfNeeded() diff --git a/dataRetriever/requestHandlers/requestHandler_test.go b/dataRetriever/requestHandlers/requestHandler_test.go index fd2164ee1c0..df0235ff220 100644 --- a/dataRetriever/requestHandlers/requestHandler_test.go +++ b/dataRetriever/requestHandlers/requestHandler_test.go @@ -1,6 +1,7 @@ package requestHandlers import ( + "bytes" "sync/atomic" "testing" "time" @@ -1447,3 +1448,145 @@ func TestResolverRequestHandler_RequestPeerAuthenticationsByHashes(t *testing.T) assert.True(t, wasCalled) }) } + +func TestResolverRequestHandler_RequestValidatorInfo(t *testing.T) { + t.Parallel() + + t.Run("MetaChainResolver returns error", func(t *testing.T) { + providedHash := []byte("provided hash") + wasCalled := false + res := &mock.ResolverStub{ + RequestDataFromHashCalled: func(hash []byte, epoch uint32) error { + wasCalled = true + return nil + }, + } + + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + return res, errors.New("provided err") + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{}, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorInfo(providedHash) + assert.False(t, wasCalled) + }) + t.Run("should work", func(t *testing.T) { + providedHash := []byte("provided hash") + wasCalled := false + res := &mock.ResolverStub{ + RequestDataFromHashCalled: func(hash []byte, epoch uint32) error { + assert.True(t, bytes.Equal(providedHash, hash)) + wasCalled = true + return nil + }, + } + + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + assert.Equal(t, common.ValidatorInfoTopic, baseTopic) + return res, nil + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{}, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorInfo(providedHash) + assert.True(t, wasCalled) + }) +} + +func TestResolverRequestHandler_RequestValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("MetaChainResolver returns error", func(t *testing.T) { + providedHash := []byte("provided hash") + wasCalled := false + res := &mock.ResolverStub{ + RequestDataFromHashCalled: func(hash []byte, epoch uint32) error { + wasCalled = true + return nil + }, + } + + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + return res, errors.New("provided err") + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{}, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorsInfo([][]byte{providedHash}) + assert.False(t, wasCalled) + }) + t.Run("cast fails", func(t *testing.T) { + providedHash := []byte("provided hash") + mbResolver := &mock.ResolverStub{} // uncastable to HashSliceResolver + wasCalled := false + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + return mbResolver, nil + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{ + AddCalled: func(keys [][]byte) { + wasCalled = true + }, + }, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorsInfo([][]byte{providedHash}) + assert.False(t, wasCalled) + }) + t.Run("should work", func(t *testing.T) { + providedHashes := [][]byte{[]byte("provided hash 1"), []byte("provided hash 2")} + wasCalled := false + res := &mock.HashSliceResolverStub{ + RequestDataFromHashArrayCalled: func(hashes [][]byte, epoch uint32) error { + assert.Equal(t, providedHashes, hashes) + wasCalled = true + return nil + }, + } + + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + assert.Equal(t, common.ValidatorInfoTopic, baseTopic) + return res, nil + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{}, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorsInfo(providedHashes) + assert.True(t, wasCalled) + }) +} diff --git a/dataRetriever/resolvers/disabled/validatorInfoResolver.go b/dataRetriever/resolvers/disabled/validatorInfoResolver.go new file mode 100644 index 00000000000..c6322fe3051 --- /dev/null +++ b/dataRetriever/resolvers/disabled/validatorInfoResolver.go @@ -0,0 +1,54 @@ +package disabled + +import ( + "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/p2p" +) + +type validatorInfoResolver struct { +} + +// NewDisabledValidatorInfoResolver creates a new disabled validator info resolver instance +func NewDisabledValidatorInfoResolver() *validatorInfoResolver { + return &validatorInfoResolver{} +} + +// RequestDataFromHash does nothing and returns nil +func (res *validatorInfoResolver) RequestDataFromHash(_ []byte, _ uint32) error { + return nil +} + +// RequestDataFromHashArray does nothing and returns nil +func (res *validatorInfoResolver) RequestDataFromHashArray(_ [][]byte, _ uint32) error { + return nil +} + +// ProcessReceivedMessage does nothing and returns nil +func (res *validatorInfoResolver) ProcessReceivedMessage(_ p2p.MessageP2P, _ core.PeerID) error { + return nil +} + +// SetResolverDebugHandler does nothing and returns nil +func (res *validatorInfoResolver) SetResolverDebugHandler(_ dataRetriever.ResolverDebugHandler) error { + return nil +} + +// SetNumPeersToQuery does nothing +func (res *validatorInfoResolver) SetNumPeersToQuery(_ int, _ int) { +} + +// NumPeersToQuery returns 0 and 0 +func (res *validatorInfoResolver) NumPeersToQuery() (int, int) { + return 0, 0 +} + +// Close does nothing and returns nil +func (res *validatorInfoResolver) Close() error { + return nil +} + +// IsInterfaceNil returns true if there is no value under the interface +func (res *validatorInfoResolver) IsInterfaceNil() bool { + return res == nil +} diff --git a/dataRetriever/resolvers/disabled/validatorInfoResolver_test.go b/dataRetriever/resolvers/disabled/validatorInfoResolver_test.go new file mode 100644 index 00000000000..cdd43fda758 --- /dev/null +++ b/dataRetriever/resolvers/disabled/validatorInfoResolver_test.go @@ -0,0 +1,47 @@ +package disabled + +import ( + "fmt" + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/stretchr/testify/assert" +) + +func TestNewDisabledValidatorInfoResolver(t *testing.T) { + t.Parallel() + + resolver := NewDisabledValidatorInfoResolver() + assert.False(t, check.IfNil(resolver)) +} + +func Test_validatorInfoResolver_SetResolverDebugHandler(t *testing.T) { + t.Parallel() + + defer func() { + r := recover() + if r != nil { + assert.Fail(t, fmt.Sprintf("should have not failed %v", r)) + } + }() + + resolver := NewDisabledValidatorInfoResolver() + + err := resolver.RequestDataFromHash(nil, 0) + assert.Nil(t, err) + + err = resolver.RequestDataFromHashArray(nil, 0) + assert.Nil(t, err) + + err = resolver.SetResolverDebugHandler(nil) + assert.Nil(t, err) + + value1, value2 := resolver.NumPeersToQuery() + assert.Zero(t, value1) + assert.Zero(t, value2) + + err = resolver.Close() + assert.Nil(t, err) + + resolver.SetNumPeersToQuery(100, 100) +} diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go new file mode 100644 index 00000000000..19b5a3e90d4 --- /dev/null +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -0,0 +1,267 @@ +package resolvers + +import ( + "encoding/hex" + "fmt" + + "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/data/batch" + "github.com/ElrondNetwork/elrond-go-core/marshal" + logger "github.com/ElrondNetwork/elrond-go-logger" + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/p2p" + "github.com/ElrondNetwork/elrond-go/storage" +) + +// maxBuffToSendValidatorsInfo represents max buffer size to send in bytes +const maxBuffToSendValidatorsInfo = 1 << 18 // 256KB + +// ArgValidatorInfoResolver is the argument structure used to create a new validator info resolver instance +type ArgValidatorInfoResolver struct { + SenderResolver dataRetriever.TopicResolverSender + Marshaller marshal.Marshalizer + AntifloodHandler dataRetriever.P2PAntifloodHandler + Throttler dataRetriever.ResolverThrottler + ValidatorInfoPool dataRetriever.ShardedDataCacherNotifier + ValidatorInfoStorage storage.Storer + DataPacker dataRetriever.DataPacker + IsFullHistoryNode bool +} + +// validatorInfoResolver is a wrapper over Resolver that is specialized in resolving validator info requests +type validatorInfoResolver struct { + dataRetriever.TopicResolverSender + messageProcessor + baseStorageResolver + validatorInfoPool dataRetriever.ShardedDataCacherNotifier + validatorInfoStorage storage.Storer + dataPacker dataRetriever.DataPacker +} + +// NewValidatorInfoResolver creates a validator info resolver +func NewValidatorInfoResolver(args ArgValidatorInfoResolver) (*validatorInfoResolver, error) { + err := checkArgs(args) + if err != nil { + return nil, err + } + + return &validatorInfoResolver{ + TopicResolverSender: args.SenderResolver, + messageProcessor: messageProcessor{ + marshalizer: args.Marshaller, + antifloodHandler: args.AntifloodHandler, + throttler: args.Throttler, + topic: args.SenderResolver.RequestTopic(), + }, + baseStorageResolver: createBaseStorageResolver(args.ValidatorInfoStorage, args.IsFullHistoryNode), + validatorInfoPool: args.ValidatorInfoPool, + validatorInfoStorage: args.ValidatorInfoStorage, + dataPacker: args.DataPacker, + }, nil +} + +func checkArgs(args ArgValidatorInfoResolver) error { + if check.IfNil(args.SenderResolver) { + return dataRetriever.ErrNilResolverSender + } + if check.IfNil(args.Marshaller) { + return dataRetriever.ErrNilMarshalizer + } + if check.IfNil(args.AntifloodHandler) { + return dataRetriever.ErrNilAntifloodHandler + } + if check.IfNil(args.Throttler) { + return dataRetriever.ErrNilThrottler + } + if check.IfNil(args.ValidatorInfoPool) { + return dataRetriever.ErrNilValidatorInfoPool + } + if check.IfNil(args.ValidatorInfoStorage) { + return dataRetriever.ErrNilValidatorInfoStorage + } + if check.IfNil(args.DataPacker) { + return dataRetriever.ErrNilDataPacker + } + + return nil +} + +// RequestDataFromHash requests validator info from other peers by hash +func (res *validatorInfoResolver) RequestDataFromHash(hash []byte, epoch uint32) error { + return res.SendOnRequestTopic( + &dataRetriever.RequestData{ + Type: dataRetriever.HashType, + Value: hash, + Epoch: epoch, + }, + [][]byte{hash}, + ) +} + +// RequestDataFromHashArray requests validator info from other peers by hash array +func (res *validatorInfoResolver) RequestDataFromHashArray(hashes [][]byte, epoch uint32) error { + b := &batch.Batch{ + Data: hashes, + } + buffHashes, err := res.marshalizer.Marshal(b) + if err != nil { + return err + } + + return res.SendOnRequestTopic( + &dataRetriever.RequestData{ + Type: dataRetriever.HashArrayType, + Value: buffHashes, + Epoch: epoch, + }, + hashes, + ) +} + +// ProcessReceivedMessage represents the callback func from the p2p.Messenger that is called each time a new message is received +// (for the topic this validator was registered to, usually a request topic) +func (res *validatorInfoResolver) ProcessReceivedMessage(message p2p.MessageP2P, fromConnectedPeer core.PeerID) error { + err := res.canProcessMessage(message, fromConnectedPeer) + if err != nil { + return err + } + + res.throttler.StartProcessing() + defer res.throttler.EndProcessing() + + rd, err := res.parseReceivedMessage(message, fromConnectedPeer) + if err != nil { + return err + } + + switch rd.Type { + case dataRetriever.HashType: + return res.resolveHashRequest(rd.Value, rd.Epoch, fromConnectedPeer) + case dataRetriever.HashArrayType: + return res.resolveMultipleHashesRequest(rd.Value, rd.Epoch, fromConnectedPeer) + } + + return fmt.Errorf("%w for value %s", dataRetriever.ErrRequestTypeNotImplemented, logger.DisplayByteSlice(rd.Value)) +} + +// resolveHashRequest sends the response for a hash request +func (res *validatorInfoResolver) resolveHashRequest(hash []byte, epoch uint32, pid core.PeerID) error { + data, err := res.fetchValidatorInfoByteSlice(hash, epoch) + if err != nil { + return err + } + + return res.marshalAndSend(data, pid) +} + +// resolveMultipleHashesRequest sends the response for a hash array type request +func (res *validatorInfoResolver) resolveMultipleHashesRequest(hashesBuff []byte, epoch uint32, pid core.PeerID) error { + b := batch.Batch{} + err := res.marshalizer.Unmarshal(&b, hashesBuff) + if err != nil { + return err + } + hashes := b.Data + + validatorInfoForHashes, err := res.fetchValidatorInfoForHashes(hashes, epoch) + if err != nil { + outputHashes := "" + for _, hash := range hashes { + outputHashes += hex.EncodeToString(hash) + " " + } + return fmt.Errorf("resolveMultipleHashesRequest error %w from buff %s", err, outputHashes) + } + + return res.sendValidatorInfoForHashes(validatorInfoForHashes, pid) +} + +func (res *validatorInfoResolver) sendValidatorInfoForHashes(validatorInfoForHashes [][]byte, pid core.PeerID) error { + buffsToSend, err := res.dataPacker.PackDataInChunks(validatorInfoForHashes, maxBuffToSendValidatorsInfo) + if err != nil { + return err + } + + for _, buff := range buffsToSend { + err = res.Send(buff, pid) + if err != nil { + return err + } + } + + return nil +} + +func (res *validatorInfoResolver) fetchValidatorInfoForHashes(hashes [][]byte, epoch uint32) ([][]byte, error) { + validatorInfos := make([][]byte, 0) + for _, hash := range hashes { + validatorInfoForHash, _ := res.fetchValidatorInfoByteSlice(hash, epoch) + if validatorInfoForHash != nil { + validatorInfos = append(validatorInfos, validatorInfoForHash) + } + } + + if len(validatorInfos) == 0 { + return nil, dataRetriever.ErrValidatorInfoNotFound + } + + return validatorInfos, nil +} + +func (res *validatorInfoResolver) fetchValidatorInfoByteSlice(hash []byte, epoch uint32) ([]byte, error) { + data, ok := res.validatorInfoPool.SearchFirstData(hash) + if ok { + return res.marshalizer.Marshal(data) + } + + buff, err := res.getFromStorage(hash, epoch) + if err != nil { + res.ResolverDebugHandler().LogFailedToResolveData( + res.topic, + hash, + err, + ) + return nil, err + } + + res.ResolverDebugHandler().LogSucceededToResolveData(res.topic, hash) + + return buff, nil +} + +func (res *validatorInfoResolver) marshalAndSend(data []byte, pid core.PeerID) error { + b := &batch.Batch{ + Data: [][]byte{data}, + } + buff, err := res.marshalizer.Marshal(b) + if err != nil { + return err + } + + return res.Send(buff, pid) +} + +// SetResolverDebugHandler sets a resolver debug handler +func (res *validatorInfoResolver) SetResolverDebugHandler(handler dataRetriever.ResolverDebugHandler) error { + return res.TopicResolverSender.SetResolverDebugHandler(handler) +} + +// SetNumPeersToQuery sets the number of intra shard and cross shard peers to query +func (res *validatorInfoResolver) SetNumPeersToQuery(intra int, cross int) { + res.TopicResolverSender.SetNumPeersToQuery(intra, cross) +} + +// NumPeersToQuery returns the number of intra shard and cross shard peers to query +func (res *validatorInfoResolver) NumPeersToQuery() (int, int) { + return res.TopicResolverSender.NumPeersToQuery() +} + +// Close returns nil +func (res *validatorInfoResolver) Close() error { + return nil +} + +// IsInterfaceNil returns true if there is no value under the interface +func (res *validatorInfoResolver) IsInterfaceNil() bool { + return res == nil +} diff --git a/dataRetriever/resolvers/validatorInfoResolver_test.go b/dataRetriever/resolvers/validatorInfoResolver_test.go new file mode 100644 index 00000000000..ac895edf0d5 --- /dev/null +++ b/dataRetriever/resolvers/validatorInfoResolver_test.go @@ -0,0 +1,677 @@ +package resolvers_test + +import ( + "errors" + "fmt" + "strings" + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/core/partitioning" + "github.com/ElrondNetwork/elrond-go-core/data/batch" + "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/dataRetriever/mock" + "github.com/ElrondNetwork/elrond-go/dataRetriever/resolvers" + "github.com/ElrondNetwork/elrond-go/p2p" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + "github.com/ElrondNetwork/elrond-go/testscommon/storage" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createMockArgValidatorInfoResolver() resolvers.ArgValidatorInfoResolver { + return resolvers.ArgValidatorInfoResolver{ + SenderResolver: &mock.TopicResolverSenderStub{}, + Marshaller: &mock.MarshalizerMock{}, + AntifloodHandler: &mock.P2PAntifloodHandlerStub{}, + Throttler: &mock.ThrottlerStub{}, + ValidatorInfoPool: testscommon.NewShardedDataStub(), + ValidatorInfoStorage: &storage.StorerStub{}, + DataPacker: &mock.DataPackerStub{}, + IsFullHistoryNode: false, + } +} + +func createMockValidatorInfo(pk []byte) state.ValidatorInfo { + return state.ValidatorInfo{ + PublicKey: pk, + ShardId: 123, + List: string(common.EligibleList), + Index: 10, + Rating: 11, + } +} + +func TestNewValidatorInfoResolver(t *testing.T) { + t.Parallel() + + t.Run("nil SenderResolver should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.SenderResolver = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilResolverSender, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil marshaller should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.Marshaller = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilMarshalizer, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil AntifloodHandler should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.AntifloodHandler = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilAntifloodHandler, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil Throttler should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.Throttler = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilThrottler, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil ValidatorInfoPool should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilValidatorInfoPool, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil ValidatorInfoStorage should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoStorage = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilValidatorInfoStorage, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil DataPacker should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.DataPacker = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilDataPacker, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + res, err := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) + assert.Nil(t, err) + assert.False(t, check.IfNil(res)) + + assert.Nil(t, res.Close()) + }) +} + +func TestValidatorInfoResolver_RequestDataFromHash(t *testing.T) { + t.Parallel() + + t.Run("should error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendOnRequestTopicCalled: func(rd *dataRetriever.RequestData, originalHashes [][]byte) error { + return expectedErr + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + err := res.RequestDataFromHash(nil, 0) + assert.Equal(t, expectedErr, err) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + providedHash := []byte("provided hash") + providedEpoch := uint32(123) + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendOnRequestTopicCalled: func(rd *dataRetriever.RequestData, originalHashes [][]byte) error { + assert.Equal(t, providedHash, originalHashes[0]) + assert.Equal(t, dataRetriever.HashType, rd.Type) + assert.Equal(t, providedHash, rd.Value) + assert.Equal(t, providedEpoch, rd.Epoch) + + return nil + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.RequestDataFromHash(providedHash, providedEpoch) + assert.Nil(t, err) + }) +} + +func TestValidatorInfoResolver_RequestDataFromHashArray(t *testing.T) { + t.Parallel() + + t.Run("marshal returns error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.Marshaller = &testscommon.MarshalizerStub{ + MarshalCalled: func(obj interface{}) ([]byte, error) { + return nil, expectedErr + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + err := res.RequestDataFromHashArray(nil, 0) + assert.Equal(t, expectedErr, err) + }) + t.Run("should error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendOnRequestTopicCalled: func(rd *dataRetriever.RequestData, originalHashes [][]byte) error { + return expectedErr + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + err := res.RequestDataFromHashArray(nil, 0) + assert.Equal(t, expectedErr, err) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + providedHashes := [][]byte{[]byte("provided hash")} + providedEpoch := uint32(123) + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendOnRequestTopicCalled: func(rd *dataRetriever.RequestData, originalHashes [][]byte) error { + assert.Equal(t, providedHashes, originalHashes) + assert.Equal(t, dataRetriever.HashArrayType, rd.Type) + + b := &batch.Batch{} + _ = args.Marshaller.Unmarshal(b, rd.Value) + assert.Equal(t, providedHashes, b.Data) + assert.Equal(t, providedEpoch, rd.Epoch) + + return nil + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.RequestDataFromHashArray(providedHashes, providedEpoch) + assert.Nil(t, err) + }) +} + +func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { + t.Parallel() + + t.Run("nil message should error", func(t *testing.T) { + t.Parallel() + + res, _ := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(nil, fromConnectedPeer) + assert.Equal(t, dataRetriever.ErrNilMessage, err) + }) + t.Run("canProcessMessage due to antiflood handler error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.AntifloodHandler = &mock.P2PAntifloodHandlerStub{ + CanProcessMessageCalled: func(message p2p.MessageP2P, fromConnectedPeer core.PeerID) error { + return expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, nil), fromConnectedPeer) + assert.True(t, errors.Is(err, expectedErr)) + assert.False(t, args.Throttler.(*mock.ThrottlerStub).StartWasCalled) + assert.False(t, args.Throttler.(*mock.ThrottlerStub).EndWasCalled) + }) + t.Run("parseReceivedMessage returns error due to marshalizer error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.Marshaller = &mock.MarshalizerStub{ + UnmarshalCalled: func(obj interface{}, buff []byte) error { + return expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, nil), fromConnectedPeer) + assert.True(t, errors.Is(err, expectedErr)) + }) + + t.Run("invalid request type should error", func(t *testing.T) { + t.Parallel() + + res, _ := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.NonceType, []byte("hash")), fromConnectedPeer) + assert.True(t, errors.Is(err, dataRetriever.ErrRequestTypeNotImplemented)) + }) + + // resolveHashRequest + t.Run("data not found in cache and fetchValidatorInfoByteSlice fails when getting data from storage", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + return nil, expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.Equal(t, expectedErr, err) + }) + t.Run("data found in cache but marshal fails", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + marshallerMock := testscommon.MarshalizerMock{} + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return []byte("some value"), true + }, + } + args.Marshaller = &testscommon.MarshalizerStub{ + MarshalCalled: func(obj interface{}) ([]byte, error) { + return nil, expectedErr + }, + UnmarshalCalled: func(obj interface{}, buff []byte) error { + return marshallerMock.Unmarshal(obj, buff) + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.NotNil(t, err) + }) + t.Run("data found in storage but marshal fails", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + marshallerMock := testscommon.MarshalizerMock{} + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + return []byte("some value"), nil + }, + } + args.Marshaller = &testscommon.MarshalizerStub{ + MarshalCalled: func(obj interface{}) ([]byte, error) { + return nil, expectedErr + }, + UnmarshalCalled: func(obj interface{}, buff []byte) error { + return marshallerMock.Unmarshal(obj, buff) + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.NotNil(t, err) + }) + t.Run("should work, data from cache", func(t *testing.T) { + t.Parallel() + + wasCalled := false + providedValue := createMockValidatorInfo([]byte("provided pk")) + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return providedValue, true + }, + } + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendCalled: func(buff []byte, peer core.PeerID) error { + marshallerMock := testscommon.MarshalizerMock{} + b := &batch.Batch{} + _ = marshallerMock.Unmarshal(b, buff) + + vi := &state.ValidatorInfo{} + _ = marshallerMock.Unmarshal(vi, b.Data[0]) + + assert.Equal(t, &providedValue, vi) + wasCalled = true + + return nil + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.Nil(t, err) + assert.True(t, wasCalled) + }) + t.Run("should work, data from storage", func(t *testing.T) { + t.Parallel() + + wasCalled := false + providedValue := createMockValidatorInfo([]byte("provided pk")) + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + marshallerMock := testscommon.MarshalizerMock{} + return marshallerMock.Marshal(providedValue) + }, + } + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendCalled: func(buff []byte, peer core.PeerID) error { + marshallerMock := testscommon.MarshalizerMock{} + b := &batch.Batch{} + _ = marshallerMock.Unmarshal(b, buff) + + vi := &state.ValidatorInfo{} + _ = marshallerMock.Unmarshal(vi, b.Data[0]) + + assert.Equal(t, &providedValue, vi) + wasCalled = true + + return nil + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.Nil(t, err) + assert.True(t, wasCalled) + }) + + // resolveMultipleHashesRequest + t.Run("unmarshal fails", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.Marshaller = &testscommon.MarshalizerStub{ + UnmarshalCalled: func(obj interface{}, buff []byte) error { + switch obj.(type) { + case *dataRetriever.RequestData: + return testscommon.MarshalizerMock{}.Unmarshal(obj, buff) + case *batch.Batch: + return expectedErr + } + return nil + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, []byte("hash")), fromConnectedPeer) + assert.Equal(t, expectedErr, err) + }) + t.Run("no hash found", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + return nil, errors.New("not found") + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + b := &batch.Batch{ + Data: [][]byte{[]byte("hash")}, + } + buff, _ := args.Marshaller.Marshal(b) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + require.NotNil(t, err) + assert.True(t, strings.Contains(err.Error(), dataRetriever.ErrValidatorInfoNotFound.Error())) + }) + t.Run("pack data in chuncks returns error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return key, true + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + return nil, errors.New("not found") + }, + } + args.DataPacker = &mock.DataPackerStub{ + PackDataInChunksCalled: func(data [][]byte, limit int) ([][]byte, error) { + return nil, expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + b := &batch.Batch{ + Data: [][]byte{[]byte("hash")}, + } + buff, _ := args.Marshaller.Marshal(b) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + assert.Equal(t, expectedErr, err) + }) + t.Run("all hashes in one chunk should work", func(t *testing.T) { + t.Parallel() + + wasCalled := false + numOfProvidedData := 3 + providedHashes := make([][]byte, 0) + providedData := make([]state.ValidatorInfo, 0) + for i := 0; i < numOfProvidedData; i++ { + hashStr := fmt.Sprintf("hash%d", i) + providedHashes = append(providedHashes, []byte(hashStr)) + pkStr := fmt.Sprintf("pk%d", i) + providedData = append(providedData, createMockValidatorInfo([]byte(pkStr))) + } + args := createMockArgValidatorInfoResolver() + numOfCalls := 0 + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + val := providedData[numOfCalls] + numOfCalls++ + return val, true + }, + } + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendCalled: func(buff []byte, peer core.PeerID) error { + marshallerMock := testscommon.MarshalizerMock{} + b := &batch.Batch{} + _ = marshallerMock.Unmarshal(b, buff) + assert.Equal(t, numOfProvidedData, len(b.Data)) + + for i := 0; i < numOfProvidedData; i++ { + vi := &state.ValidatorInfo{} + _ = marshallerMock.Unmarshal(vi, b.Data[i]) + + assert.Equal(t, &providedData[i], vi) + } + + wasCalled = true + return nil + }, + } + args.DataPacker, _ = partitioning.NewSimpleDataPacker(args.Marshaller) + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + buff, _ := args.Marshaller.Marshal(&batch.Batch{Data: providedHashes}) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + assert.Nil(t, err) + assert.True(t, wasCalled) + }) + t.Run("multiple chunks should work", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + numOfProvidedData := 1000 + providedHashes := make([][]byte, 0) + providedData := make([]state.ValidatorInfo, 0) + testHasher := hashingMocks.HasherMock{} + testMarshaller := testscommon.MarshalizerMock{} + providedDataMap := make(map[string]struct{}, 0) + for i := 0; i < numOfProvidedData; i++ { + hashStr := fmt.Sprintf("hash%d", i) + providedHashes = append(providedHashes, []byte(hashStr)) + pkStr := fmt.Sprintf("pk%d", i) + newValidatorInfo := createMockValidatorInfo([]byte(pkStr)) + providedData = append(providedData, newValidatorInfo) + + buff, err := testMarshaller.Marshal(newValidatorInfo) + require.Nil(t, err) + hash := testHasher.Compute(string(buff)) + providedDataMap[string(hash)] = struct{}{} + } + numOfCalls := 0 + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + val := providedData[numOfCalls] + numOfCalls++ + return val, true + }, + } + numOfCallsSend := 0 + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendCalled: func(buff []byte, peer core.PeerID) error { + marshallerMock := testscommon.MarshalizerMock{} + b := &batch.Batch{} + _ = marshallerMock.Unmarshal(b, buff) + + dataLen := len(b.Data) + for i := 0; i < dataLen; i++ { + vi := &state.ValidatorInfo{} + _ = marshallerMock.Unmarshal(vi, b.Data[i]) + + // remove this info from the provided map + buff, err := testMarshaller.Marshal(vi) + require.Nil(t, err) + hash := testHasher.Compute(string(buff)) + delete(providedDataMap, string(hash)) + } + + numOfCallsSend++ + return nil + }, + } + args.DataPacker, _ = partitioning.NewSimpleDataPacker(args.Marshaller) + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + buff, _ := args.Marshaller.Marshal(&batch.Batch{Data: providedHashes}) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + assert.Nil(t, err) + assert.Equal(t, 2, numOfCallsSend) // ~677 messages in a chunk + assert.Equal(t, 0, len(providedDataMap)) // all items should have been deleted on Send + }) +} + +func TestValidatorInfoResolver_SetResolverDebugHandler(t *testing.T) { + t.Parallel() + + defer func() { + r := recover() + if r != nil { + assert.Fail(t, "should not panic") + } + }() + + res, _ := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) + require.False(t, check.IfNil(res)) + + _ = res.SetResolverDebugHandler(nil) +} + +func TestValidatorInfoResolver_NumPeersToQuery(t *testing.T) { + t.Parallel() + + providedIntra, providedCross := 5, 10 + receivedIntra, receivedCross := 0, 0 + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SetNumPeersToQueryCalled: func(intra int, cross int) { + assert.Equal(t, providedIntra, intra) + assert.Equal(t, providedCross, cross) + receivedIntra = intra + receivedCross = cross + }, + GetNumPeersToQueryCalled: func() (int, int) { + return receivedIntra, receivedCross + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + res.SetNumPeersToQuery(providedIntra, providedCross) + intra, cross := res.NumPeersToQuery() + assert.Equal(t, providedIntra, intra) + assert.Equal(t, providedCross, cross) +} diff --git a/epochStart/bootstrap/disabled/disabledEpochStartNotifier.go b/epochStart/bootstrap/disabled/disabledEpochStartNotifier.go index caf898fb359..01f413e9a28 100644 --- a/epochStart/bootstrap/disabled/disabledEpochStartNotifier.go +++ b/epochStart/bootstrap/disabled/disabledEpochStartNotifier.go @@ -23,7 +23,7 @@ func (desn *EpochStartNotifier) UnregisterHandler(_ epochStart.ActionHandler) { } // NotifyAllPrepare - -func (desn *EpochStartNotifier) NotifyAllPrepare(_ data.HeaderHandler, _ data.BodyHandler) { +func (desn *EpochStartNotifier) NotifyAllPrepare(_ data.HeaderHandler, _ data.BodyHandler, _ epochStart.ValidatorInfoCacher) { } // NotifyAll - diff --git a/epochStart/bootstrap/process.go b/epochStart/bootstrap/process.go index 066fd956736..0c98d890329 100644 --- a/epochStart/bootstrap/process.go +++ b/epochStart/bootstrap/process.go @@ -565,7 +565,7 @@ func (e *epochStartBootstrap) createSyncers() error { syncTxsArgs := updateSync.ArgsNewTransactionsSyncer{ DataPools: e.dataPool, Storages: disabled.NewChainStorer(), - Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), + Marshaller: e.coreComponentsHolder.InternalMarshalizer(), RequestHandler: e.requestHandler, } diff --git a/epochStart/bootstrap/process_test.go b/epochStart/bootstrap/process_test.go index 22d3e9970b5..ed20437f2c2 100644 --- a/epochStart/bootstrap/process_test.go +++ b/epochStart/bootstrap/process_test.go @@ -41,6 +41,7 @@ import ( statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageMocks "github.com/ElrondNetwork/elrond-go/testscommon/storage" "github.com/ElrondNetwork/elrond-go/testscommon/syncer" + "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/trie/factory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -1754,6 +1755,9 @@ func TestRequestAndProcessing(t *testing.T) { HeadersCalled: func() dataRetriever.HeadersPool { return &mock.HeadersCacherStub{} }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherStub.ValidatorInfoCacherStub{} + }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} epochStartProvider.miniBlocksSyncer = &epochStartMocks.PendingMiniBlockSyncHandlerStub{} @@ -1821,6 +1825,9 @@ func TestRequestAndProcessing(t *testing.T) { HeadersCalled: func() dataRetriever.HeadersPool { return &mock.HeadersCacherStub{} }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherStub.ValidatorInfoCacherStub{} + }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} epochStartProvider.miniBlocksSyncer = &epochStartMocks.PendingMiniBlockSyncHandlerStub{} @@ -1976,6 +1983,9 @@ func TestEpochStartBootstrap_WithDisabledShardIDAsObserver(t *testing.T) { TrieNodesCalled: func() storage.Cacher { return testscommon.NewCacherStub() }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherStub.ValidatorInfoCacherStub{} + }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} epochStartProvider.epochStartMeta = &block.MetaBlock{Epoch: 0} diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index d4ca68c8225..23ac78a3841 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -18,19 +18,22 @@ import ( "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/storage/lrucache" + "github.com/ElrondNetwork/elrond-go/update" "github.com/ElrondNetwork/elrond-go/update/sync" ) const consensusGroupCacheSize = 50 type syncValidatorStatus struct { - miniBlocksSyncer epochStart.PendingMiniBlocksSyncHandler - dataPool dataRetriever.PoolsHolder - marshalizer marshal.Marshalizer - requestHandler process.RequestHandler - nodeCoordinator StartInEpochNodesCoordinator - genesisNodesConfig sharding.GenesisNodesSetupHandler - memDB storage.Storer + miniBlocksSyncer epochStart.PendingMiniBlocksSyncHandler + transactionsSyncer update.TransactionsSyncHandler + dataPool dataRetriever.PoolsHolder + marshalizer marshal.Marshalizer + requestHandler process.RequestHandler + nodeCoordinator StartInEpochNodesCoordinator + genesisNodesConfig sharding.GenesisNodesSetupHandler + memDB storage.Storer + enableEpochsHandler common.EnableEpochsHandler } // ArgsNewSyncValidatorStatus holds the arguments needed for creating a new validator status process component @@ -57,23 +60,37 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat } s := &syncValidatorStatus{ - dataPool: args.DataPool, - marshalizer: args.Marshalizer, - requestHandler: args.RequestHandler, - genesisNodesConfig: args.GenesisNodesConfig, + dataPool: args.DataPool, + marshalizer: args.Marshalizer, + requestHandler: args.RequestHandler, + genesisNodesConfig: args.GenesisNodesConfig, + enableEpochsHandler: args.EnableEpochsHandler, } + + var err error + syncMiniBlocksArgs := sync.ArgsNewPendingMiniBlocksSyncer{ Storage: disabled.CreateMemUnit(), Cache: s.dataPool.MiniBlocks(), Marshalizer: s.marshalizer, RequestHandler: s.requestHandler, } - var err error s.miniBlocksSyncer, err = sync.NewPendingMiniBlocksSyncer(syncMiniBlocksArgs) if err != nil { return nil, err } + syncTxsArgs := sync.ArgsNewTransactionsSyncer{ + DataPools: s.dataPool, + Storages: disabled.NewChainStorer(), + Marshaller: s.marshalizer, + RequestHandler: s.requestHandler, + } + s.transactionsSyncer, err = sync.NewTransactionsSyncer(syncTxsArgs) + if err != nil { + return nil, err + } + eligibleNodesInfo, waitingNodesInfo := args.GenesisNodesConfig.InitialNodesInfo() eligibleValidators, err := nodesCoordinator.NodesInfoToValidators(eligibleNodesInfo) @@ -112,6 +129,7 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat NodeTypeProvider: args.NodeTypeProvider, IsFullArchive: args.IsFullArchive, EnableEpochsHandler: args.EnableEpochsHandler, + ValidatorInfoCacher: s.dataPool.CurrentEpochValidatorInfo(), } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argsNodesCoordinator) if err != nil { @@ -209,6 +227,26 @@ func (s *syncValidatorStatus) getPeerBlockBodyForMeta( return nil, nil, err } + if metaBlock.GetEpoch() >= s.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { + s.transactionsSyncer.ClearFields() + ctx, cancel = context.WithTimeout(context.Background(), time.Minute) + err = s.transactionsSyncer.SyncTransactionsFor(peerMiniBlocks, metaBlock.GetEpoch(), ctx) + cancel() + if err != nil { + return nil, nil, err + } + + validatorsInfo, err := s.transactionsSyncer.GetValidatorsInfo() + if err != nil { + return nil, nil, err + } + + currentEpochValidatorInfoPool := s.dataPool.CurrentEpochValidatorInfo() + for validatorInfoHash, validatorInfo := range validatorsInfo { + currentEpochValidatorInfoPool.AddValidatorInfo([]byte(validatorInfoHash), validatorInfo) + } + } + blockBody := &block.Body{MiniBlocks: make([]*block.MiniBlock, 0, len(peerMiniBlocks))} for _, mbHeader := range shardMBHeaders { blockBody.MiniBlocks = append(blockBody.MiniBlocks, peerMiniBlocks[string(mbHeader.GetHash())]) diff --git a/epochStart/bootstrap/syncValidatorStatus_test.go b/epochStart/bootstrap/syncValidatorStatus_test.go index 8e96c4864b7..1244022e01b 100644 --- a/epochStart/bootstrap/syncValidatorStatus_test.go +++ b/epochStart/bootstrap/syncValidatorStatus_test.go @@ -9,6 +9,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/endProcess" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart/mock" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" @@ -18,6 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -248,6 +250,9 @@ func getSyncValidatorStatusArgs() ArgsNewSyncValidatorStatus { MiniBlocksCalled: func() storage.Cacher { return testscommon.NewCacherStub() }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vic.ValidatorInfoCacherStub{} + }, }, Marshalizer: &mock.MarshalizerMock{}, Hasher: &hashingMocks.HasherMock{}, diff --git a/epochStart/errors.go b/epochStart/errors.go index 37e10e88622..e2b8499618b 100644 --- a/epochStart/errors.go +++ b/epochStart/errors.go @@ -128,8 +128,17 @@ var ErrValidatorInfoMiniBlocksNumDoesNotMatch = errors.New("number of created an // ErrNilValidatorInfo signals that a nil value for the validatorInfo has been provided var ErrNilValidatorInfo = errors.New("validator info is nil") -// ErrNilMetaBlock signals that a nil metablock has been provided -var ErrNilMetaBlock = errors.New("nil metablock") +// ErrNilValidatorsInfoPool signals that a nil value for the validatorsInfoPool has been provided +var ErrNilValidatorsInfoPool = errors.New("validators info pool is nil") + +// ErrNilCurrentEpochValidatorsInfoPool signals that a nil value for the currentEpochValidatorsInfoPool has been provided +var ErrNilCurrentEpochValidatorsInfoPool = errors.New("current epoch validators info pool is nil") + +// ErrNilMetaBlock signals that a nil meta block has been provided +var ErrNilMetaBlock = errors.New("nil meta block") + +// ErrNilBlockBody signals that a nil block body has been provided +var ErrNilBlockBody = errors.New("nil block body") // ErrNilMiniBlockPool signals that a nil mini blocks pool was used var ErrNilMiniBlockPool = errors.New("nil mini block pool") @@ -310,3 +319,6 @@ var ErrNilScheduledDataSyncerFactory = errors.New("nil scheduled data syncer fac // ErrNilEnableEpochsHandler signals that a nil enable epochs handler has been provided var ErrNilEnableEpochsHandler = errors.New("nil enable epochs handler") + +// ErrNilValidatorInfoStorage signals that nil validator info storage has been provided +var ErrNilValidatorInfoStorage = errors.New("nil validator info storage") diff --git a/epochStart/interface.go b/epochStart/interface.go index 45c5cab69cc..f02f0b39bca 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -58,6 +58,8 @@ type RequestHandler interface { RequestInterval() time.Duration SetNumPeersToQuery(key string, intra int, cross int) error GetNumPeersToQuery(key string) (int, int, error) + RequestValidatorInfo(hash []byte) + RequestValidatorsInfo(hashes [][]byte) IsInterfaceNil() bool } @@ -147,12 +149,19 @@ type ManualEpochStartNotifier interface { IsInterfaceNil() bool } -// TransactionCacher defines the methods for the local cacher, info for current round +// TransactionCacher defines the methods for the local transaction cacher, needed for the current block type TransactionCacher interface { GetTx(txHash []byte) (data.TransactionHandler, error) IsInterfaceNil() bool } +// ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current epoch +type ValidatorInfoCacher interface { + GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) + AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) + IsInterfaceNil() bool +} + // StakingDataProvider is able to provide staking data from the system smart contracts type StakingDataProvider interface { GetTotalStakeEligibleNodes() *big.Int @@ -190,10 +199,10 @@ type RewardsCreator interface { ) error GetProtocolSustainabilityRewards() *big.Int GetLocalTxCache() TransactionCacher - CreateMarshalizedData(body *block.Body) map[string][][]byte + CreateMarshalledData(body *block.Body) map[string][][]byte GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler - SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) + SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) RemoveBlockDataFromPools(metaBlock data.MetaHeaderHandler, body *block.Body) IsInterfaceNil() bool } diff --git a/epochStart/metachain/baseRewards.go b/epochStart/metachain/baseRewards.go index 5cee2b12b14..8b8a96ddcf2 100644 --- a/epochStart/metachain/baseRewards.go +++ b/epochStart/metachain/baseRewards.go @@ -80,7 +80,7 @@ func NewBaseRewardsCreator(args BaseRewardsCreatorArgs) (*baseRewardsCreator, er return nil, epochStart.ErrProtocolSustainabilityAddressInMetachain } - currTxsCache := dataPool.NewCurrentBlockPool() + currTxsCache := dataPool.NewCurrentBlockTransactionsPool() brc := &baseRewardsCreator{ currTxs: currTxsCache, shardCoordinator: args.ShardCoordinator, @@ -115,13 +115,13 @@ func (brc *baseRewardsCreator) GetLocalTxCache() epochStart.TransactionCacher { return brc.currTxs } -// CreateMarshalizedData creates the marshalized data to be sent to shards -func (brc *baseRewardsCreator) CreateMarshalizedData(body *block.Body) map[string][][]byte { +// CreateMarshalledData creates the marshalled data to be sent to shards +func (brc *baseRewardsCreator) CreateMarshalledData(body *block.Body) map[string][][]byte { if check.IfNil(body) { return nil } - mrsTxs := make(map[string][][]byte) + marshalledRewardsTxs := make(map[string][][]byte) for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.RewardsBlock { @@ -133,35 +133,35 @@ func (brc *baseRewardsCreator) CreateMarshalizedData(body *block.Body) map[strin } broadcastTopic := createBroadcastTopic(brc.shardCoordinator, miniBlock.ReceiverShardID) - if _, ok := mrsTxs[broadcastTopic]; !ok { - mrsTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) + if _, ok := marshalledRewardsTxs[broadcastTopic]; !ok { + marshalledRewardsTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) } for _, txHash := range miniBlock.TxHashes { rwdTx, err := brc.currTxs.GetTx(txHash) if err != nil { - log.Error("rewardsCreator.CreateMarshalizedData.GetTx", "hash", txHash, "error", err) + log.Error("rewardsCreator.CreateMarshalledData.GetTx", "hash", txHash, "error", err) continue } - marshalizedData, err := brc.marshalizer.Marshal(rwdTx) + marshalledData, err := brc.marshalizer.Marshal(rwdTx) if err != nil { - log.Error("rewardsCreator.CreateMarshalizedData.Marshal", "hash", txHash, "error", err) + log.Error("rewardsCreator.CreateMarshalledData.Marshal", "hash", txHash, "error", err) continue } - mrsTxs[broadcastTopic] = append(mrsTxs[broadcastTopic], marshalizedData) + marshalledRewardsTxs[broadcastTopic] = append(marshalledRewardsTxs[broadcastTopic], marshalledData) } - if len(mrsTxs[broadcastTopic]) == 0 { - delete(mrsTxs, broadcastTopic) + if len(marshalledRewardsTxs[broadcastTopic]) == 0 { + delete(marshalledRewardsTxs, broadcastTopic) } } - return mrsTxs + return marshalledRewardsTxs } -// GetRewardsTxs will return rewards txs MUST be called before SaveTxBlockToStorage +// GetRewardsTxs returns rewards txs for the current block func (brc *baseRewardsCreator) GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler { rewardsTxs := make(map[string]data.TransactionHandler) for _, miniBlock := range body.MiniBlocks { @@ -182,8 +182,8 @@ func (brc *baseRewardsCreator) GetRewardsTxs(body *block.Body) map[string]data.T return rewardsTxs } -// SaveTxBlockToStorage saves created data to storage -func (brc *baseRewardsCreator) SaveTxBlockToStorage(_ data.MetaHeaderHandler, body *block.Body) { +// SaveBlockDataToStorage saves block data to storage +func (brc *baseRewardsCreator) SaveBlockDataToStorage(_ data.MetaHeaderHandler, body *block.Body) { if check.IfNil(body) { return } @@ -217,8 +217,8 @@ func (brc *baseRewardsCreator) SaveTxBlockToStorage(_ data.MetaHeaderHandler, bo } } -// DeleteTxsFromStorage deletes data from storage -func (brc *baseRewardsCreator) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { +// DeleteBlockDataFromStorage deletes block data from storage +func (brc *baseRewardsCreator) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { if check.IfNil(metaBlock) || check.IfNil(body) { return } @@ -240,7 +240,7 @@ func (brc *baseRewardsCreator) DeleteTxsFromStorage(metaBlock data.MetaHeaderHan } } -// RemoveBlockDataFromPools removes block info from pools +// RemoveBlockDataFromPools removes block data from pools func (brc *baseRewardsCreator) RemoveBlockDataFromPools(metaBlock data.MetaHeaderHandler, body *block.Body) { if check.IfNil(metaBlock) || check.IfNil(body) { return diff --git a/epochStart/metachain/baseRewards_test.go b/epochStart/metachain/baseRewards_test.go index 8f080fc891f..3787a9af4c1 100644 --- a/epochStart/metachain/baseRewards_test.go +++ b/epochStart/metachain/baseRewards_test.go @@ -290,7 +290,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataNilMiniblocksEmptyMap(t *testin require.Nil(t, err) require.NotNil(t, rwd) - result := rwd.CreateMarshalizedData(nil) + result := rwd.CreateMarshalledData(nil) require.Equal(t, 0, len(result)) } @@ -302,7 +302,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataEmptyMiniblocksEmptyMap(t *test require.Nil(t, err) require.NotNil(t, rwd) - result := rwd.CreateMarshalizedData(&block.Body{}) + result := rwd.CreateMarshalledData(&block.Body{}) require.Equal(t, 0, len(result)) } @@ -327,7 +327,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataOnlyRewardsMiniblocksGetMarshal for _, mbType := range miniBlockTypes { dummyMiniBlock.Type = mbType - result := rwd.CreateMarshalizedData(&block.Body{ + result := rwd.CreateMarshalledData(&block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -336,7 +336,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataOnlyRewardsMiniblocksGetMarshal } dummyMiniBlock.Type = block.RewardsBlock - result := rwd.CreateMarshalizedData(&block.Body{ + result := rwd.CreateMarshalledData(&block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -367,7 +367,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataWrongSenderNotIncluded(t *testi dummyMiniBlock := createDummyRewardTxMiniblock(rwd) dummyMiniBlock.Type = block.RewardsBlock dummyMiniBlock.SenderShardID = args.ShardCoordinator.SelfId() + 1 - result := rwd.CreateMarshalizedData(&block.Body{ + result := rwd.CreateMarshalledData(&block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -386,7 +386,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataNotFoundTxHashIgnored(t *testin dummyMiniBlock := createDummyRewardTxMiniblock(rwd) dummyMiniBlock.Type = block.RewardsBlock dummyMiniBlock.TxHashes = [][]byte{[]byte("not found txHash")} - result := rwd.CreateMarshalizedData(&block.Body{ + result := rwd.CreateMarshalledData(&block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -460,7 +460,7 @@ func TestBaseRewardsCreator_SaveTxBlockToStorageNilBodyNoPanic(t *testing.T) { require.Nil(t, err) require.NotNil(t, rwd) - rwd.SaveTxBlockToStorage(nil, nil) + rwd.SaveBlockDataToStorage(nil, nil) } func TestBaseRewardsCreator_SaveTxBlockToStorageNonRewardsMiniBlocksAreIgnored(t *testing.T) { @@ -486,7 +486,7 @@ func TestBaseRewardsCreator_SaveTxBlockToStorageNonRewardsMiniBlocksAreIgnored(t for _, mbType := range miniBlockTypes { dummyMiniBlock.Type = mbType - rwd.SaveTxBlockToStorage(nil, &block.Body{ + rwd.SaveBlockDataToStorage(nil, &block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -501,7 +501,7 @@ func TestBaseRewardsCreator_SaveTxBlockToStorageNonRewardsMiniBlocksAreIgnored(t } dummyMiniBlock.Type = block.RewardsBlock - rwd.SaveTxBlockToStorage(nil, &block.Body{ + rwd.SaveBlockDataToStorage(nil, &block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -530,7 +530,7 @@ func TestBaseRewardsCreator_SaveTxBlockToStorageNotFoundTxIgnored(t *testing.T) dummyMb := createDummyRewardTxMiniblock(rwd) dummyMb.TxHashes = [][]byte{rwTxHash} - rwd.SaveTxBlockToStorage(nil, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) + rwd.SaveBlockDataToStorage(nil, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) mmb, err := args.Marshalizer.Marshal(dummyMb) require.Nil(t, err) @@ -558,7 +558,7 @@ func TestBaseRewardsCreator_DeleteTxsFromStorageNilMetablockNoPanic(t *testing.T require.NotNil(t, rwd) dummyMb := createDummyRewardTxMiniblock(rwd) - rwd.DeleteTxsFromStorage(nil, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) + rwd.DeleteBlockDataFromStorage(nil, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) } func TestBaseRewardsCreator_DeleteTxsFromStorageNilBlockBodyNoPanic(t *testing.T) { @@ -579,7 +579,7 @@ func TestBaseRewardsCreator_DeleteTxsFromStorageNilBlockBodyNoPanic(t *testing.T DevFeesInEpoch: big.NewInt(0), } - rwd.DeleteTxsFromStorage(metaBlk, nil) + rwd.DeleteBlockDataFromStorage(metaBlk, nil) } func TestBaseRewardsCreator_DeleteTxsFromStorageNonRewardsMiniBlocksIgnored(t *testing.T) { @@ -623,7 +623,7 @@ func TestBaseRewardsCreator_DeleteTxsFromStorageNonRewardsMiniBlocksIgnored(t *t dummyMbMarshalled, _ := args.Marshalizer.Marshal(dummyMb) _ = rwd.miniBlockStorage.Put(mbHash, dummyMbMarshalled) - rwd.DeleteTxsFromStorage(metaBlk, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) + rwd.DeleteBlockDataFromStorage(metaBlk, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) tx, err = rwd.rewardsStorage.Get(rwTxHash) require.Nil(t, err) require.NotNil(t, tx) @@ -665,7 +665,7 @@ func TestBaseRewardsCreator_DeleteTxsFromStorage(t *testing.T) { dummyMbMarshalled, _ := args.Marshalizer.Marshal(dummyMb) _ = rwd.miniBlockStorage.Put(mbHash, dummyMbMarshalled) - rwd.DeleteTxsFromStorage(metaBlk, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) + rwd.DeleteBlockDataFromStorage(metaBlk, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) tx, err := rwd.rewardsStorage.Get(rwTxHash) require.NotNil(t, err) require.Nil(t, tx) @@ -710,7 +710,7 @@ func TestBaseRewardsCreator_RemoveBlockDataFromPoolsNilBlockBodyNoPanic(t *testi DevFeesInEpoch: big.NewInt(0), } - rwd.DeleteTxsFromStorage(metaBlk, nil) + rwd.DeleteBlockDataFromStorage(metaBlk, nil) } func TestBaseRewardsCreator_RemoveBlockDataFromPoolsNonRewardsMiniBlocksIgnored(t *testing.T) { diff --git a/epochStart/metachain/rewardsCreatorProxy.go b/epochStart/metachain/rewardsCreatorProxy.go index 1e9d643834d..2a43479aed5 100644 --- a/epochStart/metachain/rewardsCreatorProxy.go +++ b/epochStart/metachain/rewardsCreatorProxy.go @@ -96,9 +96,9 @@ func (rcp *rewardsCreatorProxy) GetLocalTxCache() epochStart.TransactionCacher { return rcp.rc.GetLocalTxCache() } -// CreateMarshalizedData proxies the same method of the configured rewardsCreator instance -func (rcp *rewardsCreatorProxy) CreateMarshalizedData(body *block.Body) map[string][][]byte { - return rcp.rc.CreateMarshalizedData(body) +// CreateMarshalledData proxies the same method of the configured rewardsCreator instance +func (rcp *rewardsCreatorProxy) CreateMarshalledData(body *block.Body) map[string][][]byte { + return rcp.rc.CreateMarshalledData(body) } // GetRewardsTxs proxies the same method of the configured rewardsCreator instance @@ -106,14 +106,14 @@ func (rcp *rewardsCreatorProxy) GetRewardsTxs(body *block.Body) map[string]data. return rcp.rc.GetRewardsTxs(body) } -// SaveTxBlockToStorage proxies the same method of the configured rewardsCreator instance -func (rcp *rewardsCreatorProxy) SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - rcp.rc.SaveTxBlockToStorage(metaBlock, body) +// SaveBlockDataToStorage proxies the same method of the configured rewardsCreator instance +func (rcp *rewardsCreatorProxy) SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + rcp.rc.SaveBlockDataToStorage(metaBlock, body) } -// DeleteTxsFromStorage proxies the same method of the configured rewardsCreator instance -func (rcp *rewardsCreatorProxy) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - rcp.rc.DeleteTxsFromStorage(metaBlock, body) +// DeleteBlockDataFromStorage proxies the same method of the configured rewardsCreator instance +func (rcp *rewardsCreatorProxy) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + rcp.rc.DeleteBlockDataFromStorage(metaBlock, body) } // RemoveBlockDataFromPools proxies the same method of the configured rewardsCreator instance diff --git a/epochStart/metachain/rewardsCreatorProxy_test.go b/epochStart/metachain/rewardsCreatorProxy_test.go index 79453cc65e9..5d00c2ac38f 100644 --- a/epochStart/metachain/rewardsCreatorProxy_test.go +++ b/epochStart/metachain/rewardsCreatorProxy_test.go @@ -217,7 +217,7 @@ func TestRewardsCreatorProxy_CreateMarshalizedData(t *testing.T) { blockBody := createDefaultBlockBody() rewardCreatorV1 := &mock.RewardsCreatorStub{ - CreateMarshalizedDataCalled: func(body *block.Body) map[string][][]byte { + CreateMarshalledDataCalled: func(body *block.Body) map[string][][]byte { if blockBody == body { return expectedValue } @@ -227,7 +227,7 @@ func TestRewardsCreatorProxy_CreateMarshalizedData(t *testing.T) { rewardsCreatorProxy, _, _ := createTestData(rewardCreatorV1, rCreatorV1) - protocolSustainabilityRewards := rewardsCreatorProxy.CreateMarshalizedData(blockBody) + protocolSustainabilityRewards := rewardsCreatorProxy.CreateMarshalledData(blockBody) require.Equal(t, expectedValue, protocolSustainabilityRewards) } @@ -262,14 +262,14 @@ func TestRewardsCreatorProxy_SaveTxBlockToStorage(t *testing.T) { functionCalled := false rewardCreatorV1 := &mock.RewardsCreatorStub{ - SaveTxBlockToStorageCalled: func(metaBlock data.MetaHeaderHandler, body *block.Body) { + SaveBlockDataToStorageCalled: func(metaBlock data.MetaHeaderHandler, body *block.Body) { functionCalled = true }, } rewardsCreatorProxy, _, metaBlock := createTestData(rewardCreatorV1, rCreatorV1) - rewardsCreatorProxy.SaveTxBlockToStorage(metaBlock, blockBody) + rewardsCreatorProxy.SaveBlockDataToStorage(metaBlock, blockBody) require.Equal(t, true, functionCalled) } @@ -280,14 +280,14 @@ func TestRewardsCreatorProxy_DeleteTxsFromStorage(t *testing.T) { functionCalled := false rewardCreatorV1 := &mock.RewardsCreatorStub{ - DeleteTxsFromStorageCalled: func(metaBlock data.MetaHeaderHandler, body *block.Body) { + DeleteBlockDataFromStorageCalled: func(metaBlock data.MetaHeaderHandler, body *block.Body) { functionCalled = true }, } rewardsCreatorProxy, _, metaBlock := createTestData(rewardCreatorV1, rCreatorV1) - rewardsCreatorProxy.DeleteTxsFromStorage(metaBlock, blockBody) + rewardsCreatorProxy.DeleteBlockDataFromStorage(metaBlock, blockBody) require.Equal(t, true, functionCalled) } diff --git a/epochStart/metachain/rewards_test.go b/epochStart/metachain/rewards_test.go index ec30f0d96d0..517ccc7eb03 100644 --- a/epochStart/metachain/rewards_test.go +++ b/epochStart/metachain/rewards_test.go @@ -514,7 +514,7 @@ func TestRewardsCreator_CreateMarshalizedData(t *testing.T) { }, }, } - res := rwd.CreateMarshalizedData(&bdy) + res := rwd.CreateMarshalledData(&bdy) assert.NotNil(t, res) } @@ -579,7 +579,7 @@ func TestRewardsCreator_SaveTxBlockToStorage(t *testing.T) { }, }, } - rwd.SaveTxBlockToStorage(&mb2, &bdy) + rwd.SaveBlockDataToStorage(&mb2, &bdy) assert.True(t, putRwdTxWasCalled) assert.True(t, putMbWasCalled) diff --git a/epochStart/metachain/systemSCs_test.go b/epochStart/metachain/systemSCs_test.go index 2c10e2d020b..ddcf47f874f 100644 --- a/epochStart/metachain/systemSCs_test.go +++ b/epochStart/metachain/systemSCs_test.go @@ -1130,7 +1130,7 @@ func TestSystemSCProcessor_ProcessDelegationRewardsNothingToExecute(t *testing.T }, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockPool() + localCache := dataPool.NewCurrentBlockTransactionsPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: 0, @@ -1151,7 +1151,7 @@ func TestSystemSCProcessor_ProcessDelegationRewardsErrors(t *testing.T) { }, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockPool() + localCache := dataPool.NewCurrentBlockTransactionsPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: core.MetachainShardId, @@ -1198,7 +1198,7 @@ func TestSystemSCProcessor_ProcessDelegationRewards(t *testing.T) { }, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockPool() + localCache := dataPool.NewCurrentBlockTransactionsPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: core.MetachainShardId, diff --git a/epochStart/metachain/trigger.go b/epochStart/metachain/trigger.go index 394b4c0a5e1..7bb0f66c6c3 100644 --- a/epochStart/metachain/trigger.go +++ b/epochStart/metachain/trigger.go @@ -46,6 +46,7 @@ type ArgsNewMetaEpochStartTrigger struct { Hasher hashing.Hasher Storage dataRetriever.StorageService AppStatusHandler core.AppStatusHandler + DataPool dataRetriever.PoolsHolder } type trigger struct { @@ -69,6 +70,7 @@ type trigger struct { marshaller marshal.Marshalizer hasher hashing.Hasher appStatusHandler core.AppStatusHandler + validatorInfoPool epochStart.ValidatorInfoCacher } // NewEpochStartTrigger creates a trigger for start of epoch @@ -103,6 +105,12 @@ func NewEpochStartTrigger(args *ArgsNewMetaEpochStartTrigger) (*trigger, error) if check.IfNil(args.AppStatusHandler) { return nil, epochStart.ErrNilStatusHandler } + if check.IfNil(args.DataPool) { + return nil, epochStart.ErrNilDataPoolsHolder + } + if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { + return nil, epochStart.ErrNilCurrentEpochValidatorsInfoPool + } triggerStorage, err := args.Storage.GetStorer(dataRetriever.BootstrapUnit) if err != nil { @@ -133,6 +141,7 @@ func NewEpochStartTrigger(args *ArgsNewMetaEpochStartTrigger) (*trigger, error) epochStartMeta: &block.MetaBlock{}, appStatusHandler: args.AppStatusHandler, nextEpochStartRound: disabledRoundForForceEpochStart, + validatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), } err = trig.saveState(trig.triggerStateKey) diff --git a/epochStart/metachain/triggerRegistry_test.go b/epochStart/metachain/triggerRegistry_test.go index 2c51d9782bc..70c1a8f26d0 100644 --- a/epochStart/metachain/triggerRegistry_test.go +++ b/epochStart/metachain/triggerRegistry_test.go @@ -35,6 +35,7 @@ func cloneTrigger(t *trigger) *trigger { rt.hasher = t.hasher rt.appStatusHandler = t.appStatusHandler rt.nextEpochStartRound = t.nextEpochStartRound + rt.validatorInfoPool = t.validatorInfoPool return rt } diff --git a/epochStart/metachain/trigger_test.go b/epochStart/metachain/trigger_test.go index 1bfff13338b..d06eca1f8ad 100644 --- a/epochStart/metachain/trigger_test.go +++ b/epochStart/metachain/trigger_test.go @@ -16,9 +16,11 @@ import ( "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/epochStart/mock" "github.com/ElrondNetwork/elrond-go/storage" + dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageStubs "github.com/ElrondNetwork/elrond-go/testscommon/storage" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -53,6 +55,11 @@ func createMockEpochStartTriggerArguments() *ArgsNewMetaEpochStartTrigger { }, nil }, }, + DataPool: &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vic.ValidatorInfoCacherStub{} + }, + }, } } diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index eea1720ca65..4963adb2426 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -10,6 +10,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" @@ -22,19 +23,23 @@ var _ process.EpochStartValidatorInfoCreator = (*validatorInfoCreator)(nil) // ArgsNewValidatorInfoCreator defines the arguments structure needed to create a new validatorInfo creator type ArgsNewValidatorInfoCreator struct { - ShardCoordinator sharding.Coordinator - MiniBlockStorage storage.Storer - Hasher hashing.Hasher - Marshalizer marshal.Marshalizer - DataPool dataRetriever.PoolsHolder + ShardCoordinator sharding.Coordinator + ValidatorInfoStorage storage.Storer + MiniBlockStorage storage.Storer + Hasher hashing.Hasher + Marshalizer marshal.Marshalizer + DataPool dataRetriever.PoolsHolder + EnableEpochsHandler common.EnableEpochsHandler } type validatorInfoCreator struct { - shardCoordinator sharding.Coordinator - miniBlockStorage storage.Storer - hasher hashing.Hasher - marshalizer marshal.Marshalizer - dataPool dataRetriever.PoolsHolder + shardCoordinator sharding.Coordinator + validatorInfoStorage storage.Storer + miniBlockStorage storage.Storer + hasher hashing.Hasher + marshalizer marshal.Marshalizer + dataPool dataRetriever.PoolsHolder + enableEpochsHandler common.EnableEpochsHandler } // NewValidatorInfoCreator creates a new validatorInfo creator object @@ -48,31 +53,44 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr if check.IfNil(args.Hasher) { return nil, epochStart.ErrNilHasher } + if check.IfNil(args.ValidatorInfoStorage) { + return nil, epochStart.ErrNilValidatorInfoStorage + } if check.IfNil(args.MiniBlockStorage) { return nil, epochStart.ErrNilStorage } if check.IfNil(args.DataPool) { return nil, epochStart.ErrNilDataPoolsHolder } + if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { + return nil, epochStart.ErrNilCurrentEpochValidatorsInfoPool + } + if check.IfNil(args.EnableEpochsHandler) { + return nil, epochStart.ErrNilEnableEpochsHandler + } vic := &validatorInfoCreator{ - shardCoordinator: args.ShardCoordinator, - hasher: args.Hasher, - marshalizer: args.Marshalizer, - miniBlockStorage: args.MiniBlockStorage, - dataPool: args.DataPool, + shardCoordinator: args.ShardCoordinator, + hasher: args.Hasher, + marshalizer: args.Marshalizer, + validatorInfoStorage: args.ValidatorInfoStorage, + miniBlockStorage: args.MiniBlockStorage, + dataPool: args.DataPool, + enableEpochsHandler: args.EnableEpochsHandler, } return vic, nil } -// CreateValidatorInfoMiniBlocks creates the validatorInfo miniblocks according to the provided validatorInfo map +// CreateValidatorInfoMiniBlocks creates the validatorInfo mini blocks according to the provided validatorInfo map func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) { if validatorsInfo == nil { return nil, epochStart.ErrNilValidatorInfo } - miniblocks := make([]*block.MiniBlock, 0) + vic.clean() + + miniBlocks := make([]*block.MiniBlock, 0) for shardId := uint32(0); shardId < vic.shardCoordinator.NumberOfShards(); shardId++ { validators := validatorsInfo[shardId] @@ -85,12 +103,12 @@ func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo ma return nil, err } - miniblocks = append(miniblocks, miniBlock) + miniBlocks = append(miniBlocks, miniBlock) } validators := validatorsInfo[core.MetachainShardId] if len(validators) == 0 { - return miniblocks, nil + return miniBlocks, nil } miniBlock, err := vic.createMiniBlock(validators) @@ -98,9 +116,9 @@ func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo ma return nil, err } - miniblocks = append(miniblocks, miniBlock) + miniBlocks = append(miniBlocks, miniBlock) - return miniblocks, nil + return miniBlocks, nil } func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.ValidatorInfo) (*block.MiniBlock, error) { @@ -118,17 +136,42 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat for index, validator := range validatorCopy { shardValidatorInfo := createShardValidatorInfo(validator) - marshalizedShardValidatorInfo, err := vic.marshalizer.Marshal(shardValidatorInfo) + + shardValidatorInfoData, err := vic.getShardValidatorInfoData(shardValidatorInfo) if err != nil { return nil, err } - miniBlock.TxHashes[index] = marshalizedShardValidatorInfo + miniBlock.TxHashes[index] = shardValidatorInfoData } return miniBlock, nil } +func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *state.ShardValidatorInfo) ([]byte, error) { + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { + return vic.getShardValidatorInfoHash(shardValidatorInfo) + } + + marshalledShardValidatorInfo, err := vic.marshalizer.Marshal(shardValidatorInfo) + if err != nil { + return nil, err + } + + return marshalledShardValidatorInfo, nil +} + +func (vic *validatorInfoCreator) getShardValidatorInfoHash(shardValidatorInfo *state.ShardValidatorInfo) ([]byte, error) { + shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) + if err != nil { + return nil, err + } + + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() + validatorInfoCacher.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) + return shardValidatorInfoHash, nil +} + func createShardValidatorInfo(validator *state.ValidatorInfo) *state.ShardValidatorInfo { return &state.ShardValidatorInfo{ PublicKey: validator.PublicKey, @@ -139,12 +182,9 @@ func createShardValidatorInfo(validator *state.ValidatorInfo) *state.ShardValida } } -// VerifyValidatorInfoMiniBlocks verifies if received validatorinfo miniblocks are correct -func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( - miniblocks []*block.MiniBlock, - validatorsInfo map[uint32][]*state.ValidatorInfo, -) error { - if len(miniblocks) == 0 { +// VerifyValidatorInfoMiniBlocks verifies if received validator info mini blocks are correct +func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks(miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { + if len(miniBlocks) == 0 { return epochStart.ErrNilMiniblocks } @@ -165,7 +205,7 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( numReceivedValidatorInfoMBs := 0 var receivedMbHash []byte - for _, receivedMb := range miniblocks { + for _, receivedMb := range miniBlocks { if receivedMb == nil { return epochStart.ErrNilMiniblock } @@ -182,7 +222,7 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( _, ok := hashesToMiniBlocks[string(receivedMbHash)] if !ok { - // TODO: add display debug prints of miniblocks contents + // TODO: add display debug prints of mini blocks contents return epochStart.ErrValidatorMiniBlockHashDoesNotMatch } } @@ -194,8 +234,115 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( return nil } -// SaveValidatorInfoBlocksToStorage saves created data to storage -func (vic *validatorInfoCreator) SaveValidatorInfoBlocksToStorage(_ data.HeaderHandler, body *block.Body) { +// GetLocalValidatorInfoCache returns the local validator info cache which holds all the validator info for the current epoch +func (vic *validatorInfoCreator) GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher { + return vic.dataPool.CurrentEpochValidatorInfo() +} + +// CreateMarshalledData creates the marshalled data to be sent to shards +func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[string][][]byte { + if !vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { + return nil + } + + if check.IfNil(body) { + return nil + } + + marshalledValidatorInfoTxs := make([][]byte, 0) + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + isCrossMiniBlockFromMe := miniBlock.SenderShardID == vic.shardCoordinator.SelfId() && + miniBlock.ReceiverShardID != vic.shardCoordinator.SelfId() + if !isCrossMiniBlockFromMe { + continue + } + + marshalledValidatorInfoTxs = append(marshalledValidatorInfoTxs, vic.getMarshalledValidatorInfoTxs(miniBlock)...) + } + + mapMarshalledValidatorInfoTxs := make(map[string][][]byte) + if len(marshalledValidatorInfoTxs) > 0 { + mapMarshalledValidatorInfoTxs[common.ValidatorInfoTopic] = marshalledValidatorInfoTxs + } + + return mapMarshalledValidatorInfoTxs +} + +func (vic *validatorInfoCreator) getMarshalledValidatorInfoTxs(miniBlock *block.MiniBlock) [][]byte { + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() + + marshalledValidatorInfoTxs := make([][]byte, 0) + for _, txHash := range miniBlock.TxHashes { + validatorInfoTx, err := validatorInfoCacher.GetValidatorInfo(txHash) + if err != nil { + log.Error("validatorInfoCreator.getMarshalledValidatorInfoTxs.GetValidatorInfo", "hash", txHash, "error", err) + continue + } + + marshalledData, err := vic.marshalizer.Marshal(validatorInfoTx) + if err != nil { + log.Error("validatorInfoCreator.getMarshalledValidatorInfoTxs.Marshal", "hash", txHash, "error", err) + continue + } + + marshalledValidatorInfoTxs = append(marshalledValidatorInfoTxs, marshalledData) + } + + return marshalledValidatorInfoTxs +} + +// GetValidatorInfoTxs returns validator info txs for the current epoch +func (vic *validatorInfoCreator) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { + mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) + + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + + vic.setMapShardValidatorInfo(miniBlock, mapShardValidatorInfo) + } + + return mapShardValidatorInfo +} + +func (vic *validatorInfoCreator) setMapShardValidatorInfo(miniBlock *block.MiniBlock, mapShardValidatorInfo map[string]*state.ShardValidatorInfo) { + for _, txHash := range miniBlock.TxHashes { + shardValidatorInfo, err := vic.getShardValidatorInfo(txHash) + if err != nil { + log.Error("validatorInfoCreator.setMapShardValidatorInfo", "hash", txHash, "error", err) + continue + } + + mapShardValidatorInfo[string(txHash)] = shardValidatorInfo + } +} + +func (vic *validatorInfoCreator) getShardValidatorInfo(txHash []byte) (*state.ShardValidatorInfo, error) { + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() + shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil + } + + shardValidatorInfo := &state.ShardValidatorInfo{} + err := vic.marshalizer.Unmarshal(shardValidatorInfo, txHash) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil +} + +// SaveBlockDataToStorage saves block data to storage +func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, body *block.Body) { if check.IfNil(body) { return } @@ -205,40 +352,96 @@ func (vic *validatorInfoCreator) SaveValidatorInfoBlocksToStorage(_ data.HeaderH continue } - marshalizedData, err := vic.marshalizer.Marshal(miniBlock) + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { + vic.saveValidatorInfo(miniBlock) + } + + marshalledData, err := vic.marshalizer.Marshal(miniBlock) if err != nil { + log.Error("validatorInfoCreator.SaveBlockDataToStorage.Marshal", "error", err) continue } - mbHash := vic.hasher.Compute(string(marshalizedData)) - _ = vic.miniBlockStorage.Put(mbHash, marshalizedData) + mbHash := vic.hasher.Compute(string(marshalledData)) + err = vic.miniBlockStorage.Put(mbHash, marshalledData) + if err != nil { + log.Debug("validatorInfoCreator.SaveBlockDataToStorage.Put", "hash", mbHash, "error", err) + } } } -// DeleteValidatorInfoBlocksFromStorage deletes data from storage -func (vic *validatorInfoCreator) DeleteValidatorInfoBlocksFromStorage(metaBlock data.HeaderHandler) { - if check.IfNil(metaBlock) { +func (vic *validatorInfoCreator) saveValidatorInfo(miniBlock *block.MiniBlock) { + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() + + for _, validatorInfoHash := range miniBlock.TxHashes { + validatorInfo, err := validatorInfoCacher.GetValidatorInfo(validatorInfoHash) + if err != nil { + log.Error("validatorInfoCreator.saveValidatorInfo.GetValidatorInfo", "hash", validatorInfoHash, "error", err) + continue + } + + marshalledData, err := vic.marshalizer.Marshal(validatorInfo) + if err != nil { + log.Error("validatorInfoCreator.saveValidatorInfo.Marshal", "hash", validatorInfoHash, "error", err) + continue + } + + err = vic.validatorInfoStorage.Put(validatorInfoHash, marshalledData) + if err != nil { + log.Debug("validatorInfoCreator.saveValidatorInfo.Put", "hash", validatorInfoHash, "error", err) + } + } +} + +// DeleteBlockDataFromStorage deletes block data from storage +func (vic *validatorInfoCreator) DeleteBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { + if check.IfNil(metaBlock) || check.IfNil(body) { return } + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { + vic.removeValidatorInfo(body) + } + for _, mbHeader := range metaBlock.GetMiniBlockHeaderHandlers() { if mbHeader.GetTypeInt32() == int32(block.PeerBlock) { - _ = vic.miniBlockStorage.Remove(mbHeader.GetHash()) + err := vic.miniBlockStorage.Remove(mbHeader.GetHash()) + if err != nil { + log.Debug("validatorInfoCreator.DeleteBlockDataFromStorage.Remove", "hash", mbHeader.GetHash(), "error", err) + } } } } -// IsInterfaceNil return true if underlying object is nil -func (vic *validatorInfoCreator) IsInterfaceNil() bool { - return vic == nil +func (vic *validatorInfoCreator) removeValidatorInfo(body *block.Body) { + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + + vic.removeValidatorInfoFromStorage(miniBlock) + } +} + +func (vic *validatorInfoCreator) removeValidatorInfoFromStorage(miniBlock *block.MiniBlock) { + for _, txHash := range miniBlock.TxHashes { + err := vic.validatorInfoStorage.Remove(txHash) + if err != nil { + log.Debug("validatorInfoCreator.removeValidatorInfoFromStorage.Remove", "hash", txHash, "error", err) + } + } } -// RemoveBlockDataFromPools removes block info from pools -func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderHandler, _ *block.Body) { +// RemoveBlockDataFromPools removes block data from pools +func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderHandler, body *block.Body) { if check.IfNil(metaBlock) { return } + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { + vic.removeValidatorInfoFromPool(body) + } + miniBlocksPool := vic.dataPool.MiniBlocks() for _, mbHeader := range metaBlock.GetMiniBlockHeaderHandlers() { @@ -256,3 +459,27 @@ func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderH "num txs", mbHeader.GetTxCount()) } } + +func (vic *validatorInfoCreator) removeValidatorInfoFromPool(body *block.Body) { + validatorInfoPool := vic.dataPool.ValidatorsInfo() + + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + + for _, txHash := range miniBlock.TxHashes { + validatorInfoPool.RemoveDataFromAllShards(txHash) + } + } +} + +func (vic *validatorInfoCreator) clean() { + currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() + currentEpochValidatorInfo.Clean() +} + +// IsInterfaceNil returns true if underlying object is nil +func (vic *validatorInfoCreator) IsInterfaceNil() bool { + return vic == nil +} diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index c65c0a2ecbb..739f367b8ad 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -11,6 +11,8 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process/mock" "github.com/ElrondNetwork/elrond-go/state" @@ -18,6 +20,8 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + vics "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -112,22 +116,29 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator _ = shardCoordinator.SetSelfId(core.MetachainShardId) argsNewEpochEconomics := ArgsNewValidatorInfoCreator{ - ShardCoordinator: shardCoordinator, - MiniBlockStorage: createMemUnit(), - Hasher: &hashingMocks.HasherMock{}, - Marshalizer: &mock.MarshalizerMock{}, + ShardCoordinator: shardCoordinator, + ValidatorInfoStorage: createMemUnit(), + MiniBlockStorage: createMemUnit(), + Hasher: &hashingMocks.HasherMock{}, + Marshalizer: &mock.MarshalizerMock{}, DataPool: &dataRetrieverMock.PoolsHolderStub{ MiniBlocksCalled: func() storage.Cacher { return &testscommon.CacherStub{ RemoveCalled: func(key []byte) {}, } }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{} + }, + }, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, }, } return argsNewEpochEconomics } -func verifyMiniBlocks(bl *block.MiniBlock, infos []*state.ValidatorInfo, marshalizer marshal.Marshalizer) bool { +func verifyMiniBlocks(bl *block.MiniBlock, infos []*state.ValidatorInfo, marshalledShardValidatorsInfo [][]byte, marshalizer marshal.Marshalizer) bool { if bl.SenderShardID != core.MetachainShardId || bl.ReceiverShardID != core.AllShardId || len(bl.TxHashes) == 0 || @@ -141,10 +152,10 @@ func verifyMiniBlocks(bl *block.MiniBlock, infos []*state.ValidatorInfo, marshal return bytes.Compare(validatorCopy[a].PublicKey, validatorCopy[b].PublicKey) < 0 }) - for i, txHash := range bl.TxHashes { - vi := createShardValidatorInfo(validatorCopy[i]) + for i, marshalledShardValidatorInfo := range marshalledShardValidatorsInfo { + vi := createShardValidatorInfo(infos[i]) unmarshaledVi := &state.ShardValidatorInfo{} - _ = marshalizer.Unmarshal(unmarshaledVi, txHash) + _ = marshalizer.Unmarshal(unmarshaledVi, marshalledShardValidatorInfo) if !reflect.DeepEqual(unmarshaledVi, vi) { return false } @@ -208,6 +219,17 @@ func TestEpochValidatorInfoCreator_NewValidatorInfoCreatorNilDataPool(t *testing require.Equal(t, epochStart.ErrNilDataPoolsHolder, err) } +func TestEpochValidatorInfoCreator_NewValidatorInfoCreatorNilEnableEpochsHandler(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = nil + vic, err := NewValidatorInfoCreator(arguments) + + require.Nil(t, vic) + require.Equal(t, epochStart.ErrNilEnableEpochsHandler, err) +} + func TestEpochValidatorInfoCreator_NewValidatorInfoCreatorShouldWork(t *testing.T) { t.Parallel() @@ -264,9 +286,22 @@ func TestEpochValidatorInfoCreator_CreateValidatorInfoMiniBlocksShouldBeCorrect( vic, _ := NewValidatorInfoCreator(arguments) mbs, _ := vic.CreateValidatorInfoMiniBlocks(validatorInfo) - correctMB0 := verifyMiniBlocks(mbs[0], validatorInfo[0], arguments.Marshalizer) + shardValidatorInfo := make([]*state.ShardValidatorInfo, len(validatorInfo[0])) + marshalledShardValidatorInfo := make([][]byte, len(validatorInfo[0])) + for i := 0; i < len(validatorInfo[0]); i++ { + shardValidatorInfo[i] = createShardValidatorInfo(validatorInfo[0][i]) + marshalledShardValidatorInfo[i], _ = arguments.Marshalizer.Marshal(shardValidatorInfo[i]) + } + correctMB0 := verifyMiniBlocks(mbs[0], validatorInfo[0], marshalledShardValidatorInfo, arguments.Marshalizer) require.True(t, correctMB0) - correctMbMeta := verifyMiniBlocks(mbs[1], validatorInfo[core.MetachainShardId], arguments.Marshalizer) + + shardValidatorInfo = make([]*state.ShardValidatorInfo, len(validatorInfo[core.MetachainShardId])) + marshalledShardValidatorInfo = make([][]byte, len(validatorInfo[core.MetachainShardId])) + for i := 0; i < len(validatorInfo[core.MetachainShardId]); i++ { + shardValidatorInfo[i] = createShardValidatorInfo(validatorInfo[core.MetachainShardId][i]) + marshalledShardValidatorInfo[i], _ = arguments.Marshalizer.Marshal(shardValidatorInfo[i]) + } + correctMbMeta := verifyMiniBlocks(mbs[1], validatorInfo[core.MetachainShardId], marshalledShardValidatorInfo, arguments.Marshalizer) require.True(t, correctMbMeta) } @@ -367,9 +402,9 @@ func createValidatorInfoMiniBlocks( }) for index, validator := range validatorCopy { - shardValidator := createShardValidatorInfo(validator) - marshalizedValidator, _ := arguments.Marshalizer.Marshal(shardValidator) - miniBlock.TxHashes[index] = marshalizedValidator + shardValidatorInfo := createShardValidatorInfo(validator) + shardValidatorInfoHash, _ := core.CalculateHash(arguments.Marshalizer, arguments.Hasher, shardValidatorInfo) + miniBlock.TxHashes[index] = shardValidatorInfoHash } miniblocks = append(miniblocks, miniBlock) @@ -377,7 +412,7 @@ func createValidatorInfoMiniBlocks( return miniblocks } -func TestEpochValidatorInfoCreator_SaveValidatorInfoBlocksToStorage(t *testing.T) { +func TestEpochValidatorInfoCreator_SaveValidatorInfoBlockDataToStorage(t *testing.T) { validatorInfo := createMockValidatorInfo() arguments := createMockEpochValidatorInfoCreatorsArguments() arguments.MiniBlockStorage = mock.NewStorerMock() @@ -427,7 +462,7 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoBlocksToStorage(t *testing.T } body := &block.Body{MiniBlocks: miniblocks} - vic.SaveValidatorInfoBlocksToStorage(meta, body) + vic.SaveBlockDataToStorage(meta, body) for i, mbHeader := range meta.MiniBlockHeaders { mb, err := miniBlockStorage.Get(mbHeader.Hash) @@ -440,15 +475,15 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoBlocksToStorage(t *testing.T } } -func TestEpochValidatorInfoCreator_DeleteValidatorInfoBlocksFromStorage(t *testing.T) { - testDeleteValidatorInfoBlock(t, block.PeerBlock, false) +func TestEpochValidatorInfoCreator_DeleteValidatorInfoBlockDataFromStorage(t *testing.T) { + testDeleteValidatorInfoBlockData(t, block.PeerBlock, false) } -func TestEpochValidatorInfoCreator_DeleteValidatorInfoBlocksFromStorageDoesDeleteOnlyPeerBlocks(t *testing.T) { - testDeleteValidatorInfoBlock(t, block.TxBlock, true) +func TestEpochValidatorInfoCreator_DeleteValidatorInfoBlockDataFromStorageDoesDeleteOnlyPeerBlocks(t *testing.T) { + testDeleteValidatorInfoBlockData(t, block.TxBlock, true) } -func testDeleteValidatorInfoBlock(t *testing.T, blockType block.Type, shouldExist bool) { +func testDeleteValidatorInfoBlockData(t *testing.T, blockType block.Type, shouldExist bool) { validatorInfo := createMockValidatorInfo() arguments := createMockEpochValidatorInfoCreatorsArguments() arguments.MiniBlockStorage = mock.NewStorerMock() @@ -504,7 +539,8 @@ func testDeleteValidatorInfoBlock(t *testing.T, blockType block.Type, shouldExis require.Nil(t, err) } - vic.DeleteValidatorInfoBlocksFromStorage(meta) + body := &block.Body{} + vic.DeleteBlockDataFromStorage(meta, body) for _, mbHeader := range meta.MiniBlockHeaders { mb, err := mbStorage.Get(mbHeader.Hash) @@ -525,3 +561,500 @@ func TestEpochValidatorInfoCreator_IsInterfaceNil(t *testing.T) { vic, _ := NewValidatorInfoCreator(arguments) require.False(t, vic.IsInterfaceNil()) } + +func TestEpochValidatorInfoCreator_GetShardValidatorInfoData(t *testing.T) { + t.Parallel() + + t.Run("get shard validator info data before refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: false, + } + vic, _ := NewValidatorInfoCreator(arguments) + + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + marshalledShardValidatorInfo, _ := arguments.Marshalizer.Marshal(shardValidatorInfo) + shardValidatorInfoData, _ := vic.getShardValidatorInfoData(shardValidatorInfo) + assert.Equal(t, marshalledShardValidatorInfo, shardValidatorInfoData) + }) + + t.Run("get shard validator info data after refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + vic, _ := NewValidatorInfoCreator(arguments) + + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + shardValidatorInfoHash, _ := core.CalculateHash(arguments.Marshalizer, arguments.Hasher, shardValidatorInfo) + shardValidatorInfoData, _ := vic.getShardValidatorInfoData(shardValidatorInfo) + assert.Equal(t, shardValidatorInfoHash, shardValidatorInfoData) + }) +} + +func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { + t.Parallel() + + t.Run("CreateMarshalledData should return nil before refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: false, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := createMockBlockBody(0, 1, block.TxBlock) + marshalledData := vic.CreateMarshalledData(body) + assert.Nil(t, marshalledData) + }) + + t.Run("CreateMarshalledData should return nil body is nil", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + vic, _ := NewValidatorInfoCreator(arguments) + + marshalledData := vic.CreateMarshalledData(nil) + assert.Nil(t, marshalledData) + }) + + t.Run("CreateMarshalledData should return empty slice when there is no peer mini block in body", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := createMockBlockBody(0, 1, block.TxBlock) + marshalledData := vic.CreateMarshalledData(body) + assert.Equal(t, make(map[string][][]byte), marshalledData) + }) + + t.Run("CreateMarshalledData should return empty slice when sender or receiver do not match", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := createMockBlockBody(0, 1, block.PeerBlock) + marshalledData := vic.CreateMarshalledData(body) + assert.Equal(t, make(map[string][][]byte), marshalledData) + }) + + t.Run("CreateMarshalledData should return empty slice when tx hash does not exist in validator info cacher", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := createMockBlockBody(core.MetachainShardId, 0, block.PeerBlock) + marshalledData := vic.CreateMarshalledData(body) + assert.Equal(t, make(map[string][][]byte), marshalledData) + }) + + t.Run("CreateMarshalledData should work", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + marshalledSVI1, _ := arguments.Marshalizer.Marshal(svi1) + + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + marshalledSVI2, _ := arguments.Marshalizer.Marshal(svi2) + + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + marshalledSVI3, _ := arguments.Marshalizer.Marshal(svi3) + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("b")) { + return svi2, nil + } + if bytes.Equal(validatorInfoHash, []byte("c")) { + return svi3, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := createMockBlockBody(core.MetachainShardId, 0, block.PeerBlock) + marshalledData := vic.CreateMarshalledData(body) + require.Equal(t, 1, len(marshalledData)) + require.Equal(t, 3, len(marshalledData[common.ValidatorInfoTopic])) + assert.Equal(t, marshalledSVI1, marshalledData[common.ValidatorInfoTopic][0]) + assert.Equal(t, marshalledSVI2, marshalledData[common.ValidatorInfoTopic][1]) + assert.Equal(t, marshalledSVI3, marshalledData[common.ValidatorInfoTopic][2]) + }) +} + +func TestEpochValidatorInfoCreator_SetMarshalledValidatorInfoTxsShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + marshalledSVI1, _ := arguments.Marshalizer.Marshal(svi1) + + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + marshalledSVI2, _ := arguments.Marshalizer.Marshal(svi2) + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("c")) { + return svi2, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + miniBlock := createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock) + marshalledValidatorInfoTxs := vic.getMarshalledValidatorInfoTxs(miniBlock) + + require.Equal(t, 2, len(marshalledValidatorInfoTxs)) + assert.Equal(t, marshalledSVI1, marshalledValidatorInfoTxs[0]) + assert.Equal(t, marshalledSVI2, marshalledValidatorInfoTxs[1]) +} + +func TestEpochValidatorInfoCreator_GetValidatorInfoTxsShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("b")) { + return svi2, nil + } + if bytes.Equal(validatorInfoHash, []byte("c")) { + return svi3, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + mapValidatorInfoTxs := vic.GetValidatorInfoTxs(body) + + require.Equal(t, 3, len(mapValidatorInfoTxs)) + require.Equal(t, svi1, mapValidatorInfoTxs["a"]) + require.Equal(t, svi2, mapValidatorInfoTxs["b"]) + require.Equal(t, svi3, mapValidatorInfoTxs["c"]) +} + +func TestEpochValidatorInfoCreator_SetMapShardValidatorInfoShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("b")) { + return svi2, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + miniBlock := createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock) + mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) + vic.setMapShardValidatorInfo(miniBlock, mapShardValidatorInfo) + + require.Equal(t, 2, len(mapShardValidatorInfo)) + require.Equal(t, svi1, mapShardValidatorInfo["a"]) + require.Equal(t, svi2, mapShardValidatorInfo["b"]) +} + +func TestEpochValidatorInfoCreator_GetShardValidatorInfoShouldWork(t *testing.T) { + t.Parallel() + + t.Run("get shard validator info before refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + marshalledSVI, _ := arguments.Marshalizer.Marshal(svi) + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: false, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + shardValidatorInfo, _ := vic.getShardValidatorInfo(marshalledSVI) + require.Equal(t, svi, shardValidatorInfo) + }) + + t.Run("get shard validator info after refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + shardValidatorInfo, _ := vic.getShardValidatorInfo([]byte("a")) + require.Equal(t, svi, shardValidatorInfo) + }) +} + +func TestEpochValidatorInfoCreator_SaveValidatorInfoShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + marshalledSVI1, _ := arguments.Marshalizer.Marshal(svi1) + + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + marshalledSVI2, _ := arguments.Marshalizer.Marshal(svi2) + + storer := createMemUnit() + arguments.ValidatorInfoStorage = storer + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("b")) { + return svi2, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + miniBlock := createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock) + vic.saveValidatorInfo(miniBlock) + + msvi1, err := storer.Get([]byte("a")) + assert.Nil(t, err) + assert.Equal(t, marshalledSVI1, msvi1) + + msvi2, err := storer.Get([]byte("b")) + assert.Nil(t, err) + assert.Equal(t, marshalledSVI2, msvi2) + + msvi3, err := storer.Get([]byte("c")) + assert.NotNil(t, err) + assert.Nil(t, msvi3) +} + +func TestEpochValidatorInfoCreator_RemoveValidatorInfoShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + storer := createMemUnit() + arguments.ValidatorInfoStorage = storer + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + + _ = storer.Put([]byte("a"), []byte("aa")) + _ = storer.Put([]byte("b"), []byte("bb")) + _ = storer.Put([]byte("c"), []byte("cc")) + _ = storer.Put([]byte("d"), []byte("dd")) + + vic.removeValidatorInfo(body) + + msvi, err := storer.Get([]byte("a")) + assert.NotNil(t, err) + assert.Nil(t, msvi) + + msvi, err = storer.Get([]byte("b")) + assert.NotNil(t, err) + assert.Nil(t, msvi) + + msvi, err = storer.Get([]byte("c")) + assert.NotNil(t, err) + assert.Nil(t, msvi) + + msvi, err = storer.Get([]byte("d")) + assert.Nil(t, err) + assert.Equal(t, []byte("dd"), msvi) +} + +func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromPoolShouldWork(t *testing.T) { + t.Parallel() + + shardedDataCacheNotifierMock := testscommon.NewShardedDataCacheNotifierMock() + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vics.ValidatorInfoCacherStub{} + }, + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return shardedDataCacheNotifierMock + }, + } + + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("aa")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("bb")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("cc")} + svi4 := &state.ShardValidatorInfo{PublicKey: []byte("dd")} + + shardedDataCacheNotifierMock.AddData([]byte("a"), svi1, svi1.Size(), "x") + shardedDataCacheNotifierMock.AddData([]byte("b"), svi2, svi2.Size(), "x") + shardedDataCacheNotifierMock.AddData([]byte("c"), svi3, svi3.Size(), "x") + shardedDataCacheNotifierMock.AddData([]byte("d"), svi4, svi4.Size(), "x") + + vic.removeValidatorInfoFromPool(body) + + svi, found := shardedDataCacheNotifierMock.SearchFirstData([]byte("a")) + assert.False(t, found) + assert.Nil(t, svi) + + svi, found = shardedDataCacheNotifierMock.SearchFirstData([]byte("b")) + assert.False(t, found) + assert.Nil(t, svi) + + svi, found = shardedDataCacheNotifierMock.SearchFirstData([]byte("c")) + assert.False(t, found) + assert.Nil(t, svi) + + svi, found = shardedDataCacheNotifierMock.SearchFirstData([]byte("d")) + assert.True(t, found) + assert.Equal(t, svi4, svi) +} + +func createMockBlockBody(senderShardID, receiverShardID uint32, blockType block.Type) *block.Body { + return &block.Body{ + MiniBlocks: []*block.MiniBlock{createMockMiniBlock(senderShardID, receiverShardID, blockType)}, + } +} + +func createMockMiniBlock(senderShardID, receiverShardID uint32, blockType block.Type) *block.MiniBlock { + return &block.MiniBlock{ + SenderShardID: senderShardID, + ReceiverShardID: receiverShardID, + Type: blockType, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } +} diff --git a/epochStart/mock/epochStartNotifierStub.go b/epochStart/mock/epochStartNotifierStub.go index c8599c685f2..0fe06e739bc 100644 --- a/epochStart/mock/epochStartNotifierStub.go +++ b/epochStart/mock/epochStartNotifierStub.go @@ -1,6 +1,8 @@ package mock -import "github.com/ElrondNetwork/elrond-go-core/data" +import ( + "github.com/ElrondNetwork/elrond-go-core/data" +) // EpochStartNotifierStub - type EpochStartNotifierStub struct { diff --git a/epochStart/mock/rewardsCreatorStub.go b/epochStart/mock/rewardsCreatorStub.go index 3be87ced58a..5fa99e8df4e 100644 --- a/epochStart/mock/rewardsCreatorStub.go +++ b/epochStart/mock/rewardsCreatorStub.go @@ -19,10 +19,10 @@ type RewardsCreatorStub struct { ) error GetProtocolSustainabilityRewardsCalled func() *big.Int GetLocalTxCacheCalled func() epochStart.TransactionCacher - CreateMarshalizedDataCalled func(body *block.Body) map[string][][]byte + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler - SaveTxBlockToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + SaveBlockDataToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) } @@ -68,10 +68,10 @@ func (rcs *RewardsCreatorStub) GetLocalTxCache() epochStart.TransactionCacher { return nil } -// CreateMarshalizedData - -func (rcs *RewardsCreatorStub) CreateMarshalizedData(body *block.Body) map[string][][]byte { - if rcs.CreateMarshalizedDataCalled != nil { - return rcs.CreateMarshalizedDataCalled(body) +// CreateMarshalledData - +func (rcs *RewardsCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if rcs.CreateMarshalledDataCalled != nil { + return rcs.CreateMarshalledDataCalled(body) } return nil } @@ -84,17 +84,17 @@ func (rcs *RewardsCreatorStub) GetRewardsTxs(body *block.Body) map[string]data.T return nil } -// SaveTxBlockToStorage - -func (rcs *RewardsCreatorStub) SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if rcs.SaveTxBlockToStorageCalled != nil { - rcs.SaveTxBlockToStorageCalled(metaBlock, body) +// SaveBlockDataToStorage - +func (rcs *RewardsCreatorStub) SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if rcs.SaveBlockDataToStorageCalled != nil { + rcs.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteTxsFromStorage - -func (rcs *RewardsCreatorStub) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if rcs.DeleteTxsFromStorageCalled != nil { - rcs.DeleteTxsFromStorageCalled(metaBlock, body) +// DeleteBlockDataFromStorage - +func (rcs *RewardsCreatorStub) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if rcs.DeleteBlockDataFromStorageCalled != nil { + rcs.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/epochStart/mock/validatorInfoSyncerStub.go b/epochStart/mock/validatorInfoSyncerStub.go index 29a006317c4..3dedd99d58e 100644 --- a/epochStart/mock/validatorInfoSyncerStub.go +++ b/epochStart/mock/validatorInfoSyncerStub.go @@ -2,6 +2,7 @@ package mock import ( "github.com/ElrondNetwork/elrond-go-core/data" + "github.com/ElrondNetwork/elrond-go/state" ) // ValidatorInfoSyncerStub - @@ -13,6 +14,11 @@ func (vip *ValidatorInfoSyncerStub) SyncMiniBlocks(_ data.HeaderHandler) ([][]by return nil, nil, nil } +// SyncValidatorsInfo - +func (vip *ValidatorInfoSyncerStub) SyncValidatorsInfo(_ data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) { + return nil, nil, nil +} + // IsInterfaceNil - func (vip *ValidatorInfoSyncerStub) IsInterfaceNil() bool { return vip == nil diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index c3cabc0432e..d1f320227f9 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -1,7 +1,6 @@ package shardchain import ( - "fmt" "sync" "time" @@ -9,8 +8,10 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -21,19 +22,25 @@ const waitTime = 5 * time.Second // ArgPeerMiniBlockSyncer holds all dependencies required to create a peerMiniBlockSyncer type ArgPeerMiniBlockSyncer struct { - MiniBlocksPool storage.Cacher - Requesthandler epochStart.RequestHandler + MiniBlocksPool storage.Cacher + ValidatorsInfoPool dataRetriever.ShardedDataCacherNotifier + RequestHandler epochStart.RequestHandler } -// peerMiniBlockSyncer implements validator info processing for miniblocks of type peerMiniblock +// peerMiniBlockSyncer implements validator info processing for mini blocks of type PeerMiniBlock type peerMiniBlockSyncer struct { - miniBlocksPool storage.Cacher - requestHandler epochStart.RequestHandler - - mapAllPeerMiniblocks map[string]*block.MiniBlock - chRcvAllMiniblocks chan struct{} - mutMiniBlocksForBlock sync.RWMutex - numMissingPeerMiniblocks uint32 + miniBlocksPool storage.Cacher + validatorsInfoPool dataRetriever.ShardedDataCacherNotifier + requestHandler epochStart.RequestHandler + + mapAllPeerMiniBlocks map[string]*block.MiniBlock + mapAllValidatorsInfo map[string]*state.ShardValidatorInfo + chRcvAllMiniBlocks chan struct{} + chRcvAllValidatorsInfo chan struct{} + mutMiniBlocksForBlock sync.RWMutex + mutValidatorsInfoForBlock sync.RWMutex + numMissingPeerMiniBlocks uint32 + numMissingValidatorsInfo uint32 } // NewPeerMiniBlockSyncer creates a new peerMiniBlockSyncer object @@ -41,71 +48,143 @@ func NewPeerMiniBlockSyncer(arguments ArgPeerMiniBlockSyncer) (*peerMiniBlockSyn if check.IfNil(arguments.MiniBlocksPool) { return nil, epochStart.ErrNilMiniBlockPool } - if check.IfNil(arguments.Requesthandler) { + if check.IfNil(arguments.ValidatorsInfoPool) { + return nil, epochStart.ErrNilValidatorsInfoPool + } + if check.IfNil(arguments.RequestHandler) { return nil, epochStart.ErrNilRequestHandler } p := &peerMiniBlockSyncer{ - miniBlocksPool: arguments.MiniBlocksPool, - requestHandler: arguments.Requesthandler, + miniBlocksPool: arguments.MiniBlocksPool, + validatorsInfoPool: arguments.ValidatorsInfoPool, + requestHandler: arguments.RequestHandler, } //TODO: change the registerHandler for the miniblockPool to call //directly with hash and value - like func (sp *shardProcessor) receivedMetaBlock p.miniBlocksPool.RegisterHandler(p.receivedMiniBlock, core.UniqueIdentifier()) + p.validatorsInfoPool.RegisterOnAdded(p.receivedValidatorInfo) return p, nil } -func (p *peerMiniBlockSyncer) init() { +func (p *peerMiniBlockSyncer) initMiniBlocks() { p.mutMiniBlocksForBlock.Lock() - p.mapAllPeerMiniblocks = make(map[string]*block.MiniBlock) - p.chRcvAllMiniblocks = make(chan struct{}) + p.mapAllPeerMiniBlocks = make(map[string]*block.MiniBlock) + p.chRcvAllMiniBlocks = make(chan struct{}) p.mutMiniBlocksForBlock.Unlock() } -// SyncMiniBlocks processes an epochstart block asyncrhonous, processing the PeerMiniblocks -func (p *peerMiniBlockSyncer) SyncMiniBlocks(metaBlock data.HeaderHandler) ([][]byte, data.BodyHandler, error) { - if check.IfNil(metaBlock) { +func (p *peerMiniBlockSyncer) initValidatorsInfo() { + p.mutValidatorsInfoForBlock.Lock() + p.mapAllValidatorsInfo = make(map[string]*state.ShardValidatorInfo) + p.chRcvAllValidatorsInfo = make(chan struct{}) + p.mutValidatorsInfoForBlock.Unlock() +} + +// SyncMiniBlocks synchronizes peers mini blocks from an epoch start meta block +func (p *peerMiniBlockSyncer) SyncMiniBlocks(headerHandler data.HeaderHandler) ([][]byte, data.BodyHandler, error) { + if check.IfNil(headerHandler) { return nil, nil, epochStart.ErrNilMetaBlock } - p.init() + p.initMiniBlocks() - p.computeMissingPeerBlocks(metaBlock) + p.computeMissingPeerBlocks(headerHandler) - allMissingPeerMiniblocksHashes, err := p.retrieveMissingBlocks() + allMissingPeerMiniBlocksHashes, err := p.retrieveMissingMiniBlocks() if err != nil { - return allMissingPeerMiniblocksHashes, nil, err + return allMissingPeerMiniBlocksHashes, nil, err } - peerBlockBody := p.getAllPeerMiniBlocks(metaBlock) + peerBlockBody := p.getAllPeerMiniBlocks(headerHandler) return nil, peerBlockBody, nil } +// SyncValidatorsInfo synchronizes validators info from a block body of an epoch start meta block +func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) { + if check.IfNil(bodyHandler) { + return nil, nil, epochStart.ErrNilBlockBody + } + + body, ok := bodyHandler.(*block.Body) + if !ok { + return nil, nil, epochStart.ErrWrongTypeAssertion + } + + p.initValidatorsInfo() + + p.computeMissingValidatorsInfo(body) + + allMissingValidatorsInfoHashes, err := p.retrieveMissingValidatorsInfo() + if err != nil { + return allMissingValidatorsInfoHashes, nil, err + } + + validatorsInfo := p.getAllValidatorsInfo(body) + + return nil, validatorsInfo, nil +} + func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { - peerMb, ok := val.(*block.MiniBlock) - if !ok || peerMb.Type != block.PeerBlock { + peerMiniBlock, ok := val.(*block.MiniBlock) + if !ok { + log.Error("receivedMiniBlock", "key", key, "error", epochStart.ErrWrongTypeAssertion) return } - log.Trace(fmt.Sprintf("received miniblock of type %s", peerMb.Type)) + if peerMiniBlock.Type != block.PeerBlock { + return + } + + log.Debug("peerMiniBlockSyncer.receivedMiniBlock", "mb type", peerMiniBlock.Type) p.mutMiniBlocksForBlock.Lock() - havingPeerMb, ok := p.mapAllPeerMiniblocks[string(key)] + havingPeerMb, ok := p.mapAllPeerMiniBlocks[string(key)] if !ok || havingPeerMb != nil { p.mutMiniBlocksForBlock.Unlock() return } - p.mapAllPeerMiniblocks[string(key)] = peerMb - p.numMissingPeerMiniblocks-- - numMissingPeerMiniblocks := p.numMissingPeerMiniblocks + p.mapAllPeerMiniBlocks[string(key)] = peerMiniBlock + p.numMissingPeerMiniBlocks-- + numMissingPeerMiniBlocks := p.numMissingPeerMiniBlocks p.mutMiniBlocksForBlock.Unlock() - if numMissingPeerMiniblocks == 0 { - p.chRcvAllMiniblocks <- struct{}{} + log.Debug("peerMiniBlockSyncer.receivedMiniBlock", "mb hash", key, "num missing peer mini blocks", numMissingPeerMiniBlocks) + + if numMissingPeerMiniBlocks == 0 { + p.chRcvAllMiniBlocks <- struct{}{} + } +} + +func (p *peerMiniBlockSyncer) receivedValidatorInfo(key []byte, val interface{}) { + validatorInfo, ok := val.(*state.ShardValidatorInfo) + if !ok { + log.Error("receivedValidatorInfo", "key", key, "error", epochStart.ErrWrongTypeAssertion) + return + } + + log.Debug("peerMiniBlockSyncer.receivedValidatorInfo", "pk", validatorInfo.PublicKey) + + p.mutValidatorsInfoForBlock.Lock() + havingValidatorInfo, ok := p.mapAllValidatorsInfo[string(key)] + if !ok || havingValidatorInfo != nil { + p.mutValidatorsInfoForBlock.Unlock() + return + } + + p.mapAllValidatorsInfo[string(key)] = validatorInfo + p.numMissingValidatorsInfo-- + numMissingValidatorsInfo := p.numMissingValidatorsInfo + p.mutValidatorsInfoForBlock.Unlock() + + log.Debug("peerMiniBlockSyncer.receivedValidatorInfo", "tx hash", key, "num missing validators info", numMissingValidatorsInfo) + + if numMissingValidatorsInfo == 0 { + p.chRcvAllValidatorsInfo <- struct{}{} } } @@ -121,74 +200,157 @@ func (p *peerMiniBlockSyncer) getAllPeerMiniBlocks(metaBlock data.HeaderHandler) continue } - mb := p.mapAllPeerMiniblocks[string(peerMiniBlock.GetHash())] + mb := p.mapAllPeerMiniBlocks[string(peerMiniBlock.GetHash())] peerBlockBody.MiniBlocks = append(peerBlockBody.MiniBlocks, mb) } return peerBlockBody } +func (p *peerMiniBlockSyncer) getAllValidatorsInfo(body *block.Body) map[string]*state.ShardValidatorInfo { + p.mutValidatorsInfoForBlock.Lock() + defer p.mutValidatorsInfoForBlock.Unlock() + + validatorsInfo := make(map[string]*state.ShardValidatorInfo) + for _, mb := range body.MiniBlocks { + if mb.Type != block.PeerBlock { + continue + } + + for _, txHash := range mb.TxHashes { + validatorInfo := p.mapAllValidatorsInfo[string(txHash)] + validatorsInfo[string(txHash)] = validatorInfo + } + } + + return validatorsInfo +} + func (p *peerMiniBlockSyncer) computeMissingPeerBlocks(metaBlock data.HeaderHandler) { - numMissingPeerMiniblocks := uint32(0) p.mutMiniBlocksForBlock.Lock() + defer p.mutMiniBlocksForBlock.Unlock() + numMissingPeerMiniBlocks := uint32(0) for _, mb := range metaBlock.GetMiniBlockHeaderHandlers() { if mb.GetTypeInt32() != int32(block.PeerBlock) { continue } - p.mapAllPeerMiniblocks[string(mb.GetHash())] = nil + p.mapAllPeerMiniBlocks[string(mb.GetHash())] = nil mbObjectFound, ok := p.miniBlocksPool.Peek(mb.GetHash()) if !ok { - numMissingPeerMiniblocks++ + numMissingPeerMiniBlocks++ continue } mbFound, ok := mbObjectFound.(*block.MiniBlock) if !ok { - numMissingPeerMiniblocks++ + numMissingPeerMiniBlocks++ continue } - p.mapAllPeerMiniblocks[string(mb.GetHash())] = mbFound + p.mapAllPeerMiniBlocks[string(mb.GetHash())] = mbFound } - p.numMissingPeerMiniblocks = numMissingPeerMiniblocks - p.mutMiniBlocksForBlock.Unlock() + p.numMissingPeerMiniBlocks = numMissingPeerMiniBlocks } -func (p *peerMiniBlockSyncer) retrieveMissingBlocks() ([][]byte, error) { +func (p *peerMiniBlockSyncer) computeMissingValidatorsInfo(body *block.Body) { + p.mutValidatorsInfoForBlock.Lock() + defer p.mutValidatorsInfoForBlock.Unlock() + + numMissingValidatorsInfo := uint32(0) + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + + numMissingValidatorsInfo += p.setMissingValidatorsInfo(miniBlock) + } + + p.numMissingValidatorsInfo = numMissingValidatorsInfo +} + +func (p *peerMiniBlockSyncer) setMissingValidatorsInfo(miniBlock *block.MiniBlock) uint32 { + numMissingValidatorsInfo := uint32(0) + for _, txHash := range miniBlock.TxHashes { + p.mapAllValidatorsInfo[string(txHash)] = nil + + validatorInfoObjectFound, ok := p.validatorsInfoPool.SearchFirstData(txHash) + if !ok { + numMissingValidatorsInfo++ + continue + } + + validatorInfo, ok := validatorInfoObjectFound.(*state.ShardValidatorInfo) + if !ok { + numMissingValidatorsInfo++ + continue + } + + p.mapAllValidatorsInfo[string(txHash)] = validatorInfo + } + + return numMissingValidatorsInfo +} + +func (p *peerMiniBlockSyncer) retrieveMissingMiniBlocks() ([][]byte, error) { p.mutMiniBlocksForBlock.Lock() - missingMiniblocks := make([][]byte, 0) - for mbHash, mb := range p.mapAllPeerMiniblocks { + missingMiniBlocks := make([][]byte, 0) + for mbHash, mb := range p.mapAllPeerMiniBlocks { if mb == nil { - missingMiniblocks = append(missingMiniblocks, []byte(mbHash)) + missingMiniBlocks = append(missingMiniBlocks, []byte(mbHash)) } } - p.numMissingPeerMiniblocks = uint32(len(missingMiniblocks)) + p.numMissingPeerMiniBlocks = uint32(len(missingMiniBlocks)) p.mutMiniBlocksForBlock.Unlock() - if len(missingMiniblocks) == 0 { + if len(missingMiniBlocks) == 0 { + return nil, nil + } + + go p.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniBlocks) + + select { + case <-p.chRcvAllMiniBlocks: + return nil, nil + case <-time.After(waitTime): + return p.getAllMissingPeerMiniBlocksHashes(), process.ErrTimeIsOut + } +} + +func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) { + p.mutValidatorsInfoForBlock.Lock() + missingValidatorsInfo := make([][]byte, 0) + for validatorInfoHash, validatorInfo := range p.mapAllValidatorsInfo { + if validatorInfo == nil { + missingValidatorsInfo = append(missingValidatorsInfo, []byte(validatorInfoHash)) + } + } + p.numMissingValidatorsInfo = uint32(len(missingValidatorsInfo)) + p.mutValidatorsInfoForBlock.Unlock() + + if len(missingValidatorsInfo) == 0 { return nil, nil } - go p.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniblocks) + go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { - case <-p.chRcvAllMiniblocks: + case <-p.chRcvAllValidatorsInfo: return nil, nil case <-time.After(waitTime): - return p.getAllMissingPeerMiniblocksHashes(), process.ErrTimeIsOut + return p.getAllMissingValidatorsInfoHashes(), process.ErrTimeIsOut } } -func (p *peerMiniBlockSyncer) getAllMissingPeerMiniblocksHashes() [][]byte { +func (p *peerMiniBlockSyncer) getAllMissingPeerMiniBlocksHashes() [][]byte { p.mutMiniBlocksForBlock.RLock() defer p.mutMiniBlocksForBlock.RUnlock() missingPeerMiniBlocksHashes := make([][]byte, 0) - for hash, mb := range p.mapAllPeerMiniblocks { + for hash, mb := range p.mapAllPeerMiniBlocks { if mb == nil { missingPeerMiniBlocksHashes = append(missingPeerMiniBlocksHashes, []byte(hash)) } @@ -197,6 +359,20 @@ func (p *peerMiniBlockSyncer) getAllMissingPeerMiniblocksHashes() [][]byte { return missingPeerMiniBlocksHashes } +func (p *peerMiniBlockSyncer) getAllMissingValidatorsInfoHashes() [][]byte { + p.mutValidatorsInfoForBlock.RLock() + defer p.mutValidatorsInfoForBlock.RUnlock() + + missingValidatorsInfoHashes := make([][]byte, 0) + for validatorInfoHash, validatorInfo := range p.mapAllValidatorsInfo { + if validatorInfo == nil { + missingValidatorsInfoHashes = append(missingValidatorsInfoHashes, []byte(validatorInfoHash)) + } + } + + return missingValidatorsInfoHashes +} + // IsInterfaceNil returns true if underlying object is nil func (p *peerMiniBlockSyncer) IsInterfaceNil() bool { return p == nil diff --git a/epochStart/shardchain/peerMiniBlocksSyncer_test.go b/epochStart/shardchain/peerMiniBlocksSyncer_test.go index a72b794ab18..57ebcd61291 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer_test.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer_test.go @@ -6,6 +6,7 @@ import ( "time" "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/epochStart" @@ -13,13 +14,15 @@ import ( "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func createDefaultArguments() ArgPeerMiniBlockSyncer { defaultArgs := ArgPeerMiniBlockSyncer{ - MiniBlocksPool: testscommon.NewCacherStub(), - Requesthandler: &testscommon.RequestHandlerStub{}, + MiniBlocksPool: testscommon.NewCacherStub(), + ValidatorsInfoPool: testscommon.NewShardedDataStub(), + RequestHandler: &testscommon.RequestHandlerStub{}, } return defaultArgs @@ -36,11 +39,22 @@ func TestNewValidatorInfoProcessor_NilMiniBlocksPoolErr(t *testing.T) { require.Equal(t, epochStart.ErrNilMiniBlockPool, err) } +func TestNewValidatorInfoProcessor_NilValidatorsInfoPoolShouldErr(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + args.ValidatorsInfoPool = nil + syncer, err := NewPeerMiniBlockSyncer(args) + + require.Nil(t, syncer) + require.Equal(t, epochStart.ErrNilValidatorsInfoPool, err) +} + func TestNewValidatorInfoProcessor_NilRequestHandlerShouldErr(t *testing.T) { t.Parallel() args := createDefaultArguments() - args.Requesthandler = nil + args.RequestHandler = nil syncer, err := NewPeerMiniBlockSyncer(args) require.Nil(t, syncer) @@ -253,7 +267,7 @@ func TestValidatorInfoProcessor_ProcesStartOfEpochWithMissinPeerMiniblocksShould }, } - args.Requesthandler = &testscommon.RequestHandlerStub{ + args.RequestHandler = &testscommon.RequestHandlerStub{ RequestMiniBlocksHandlerCalled: func(destShardID uint32, miniblockHashes [][]byte) { if destShardID == core.MetachainShardId && bytes.Equal(miniblockHashes[0], peerMiniBlockHash) { @@ -317,7 +331,7 @@ func TestValidatorInfoProcessor_ProcesStartOfEpochWithMissinPeerMiniblocksTimeou }, } - args.Requesthandler = &testscommon.RequestHandlerStub{ + args.RequestHandler = &testscommon.RequestHandlerStub{ RequestMiniBlocksHandlerCalled: func(destShardID uint32, miniblockHashes [][]byte) { if destShardID == core.MetachainShardId && bytes.Equal(miniblockHashes[0], peerMiniBlockHash) { @@ -332,3 +346,383 @@ func TestValidatorInfoProcessor_ProcesStartOfEpochWithMissinPeerMiniblocksTimeou require.Equal(t, process.ErrTimeIsOut, processError) } + +func TestValidatorInfoProcessor_SyncValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("sync validators info with nil block body", func(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + + missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(nil) + assert.Nil(t, missingValidatorsInfoHashes) + assert.Nil(t, validatorsInfo) + assert.Equal(t, epochStart.ErrNilBlockBody, err) + }) + + t.Run("sync validators info with missing validators info", func(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + args.ValidatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + syncer, _ := NewPeerMiniBlockSyncer(args) + + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(body) + + assert.Equal(t, 3, len(missingValidatorsInfoHashes)) + assert.Nil(t, validatorsInfo) + assert.Equal(t, process.ErrTimeIsOut, err) + }) + + t.Run("sync validators info without missing validators info", func(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + args.ValidatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, []byte("a")) { + return svi1, true + } + if bytes.Equal(key, []byte("b")) { + return svi2, true + } + if bytes.Equal(key, []byte("c")) { + return svi3, true + } + return nil, false + }, + } + syncer, _ := NewPeerMiniBlockSyncer(args) + + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(body) + + assert.Nil(t, err) + assert.Nil(t, missingValidatorsInfoHashes) + assert.Equal(t, 3, len(validatorsInfo)) + assert.Equal(t, svi1, validatorsInfo["a"]) + assert.Equal(t, svi2, validatorsInfo["b"]) + assert.Equal(t, svi3, validatorsInfo["c"]) + }) +} + +func TestValidatorInfoProcessor_ReceivedValidatorInfo(t *testing.T) { + t.Parallel() + + t.Run("received validators info with wrong type assertion", func(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.numMissingValidatorsInfo = 1 + syncer.mutValidatorsInfoForBlock.Unlock() + + syncer.receivedValidatorInfo([]byte("a"), nil) + + syncer.mutValidatorsInfoForBlock.RLock() + numMissingValidatorsInfo := syncer.numMissingValidatorsInfo + syncer.mutValidatorsInfoForBlock.RUnlock() + + assert.Equal(t, uint32(1), numMissingValidatorsInfo) + }) + + t.Run("received validators info with not requested validator info", func(t *testing.T) { + t.Parallel() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.numMissingValidatorsInfo = 1 + syncer.mutValidatorsInfoForBlock.Unlock() + + syncer.receivedValidatorInfo([]byte("b"), svi) + + syncer.mutValidatorsInfoForBlock.RLock() + numMissingValidatorsInfo := syncer.numMissingValidatorsInfo + syncer.mutValidatorsInfoForBlock.RUnlock() + + assert.Equal(t, uint32(1), numMissingValidatorsInfo) + }) + + t.Run("received validators info with already received validator info", func(t *testing.T) { + t.Parallel() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = svi + syncer.numMissingValidatorsInfo = 1 + syncer.mutValidatorsInfoForBlock.Unlock() + + syncer.receivedValidatorInfo([]byte("a"), svi) + + syncer.mutValidatorsInfoForBlock.RLock() + numMissingValidatorsInfo := syncer.numMissingValidatorsInfo + syncer.mutValidatorsInfoForBlock.RUnlock() + + assert.Equal(t, uint32(1), numMissingValidatorsInfo) + }) + + t.Run("received validators info with missing validator info", func(t *testing.T) { + t.Parallel() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.numMissingValidatorsInfo = 1 + syncer.mutValidatorsInfoForBlock.Unlock() + + wasWithTimeOut := atomic.Flag{} + go func() { + select { + case <-syncer.chRcvAllValidatorsInfo: + return + case <-time.After(time.Second): + wasWithTimeOut.SetValue(true) + return + } + }() + + syncer.receivedValidatorInfo([]byte("a"), svi) + + syncer.mutValidatorsInfoForBlock.RLock() + numMissingValidatorsInfo := syncer.numMissingValidatorsInfo + syncer.mutValidatorsInfoForBlock.RUnlock() + + assert.False(t, wasWithTimeOut.IsSet()) + assert.Equal(t, uint32(0), numMissingValidatorsInfo) + }) +} + +func TestValidatorInfoProcessor_GetAllValidatorsInfoShouldWork(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = svi1 + syncer.mapAllValidatorsInfo["b"] = svi2 + syncer.mapAllValidatorsInfo["c"] = svi3 + syncer.mutValidatorsInfoForBlock.Unlock() + + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + validatorsInfo := syncer.getAllValidatorsInfo(body) + + assert.Equal(t, 3, len(validatorsInfo)) + assert.Equal(t, svi1, validatorsInfo["a"]) + assert.Equal(t, svi2, validatorsInfo["b"]) + assert.Equal(t, svi3, validatorsInfo["c"]) +} + +func TestValidatorInfoProcessor_ComputeMissingValidatorsInfoShouldWork(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + + args := createDefaultArguments() + args.ValidatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, []byte("a")) { + return svi1, true + } + if bytes.Equal(key, []byte("b")) { + return svi2, true + } + return nil, false + }, + } + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + syncer.computeMissingValidatorsInfo(body) + + syncer.mutValidatorsInfoForBlock.RLock() + assert.Equal(t, uint32(1), syncer.numMissingValidatorsInfo) + assert.Equal(t, 3, len(syncer.mapAllValidatorsInfo)) + assert.Equal(t, svi1, syncer.mapAllValidatorsInfo["a"]) + assert.Equal(t, svi2, syncer.mapAllValidatorsInfo["b"]) + assert.Nil(t, syncer.mapAllValidatorsInfo["c"]) + syncer.mutValidatorsInfoForBlock.RUnlock() +} + +func TestValidatorInfoProcessor_RetrieveMissingValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("retrieve missing validators info without missing validators info", func(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + + missingValidatorsInfoHashes, err := syncer.retrieveMissingValidatorsInfo() + assert.Nil(t, missingValidatorsInfoHashes) + assert.Nil(t, err) + }) + + t.Run("retrieve missing validators info with not all validators info received", func(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + syncer.requestHandler = &testscommon.RequestHandlerStub{ + RequestValidatorsInfoCalled: func(hashes [][]byte) { + syncer.mutValidatorsInfoForBlock.Lock() + for _, hash := range hashes { + if bytes.Equal(hash, []byte("a")) { + syncer.mapAllValidatorsInfo["a"] = svi1 + } + if bytes.Equal(hash, []byte("b")) { + syncer.mapAllValidatorsInfo["b"] = svi2 + } + if bytes.Equal(hash, []byte("c")) { + syncer.mapAllValidatorsInfo["c"] = svi3 + } + } + syncer.mutValidatorsInfoForBlock.Unlock() + }, + } + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.mapAllValidatorsInfo["b"] = nil + syncer.mapAllValidatorsInfo["c"] = nil + syncer.mapAllValidatorsInfo["d"] = nil + syncer.mutValidatorsInfoForBlock.Unlock() + + missingValidatorsInfoHashes, err := syncer.retrieveMissingValidatorsInfo() + assert.Equal(t, process.ErrTimeIsOut, err) + require.Equal(t, 1, len(missingValidatorsInfoHashes)) + assert.Equal(t, []byte("d"), missingValidatorsInfoHashes[0]) + }) + + t.Run("retrieve missing validators info with all validators info received", func(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + syncer.requestHandler = &testscommon.RequestHandlerStub{ + RequestValidatorsInfoCalled: func(hashes [][]byte) { + syncer.mutValidatorsInfoForBlock.Lock() + for _, hash := range hashes { + if bytes.Equal(hash, []byte("a")) { + syncer.mapAllValidatorsInfo["a"] = svi1 + } + if bytes.Equal(hash, []byte("b")) { + syncer.mapAllValidatorsInfo["b"] = svi2 + } + if bytes.Equal(hash, []byte("c")) { + syncer.mapAllValidatorsInfo["c"] = svi3 + } + } + syncer.mutValidatorsInfoForBlock.Unlock() + }, + } + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.mapAllValidatorsInfo["b"] = nil + syncer.mapAllValidatorsInfo["c"] = nil + syncer.mutValidatorsInfoForBlock.Unlock() + + go func() { + time.Sleep(waitTime / 2) + syncer.chRcvAllValidatorsInfo <- struct{}{} + }() + + missingValidatorsInfoHashes, err := syncer.retrieveMissingValidatorsInfo() + + assert.Nil(t, err) + assert.Nil(t, missingValidatorsInfoHashes) + }) +} + +func TestValidatorInfoProcessor_GetAllMissingValidatorsInfoHashesShouldWork(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = svi1 + syncer.mapAllValidatorsInfo["b"] = svi2 + syncer.mapAllValidatorsInfo["c"] = svi3 + syncer.mapAllValidatorsInfo["d"] = nil + syncer.mutValidatorsInfoForBlock.Unlock() + + missingValidatorsInfoHashes := syncer.getAllMissingValidatorsInfoHashes() + require.Equal(t, 1, len(missingValidatorsInfoHashes)) + assert.Equal(t, []byte("d"), missingValidatorsInfoHashes[0]) +} + +func createMockMiniBlock(senderShardID, receiverShardID uint32, blockType block.Type) *block.MiniBlock { + return &block.MiniBlock{ + SenderShardID: senderShardID, + ReceiverShardID: receiverShardID, + Type: blockType, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } +} diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index acd31cf5de7..b19340c0c6d 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -36,7 +36,7 @@ var _ process.EpochStartTriggerHandler = (*trigger)(nil) var _ process.EpochBootstrapper = (*trigger)(nil) var _ closing.Closer = (*trigger)(nil) -// sleepTime defines the time in milliseconds between each iteration made in requestMissingMiniblocks method +// sleepTime defines the time in milliseconds between each iteration made in requestMissingMiniBlocks method const sleepTime = 1 * time.Second // ArgsShardEpochStartTrigger struct { defines the arguments needed for new start of epoch trigger @@ -54,6 +54,7 @@ type ArgsShardEpochStartTrigger struct { PeerMiniBlocksSyncer process.ValidatorInfoSyncer RoundHandler process.RoundHandler AppStatusHandler core.AppStatusHandler + EnableEpochsHandler common.EnableEpochsHandler Epoch uint32 Validity uint64 @@ -77,12 +78,14 @@ type trigger struct { mapEpochStartHdrs map[string]data.HeaderHandler mapFinalizedEpochs map[uint32]string - headersPool dataRetriever.HeadersPool - miniBlocksPool storage.Cacher - shardHdrStorage storage.Storer - metaHdrStorage storage.Storer - triggerStorage storage.Storer - metaNonceHdrStorage storage.Storer + headersPool dataRetriever.HeadersPool + miniBlocksPool storage.Cacher + validatorInfoPool dataRetriever.ShardedDataCacherNotifier + currentEpochValidatorInfoPool epochStart.ValidatorInfoCacher + shardHdrStorage storage.Storer + metaHdrStorage storage.Storer + triggerStorage storage.Storer + metaNonceHdrStorage storage.Storer uint64Converter typeConverters.Uint64ByteSliceConverter @@ -102,11 +105,14 @@ type trigger struct { peerMiniBlocksSyncer process.ValidatorInfoSyncer - appStatusHandler core.AppStatusHandler + appStatusHandler core.AppStatusHandler + enableEpochsHandler common.EnableEpochsHandler - mapMissingMiniblocks map[string]uint32 - mutMissingMiniblocks sync.RWMutex - cancelFunc func() + mapMissingMiniBlocks map[string]uint32 + mapMissingValidatorsInfo map[string]uint32 + mutMissingMiniBlocks sync.RWMutex + mutMissingValidatorsInfo sync.RWMutex + cancelFunc func() } type metaInfo struct { @@ -162,6 +168,15 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { if check.IfNil(args.DataPool.Headers()) { return nil, epochStart.ErrNilMetaBlocksPool } + if check.IfNil(args.DataPool.MiniBlocks()) { + return nil, epochStart.ErrNilMiniBlockPool + } + if check.IfNil(args.DataPool.ValidatorsInfo()) { + return nil, epochStart.ErrNilValidatorsInfoPool + } + if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { + return nil, epochStart.ErrNilCurrentEpochValidatorsInfoPool + } if check.IfNil(args.PeerMiniBlocksSyncer) { return nil, epochStart.ErrNilValidatorInfoProcessor } @@ -177,6 +192,9 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { if check.IfNil(args.AppStatusHandler) { return nil, epochStart.ErrNilStatusHandler } + if check.IfNil(args.EnableEpochsHandler) { + return nil, epochStart.ErrNilEnableEpochsHandler + } metaHdrStorage, err := args.Storage.GetStorer(dataRetriever.MetaBlockUnit) if err != nil { @@ -201,39 +219,42 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { trigggerStateKey := common.TriggerRegistryInitialKeyPrefix + fmt.Sprintf("%d", args.Epoch) t := &trigger{ - triggerStateKey: []byte(trigggerStateKey), - epoch: args.Epoch, - metaEpoch: args.Epoch, - currentRoundIndex: 0, - epochStartRound: 0, - epochFinalityAttestingRound: 0, - isEpochStart: false, - validity: args.Validity, - finality: args.Finality, - newEpochHdrReceived: false, - mutTrigger: sync.RWMutex{}, - mapHashHdr: make(map[string]data.HeaderHandler), - mapNonceHashes: make(map[uint64][]string), - mapEpochStartHdrs: make(map[string]data.HeaderHandler), - mapFinalizedEpochs: make(map[uint32]string), - headersPool: args.DataPool.Headers(), - miniBlocksPool: args.DataPool.MiniBlocks(), - metaHdrStorage: metaHdrStorage, - shardHdrStorage: shardHdrStorage, - triggerStorage: triggerStorage, - metaNonceHdrStorage: metaHdrNoncesStorage, - uint64Converter: args.Uint64Converter, - marshaller: args.Marshalizer, - hasher: args.Hasher, - headerValidator: args.HeaderValidator, - requestHandler: args.RequestHandler, - epochMetaBlockHash: nil, - epochStartNotifier: args.EpochStartNotifier, - epochStartMeta: &block.MetaBlock{}, - epochStartShardHeader: &block.Header{}, - peerMiniBlocksSyncer: args.PeerMiniBlocksSyncer, - appStatusHandler: args.AppStatusHandler, - roundHandler: args.RoundHandler, + triggerStateKey: []byte(trigggerStateKey), + epoch: args.Epoch, + metaEpoch: args.Epoch, + currentRoundIndex: 0, + epochStartRound: 0, + epochFinalityAttestingRound: 0, + isEpochStart: false, + validity: args.Validity, + finality: args.Finality, + newEpochHdrReceived: false, + mutTrigger: sync.RWMutex{}, + mapHashHdr: make(map[string]data.HeaderHandler), + mapNonceHashes: make(map[uint64][]string), + mapEpochStartHdrs: make(map[string]data.HeaderHandler), + mapFinalizedEpochs: make(map[uint32]string), + headersPool: args.DataPool.Headers(), + miniBlocksPool: args.DataPool.MiniBlocks(), + validatorInfoPool: args.DataPool.ValidatorsInfo(), + currentEpochValidatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), + metaHdrStorage: metaHdrStorage, + shardHdrStorage: shardHdrStorage, + triggerStorage: triggerStorage, + metaNonceHdrStorage: metaHdrNoncesStorage, + uint64Converter: args.Uint64Converter, + marshaller: args.Marshalizer, + hasher: args.Hasher, + headerValidator: args.HeaderValidator, + requestHandler: args.RequestHandler, + epochMetaBlockHash: nil, + epochStartNotifier: args.EpochStartNotifier, + epochStartMeta: &block.MetaBlock{}, + epochStartShardHeader: &block.Header{}, + peerMiniBlocksSyncer: args.PeerMiniBlocksSyncer, + appStatusHandler: args.AppStatusHandler, + roundHandler: args.RoundHandler, + enableEpochsHandler: args.EnableEpochsHandler, } t.headersPool.RegisterHandler(t.receivedMetaBlock) @@ -243,73 +264,154 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { return nil, err } - t.mapMissingMiniblocks = make(map[string]uint32) + t.mapMissingMiniBlocks = make(map[string]uint32) + t.mapMissingValidatorsInfo = make(map[string]uint32) var ctx context.Context ctx, t.cancelFunc = context.WithCancel(context.Background()) - go t.requestMissingMiniblocks(ctx) + go t.requestMissingMiniBlocks(ctx) + go t.requestMissingValidatorsInfo(ctx) return t, nil } -func (t *trigger) clearMissingMiniblocksMap(epoch uint32) { - t.mutMissingMiniblocks.Lock() - defer t.mutMissingMiniblocks.Unlock() +func (t *trigger) clearMissingMiniBlocksMap(epoch uint32) { + t.mutMissingMiniBlocks.Lock() + defer t.mutMissingMiniBlocks.Unlock() - for hash, epochOfMissingMb := range t.mapMissingMiniblocks { + for hash, epochOfMissingMb := range t.mapMissingMiniBlocks { if epochOfMissingMb <= epoch { - delete(t.mapMissingMiniblocks, hash) + delete(t.mapMissingMiniBlocks, hash) + } + } +} + +func (t *trigger) clearMissingValidatorsInfoMap(epoch uint32) { + t.mutMissingValidatorsInfo.Lock() + defer t.mutMissingValidatorsInfo.Unlock() + + for hash, epochOfMissingValidatorInfo := range t.mapMissingValidatorsInfo { + if epochOfMissingValidatorInfo <= epoch { + delete(t.mapMissingValidatorsInfo, hash) + } + } +} + +func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { + timer := time.NewTimer(sleepTime) + defer timer.Stop() + + for { + timer.Reset(sleepTime) + + select { + case <-ctx.Done(): + log.Debug("requestMissingMiniBlocks: trigger's go routine is stopping...") + return + case <-timer.C: + } + + t.mutMissingMiniBlocks.RLock() + if len(t.mapMissingMiniBlocks) == 0 { + t.mutMissingMiniBlocks.RUnlock() + continue + } + + missingMiniBlocks := make([][]byte, 0, len(t.mapMissingMiniBlocks)) + for hash, epoch := range t.mapMissingMiniBlocks { + missingMiniBlocks = append(missingMiniBlocks, []byte(hash)) + log.Debug("trigger.requestMissingMiniBlocks", "epoch", epoch, "hash", []byte(hash)) + } + t.mutMissingMiniBlocks.RUnlock() + + go t.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniBlocks) + + timer.Reset(waitTime) + + select { + case <-ctx.Done(): + log.Debug("requestMissingMiniBlocks: trigger's go routine is stopping...") + return + case <-timer.C: } + + t.updateMissingMiniBlocks() } } -func (t *trigger) requestMissingMiniblocks(ctx context.Context) { +func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { + timer := time.NewTimer(sleepTime) + defer timer.Stop() + for { + timer.Reset(sleepTime) + select { case <-ctx.Done(): - log.Debug("trigger's go routine is stopping...") + log.Debug("requestMissingValidatorsInfo: trigger's go routine is stopping...") return - case <-time.After(sleepTime): + case <-timer.C: } - t.mutMissingMiniblocks.RLock() - if len(t.mapMissingMiniblocks) == 0 { - t.mutMissingMiniblocks.RUnlock() + t.mutMissingValidatorsInfo.RLock() + if len(t.mapMissingValidatorsInfo) == 0 { + t.mutMissingValidatorsInfo.RUnlock() continue } - missingMiniblocks := make([][]byte, 0, len(t.mapMissingMiniblocks)) - for hash := range t.mapMissingMiniblocks { - missingMiniblocks = append(missingMiniblocks, []byte(hash)) - log.Debug("trigger.requestMissingMiniblocks", "hash", []byte(hash)) + missingValidatorsInfo := make([][]byte, 0, len(t.mapMissingValidatorsInfo)) + for hash, epoch := range t.mapMissingValidatorsInfo { + missingValidatorsInfo = append(missingValidatorsInfo, []byte(hash)) + log.Debug("trigger.requestMissingValidatorsInfo", "epoch", epoch, "hash", []byte(hash)) } - t.mutMissingMiniblocks.RUnlock() + t.mutMissingValidatorsInfo.RUnlock() - go t.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniblocks) + go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + + timer.Reset(waitTime) select { case <-ctx.Done(): - log.Debug("trigger's go routine is stopping...") + log.Debug("requestMissingValidatorsInfo: trigger's go routine is stopping...") return - case <-time.After(waitTime): + case <-timer.C: } - t.updateMissingMiniblocks() + t.updateMissingValidatorsInfo() } } -func (t *trigger) updateMissingMiniblocks() { - t.mutMissingMiniblocks.Lock() - for hash := range t.mapMissingMiniblocks { +func (t *trigger) updateMissingMiniBlocks() { + t.mutMissingMiniBlocks.Lock() + for hash := range t.mapMissingMiniBlocks { if t.miniBlocksPool.Has([]byte(hash)) { - delete(t.mapMissingMiniblocks, hash) + delete(t.mapMissingMiniBlocks, hash) } } - numMissingMiniblocks := len(t.mapMissingMiniblocks) - t.mutMissingMiniblocks.Unlock() + numMissingMiniBlocks := len(t.mapMissingMiniBlocks) + t.mutMissingMiniBlocks.Unlock() - if numMissingMiniblocks == 0 { - log.Debug("trigger.updateMissingMiniblocks -> updateTriggerFromMeta") + if numMissingMiniBlocks == 0 { + log.Debug("trigger.updateMissingMiniBlocks -> updateTriggerFromMeta") + t.mutTrigger.Lock() + t.updateTriggerFromMeta() + t.mutTrigger.Unlock() + } +} + +func (t *trigger) updateMissingValidatorsInfo() { + t.mutMissingValidatorsInfo.Lock() + for hash := range t.mapMissingValidatorsInfo { + _, isValidatorInfoFound := t.validatorInfoPool.SearchFirstData([]byte(hash)) + if isValidatorInfoFound { + delete(t.mapMissingValidatorsInfo, hash) + } + } + numMissingValidatorsInfo := len(t.mapMissingValidatorsInfo) + t.mutMissingValidatorsInfo.Unlock() + + if numMissingValidatorsInfo == 0 { + log.Debug("trigger.updateMissingValidatorsInfo -> updateTriggerFromMeta") t.mutTrigger.Lock() t.updateTriggerFromMeta() t.mutTrigger.Unlock() @@ -549,7 +651,8 @@ func (t *trigger) updateTriggerFromMeta() { log.Debug(display.Headline(msg, "", "#")) log.Debug("trigger.updateTriggerFromMeta", "isEpochStart", t.isEpochStart) logger.SetCorrelationEpoch(t.metaEpoch) - t.clearMissingMiniblocksMap(t.metaEpoch) + t.clearMissingMiniBlocksMap(t.metaEpoch) + t.clearMissingValidatorsInfoMap(t.metaEpoch) } // save all final-valid epoch start blocks @@ -645,26 +748,49 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH return false, 0 } - missingMiniblocksHashes, blockBody, err := t.peerMiniBlocksSyncer.SyncMiniBlocks(metaHdr) + missingMiniBlocksHashes, blockBody, err := t.peerMiniBlocksSyncer.SyncMiniBlocks(metaHdr) if err != nil { - t.addMissingMiniblocks(metaHdr.GetEpoch(), missingMiniblocksHashes) - log.Warn("processMetablock failed", "error", err) + t.addMissingMiniBlocks(metaHdr.GetEpoch(), missingMiniBlocksHashes) + log.Debug("checkIfTriggerCanBeActivated.SyncMiniBlocks", "num missing mini blocks", len(missingMiniBlocksHashes), "error", err) return false, 0 } + if metaHdr.GetEpoch() >= t.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { + missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) + if err != nil { + t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) + log.Debug("checkIfTriggerCanBeActivated.SyncValidatorsInfo", "num missing validators info", len(missingValidatorsInfoHashes), "error", err) + return false, 0 + } + + for validatorInfoHash, validatorInfo := range validatorsInfo { + t.currentEpochValidatorInfoPool.AddValidatorInfo([]byte(validatorInfoHash), validatorInfo) + } + } + t.epochStartNotifier.NotifyAllPrepare(metaHdr, blockBody) isMetaHdrFinal, finalityAttestingRound := t.isMetaBlockFinal(hash, metaHdr) return isMetaHdrFinal, finalityAttestingRound } -func (t *trigger) addMissingMiniblocks(epoch uint32, missingMiniblocksHashes [][]byte) { - t.mutMissingMiniblocks.Lock() - defer t.mutMissingMiniblocks.Unlock() +func (t *trigger) addMissingMiniBlocks(epoch uint32, missingMiniBlocksHashes [][]byte) { + t.mutMissingMiniBlocks.Lock() + defer t.mutMissingMiniBlocks.Unlock() + + for _, hash := range missingMiniBlocksHashes { + t.mapMissingMiniBlocks[string(hash)] = epoch + log.Debug("trigger.addMissingMiniBlocks", "epoch", epoch, "hash", hash) + } +} + +func (t *trigger) addMissingValidatorsInfo(epoch uint32, missingValidatorsInfoHashes [][]byte) { + t.mutMissingValidatorsInfo.Lock() + defer t.mutMissingValidatorsInfo.Unlock() - for _, hash := range missingMiniblocksHashes { - t.mapMissingMiniblocks[string(hash)] = epoch - log.Debug("trigger.addMissingMiniblocks", "epoch", epoch, "hash", hash) + for _, hash := range missingValidatorsInfoHashes { + t.mapMissingValidatorsInfo[string(hash)] = epoch + log.Debug("trigger.addMissingValidatorsInfo", "epoch", epoch, "hash", hash) } } diff --git a/epochStart/shardchain/triggerRegistry_test.go b/epochStart/shardchain/triggerRegistry_test.go index 6edcfe8283a..ed1cef599a4 100644 --- a/epochStart/shardchain/triggerRegistry_test.go +++ b/epochStart/shardchain/triggerRegistry_test.go @@ -48,9 +48,13 @@ func cloneTrigger(t *trigger) *trigger { rt.peerMiniBlocksSyncer = t.peerMiniBlocksSyncer rt.appStatusHandler = t.appStatusHandler rt.miniBlocksPool = t.miniBlocksPool - rt.mapMissingMiniblocks = t.mapMissingMiniblocks + rt.currentEpochValidatorInfoPool = t.currentEpochValidatorInfoPool + rt.validatorInfoPool = t.validatorInfoPool + rt.mapMissingValidatorsInfo = t.mapMissingValidatorsInfo + rt.mapMissingMiniBlocks = t.mapMissingMiniBlocks rt.mapFinalizedEpochs = t.mapFinalizedEpochs rt.roundHandler = t.roundHandler + rt.enableEpochsHandler = t.enableEpochsHandler return rt } diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index e6ab2c12c30..04f761071a4 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -14,12 +14,14 @@ import ( "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/epochStart/mock" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageStubs "github.com/ElrondNetwork/elrond-go/testscommon/storage" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -41,6 +43,9 @@ func createMockShardEpochStartTriggerArguments() *ArgsShardEpochStartTrigger { MiniBlocksCalled: func() storage.Cacher { return testscommon.NewCacherStub() }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vic.ValidatorInfoCacherStub{} + }, }, Storage: &storageStubs.ChainStorerStub{ GetStorerCalled: func(unitType dataRetriever.UnitType) (storage.Storer, error) { @@ -59,6 +64,7 @@ func createMockShardEpochStartTriggerArguments() *ArgsShardEpochStartTrigger { PeerMiniBlocksSyncer: &mock.ValidatorInfoSyncerStub{}, RoundHandler: &mock.RoundHandlerStub{}, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, } } @@ -229,6 +235,17 @@ func TestNewEpochStartTrigger_NilRoundHandlerShouldErr(t *testing.T) { assert.Equal(t, epochStart.ErrNilRoundHandler, err) } +func TestNewEpochStartTrigger_NilEnableEpochsHandlerShouldErr(t *testing.T) { + t.Parallel() + + args := createMockShardEpochStartTriggerArguments() + args.EnableEpochsHandler = nil + epochStartTrigger, err := NewEpochStartTrigger(args) + + assert.Nil(t, epochStartTrigger) + assert.Equal(t, epochStart.ErrNilEnableEpochsHandler, err) +} + func TestNewEpochStartTrigger_ShouldOk(t *testing.T) { t.Parallel() @@ -354,6 +371,9 @@ func TestTrigger_ReceivedHeaderIsEpochStartTrueWithPeerMiniblocks(t *testing.T) }, } }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vic.ValidatorInfoCacherStub{} + }, } args.Uint64Converter = &mock.Uint64ByteSliceConverterMock{ ToByteSliceCalled: func(u uint64) []byte { @@ -586,3 +606,121 @@ func TestTrigger_ReceivedHeaderChangeEpochFinalityAttestingRound(t *testing.T) { epochStartTrigger.receivedMetaBlock(header103, hash103) require.Equal(t, uint64(102), epochStartTrigger.EpochFinalityAttestingRound()) } + +func TestTrigger_ClearMissingValidatorsInfoMapShouldWork(t *testing.T) { + t.Parallel() + + args := createMockShardEpochStartTriggerArguments() + epochStartTrigger, _ := NewEpochStartTrigger(args) + + epochStartTrigger.mutMissingValidatorsInfo.Lock() + epochStartTrigger.mapMissingValidatorsInfo["a"] = 0 + epochStartTrigger.mapMissingValidatorsInfo["b"] = 0 + epochStartTrigger.mapMissingValidatorsInfo["c"] = 1 + epochStartTrigger.mapMissingValidatorsInfo["d"] = 1 + epochStartTrigger.mutMissingValidatorsInfo.Unlock() + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + numMissingValidatorsInfo := len(epochStartTrigger.mapMissingValidatorsInfo) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() + assert.Equal(t, 4, numMissingValidatorsInfo) + + epochStartTrigger.clearMissingValidatorsInfoMap(0) + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + numMissingValidatorsInfo = len(epochStartTrigger.mapMissingValidatorsInfo) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() + assert.Equal(t, 2, numMissingValidatorsInfo) + + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["c"]) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["d"]) +} + +func TestTrigger_UpdateMissingValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("update missing validators when there are no missing validators", func(t *testing.T) { + t.Parallel() + + args := createMockShardEpochStartTriggerArguments() + epochStartTrigger, _ := NewEpochStartTrigger(args) + + epochStartTrigger.updateMissingValidatorsInfo() + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + assert.Equal(t, 0, len(epochStartTrigger.mapMissingValidatorsInfo)) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() + }) + + t.Run("update missing validators when there are missing validators", func(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + + args := createMockShardEpochStartTriggerArguments() + + args.DataPool = &dataRetrieverMock.PoolsHolderStub{ + HeadersCalled: func() dataRetriever.HeadersPool { + return &mock.HeadersCacherStub{} + }, + MiniBlocksCalled: func() storage.Cacher { + return testscommon.NewCacherStub() + }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &vic.ValidatorInfoCacherStub{} + }, + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, []byte("a")) { + return svi1, true + } + if bytes.Equal(key, []byte("b")) { + return svi2, true + } + + return nil, false + }, + } + }, + } + + epochStartTrigger, _ := NewEpochStartTrigger(args) + + epochStartTrigger.mutMissingValidatorsInfo.Lock() + epochStartTrigger.mapMissingValidatorsInfo["a"] = 1 + epochStartTrigger.mapMissingValidatorsInfo["b"] = 1 + epochStartTrigger.mapMissingValidatorsInfo["c"] = 1 + epochStartTrigger.mutMissingValidatorsInfo.Unlock() + + epochStartTrigger.updateMissingValidatorsInfo() + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + assert.Equal(t, 1, len(epochStartTrigger.mapMissingValidatorsInfo)) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["c"]) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() + }) +} + +func TestTrigger_AddMissingValidatorsInfo(t *testing.T) { + t.Parallel() + + args := createMockShardEpochStartTriggerArguments() + epochStartTrigger, _ := NewEpochStartTrigger(args) + + missingValidatorsInfoHashes := [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + } + + epochStartTrigger.addMissingValidatorsInfo(1, missingValidatorsInfoHashes) + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + assert.Equal(t, 3, len(epochStartTrigger.mapMissingValidatorsInfo)) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["a"]) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["b"]) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["c"]) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() +} diff --git a/factory/blockProcessorCreator.go b/factory/blockProcessorCreator.go index 2babc8f0227..d9fcfd9b7ca 100644 --- a/factory/blockProcessorCreator.go +++ b/factory/blockProcessorCreator.go @@ -756,12 +756,18 @@ func (pcf *processComponentsFactory) newMetaBlockProcessor( return nil, err } + validatorInfoStorage, err := pcf.data.StorageService().GetStorer(dataRetriever.UnsignedTransactionUnit) + if err != nil { + return nil, err + } argsEpochValidatorInfo := metachainEpochStart.ArgsNewValidatorInfoCreator{ - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - MiniBlockStorage: miniBlockStorage, - Hasher: pcf.coreData.Hasher(), - Marshalizer: pcf.coreData.InternalMarshalizer(), - DataPool: pcf.data.Datapool(), + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + ValidatorInfoStorage: validatorInfoStorage, + MiniBlockStorage: miniBlockStorage, + Hasher: pcf.coreData.Hasher(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + DataPool: pcf.data.Datapool(), + EnableEpochsHandler: pcf.coreData.EnableEpochsHandler(), } validatorInfoCreator, err := metachainEpochStart.NewValidatorInfoCreator(argsEpochValidatorInfo) if err != nil { diff --git a/factory/processComponents.go b/factory/processComponents.go index 804aac60b59..072fff453ce 100644 --- a/factory/processComponents.go +++ b/factory/processComponents.go @@ -734,8 +734,9 @@ func (pcf *processComponentsFactory) newEpochStartTrigger(requestHandler epochSt } argsPeerMiniBlockSyncer := shardchain.ArgPeerMiniBlockSyncer{ - MiniBlocksPool: pcf.data.Datapool().MiniBlocks(), - Requesthandler: requestHandler, + MiniBlocksPool: pcf.data.Datapool().MiniBlocks(), + ValidatorsInfoPool: pcf.data.Datapool().ValidatorsInfo(), + RequestHandler: requestHandler, } peerMiniBlockSyncer, err := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlockSyncer) @@ -758,6 +759,7 @@ func (pcf *processComponentsFactory) newEpochStartTrigger(requestHandler epochSt PeerMiniBlocksSyncer: peerMiniBlockSyncer, RoundHandler: pcf.coreData.RoundHandler(), AppStatusHandler: pcf.coreData.StatusHandler(), + EnableEpochsHandler: pcf.coreData.EnableEpochsHandler(), } epochStartTrigger, err := shardchain.NewEpochStartTrigger(argEpochStart) if err != nil { @@ -778,6 +780,7 @@ func (pcf *processComponentsFactory) newEpochStartTrigger(requestHandler epochSt Marshalizer: pcf.coreData.InternalMarshalizer(), Hasher: pcf.coreData.Hasher(), AppStatusHandler: pcf.coreData.StatusHandler(), + DataPool: pcf.data.Datapool(), } epochStartTrigger, err := metachain.NewEpochStartTrigger(argEpochStart) if err != nil { diff --git a/factory/shardingFactory.go b/factory/shardingFactory.go index 60dbb89ad39..9dc44483fd6 100644 --- a/factory/shardingFactory.go +++ b/factory/shardingFactory.go @@ -105,6 +105,7 @@ func CreateNodesCoordinator( chanNodeStop chan endProcess.ArgEndProcess, nodeTypeProvider core.NodeTypeProviderHandler, enableEpochsHandler common.EnableEpochsHandler, + validatorInfoCacher epochStart.ValidatorInfoCacher, ) (nodesCoordinator.NodesCoordinator, error) { if chanNodeStop == nil { return nil, nodesCoordinator.ErrNilNodeStopChannel @@ -194,6 +195,7 @@ func CreateNodesCoordinator( NodeTypeProvider: nodeTypeProvider, IsFullArchive: prefsConfig.FullArchive, EnableEpochsHandler: enableEpochsHandler, + ValidatorInfoCacher: validatorInfoCacher, } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/genesis/process/disabled/requestHandler.go b/genesis/process/disabled/requestHandler.go index 2265f19ff37..64f110205ac 100644 --- a/genesis/process/disabled/requestHandler.go +++ b/genesis/process/disabled/requestHandler.go @@ -86,6 +86,14 @@ func (r *RequestHandler) RequestPeerAuthenticationsChunk(_ uint32, _ uint32) { func (r *RequestHandler) RequestPeerAuthenticationsByHashes(_ uint32, _ [][]byte) { } +// RequestValidatorInfo does nothing +func (r *RequestHandler) RequestValidatorInfo(_ []byte) { +} + +// RequestValidatorsInfo does nothing +func (r *RequestHandler) RequestValidatorsInfo(_ [][]byte) { +} + // IsInterfaceNil returns true if there is no value under the interface func (r *RequestHandler) IsInterfaceNil() bool { return r == nil diff --git a/genesis/process/shardGenesisBlockCreator.go b/genesis/process/shardGenesisBlockCreator.go index 92663ff80b2..965fee00387 100644 --- a/genesis/process/shardGenesisBlockCreator.go +++ b/genesis/process/shardGenesisBlockCreator.go @@ -123,6 +123,7 @@ func createGenesisConfig() config.EnableEpochs { HeartbeatDisableEpoch: unreachableEpoch, MiniBlockPartialExecutionEnableEpoch: unreachableEpoch, ESDTMetadataContinuousCleanupEnableEpoch: unreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: unreachableEpoch, } } diff --git a/integrationTests/consensus/testInitializer.go b/integrationTests/consensus/testInitializer.go index 95636bf00c4..df0bfe6062d 100644 --- a/integrationTests/consensus/testInitializer.go +++ b/integrationTests/consensus/testInitializer.go @@ -52,6 +52,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/trie" "github.com/ElrondNetwork/elrond-go/trie/hashesHolder" vmcommon "github.com/ElrondNetwork/elrond-vm-common" @@ -335,6 +336,8 @@ func createConsensusOnlyNode( syncer, 0) + dataPool := dataRetrieverMock.CreatePoolsHolder(1, 0) + argsNewMetaEpochStart := &metachain.ArgsNewMetaEpochStartTrigger{ GenesisTime: time.Unix(startTime, 0), EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), @@ -347,6 +350,7 @@ func createConsensusOnlyNode( Marshalizer: testMarshalizer, Hasher: testHasher, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + DataPool: dataPool, } epochStartTrigger, _ := metachain.NewEpochStartTrigger(argsNewMetaEpochStart) @@ -442,7 +446,7 @@ func createConsensusOnlyNode( dataComponents := integrationTests.GetDefaultDataComponents() dataComponents.BlockChain = blockChain - dataComponents.DataPool = dataRetrieverMock.CreatePoolsHolder(1, 0) + dataComponents.DataPool = dataPool dataComponents.Store = createTestStore() stateComponents := integrationTests.GetDefaultStateComponents() @@ -532,6 +536,7 @@ func createNodes( EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ IsWaitingListFixFlagEnabledField: true, }, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, _ := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/factory/consensusComponents/consensusComponents_test.go b/integrationTests/factory/consensusComponents/consensusComponents_test.go index d532e695691..826947140f6 100644 --- a/integrationTests/factory/consensusComponents/consensusComponents_test.go +++ b/integrationTests/factory/consensusComponents/consensusComponents_test.go @@ -67,6 +67,7 @@ func TestConsensusComponents_Close_ShouldWork(t *testing.T) { managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), managedCoreComponents.EnableEpochsHandler(), + managedDataComponents.Datapool().CurrentEpochValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/processComponents/processComponents_test.go b/integrationTests/factory/processComponents/processComponents_test.go index c800775403a..6581c618da2 100644 --- a/integrationTests/factory/processComponents/processComponents_test.go +++ b/integrationTests/factory/processComponents/processComponents_test.go @@ -68,6 +68,7 @@ func TestProcessComponents_Close_ShouldWork(t *testing.T) { managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), managedCoreComponents.EnableEpochsHandler(), + managedDataComponents.Datapool().CurrentEpochValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/statusComponents/statusComponents_test.go b/integrationTests/factory/statusComponents/statusComponents_test.go index 39bde603b0c..5f167c8291d 100644 --- a/integrationTests/factory/statusComponents/statusComponents_test.go +++ b/integrationTests/factory/statusComponents/statusComponents_test.go @@ -68,6 +68,7 @@ func TestStatusComponents_Create_Close_ShouldWork(t *testing.T) { managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), managedCoreComponents.EnableEpochsHandler(), + managedDataComponents.Datapool().CurrentEpochValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/mock/epochRewardsCreatorStub.go b/integrationTests/mock/epochRewardsCreatorStub.go index 5302875ec54..986b91158bf 100644 --- a/integrationTests/mock/epochRewardsCreatorStub.go +++ b/integrationTests/mock/epochRewardsCreatorStub.go @@ -17,13 +17,13 @@ type EpochRewardsCreatorStub struct { VerifyRewardsMiniBlocksCalled func( metaBlock data.MetaHeaderHandler, validatorsInfo map[uint32][]*state.ValidatorInfo, computedEconomics *block.Economics, ) error - CreateMarshalizedDataCalled func(body *block.Body) map[string][][]byte - SaveTxBlockToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler - GetProtocolSustainCalled func() *big.Int - GetLocalTxCacheCalled func() epochStart.TransactionCacher + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte + SaveBlockDataToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler + GetProtocolSustainCalled func() *big.Int + GetLocalTxCacheCalled func() epochStart.TransactionCacher } // GetProtocolSustainabilityRewards - @@ -54,7 +54,7 @@ func (e *EpochRewardsCreatorStub) CreateRewardsMiniBlocks( return nil, nil } -// GetRewardsTxs -- +// GetRewardsTxs - func (e *EpochRewardsCreatorStub) GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler { if e.GetRewardsTxsCalled != nil { return e.GetRewardsTxsCalled(body) @@ -74,25 +74,25 @@ func (e *EpochRewardsCreatorStub) VerifyRewardsMiniBlocks( return nil } -// CreateMarshalizedData - -func (e *EpochRewardsCreatorStub) CreateMarshalizedData(body *block.Body) map[string][][]byte { - if e.CreateMarshalizedDataCalled != nil { - return e.CreateMarshalizedDataCalled(body) +// CreateMarshalledData - +func (e *EpochRewardsCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if e.CreateMarshalledDataCalled != nil { + return e.CreateMarshalledDataCalled(body) } return nil } -// SaveTxBlockToStorage - -func (e *EpochRewardsCreatorStub) SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if e.SaveTxBlockToStorageCalled != nil { - e.SaveTxBlockToStorageCalled(metaBlock, body) +// SaveBlockDataToStorage - +func (e *EpochRewardsCreatorStub) SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if e.SaveBlockDataToStorageCalled != nil { + e.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteTxsFromStorage - -func (e *EpochRewardsCreatorStub) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if e.DeleteTxsFromStorageCalled != nil { - e.DeleteTxsFromStorageCalled(metaBlock, body) +// DeleteBlockDataFromStorage - +func (e *EpochRewardsCreatorStub) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if e.DeleteBlockDataFromStorageCalled != nil { + e.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/integrationTests/mock/epochValidatorInfoCreatorStub.go b/integrationTests/mock/epochValidatorInfoCreatorStub.go index 3533131a117..6cf8318f6a1 100644 --- a/integrationTests/mock/epochValidatorInfoCreatorStub.go +++ b/integrationTests/mock/epochValidatorInfoCreatorStub.go @@ -3,6 +3,7 @@ package mock import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/state" ) @@ -10,9 +11,11 @@ import ( type EpochValidatorInfoCreatorStub struct { CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - CreateMarshalizedDataCalled func(body block.Body) map[string][][]byte - SaveTxBlockToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.HeaderHandler) + GetLocalValidatorInfoCacheCalled func() epochStart.ValidatorInfoCacher + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte + GetValidatorInfoTxsCalled func(body *block.Body) map[string]*state.ShardValidatorInfo + SaveBlockDataToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) } @@ -25,24 +28,48 @@ func (e *EpochValidatorInfoCreatorStub) CreateValidatorInfoMiniBlocks(validatorI } // VerifyValidatorInfoMiniBlocks - -func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { +func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { if e.VerifyValidatorInfoMiniBlocksCalled != nil { - return e.VerifyValidatorInfoMiniBlocksCalled(miniblocks, validatorsInfo) + return e.VerifyValidatorInfoMiniBlocksCalled(miniBlocks, validatorsInfo) } return nil } -// SaveValidatorInfoBlocksToStorage - -func (e *EpochValidatorInfoCreatorStub) SaveValidatorInfoBlocksToStorage(metaBlock data.HeaderHandler, body *block.Body) { - if e.SaveTxBlockToStorageCalled != nil { - e.SaveTxBlockToStorageCalled(metaBlock, body) +// GetLocalValidatorInfoCache - +func (e *EpochValidatorInfoCreatorStub) GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher { + if e.GetLocalValidatorInfoCacheCalled != nil { + return e.GetLocalValidatorInfoCacheCalled() + } + return nil +} + +// CreateMarshalledData - +func (e *EpochValidatorInfoCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if e.CreateMarshalledDataCalled != nil { + return e.CreateMarshalledDataCalled(body) + } + return nil +} + +// GetValidatorInfoTxs - +func (e *EpochValidatorInfoCreatorStub) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { + if e.GetValidatorInfoTxsCalled != nil { + return e.GetValidatorInfoTxsCalled(body) + } + return nil +} + +// SaveBlockDataToStorage - +func (e *EpochValidatorInfoCreatorStub) SaveBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.SaveBlockDataToStorageCalled != nil { + e.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteValidatorInfoBlocksFromStorage - -func (e *EpochValidatorInfoCreatorStub) DeleteValidatorInfoBlocksFromStorage(metaBlock data.HeaderHandler) { - if e.DeleteTxsFromStorageCalled != nil { - e.DeleteTxsFromStorageCalled(metaBlock) +// DeleteBlockDataFromStorage - +func (e *EpochValidatorInfoCreatorStub) DeleteBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.DeleteBlockDataFromStorageCalled != nil { + e.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/integrationTests/mock/intermediateTransactionHandlerMock.go b/integrationTests/mock/intermediateTransactionHandlerMock.go index 66c9b858a85..36eff30f3bd 100644 --- a/integrationTests/mock/intermediateTransactionHandlerMock.go +++ b/integrationTests/mock/intermediateTransactionHandlerMock.go @@ -13,7 +13,7 @@ type IntermediateTransactionHandlerMock struct { VerifyInterMiniBlocksCalled func(body *block.Body) error SaveCurrentIntermediateTxToStorageCalled func() CreateBlockStartedCalled func() - CreateMarshalizedDataCalled func(txHashes [][]byte) ([][]byte, error) + CreateMarshalledDataCalled func(txHashes [][]byte) ([][]byte, error) GetAllCurrentFinishedTxsCalled func() map[string]data.TransactionHandler RemoveProcessedResultsCalled func(key []byte) [][]byte InitProcessedResultsCalled func(key []byte) @@ -48,12 +48,12 @@ func (ith *IntermediateTransactionHandlerMock) GetAllCurrentFinishedTxs() map[st return nil } -// CreateMarshalizedData - -func (ith *IntermediateTransactionHandlerMock) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - if ith.CreateMarshalizedDataCalled == nil { +// CreateMarshalledData - +func (ith *IntermediateTransactionHandlerMock) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + if ith.CreateMarshalledDataCalled == nil { return nil, nil } - return ith.CreateMarshalizedDataCalled(txHashes) + return ith.CreateMarshalledDataCalled(txHashes) } // AddIntermediateTransactions - diff --git a/integrationTests/multiShard/endOfEpoch/startInEpoch/startInEpoch_test.go b/integrationTests/multiShard/endOfEpoch/startInEpoch/startInEpoch_test.go index 7c20d389ded..296bcc634b6 100644 --- a/integrationTests/multiShard/endOfEpoch/startInEpoch/startInEpoch_test.go +++ b/integrationTests/multiShard/endOfEpoch/startInEpoch/startInEpoch_test.go @@ -63,6 +63,7 @@ func testNodeStartsInEpoch(t *testing.T, shardID uint32, expectedHighestRound ui StakingV2EnableEpoch: integrationTests.UnreachableEpoch, ScheduledMiniBlocksEnableEpoch: integrationTests.UnreachableEpoch, MiniBlockPartialExecutionEnableEpoch: integrationTests.UnreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: integrationTests.UnreachableEpoch, } nodes := integrationTests.CreateNodesWithEnableEpochs( diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 518bb535e14..0a3cc6193e3 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -11,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) // ArgIndexHashedNodesCoordinatorFactory - @@ -69,7 +70,10 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, + }, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -124,8 +128,10 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsWaitingListFixFlagEnabledField: true, + IsWaitingListFixFlagEnabledField: true, + RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, }, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } baseCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testHeartbeatNode.go b/integrationTests/testHeartbeatNode.go index b0828accbd2..5ea1c37fadd 100644 --- a/integrationTests/testHeartbeatNode.go +++ b/integrationTests/testHeartbeatNode.go @@ -46,6 +46,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/p2pmocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" trieMock "github.com/ElrondNetwork/elrond-go/testscommon/trie" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/update" "github.com/stretchr/testify/require" ) @@ -296,6 +297,7 @@ func CreateNodesWithTestHeartbeatNode( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -341,6 +343,7 @@ func CreateNodesWithTestHeartbeatNode( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testP2PNode.go b/integrationTests/testP2PNode.go index a2201126e6c..b3354b1af61 100644 --- a/integrationTests/testP2PNode.go +++ b/integrationTests/testP2PNode.go @@ -32,6 +32,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/p2pmocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/update/trigger" ) @@ -331,6 +332,7 @@ func CreateNodesWithTestP2PNodes( nodesMap := make(map[uint32][]*TestP2PNode) cacherCfg := storageUnit.CacheConfig{Capacity: 10000, Type: storageUnit.LRUCache, Shards: 1} cache, _ := storageUnit.NewCache(cacherCfg) + for shardId, validatorList := range validatorsMap { argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ ShardConsensusGroupSize: shardConsensusGroupSize, @@ -352,6 +354,7 @@ func CreateNodesWithTestP2PNodes( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -397,6 +400,7 @@ func CreateNodesWithTestP2PNodes( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testProcessorNode.go b/integrationTests/testProcessorNode.go index c751d2e6a65..7ee8c8c22c0 100644 --- a/integrationTests/testProcessorNode.go +++ b/integrationTests/testProcessorNode.go @@ -1106,6 +1106,7 @@ func (tpn *TestProcessorNode) initInterceptors(heartbeatPk string) { Marshalizer: TestMarshalizer, Hasher: TestHasher, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + DataPool: tpn.DataPool, } epochStartTrigger, _ := metachain.NewEpochStartTrigger(argsEpochStart) tpn.EpochStartTrigger = &metachain.TestTrigger{} @@ -1150,8 +1151,9 @@ func (tpn *TestProcessorNode) initInterceptors(heartbeatPk string) { } } else { argsPeerMiniBlocksSyncer := shardchain.ArgPeerMiniBlockSyncer{ - MiniBlocksPool: tpn.DataPool.MiniBlocks(), - Requesthandler: tpn.RequestHandler, + MiniBlocksPool: tpn.DataPool.MiniBlocks(), + ValidatorsInfoPool: tpn.DataPool.ValidatorsInfo(), + RequestHandler: tpn.RequestHandler, } peerMiniBlockSyncer, _ := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) argsShardEpochStart := &shardchain.ArgsShardEpochStartTrigger{ @@ -1169,6 +1171,7 @@ func (tpn *TestProcessorNode) initInterceptors(heartbeatPk string) { PeerMiniBlocksSyncer: peerMiniBlockSyncer, RoundHandler: tpn.RoundHandler, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + EnableEpochsHandler: tpn.EnableEpochsHandler, } epochStartTrigger, _ := shardchain.NewEpochStartTrigger(argsShardEpochStart) tpn.EpochStartTrigger = &shardchain.TestTrigger{} @@ -1970,6 +1973,7 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { Marshalizer: TestMarshalizer, Hasher: TestHasher, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + DataPool: tpn.DataPool, } epochStartTrigger, _ := metachain.NewEpochStartTrigger(argsEpochStart) tpn.EpochStartTrigger = &metachain.TestTrigger{} @@ -2050,12 +2054,15 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { } epochStartRewards, _ := metachain.NewRewardsCreatorProxy(argsEpochRewards) + validatorInfoStorage, _ := tpn.Storage.GetStorer(dataRetriever.UnsignedTransactionUnit) argsEpochValidatorInfo := metachain.ArgsNewValidatorInfoCreator{ - ShardCoordinator: tpn.ShardCoordinator, - MiniBlockStorage: miniBlockStorage, - Hasher: TestHasher, - Marshalizer: TestMarshalizer, - DataPool: tpn.DataPool, + ShardCoordinator: tpn.ShardCoordinator, + ValidatorInfoStorage: validatorInfoStorage, + MiniBlockStorage: miniBlockStorage, + Hasher: TestHasher, + Marshalizer: TestMarshalizer, + DataPool: tpn.DataPool, + EnableEpochsHandler: tpn.EnableEpochsHandler, } epochStartValidatorInfo, _ := metachain.NewValidatorInfoCreator(argsEpochValidatorInfo) argsEpochSystemSC := metachain.ArgsNewEpochStartSystemSCProcessing{ @@ -2095,8 +2102,9 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { } else { if check.IfNil(tpn.EpochStartTrigger) { argsPeerMiniBlocksSyncer := shardchain.ArgPeerMiniBlockSyncer{ - MiniBlocksPool: tpn.DataPool.MiniBlocks(), - Requesthandler: tpn.RequestHandler, + MiniBlocksPool: tpn.DataPool.MiniBlocks(), + ValidatorsInfoPool: tpn.DataPool.ValidatorsInfo(), + RequestHandler: tpn.RequestHandler, } peerMiniBlocksSyncer, _ := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) argsShardEpochStart := &shardchain.ArgsShardEpochStartTrigger{ @@ -2114,6 +2122,7 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { PeerMiniBlocksSyncer: peerMiniBlocksSyncer, RoundHandler: tpn.RoundHandler, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + EnableEpochsHandler: tpn.EnableEpochsHandler, } epochStartTrigger, _ := shardchain.NewEpochStartTrigger(argsShardEpochStart) tpn.EpochStartTrigger = &shardchain.TestTrigger{} @@ -2952,6 +2961,7 @@ func CreateEnableEpochsConfig() config.EnableEpochs { CheckCorrectTokenIDForTransferRoleEnableEpoch: UnreachableEpoch, HeartbeatDisableEpoch: UnreachableEpoch, MiniBlockPartialExecutionEnableEpoch: UnreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: UnreachableEpoch, } } diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index 0c08dc19cdd..e573900ef86 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -15,6 +15,7 @@ import ( "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage/lrucache" "github.com/ElrondNetwork/elrond-go/testscommon" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) type nodeKeys struct { @@ -74,6 +75,7 @@ func CreateProcessorNodesWithNodesCoordinator( ChanStopNode: endProcess.GetDummyEndProcessChannel(), IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testProcessorNodeWithMultisigner.go b/integrationTests/testProcessorNodeWithMultisigner.go index 68f89112870..f8964412285 100644 --- a/integrationTests/testProcessorNodeWithMultisigner.go +++ b/integrationTests/testProcessorNodeWithMultisigner.go @@ -31,6 +31,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) // CreateNodesWithNodesCoordinator returns a map with nodes per shard each using a real nodes coordinator @@ -174,6 +175,7 @@ func CreateNodeWithBLSAndTxKeys( DelegationSmartContractEnableEpoch: 1, ScheduledMiniBlocksEnableEpoch: UnreachableEpoch, MiniBlockPartialExecutionEnableEpoch: UnreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: UnreachableEpoch, } return CreateNode( @@ -230,6 +232,7 @@ func CreateNodesWithNodesCoordinatorFactory( StakingV2EnableEpoch: UnreachableEpoch, ScheduledMiniBlocksEnableEpoch: UnreachableEpoch, MiniBlockPartialExecutionEnableEpoch: UnreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: UnreachableEpoch, } nodesMap := make(map[uint32][]*TestProcessorNode) @@ -426,6 +429,7 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) @@ -540,6 +544,7 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testSyncNode.go b/integrationTests/testSyncNode.go index 68480aea03f..054d2e4e9a8 100644 --- a/integrationTests/testSyncNode.go +++ b/integrationTests/testSyncNode.go @@ -43,7 +43,9 @@ func (tpn *TestProcessorNode) initBlockProcessorWithSync() { coreComponents.HasherField = TestHasher coreComponents.Uint64ByteSliceConverterField = TestUint64Converter coreComponents.EpochNotifierField = tpn.EpochNotifier - coreComponents.EnableEpochsHandlerField = &testscommon.EnableEpochsHandlerStub{} + coreComponents.EnableEpochsHandlerField = &testscommon.EnableEpochsHandlerStub{ + RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, + } dataComponents := GetDefaultDataComponents() dataComponents.Store = tpn.Storage @@ -88,7 +90,7 @@ func (tpn *TestProcessorNode) initBlockProcessorWithSync() { GasHandler: tpn.GasHandler, ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, - ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, + ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, } if tpn.ShardCoordinator.SelfId() == core.MetachainShardId { diff --git a/integrationTests/vm/delegation/delegationScenarios_test.go b/integrationTests/vm/delegation/delegationScenarios_test.go index 693d772b386..cf772a496a8 100644 --- a/integrationTests/vm/delegation/delegationScenarios_test.go +++ b/integrationTests/vm/delegation/delegationScenarios_test.go @@ -1246,7 +1246,7 @@ func addRewardsToDelegation(tpn *integrationTests.TestProcessorNode, recvAddr [] }, } - txCacher := dataPool.NewCurrentBlockPool() + txCacher := dataPool.NewCurrentBlockTransactionsPool() txCacher.AddTx(rewardTxHash, tx) _ = tpn.EpochStartSystemSCProcessor.ProcessDelegationRewards(mbSlice, txCacher) diff --git a/node/nodeMemoryConfig_test.go b/node/nodeMemoryConfig_test.go index ac353de4917..2ac27f707c9 100644 --- a/node/nodeMemoryConfig_test.go +++ b/node/nodeMemoryConfig_test.go @@ -44,6 +44,7 @@ func TestMemoryConfig(t *testing.T) { plannedMemory += nodeConfig.UnsignedTransactionDataPool.SizeInBytes * uint64(numShardsIncludingMeta*(numShardsIncludingMeta-1)) / 2 // One cache for each pair (meta, shard) plannedMemory += nodeConfig.RewardTransactionDataPool.SizeInBytes * uint64(numShards) + plannedMemory += nodeConfig.ValidatorInfoPool.SizeInBytes require.LessOrEqual(t, int(plannedMemory), 3000*core.MegabyteSize) } diff --git a/node/nodeRunner.go b/node/nodeRunner.go index 20221ca91ec..d66a5acc8da 100644 --- a/node/nodeRunner.go +++ b/node/nodeRunner.go @@ -183,6 +183,7 @@ func printEnableEpochs(configs *config.Configs) { log.Debug(readEpochFor("disable heartbeat v1"), "epoch", enableEpochs.HeartbeatDisableEpoch) log.Debug(readEpochFor("mini block partial execution"), "epoch", enableEpochs.MiniBlockPartialExecutionEnableEpoch) log.Debug(readEpochFor("set sender in eei output transfer"), "epoch", enableEpochs.SetSenderInEeiOutputTransferEnableEpoch) + log.Debug(readEpochFor("refactor peers mini blocks"), "epoch", enableEpochs.RefactorPeersMiniBlocksEnableEpoch) gasSchedule := configs.EpochConfig.GasSchedule log.Debug(readEpochFor("gas schedule directories paths"), "epoch", gasSchedule.GasScheduleByEpochs) @@ -343,6 +344,7 @@ func (nr *nodeRunner) executeOneComponentCreationCycle( managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), managedCoreComponents.EnableEpochsHandler(), + managedDataComponents.Datapool().CurrentEpochValidatorInfo(), ) if err != nil { return true, err diff --git a/process/block/export_test.go b/process/block/export_test.go index 8c58c102f79..f1616c4182c 100644 --- a/process/block/export_test.go +++ b/process/block/export_test.go @@ -147,15 +147,15 @@ func NewShardProcessorEmptyWith3shards( return nil }, }, - BlockTracker: mock.NewBlockTrackerMock(shardCoordinator, genesisBlocks), - BlockSizeThrottler: &mock.BlockSizeThrottlerStub{}, - Version: "softwareVersion", - HistoryRepository: &dblookupext.HistoryRepositoryStub{}, + BlockTracker: mock.NewBlockTrackerMock(shardCoordinator, genesisBlocks), + BlockSizeThrottler: &mock.BlockSizeThrottlerStub{}, + Version: "softwareVersion", + HistoryRepository: &dblookupext.HistoryRepositoryStub{}, EnableRoundsHandler: &testscommon.EnableRoundsHandlerStub{}, - GasHandler: &mock.GasHandlerMock{}, - ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, - ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, - ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, + GasHandler: &mock.GasHandlerMock{}, + ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, + ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, + ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, }, } shardProc, err := NewShardProcessor(arguments) @@ -537,3 +537,7 @@ func (sp *shardProcessor) RollBackProcessedMiniBlocksInfo(headerHandler data.Hea func (bp *baseProcessor) CheckConstructionStateAndIndexesCorrectness(mbh data.MiniBlockHeaderHandler) error { return checkConstructionStateAndIndexesCorrectness(mbh) } + +func (mp *metaProcessor) GetAllMarshalledTxs(body *block.Body) map[string][][]byte { + return mp.getAllMarshalledTxs(body) +} diff --git a/process/block/metablock.go b/process/block/metablock.go index 254899e2d22..73141649284 100644 --- a/process/block/metablock.go +++ b/process/block/metablock.go @@ -97,45 +97,45 @@ func NewMetaProcessor(arguments ArgMetaProcessor) (*metaProcessor, error) { genesisHdr := arguments.DataComponents.Blockchain().GetGenesisHeader() base := &baseProcessor{ - accountsDB: arguments.AccountsDB, - blockSizeThrottler: arguments.BlockSizeThrottler, - forkDetector: arguments.ForkDetector, - hasher: arguments.CoreComponents.Hasher(), - marshalizer: arguments.CoreComponents.InternalMarshalizer(), - store: arguments.DataComponents.StorageService(), - shardCoordinator: arguments.BootstrapComponents.ShardCoordinator(), - feeHandler: arguments.FeeHandler, - nodesCoordinator: arguments.NodesCoordinator, - uint64Converter: arguments.CoreComponents.Uint64ByteSliceConverter(), - requestHandler: arguments.RequestHandler, - appStatusHandler: arguments.CoreComponents.StatusHandler(), - blockChainHook: arguments.BlockChainHook, - txCoordinator: arguments.TxCoordinator, - epochStartTrigger: arguments.EpochStartTrigger, - headerValidator: arguments.HeaderValidator, - roundHandler: arguments.CoreComponents.RoundHandler(), - bootStorer: arguments.BootStorer, - blockTracker: arguments.BlockTracker, - dataPool: arguments.DataComponents.Datapool(), - blockChain: arguments.DataComponents.Blockchain(), - stateCheckpointModulus: arguments.Config.StateTriesConfig.CheckpointRoundsModulus, - outportHandler: arguments.StatusComponents.OutportHandler(), - genesisNonce: genesisHdr.GetNonce(), - versionedHeaderFactory: arguments.BootstrapComponents.VersionedHeaderFactory(), - headerIntegrityVerifier: arguments.BootstrapComponents.HeaderIntegrityVerifier(), - historyRepo: arguments.HistoryRepository, + accountsDB: arguments.AccountsDB, + blockSizeThrottler: arguments.BlockSizeThrottler, + forkDetector: arguments.ForkDetector, + hasher: arguments.CoreComponents.Hasher(), + marshalizer: arguments.CoreComponents.InternalMarshalizer(), + store: arguments.DataComponents.StorageService(), + shardCoordinator: arguments.BootstrapComponents.ShardCoordinator(), + feeHandler: arguments.FeeHandler, + nodesCoordinator: arguments.NodesCoordinator, + uint64Converter: arguments.CoreComponents.Uint64ByteSliceConverter(), + requestHandler: arguments.RequestHandler, + appStatusHandler: arguments.CoreComponents.StatusHandler(), + blockChainHook: arguments.BlockChainHook, + txCoordinator: arguments.TxCoordinator, + epochStartTrigger: arguments.EpochStartTrigger, + headerValidator: arguments.HeaderValidator, + roundHandler: arguments.CoreComponents.RoundHandler(), + bootStorer: arguments.BootStorer, + blockTracker: arguments.BlockTracker, + dataPool: arguments.DataComponents.Datapool(), + blockChain: arguments.DataComponents.Blockchain(), + stateCheckpointModulus: arguments.Config.StateTriesConfig.CheckpointRoundsModulus, + outportHandler: arguments.StatusComponents.OutportHandler(), + genesisNonce: genesisHdr.GetNonce(), + versionedHeaderFactory: arguments.BootstrapComponents.VersionedHeaderFactory(), + headerIntegrityVerifier: arguments.BootstrapComponents.HeaderIntegrityVerifier(), + historyRepo: arguments.HistoryRepository, epochNotifier: arguments.CoreComponents.EpochNotifier(), enableEpochsHandler: arguments.CoreComponents.EnableEpochsHandler(), - enableRoundsHandler: arguments.EnableRoundsHandler, - vmContainerFactory: arguments.VMContainersFactory, - vmContainer: arguments.VmContainer, - processDataTriesOnCommitEpoch: arguments.Config.Debug.EpochStart.ProcessDataTrieOnCommitEpoch, - gasConsumedProvider: arguments.GasHandler, - economicsData: arguments.CoreComponents.EconomicsData(), - scheduledTxsExecutionHandler: arguments.ScheduledTxsExecutionHandler, - pruningDelay: pruningDelay, - processedMiniBlocksTracker: arguments.ProcessedMiniBlocksTracker, - receiptsRepository: arguments.ReceiptsRepository, + enableRoundsHandler: arguments.EnableRoundsHandler, + vmContainerFactory: arguments.VMContainersFactory, + vmContainer: arguments.VmContainer, + processDataTriesOnCommitEpoch: arguments.Config.Debug.EpochStart.ProcessDataTrieOnCommitEpoch, + gasConsumedProvider: arguments.GasHandler, + economicsData: arguments.CoreComponents.EconomicsData(), + scheduledTxsExecutionHandler: arguments.ScheduledTxsExecutionHandler, + pruningDelay: pruningDelay, + processedMiniBlocksTracker: arguments.ProcessedMiniBlocksTracker, + receiptsRepository: arguments.ReceiptsRepository, } mp := metaProcessor{ @@ -1326,6 +1326,7 @@ func (mp *metaProcessor) CommitBlock( mp.blockTracker.CleanupInvalidCrossHeaders(header.Epoch, header.Round) } + // TODO: Should be sent also validatorInfoTxs alongside rewardsTxs -> mp.validatorInfoCreator.GetValidatorInfoTxs(body) ? mp.indexBlock(header, headerHash, body, lastMetaBlock, notarizedHeadersHashes, rewardsTxs) mp.recordBlockInHistory(headerHash, headerHandler, bodyHandler) @@ -1592,8 +1593,8 @@ func (mp *metaProcessor) getRewardsTxs(header *block.MetaBlock, body *block.Body func (mp *metaProcessor) commitEpochStart(header *block.MetaBlock, body *block.Body) { if header.IsStartOfEpochBlock() { mp.epochStartTrigger.SetProcessed(header, body) - go mp.epochRewardsCreator.SaveTxBlockToStorage(header, body) - go mp.validatorInfoCreator.SaveValidatorInfoBlocksToStorage(header, body) + go mp.epochRewardsCreator.SaveBlockDataToStorage(header, body) + go mp.validatorInfoCreator.SaveBlockDataToStorage(header, body) } else { currentHeader := mp.blockChain.GetCurrentBlockHeader() if !check.IfNil(currentHeader) && currentHeader.IsStartOfEpochBlock() { @@ -2394,7 +2395,7 @@ func (mp *metaProcessor) MarshalizedDataToBroadcast( var mrsTxs map[string][][]byte if hdr.IsStartOfEpochBlock() { - mrsTxs = mp.epochRewardsCreator.CreateMarshalizedData(body) + mrsTxs = mp.getAllMarshalledTxs(body) } else { mrsTxs = mp.txCoordinator.CreateMarshalizedData(body) } @@ -2421,6 +2422,25 @@ func (mp *metaProcessor) MarshalizedDataToBroadcast( return mrsData, mrsTxs, nil } +func (mp *metaProcessor) getAllMarshalledTxs(body *block.Body) map[string][][]byte { + allMarshalledTxs := make(map[string][][]byte) + + marshalledRewardsTxs := mp.epochRewardsCreator.CreateMarshalledData(body) + marshalledValidatorInfoTxs := mp.validatorInfoCreator.CreateMarshalledData(body) + + for topic, marshalledTxs := range marshalledRewardsTxs { + allMarshalledTxs[topic] = append(allMarshalledTxs[topic], marshalledTxs...) + log.Trace("metaProcessor.getAllMarshalledTxs", "topic", topic, "num rewards txs", len(marshalledTxs)) + } + + for topic, marshalledTxs := range marshalledValidatorInfoTxs { + allMarshalledTxs[topic] = append(allMarshalledTxs[topic], marshalledTxs...) + log.Trace("metaProcessor.getAllMarshalledTxs", "topic", topic, "num validator info txs", len(marshalledTxs)) + } + + return allMarshalledTxs +} + func getTxCount(shardInfo []data.ShardDataHandler) uint32 { txs := uint32(0) for i := 0; i < len(shardInfo); i++ { diff --git a/process/block/metablock_test.go b/process/block/metablock_test.go index d8441337bd1..2b683516ed7 100644 --- a/process/block/metablock_test.go +++ b/process/block/metablock_test.go @@ -130,13 +130,13 @@ func createMockMetaArguments( return nil }, }, - BlockTracker: mock.NewBlockTrackerMock(bootstrapComponents.ShardCoordinator(), startHeaders), - BlockSizeThrottler: &mock.BlockSizeThrottlerStub{}, - HistoryRepository: &dblookupext.HistoryRepositoryStub{}, + BlockTracker: mock.NewBlockTrackerMock(bootstrapComponents.ShardCoordinator(), startHeaders), + BlockSizeThrottler: &mock.BlockSizeThrottlerStub{}, + HistoryRepository: &dblookupext.HistoryRepositoryStub{}, EnableRoundsHandler: &testscommon.EnableRoundsHandlerStub{}, - ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, - ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, - ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, + ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, + ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, + ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, }, SCToProtocol: &mock.SCToProtocolStub{}, PendingMiniBlocksHandler: &mock.PendingMiniBlocksHandlerStub{}, @@ -3547,3 +3547,87 @@ func TestMetaProcessor_getFinalMiniBlockHashes(t *testing.T) { assert.Equal(t, expectedMbHeaders, retMbHeaders) }) } + +func TestMetaProcessor_getAllMarshalledTxs(t *testing.T) { + t.Parallel() + + arguments := createMockMetaArguments(createMockComponentHolders()) + + arguments.EpochRewardsCreator = &mock.EpochRewardsCreatorStub{ + CreateMarshalledDataCalled: func(body *block.Body) map[string][][]byte { + marshalledData := make(map[string][][]byte) + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.RewardsBlock { + continue + } + marshalledData["rewards"] = append(marshalledData["rewards"], miniBlock.TxHashes...) + } + return marshalledData + }, + } + + arguments.EpochValidatorInfoCreator = &mock.EpochValidatorInfoCreatorStub{ + CreateMarshalledDataCalled: func(body *block.Body) map[string][][]byte { + marshalledData := make(map[string][][]byte) + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + marshalledData["validatorInfo"] = append(marshalledData["validatorInfo"], miniBlock.TxHashes...) + } + return marshalledData + }, + } + + mp, _ := blproc.NewMetaProcessor(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.RewardsBlock, + TxHashes: [][]byte{ + []byte("d"), + []byte("e"), + []byte("f"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("g"), + []byte("h"), + []byte("i"), + }, + }, + }, + } + + allMarshalledTxs := mp.GetAllMarshalledTxs(body) + + require.Equal(t, 2, len(allMarshalledTxs)) + + require.Equal(t, 3, len(allMarshalledTxs["rewards"])) + require.Equal(t, 3, len(allMarshalledTxs["validatorInfo"])) + + assert.Equal(t, []byte("d"), allMarshalledTxs["rewards"][0]) + assert.Equal(t, []byte("e"), allMarshalledTxs["rewards"][1]) + assert.Equal(t, []byte("f"), allMarshalledTxs["rewards"][2]) + + assert.Equal(t, []byte("g"), allMarshalledTxs["validatorInfo"][0]) + assert.Equal(t, []byte("h"), allMarshalledTxs["validatorInfo"][1]) + assert.Equal(t, []byte("i"), allMarshalledTxs["validatorInfo"][2]) +} diff --git a/process/block/postprocess/basePostProcess.go b/process/block/postprocess/basePostProcess.go index 9c5ff8fd2a0..1418bed12ca 100644 --- a/process/block/postprocess/basePostProcess.go +++ b/process/block/postprocess/basePostProcess.go @@ -80,8 +80,8 @@ func (bpp *basePostProcessor) CreateBlockStarted() { bpp.mutInterResultsForBlock.Unlock() } -// CreateMarshalizedData creates the marshalized data for broadcasting purposes -func (bpp *basePostProcessor) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { +// CreateMarshalledData creates the marshalled data for broadcasting purposes +func (bpp *basePostProcessor) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { bpp.mutInterResultsForBlock.Lock() defer bpp.mutInterResultsForBlock.Unlock() @@ -89,7 +89,7 @@ func (bpp *basePostProcessor) CreateMarshalizedData(txHashes [][]byte) ([][]byte for _, txHash := range txHashes { txInfoObject := bpp.interResultsForBlock[string(txHash)] if txInfoObject == nil || check.IfNil(txInfoObject.tx) { - log.Warn("basePostProcessor.CreateMarshalizedData: tx not found", "hash", txHash) + log.Warn("basePostProcessor.CreateMarshalledData: tx not found", "hash", txHash) continue } diff --git a/process/block/postprocess/intermediateResults_test.go b/process/block/postprocess/intermediateResults_test.go index a2d62e610be..ae02241f385 100644 --- a/process/block/postprocess/intermediateResults_test.go +++ b/process/block/postprocess/intermediateResults_test.go @@ -900,12 +900,12 @@ func TestIntermediateResultsProcessor_CreateMarshalizedDataNothingToMarshal(t *t assert.Nil(t, err) // nothing to marshal - mrsTxs, err := irp.CreateMarshalizedData(nil) + mrsTxs, err := irp.CreateMarshalledData(nil) assert.Nil(t, err) assert.Equal(t, 0, len(mrsTxs)) // nothing saved in local cacher to marshal - mrsTxs, err = irp.CreateMarshalizedData(nil) + mrsTxs, err = irp.CreateMarshalledData(nil) assert.Nil(t, err) assert.Equal(t, 0, len(mrsTxs)) } @@ -966,7 +966,7 @@ func TestIntermediateResultsProcessor_CreateMarshalizedData(t *testing.T) { err = irp.AddIntermediateTransactions(txs) assert.Nil(t, err) - mrsTxs, err := irp.CreateMarshalizedData(txHashes) + mrsTxs, err := irp.CreateMarshalledData(txHashes) assert.Nil(t, err) assert.Equal(t, len(txs), len(mrsTxs)) diff --git a/process/block/preprocess/basePreProcess.go b/process/block/preprocess/basePreProcess.go index f40b4da67ec..8c50ab8a359 100644 --- a/process/block/preprocess/basePreProcess.go +++ b/process/block/preprocess/basePreProcess.go @@ -201,7 +201,7 @@ func (bpp *basePreProcess) removeMiniBlocksFromPools( return nil } -func (bpp *basePreProcess) createMarshalizedData(txHashes [][]byte, forBlock *txsForBlock) ([][]byte, error) { +func (bpp *basePreProcess) createMarshalledData(txHashes [][]byte, forBlock *txsForBlock) ([][]byte, error) { mrsTxs := make([][]byte, 0, len(txHashes)) for _, txHash := range txHashes { forBlock.mutTxsForBlock.RLock() @@ -209,7 +209,7 @@ func (bpp *basePreProcess) createMarshalizedData(txHashes [][]byte, forBlock *tx forBlock.mutTxsForBlock.RUnlock() if txInfoFromMap == nil || check.IfNil(txInfoFromMap.tx) { - log.Warn("basePreProcess.createMarshalizedData: tx not found", "hash", txHash) + log.Warn("basePreProcess.createMarshalledData: tx not found", "hash", txHash) continue } @@ -220,7 +220,7 @@ func (bpp *basePreProcess) createMarshalizedData(txHashes [][]byte, forBlock *tx mrsTxs = append(mrsTxs, txMrs) } - log.Trace("basePreProcess.createMarshalizedData", + log.Trace("basePreProcess.createMarshalledData", "num txs", len(mrsTxs), ) diff --git a/process/block/preprocess/rewardTxPreProcessor.go b/process/block/preprocess/rewardTxPreProcessor.go index 8f3da4d0449..aafb42dab67 100644 --- a/process/block/preprocess/rewardTxPreProcessor.go +++ b/process/block/preprocess/rewardTxPreProcessor.go @@ -5,7 +5,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" - "github.com/ElrondNetwork/elrond-go-core/core/sliceUtil" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/rewardTx" @@ -176,28 +175,11 @@ func (rtp *rewardTxPreprocessor) RestoreBlockDataIntoPools( continue } - strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) - rewardTxBuff, err := rtp.storage.GetAll(dataRetriever.RewardTransactionUnit, miniBlock.TxHashes) + err := rtp.restoreRewardTxsIntoPool(miniBlock) if err != nil { - log.Debug("reward tx from mini block was not found in RewardTransactionUnit", - "sender shard ID", miniBlock.SenderShardID, - "receiver shard ID", miniBlock.ReceiverShardID, - "num txs", len(miniBlock.TxHashes), - ) - return rewardTxsRestored, err } - for txHash, txBuff := range rewardTxBuff { - tx := rewardTx.RewardTx{} - err = rtp.marshalizer.Unmarshal(&tx, txBuff) - if err != nil { - return rewardTxsRestored, err - } - - rtp.rewardTxPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) - } - miniBlockHash, err := core.CalculateHash(rtp.marshalizer, rtp.hasher, miniBlock) if err != nil { return rewardTxsRestored, err @@ -211,6 +193,32 @@ func (rtp *rewardTxPreprocessor) RestoreBlockDataIntoPools( return rewardTxsRestored, nil } +func (rtp *rewardTxPreprocessor) restoreRewardTxsIntoPool(miniBlock *block.MiniBlock) error { + strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) + rewardTxsBuff, err := rtp.storage.GetAll(dataRetriever.RewardTransactionUnit, miniBlock.TxHashes) + if err != nil { + log.Debug("reward txs from mini block were not found in RewardTransactionUnit", + "sender shard ID", miniBlock.SenderShardID, + "receiver shard ID", miniBlock.ReceiverShardID, + "num txs", len(miniBlock.TxHashes), + ) + + return err + } + + for txHash, txBuff := range rewardTxsBuff { + tx := rewardTx.RewardTx{} + err = rtp.marshalizer.Unmarshal(&tx, txBuff) + if err != nil { + return err + } + + rtp.rewardTxPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) + } + + return nil +} + // ProcessBlockTransactions processes all the reward transactions from the block.Body, updates the state func (rtp *rewardTxPreprocessor) ProcessBlockTransactions( headerHandler data.HeaderHandler, @@ -330,7 +338,7 @@ func (rtp *rewardTxPreprocessor) RequestBlockTransactions(body *block.Body) int // computeExistingAndRequestMissingRewardTxsForShards calculates what reward transactions are available and requests // what are missing from block.Body func (rtp *rewardTxPreprocessor) computeExistingAndRequestMissingRewardTxsForShards(body *block.Body) int { - rewardTxs := block.Body{} + rewardTxsBody := block.Body{} for _, mb := range body.MiniBlocks { if mb.Type != block.RewardsBlock { continue @@ -339,11 +347,11 @@ func (rtp *rewardTxPreprocessor) computeExistingAndRequestMissingRewardTxsForSha continue } - rewardTxs.MiniBlocks = append(rewardTxs.MiniBlocks, mb) + rewardTxsBody.MiniBlocks = append(rewardTxsBody.MiniBlocks, mb) } numMissingTxsForShards := rtp.computeExistingAndRequestMissing( - &rewardTxs, + &rewardTxsBody, &rtp.rewardTxsForBlock, rtp.chReceivedAllRewardTxs, rtp.isMiniBlockCorrect, @@ -360,19 +368,20 @@ func (rtp *rewardTxPreprocessor) RequestTransactionsForMiniBlock(miniBlock *bloc return 0 } - missingRewardTxsForMiniBlock := rtp.computeMissingRewardTxsForMiniBlock(miniBlock) - if len(missingRewardTxsForMiniBlock) > 0 { - rtp.onRequestRewardTx(miniBlock.SenderShardID, missingRewardTxsForMiniBlock) + missingRewardTxsHashesForMiniBlock := rtp.computeMissingRewardTxsHashesForMiniBlock(miniBlock) + if len(missingRewardTxsHashesForMiniBlock) > 0 { + rtp.onRequestRewardTx(miniBlock.SenderShardID, missingRewardTxsHashesForMiniBlock) } - return len(missingRewardTxsForMiniBlock) + return len(missingRewardTxsHashesForMiniBlock) } -// computeMissingRewardTxsForMiniBlock computes missing reward transactions for a certain miniblock -func (rtp *rewardTxPreprocessor) computeMissingRewardTxsForMiniBlock(miniBlock *block.MiniBlock) [][]byte { - missingRewardTxs := make([][]byte, 0, len(miniBlock.TxHashes)) +// computeMissingRewardTxsHashesForMiniBlock computes missing reward transactions hashes for a certain miniblock +func (rtp *rewardTxPreprocessor) computeMissingRewardTxsHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { + missingRewardTxsHashes := make([][]byte, 0) + if miniBlock.Type != block.RewardsBlock { - return missingRewardTxs + return missingRewardTxsHashes } for _, txHash := range miniBlock.TxHashes { @@ -384,12 +393,12 @@ func (rtp *rewardTxPreprocessor) computeMissingRewardTxsForMiniBlock(miniBlock * false, ) - if tx == nil { - missingRewardTxs = append(missingRewardTxs, txHash) + if check.IfNil(tx) { + missingRewardTxsHashes = append(missingRewardTxsHashes, txHash) } } - return sliceUtil.TrimSliceSliceByte(missingRewardTxs) + return missingRewardTxsHashes } // getAllRewardTxsFromMiniBlock gets all the reward transactions from a miniblock into a new structure @@ -439,8 +448,7 @@ func (rtp *rewardTxPreprocessor) CreateAndProcessMiniBlocks( return make(block.MiniBlockSlice, 0), nil } -// ProcessMiniBlock processes all the reward transactions from a miniblock and saves the processed reward transactions -// in local cache +// ProcessMiniBlock processes all the reward transactions from the given miniblock and saves the processed ones in a local cache func (rtp *rewardTxPreprocessor) ProcessMiniBlock( miniBlock *block.MiniBlock, haveTime func() bool, @@ -513,26 +521,26 @@ func (rtp *rewardTxPreprocessor) ProcessMiniBlock( return nil, txIndex - 1, false, err } -// CreateMarshalizedData marshalizes reward transaction hashes and and saves them into a new structure -func (rtp *rewardTxPreprocessor) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - marshaledRewardTxs, err := rtp.createMarshalizedData(txHashes, &rtp.rewardTxsForBlock) +// CreateMarshalledData marshals reward transactions hashes and saves them into a new structure +func (rtp *rewardTxPreprocessor) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + marshalledRewardTxs, err := rtp.createMarshalledData(txHashes, &rtp.rewardTxsForBlock) if err != nil { return nil, err } - return marshaledRewardTxs, nil + return marshalledRewardTxs, nil } // GetAllCurrentUsedTxs returns all the reward transactions used at current creation / processing func (rtp *rewardTxPreprocessor) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { rtp.rewardTxsForBlock.mutTxsForBlock.RLock() - rewardTxPool := make(map[string]data.TransactionHandler, len(rtp.rewardTxsForBlock.txHashAndInfo)) + rewardTxsPool := make(map[string]data.TransactionHandler, len(rtp.rewardTxsForBlock.txHashAndInfo)) for txHash, txData := range rtp.rewardTxsForBlock.txHashAndInfo { - rewardTxPool[txHash] = txData.tx + rewardTxsPool[txHash] = txData.tx } rtp.rewardTxsForBlock.mutTxsForBlock.RUnlock() - return rewardTxPool + return rewardTxsPool } // AddTxsFromMiniBlocks does nothing diff --git a/process/block/preprocess/rewardTxPreProcessor_test.go b/process/block/preprocess/rewardTxPreProcessor_test.go index 660d5e92060..f24c3ed6955 100644 --- a/process/block/preprocess/rewardTxPreProcessor_test.go +++ b/process/block/preprocess/rewardTxPreProcessor_test.go @@ -380,7 +380,7 @@ func TestRewardTxPreprocessor_CreateMarshalizedDataShouldWork(t *testing.T) { txs := []data.TransactionHandler{&rewardTx.RewardTx{}} rtp.AddTxs(txHashes, txs) - res, err := rtp.CreateMarshalizedData(txHashes) + res, err := rtp.CreateMarshalledData(txHashes) assert.Nil(t, err) assert.Equal(t, 1, len(res)) diff --git a/process/block/preprocess/smartContractResults.go b/process/block/preprocess/smartContractResults.go index 6a5fd4e1d63..36c2d52d447 100644 --- a/process/block/preprocess/smartContractResults.go +++ b/process/block/preprocess/smartContractResults.go @@ -186,28 +186,11 @@ func (scr *smartContractResults) RestoreBlockDataIntoPools( continue } - strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) - scrBuff, err := scr.storage.GetAll(dataRetriever.UnsignedTransactionUnit, miniBlock.TxHashes) + err := scr.restoreSmartContractResultsIntoPool(miniBlock) if err != nil { - log.Debug("unsigned tx from mini block was not found in UnsignedTransactionUnit", - "sender shard ID", miniBlock.SenderShardID, - "receiver shard ID", miniBlock.ReceiverShardID, - "num txs", len(miniBlock.TxHashes), - ) - return scrRestored, err } - for txHash, txBuff := range scrBuff { - tx := smartContractResult.SmartContractResult{} - err = scr.marshalizer.Unmarshal(&tx, txBuff) - if err != nil { - return scrRestored, err - } - - scr.scrPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) - } - // TODO: Should be analyzed if restoring into pool only cross-shard miniblocks with destination in self shard, // would create problems or not if miniBlock.SenderShardID != scr.shardCoordinator.SelfId() { @@ -225,6 +208,32 @@ func (scr *smartContractResults) RestoreBlockDataIntoPools( return scrRestored, nil } +func (scr *smartContractResults) restoreSmartContractResultsIntoPool(miniBlock *block.MiniBlock) error { + strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) + scrsBuff, err := scr.storage.GetAll(dataRetriever.UnsignedTransactionUnit, miniBlock.TxHashes) + if err != nil { + log.Debug("smart contract results from mini block were not found in UnsignedTransactionUnit", + "sender shard ID", miniBlock.SenderShardID, + "receiver shard ID", miniBlock.ReceiverShardID, + "num txs", len(miniBlock.TxHashes), + ) + + return err + } + + for txHash, txBuff := range scrsBuff { + tx := smartContractResult.SmartContractResult{} + err = scr.marshalizer.Unmarshal(&tx, txBuff) + if err != nil { + return err + } + + scr.scrPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) + } + + return nil +} + // ProcessBlockTransactions processes all the smartContractResult from the block.Body, updates the state func (scr *smartContractResults) ProcessBlockTransactions( headerHandler data.HeaderHandler, @@ -427,21 +436,22 @@ func (scr *smartContractResults) RequestTransactionsForMiniBlock(miniBlock *bloc return 0 } - missingScrsForMiniBlock := scr.computeMissingScrsForMiniBlock(miniBlock) - if len(missingScrsForMiniBlock) > 0 { - scr.onRequestSmartContractResult(miniBlock.SenderShardID, missingScrsForMiniBlock) + missingScrsHashesForMiniBlock := scr.computeMissingScrsHashesForMiniBlock(miniBlock) + if len(missingScrsHashesForMiniBlock) > 0 { + scr.onRequestSmartContractResult(miniBlock.SenderShardID, missingScrsHashesForMiniBlock) } - return len(missingScrsForMiniBlock) + return len(missingScrsHashesForMiniBlock) } -// computeMissingScrsForMiniBlock computes missing smartContractResults for a certain miniblock -func (scr *smartContractResults) computeMissingScrsForMiniBlock(miniBlock *block.MiniBlock) [][]byte { +// computeMissingScrsHashesForMiniBlock computes missing smart contract results hashes for a certain miniblock +func (scr *smartContractResults) computeMissingScrsHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { + missingSmartContractResultsHashes := make([][]byte, 0) + if miniBlock.Type != block.SmartContractResultBlock { - return [][]byte{} + return missingSmartContractResultsHashes } - missingSmartContractResults := make([][]byte, 0, len(miniBlock.TxHashes)) for _, txHash := range miniBlock.TxHashes { tx, _ := process.GetTransactionHandlerFromPool( miniBlock.SenderShardID, @@ -451,11 +461,11 @@ func (scr *smartContractResults) computeMissingScrsForMiniBlock(miniBlock *block false) if check.IfNil(tx) { - missingSmartContractResults = append(missingSmartContractResults, txHash) + missingSmartContractResultsHashes = append(missingSmartContractResultsHashes, txHash) } } - return sliceUtil.TrimSliceSliceByte(missingSmartContractResults) + return missingSmartContractResultsHashes } // getAllScrsFromMiniBlock gets all the smartContractResults from a miniblock into a new structure @@ -501,7 +511,7 @@ func (scr *smartContractResults) CreateAndProcessMiniBlocks(_ func() bool, _ []b return make(block.MiniBlockSlice, 0), nil } -// ProcessMiniBlock processes all the smartContractResults from a and saves the processed smartContractResults in local cache complete miniblock +// ProcessMiniBlock processes all the smart contract results from the given miniblock and saves the processed ones in a local cache func (scr *smartContractResults) ProcessMiniBlock( miniBlock *block.MiniBlock, haveTime func() bool, @@ -627,26 +637,26 @@ func (scr *smartContractResults) ProcessMiniBlock( return nil, txIndex - 1, false, err } -// CreateMarshalizedData marshalizes smartContractResults and creates and saves them into a new structure -func (scr *smartContractResults) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - mrsScrs, err := scr.createMarshalizedData(txHashes, &scr.scrForBlock) +// CreateMarshalledData marshals smart contract results hashes and saves them into a new structure +func (scr *smartContractResults) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + marshalledScrs, err := scr.createMarshalledData(txHashes, &scr.scrForBlock) if err != nil { return nil, err } - return mrsScrs, nil + return marshalledScrs, nil } // GetAllCurrentUsedTxs returns all the smartContractResults used at current creation / processing func (scr *smartContractResults) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { scr.scrForBlock.mutTxsForBlock.RLock() - scrPool := make(map[string]data.TransactionHandler, len(scr.scrForBlock.txHashAndInfo)) + scrsPool := make(map[string]data.TransactionHandler, len(scr.scrForBlock.txHashAndInfo)) for txHash, txInfoFromMap := range scr.scrForBlock.txHashAndInfo { - scrPool[txHash] = txInfoFromMap.tx + scrsPool[txHash] = txInfoFromMap.tx } scr.scrForBlock.mutTxsForBlock.RUnlock() - return scrPool + return scrsPool } // AddTxsFromMiniBlocks does nothing diff --git a/process/block/preprocess/transactions.go b/process/block/preprocess/transactions.go index 090af1dad79..22ec9ee3374 100644 --- a/process/block/preprocess/transactions.go +++ b/process/block/preprocess/transactions.go @@ -10,7 +10,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" - "github.com/ElrondNetwork/elrond-go-core/core/sliceUtil" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/transaction" @@ -244,29 +243,11 @@ func (txs *transactions) RestoreBlockDataIntoPools( continue } - miniBlockStrCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) - txsBuff, err := txs.storage.GetAll(dataRetriever.TransactionUnit, miniBlock.TxHashes) + err := txs.restoreTxsIntoPool(miniBlock) if err != nil { - log.Debug("tx from mini block was not found in TransactionUnit", - "sender shard ID", miniBlock.SenderShardID, - "receiver shard ID", miniBlock.ReceiverShardID, - "num txs", len(miniBlock.TxHashes), - ) - return txsRestored, err } - for txHash, txBuff := range txsBuff { - tx := transaction.Transaction{} - err = txs.marshalizer.Unmarshal(&tx, txBuff) - if err != nil { - return txsRestored, err - } - - strCache := txs.computeCacheIdentifier(miniBlockStrCache, &tx, miniBlock.Type) - txs.txPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) - } - if miniBlock.SenderShardID != txs.shardCoordinator.SelfId() { miniBlockHash, errHash := core.CalculateHash(txs.marshalizer, txs.hasher, miniBlock) if errHash != nil { @@ -282,6 +263,33 @@ func (txs *transactions) RestoreBlockDataIntoPools( return txsRestored, nil } +func (txs *transactions) restoreTxsIntoPool(miniBlock *block.MiniBlock) error { + miniBlockStrCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) + txsBuff, err := txs.storage.GetAll(dataRetriever.TransactionUnit, miniBlock.TxHashes) + if err != nil { + log.Debug("txs from mini block were not found in TransactionUnit", + "sender shard ID", miniBlock.SenderShardID, + "receiver shard ID", miniBlock.ReceiverShardID, + "num txs", len(miniBlock.TxHashes), + ) + + return err + } + + for txHash, txBuff := range txsBuff { + tx := transaction.Transaction{} + err = txs.marshalizer.Unmarshal(&tx, txBuff) + if err != nil { + return err + } + + strCache := txs.computeCacheIdentifier(miniBlockStrCache, &tx, miniBlock.Type) + txs.txPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) + } + + return nil +} + func (txs *transactions) computeCacheIdentifier(miniBlockStrCache string, tx *transaction.Transaction, miniBlockType block.Type) string { if miniBlockType != block.InvalidBlock { return miniBlockStrCache @@ -919,21 +927,22 @@ func (txs *transactions) RequestTransactionsForMiniBlock(miniBlock *block.MiniBl return 0 } - missingTxsForMiniBlock := txs.computeMissingTxsForMiniBlock(miniBlock) - if len(missingTxsForMiniBlock) > 0 { - txs.onRequestTransaction(miniBlock.SenderShardID, missingTxsForMiniBlock) + missingTxsHashesForMiniBlock := txs.computeMissingTxsHashesForMiniBlock(miniBlock) + if len(missingTxsHashesForMiniBlock) > 0 { + txs.onRequestTransaction(miniBlock.SenderShardID, missingTxsHashesForMiniBlock) } - return len(missingTxsForMiniBlock) + return len(missingTxsHashesForMiniBlock) } -// computeMissingTxsForMiniBlock computes missing transactions for a certain miniblock -func (txs *transactions) computeMissingTxsForMiniBlock(miniBlock *block.MiniBlock) [][]byte { +// computeMissingTxsHashesForMiniBlock computes missing transactions hashes for a certain miniblock +func (txs *transactions) computeMissingTxsHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { + missingTransactionsHashes := make([][]byte, 0) + if miniBlock.Type != txs.blockType { - return nil + return missingTransactionsHashes } - missingTransactions := make([][]byte, 0, len(miniBlock.TxHashes)) searchFirst := txs.blockType == block.InvalidBlock for _, txHash := range miniBlock.TxHashes { @@ -944,12 +953,12 @@ func (txs *transactions) computeMissingTxsForMiniBlock(miniBlock *block.MiniBloc txs.txPool, searchFirst) - if tx == nil || tx.IsInterfaceNil() { - missingTransactions = append(missingTransactions, txHash) + if check.IfNil(tx) { + missingTransactionsHashes = append(missingTransactionsHashes, txHash) } } - return sliceUtil.TrimSliceSliceByte(missingTransactions) + return missingTransactionsHashes } // getAllTxsFromMiniBlock gets all the transactions from a miniblock into a new structure @@ -1424,7 +1433,7 @@ func (txs *transactions) computeSortedTxs( return selectedTxs, remainingTxs, nil } -// ProcessMiniBlock processes all the transactions from a and saves the processed transactions in local cache complete miniblock +// ProcessMiniBlock processes all the transactions from the given miniblock and saves the processed ones in a local cache func (txs *transactions) ProcessMiniBlock( miniBlock *block.MiniBlock, haveTime func() bool, @@ -1611,27 +1620,26 @@ func (txs *transactions) processInNormalMode( return nil } -// CreateMarshalizedData marshalizes transactions and creates and saves them into a new structure -func (txs *transactions) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - mrsScrs, err := txs.createMarshalizedData(txHashes, &txs.txsForCurrBlock) +// CreateMarshalledData marshals transactions hashes and saves them into a new structure +func (txs *transactions) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + marshalledTxs, err := txs.createMarshalledData(txHashes, &txs.txsForCurrBlock) if err != nil { return nil, err } - return mrsScrs, nil + return marshalledTxs, nil } // GetAllCurrentUsedTxs returns all the transactions used at current creation / processing func (txs *transactions) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { - txPool := make(map[string]data.TransactionHandler, len(txs.txsForCurrBlock.txHashAndInfo)) - txs.txsForCurrBlock.mutTxsForBlock.RLock() + txsPool := make(map[string]data.TransactionHandler, len(txs.txsForCurrBlock.txHashAndInfo)) for txHash, txInfoFromMap := range txs.txsForCurrBlock.txHashAndInfo { - txPool[txHash] = txInfoFromMap.tx + txsPool[txHash] = txInfoFromMap.tx } txs.txsForCurrBlock.mutTxsForBlock.RUnlock() - return txPool + return txsPool } // IsInterfaceNil returns true if there is no value under the interface diff --git a/process/block/preprocess/transactions_test.go b/process/block/preprocess/transactions_test.go index 0e2e566a753..d684332eaaa 100644 --- a/process/block/preprocess/transactions_test.go +++ b/process/block/preprocess/transactions_test.go @@ -146,6 +146,12 @@ func initDataPool() *dataRetrieverMock.PoolsHolderStub { }, } }, + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + RemoveSetOfDataFromPoolCalled: func(keys [][]byte, destCacheID string) { + }, + } + }, MetaBlocksCalled: func() storage.Cacher { return &testscommon.CacherStub{ GetCalled: func(key []byte) (value interface{}, ok bool) { diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index 70d29f876fe..8fb47ed9615 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -9,7 +9,10 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -17,9 +20,12 @@ var _ process.DataMarshalizer = (*validatorInfoPreprocessor)(nil) var _ process.PreProcessor = (*validatorInfoPreprocessor)(nil) type validatorInfoPreprocessor struct { - hasher hashing.Hasher - marshalizer marshal.Marshalizer - blockSizeComputation BlockSizeComputationHandler + *basePreProcess + chReceivedAllValidatorsInfo chan bool + validatorsInfoForBlock txsForBlock + validatorsInfoPool dataRetriever.ShardedDataCacherNotifier + storage dataRetriever.StorageService + enableEpochsHandler common.EnableEpochsHandler } // NewValidatorInfoPreprocessor creates a new validatorInfo preprocessor object @@ -27,7 +33,11 @@ func NewValidatorInfoPreprocessor( hasher hashing.Hasher, marshalizer marshal.Marshalizer, blockSizeComputation BlockSizeComputationHandler, + validatorsInfoPool dataRetriever.ShardedDataCacherNotifier, + store dataRetriever.StorageService, + enableEpochsHandler common.EnableEpochsHandler, ) (*validatorInfoPreprocessor, error) { + if check.IfNil(hasher) { return nil, process.ErrNilHasher } @@ -37,49 +47,48 @@ func NewValidatorInfoPreprocessor( if check.IfNil(blockSizeComputation) { return nil, process.ErrNilBlockSizeComputationHandler } + if check.IfNil(validatorsInfoPool) { + return nil, process.ErrNilValidatorInfoPool + } + if check.IfNil(store) { + return nil, process.ErrNilStorage + } + if check.IfNil(enableEpochsHandler) { + return nil, process.ErrNilEnableEpochsHandler + } - rtp := &validatorInfoPreprocessor{ + bpp := &basePreProcess{ hasher: hasher, marshalizer: marshalizer, blockSizeComputation: blockSizeComputation, } - return rtp, nil + + vip := &validatorInfoPreprocessor{ + basePreProcess: bpp, + storage: store, + validatorsInfoPool: validatorsInfoPool, + enableEpochsHandler: enableEpochsHandler, + } + + vip.chReceivedAllValidatorsInfo = make(chan bool) + vip.validatorsInfoForBlock.txHashAndInfo = make(map[string]*txInfo) + + return vip, nil } -// IsDataPrepared does nothing +// IsDataPrepared returns non error if all the requested validators info arrived and were saved into the pool func (vip *validatorInfoPreprocessor) IsDataPrepared(_ int, _ func() time.Duration) error { return nil } // RemoveBlockDataFromPools removes the peer miniblocks from pool func (vip *validatorInfoPreprocessor) RemoveBlockDataFromPools(body *block.Body, miniBlockPool storage.Cacher) error { - if check.IfNil(body) { - return process.ErrNilBlockBody - } - if check.IfNil(miniBlockPool) { - return process.ErrNilMiniBlockPool - } - - for i := 0; i < len(body.MiniBlocks); i++ { - currentMiniBlock := body.MiniBlocks[i] - if currentMiniBlock.Type != block.PeerBlock { - continue - } - - miniBlockHash, err := core.CalculateHash(vip.marshalizer, vip.hasher, currentMiniBlock) - if err != nil { - return err - } - - miniBlockPool.Remove(miniBlockHash) - } - - return nil + return vip.removeBlockDataFromPools(body, miniBlockPool, vip.validatorsInfoPool, vip.isMiniBlockCorrect) } -// RemoveTxsFromPools does nothing for validatorInfoPreprocessor implementation -func (vip *validatorInfoPreprocessor) RemoveTxsFromPools(_ *block.Body) error { - return nil +// RemoveTxsFromPools removes validators info from associated pools +func (vip *validatorInfoPreprocessor) RemoveTxsFromPools(body *block.Body) error { + return vip.removeTxsFromPools(body, vip.validatorsInfoPool, vip.isMiniBlockCorrect) } // RestoreBlockDataIntoPools restores the peer miniblocks to the pool @@ -101,6 +110,13 @@ func (vip *validatorInfoPreprocessor) RestoreBlockDataIntoPools( continue } + if vip.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { + err := vip.restoreValidatorsInfo(miniBlock) + if err != nil { + return validatorsInfoRestored, err + } + } + miniBlockHash, err := core.CalculateHash(vip.marshalizer, vip.hasher, miniBlock) if err != nil { return validatorsInfoRestored, err @@ -114,6 +130,32 @@ func (vip *validatorInfoPreprocessor) RestoreBlockDataIntoPools( return validatorsInfoRestored, nil } +func (vip *validatorInfoPreprocessor) restoreValidatorsInfo(miniBlock *block.MiniBlock) error { + strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) + validatorsInfoBuff, err := vip.storage.GetAll(dataRetriever.UnsignedTransactionUnit, miniBlock.TxHashes) + if err != nil { + log.Debug("validators info from mini block were not found in UnsignedTransactionUnit", + "sender shard ID", miniBlock.SenderShardID, + "receiver shard ID", miniBlock.ReceiverShardID, + "num txs", len(miniBlock.TxHashes), + ) + + return err + } + + for validatorInfoHash, validatorInfoBuff := range validatorsInfoBuff { + shardValidatorInfo := &state.ShardValidatorInfo{} + err = vip.marshalizer.Unmarshal(shardValidatorInfo, validatorInfoBuff) + if err != nil { + return err + } + + vip.validatorsInfoPool.AddData([]byte(validatorInfoHash), shardValidatorInfo, shardValidatorInfo.Size(), strCache) + } + + return nil +} + // ProcessBlockTransactions does nothing func (vip *validatorInfoPreprocessor) ProcessBlockTransactions( _ data.HeaderHandler, @@ -128,8 +170,14 @@ func (vip *validatorInfoPreprocessor) SaveTxsToStorage(_ *block.Body) error { return nil } -// CreateBlockStarted does nothing +// CreateBlockStarted cleans the local cache map for processed/created validators info at this round func (vip *validatorInfoPreprocessor) CreateBlockStarted() { + _ = core.EmptyChannel(vip.chReceivedAllValidatorsInfo) + + vip.validatorsInfoForBlock.mutTxsForBlock.Lock() + vip.validatorsInfoForBlock.missingTxs = 0 + vip.validatorsInfoForBlock.txHashAndInfo = make(map[string]*txInfo) + vip.validatorsInfoForBlock.mutTxsForBlock.Unlock() } // RequestBlockTransactions does nothing @@ -165,8 +213,6 @@ func (vip *validatorInfoPreprocessor) ProcessMiniBlock( return nil, indexOfLastTxProcessed, false, process.ErrValidatorInfoMiniBlockNotFromMeta } - //TODO: We need another function in the BlockSizeComputationHandler implementation that will better handle - //the PeerBlock miniblocks as those are not hashes if vip.blockSizeComputation.IsMaxBlockSizeWithoutThrottleReached(1, len(miniBlock.TxHashes)) { return nil, indexOfLastTxProcessed, false, process.ErrMaxBlockSizeReached } @@ -177,16 +223,14 @@ func (vip *validatorInfoPreprocessor) ProcessMiniBlock( return nil, len(miniBlock.TxHashes) - 1, false, nil } -// CreateMarshalizedData does nothing -func (vip *validatorInfoPreprocessor) CreateMarshalizedData(_ [][]byte) ([][]byte, error) { - marshalized := make([][]byte, 0) - return marshalized, nil +// CreateMarshalledData does nothing +func (vip *validatorInfoPreprocessor) CreateMarshalledData(_ [][]byte) ([][]byte, error) { + return make([][]byte, 0), nil } // GetAllCurrentUsedTxs does nothing func (vip *validatorInfoPreprocessor) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { - validatorInfoTxPool := make(map[string]data.TransactionHandler) - return validatorInfoTxPool + return make(map[string]data.TransactionHandler) } // AddTxsFromMiniBlocks does nothing @@ -197,7 +241,11 @@ func (vip *validatorInfoPreprocessor) AddTxsFromMiniBlocks(_ block.MiniBlockSlic func (vip *validatorInfoPreprocessor) AddTransactions(_ []data.TransactionHandler) { } -// IsInterfaceNil does nothing +// IsInterfaceNil returns true if there is no value under the interface func (vip *validatorInfoPreprocessor) IsInterfaceNil() bool { return vip == nil } + +func (vip *validatorInfoPreprocessor) isMiniBlockCorrect(mbType block.Type) bool { + return mbType == block.PeerBlock +} diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index 88d71c708a8..a52ed8bb04a 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -1,23 +1,33 @@ package preprocess import ( + "bytes" + "errors" "testing" "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + "github.com/ElrondNetwork/elrond-go/testscommon/storage" "github.com/stretchr/testify/assert" ) func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, err := NewValidatorInfoPreprocessor( nil, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) @@ -27,10 +37,14 @@ func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, err := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, nil, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) @@ -40,23 +54,81 @@ func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { func TestNewValidatorInfoPreprocessor_NilBlockSizeComputationHandlerShouldErr(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, err := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, nil, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) assert.Equal(t, process.ErrNilBlockSizeComputationHandler, err) } +func TestNewValidatorInfoPreprocessor_NilValidatorInfoPoolShouldErr(t *testing.T) { + t.Parallel() + + rtp, err := NewValidatorInfoPreprocessor( + &hashingMocks.HasherMock{}, + &testscommon.MarshalizerMock{}, + &testscommon.BlockSizeComputationStub{}, + nil, + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, + ) + + assert.Nil(t, rtp) + assert.Equal(t, process.ErrNilValidatorInfoPool, err) +} + +func TestNewValidatorInfoPreprocessor_NilStoreShouldErr(t *testing.T) { + t.Parallel() + + tdp := initDataPool() + rtp, err := NewValidatorInfoPreprocessor( + &hashingMocks.HasherMock{}, + &testscommon.MarshalizerMock{}, + &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + nil, + &testscommon.EnableEpochsHandlerStub{}, + ) + + assert.Nil(t, rtp) + assert.Equal(t, process.ErrNilStorage, err) +} + +func TestNewValidatorInfoPreprocessor_NilEnableEpochHandlerShouldErr(t *testing.T) { + t.Parallel() + + tdp := initDataPool() + rtp, err := NewValidatorInfoPreprocessor( + &hashingMocks.HasherMock{}, + &testscommon.MarshalizerMock{}, + &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + nil, + ) + + assert.Nil(t, rtp) + assert.Equal(t, process.ErrNilEnableEpochsHandler, err) +} + func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, err := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, err) assert.NotNil(t, rtp) @@ -65,14 +137,18 @@ func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) hash := make([][]byte, 0) - res, err := rtp.CreateMarshalizedData(hash) + res, err := rtp.CreateMarshalledData(hash) assert.Nil(t, err) assert.Equal(t, 0, len(res)) @@ -81,10 +157,14 @@ func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShouldErr(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := make([][]byte, 0) @@ -106,10 +186,14 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShould func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := make([][]byte, 0) @@ -131,10 +215,14 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { func TestNewValidatorInfoPreprocessor_ProcessMiniBlockNotFromMeta(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := make([][]byte, 0) @@ -160,10 +248,14 @@ func TestNewValidatorInfoPreprocessor_RestorePeerBlockIntoPools(t *testing.T) { marshalizer := &testscommon.MarshalizerMock{} blockSizeComputation := &testscommon.BlockSizeComputationStub{} + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( hasher, marshalizer, blockSizeComputation, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -201,10 +293,14 @@ func TestNewValidatorInfoPreprocessor_RestoreOtherBlockTypeIntoPoolsShouldNotRes marshalizer := &testscommon.MarshalizerMock{} blockSizeComputation := &testscommon.BlockSizeComputationStub{} + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( hasher, marshalizer, blockSizeComputation, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -242,10 +338,14 @@ func TestNewValidatorInfoPreprocessor_RemovePeerBlockFromPool(t *testing.T) { marshalizer := &testscommon.MarshalizerMock{} blockSizeComputation := &testscommon.BlockSizeComputationStub{} + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( hasher, marshalizer, blockSizeComputation, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -283,10 +383,14 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov marshalizer := &testscommon.MarshalizerMock{} blockSizeComputation := &testscommon.BlockSizeComputationStub{} + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( hasher, marshalizer, blockSizeComputation, + tdp.ValidatorsInfo(), + genericMocks.NewChainStorerMock(0), + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -316,3 +420,78 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov assert.NotNil(t, foundMb) assert.True(t, ok) } + +func TestNewValidatorInfoPreprocessor_RestoreValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("restore validators info with not all txs found in storage", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("error") + hasher := &hashingMocks.HasherMock{} + marshalizer := &testscommon.MarshalizerMock{} + blockSizeComputation := &testscommon.BlockSizeComputationStub{} + storer := &storage.ChainStorerStub{ + GetAllCalled: func(unitType dataRetriever.UnitType, keys [][]byte) (map[string][]byte, error) { + return nil, expectedErr + }, + } + tdp := initDataPool() + rtp, _ := NewValidatorInfoPreprocessor( + hasher, + marshalizer, + blockSizeComputation, + tdp.ValidatorsInfo(), + storer, + &testscommon.EnableEpochsHandlerStub{}, + ) + + miniBlock := &block.MiniBlock{} + err := rtp.restoreValidatorsInfo(miniBlock) + assert.Equal(t, expectedErr, err) + }) + + t.Run("restore validators info with all txs found in storage", func(t *testing.T) { + t.Parallel() + + hasher := &hashingMocks.HasherMock{} + marshalizer := &testscommon.MarshalizerMock{} + blockSizeComputation := &testscommon.BlockSizeComputationStub{} + shardValidatorInfoHash := []byte("hash") + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + marshalledShardValidatorInfo, _ := marshalizer.Marshal(shardValidatorInfo) + storer := &storage.ChainStorerStub{ + GetAllCalled: func(unitType dataRetriever.UnitType, keys [][]byte) (map[string][]byte, error) { + allShardValidatorsInfo := make(map[string][]byte) + allShardValidatorsInfo[string(shardValidatorInfoHash)] = marshalledShardValidatorInfo + return allShardValidatorsInfo, nil + }, + } + tdp := initDataPool() + wasCalledWithExpectedKey := false + tdp.ValidatorsInfoCalled = func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + AddDataCalled: func(key []byte, data interface{}, sizeInBytes int, cacheID string) { + if bytes.Equal(key, shardValidatorInfoHash) { + wasCalledWithExpectedKey = true + } + }, + } + } + rtp, _ := NewValidatorInfoPreprocessor( + hasher, + marshalizer, + blockSizeComputation, + tdp.ValidatorsInfo(), + storer, + &testscommon.EnableEpochsHandlerStub{}, + ) + + miniBlock := &block.MiniBlock{} + err := rtp.restoreValidatorsInfo(miniBlock) + assert.Nil(t, err) + assert.True(t, wasCalledWithExpectedKey) + }) +} diff --git a/process/coordinator/process.go b/process/coordinator/process.go index 72f1fae1732..6d895e68cf4 100644 --- a/process/coordinator/process.go +++ b/process/coordinator/process.go @@ -1005,7 +1005,7 @@ func (tc *transactionCoordinator) CreateMarshalizedData(body *block.Body) map[st dataMarshalizer, ok := preproc.(process.DataMarshalizer) if ok { // preproc supports marshalizing items - tc.appendMarshalizedItems( + tc.appendMarshalledItems( dataMarshalizer, miniBlock.TxHashes, mrsTxs, @@ -1019,7 +1019,7 @@ func (tc *transactionCoordinator) CreateMarshalizedData(body *block.Body) map[st dataMarshalizer, ok := interimProc.(process.DataMarshalizer) if ok { // interimProc supports marshalizing items - tc.appendMarshalizedItems( + tc.appendMarshalledItems( dataMarshalizer, miniBlock.TxHashes, mrsTxs, @@ -1032,15 +1032,15 @@ func (tc *transactionCoordinator) CreateMarshalizedData(body *block.Body) map[st return mrsTxs } -func (tc *transactionCoordinator) appendMarshalizedItems( +func (tc *transactionCoordinator) appendMarshalledItems( dataMarshalizer process.DataMarshalizer, txHashes [][]byte, mrsTxs map[string][][]byte, broadcastTopic string, ) { - currMrsTxs, err := dataMarshalizer.CreateMarshalizedData(txHashes) + currMrsTxs, err := dataMarshalizer.CreateMarshalledData(txHashes) if err != nil { - log.Debug("appendMarshalizedItems.CreateMarshalizedData", "error", err.Error()) + log.Debug("appendMarshalledItems.CreateMarshalledData", "error", err.Error()) return } diff --git a/process/errors.go b/process/errors.go index eec91791776..1ba7f0e317f 100644 --- a/process/errors.go +++ b/process/errors.go @@ -221,9 +221,21 @@ var ErrNilShardedDataCacherNotifier = errors.New("nil sharded data cacher notifi // ErrInvalidTxInPool signals an invalid transaction in the transactions pool var ErrInvalidTxInPool = errors.New("invalid transaction in the transactions pool") +// ErrInvalidValidatorInfoInPool signals an invalid validator info in the validators info pool +var ErrInvalidValidatorInfoInPool = errors.New("invalid validator info in the validators info pool") + // ErrTxNotFound signals that a transaction has not found var ErrTxNotFound = errors.New("transaction not found") +// ErrValidatorInfoNotFound signals that a validator info has not found +var ErrValidatorInfoNotFound = errors.New("validator info not found") + +// ErrNilHeadersStorage signals that a nil header storage has been provided +var ErrNilHeadersStorage = errors.New("nil headers storage") + +// ErrNilHeadersNonceHashStorage signals that a nil header nonce hash storage has been provided +var ErrNilHeadersNonceHashStorage = errors.New("nil headers nonce hash storage") + // ErrNilTransactionPool signals that a nil transaction pool was used var ErrNilTransactionPool = errors.New("nil transaction pool") @@ -1062,12 +1074,6 @@ var ErrNilDoubleTransactionsDetector = errors.New("nil double transactions detec // ErrNoTxToProcess signals that no transaction were sent for processing var ErrNoTxToProcess = errors.New("no transaction to process") -// ErrPropertyTooLong signals that a heartbeat property was too long -var ErrPropertyTooLong = errors.New("property too long") - -// ErrPropertyTooShort signals that a heartbeat property was too short -var ErrPropertyTooShort = errors.New("property too short") - // ErrInvalidPeerSubType signals that an invalid peer subtype was provided var ErrInvalidPeerSubType = errors.New("invalid peer subtype") @@ -1127,3 +1133,12 @@ var ErrNilEnableEpochsHandler = errors.New("nil enable epochs handler") // ErrNilCrawlerAllowedAddress signals that no crawler allowed address was found var ErrNilCrawlerAllowedAddress = errors.New("nil crawler allowed address") + +// ErrNilValidatorInfoPool signals that a nil validator info pool has been provided +var ErrNilValidatorInfoPool = errors.New("nil validator info pool") + +// ErrPropertyTooLong signals that a heartbeat property was too long +var ErrPropertyTooLong = errors.New("property too long") + +// ErrPropertyTooShort signals that a heartbeat property was too short +var ErrPropertyTooShort = errors.New("property too short") diff --git a/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go b/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go index 0f712dd35f8..7aeecea423f 100644 --- a/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go +++ b/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go @@ -240,11 +240,11 @@ func (bicf *baseInterceptorsContainerFactory) createOneTxInterceptor(topic strin return nil, err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: txFactory, Processor: txProcessor, Throttler: bicf.globalThrottler, @@ -283,11 +283,11 @@ func (bicf *baseInterceptorsContainerFactory) createOneUnsignedTxInterceptor(top return nil, err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: txFactory, Processor: txProcessor, Throttler: bicf.globalThrottler, @@ -326,11 +326,11 @@ func (bicf *baseInterceptorsContainerFactory) createOneRewardTxInterceptor(topic return nil, err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: txFactory, Processor: txProcessor, Throttler: bicf.globalThrottler, @@ -438,11 +438,11 @@ func (bicf *baseInterceptorsContainerFactory) generateMiniBlocksInterceptors() e } func (bicf *baseInterceptorsContainerFactory) createOneMiniBlocksInterceptor(topic string) (process.Interceptor, error) { - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() hasher := bicf.argInterceptorFactory.CoreComponents.Hasher() argProcessor := &processor.ArgMiniblockInterceptorProcessor{ MiniblockCache: bicf.dataPool.MiniBlocks(), - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, Hasher: hasher, ShardCoordinator: bicf.shardCoordinator, WhiteListHandler: bicf.whiteListHandler, @@ -460,7 +460,7 @@ func (bicf *baseInterceptorsContainerFactory) createOneMiniBlocksInterceptor(top interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: miniblockFactory, Processor: miniblockProcessor, Throttler: bicf.globalThrottler, @@ -532,11 +532,11 @@ func (bicf *baseInterceptorsContainerFactory) createOneTrieNodesInterceptor(topi return nil, err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: trieNodesFactory, Processor: trieNodesProcessor, Throttler: bicf.globalThrottler, @@ -735,3 +735,46 @@ func (bicf *baseInterceptorsContainerFactory) generateDirectConnectionInfoInterc return bicf.container.Add(identifier, interceptor) } + +func (bicf *baseInterceptorsContainerFactory) generateValidatorInfoInterceptor() error { + identifier := common.ValidatorInfoTopic + + interceptedValidatorInfoFactory, err := interceptorFactory.NewInterceptedValidatorInfoDataFactory(*bicf.argInterceptorFactory) + if err != nil { + return err + } + + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + argProcessor := processor.ArgValidatorInfoInterceptorProcessor{ + ValidatorInfoPool: bicf.dataPool.ValidatorsInfo(), + } + + validatorInfoProcessor, err := processor.NewValidatorInfoInterceptorProcessor(argProcessor) + if err != nil { + return err + } + + mdInterceptor, err := interceptors.NewMultiDataInterceptor( + interceptors.ArgMultiDataInterceptor{ + Topic: identifier, + Marshalizer: internalMarshaller, + DataFactory: interceptedValidatorInfoFactory, + Processor: validatorInfoProcessor, + Throttler: bicf.globalThrottler, + AntifloodHandler: bicf.antifloodHandler, + WhiteListRequest: bicf.whiteListHandler, + PreferredPeersHolder: bicf.preferredPeersHolder, + CurrentPeerId: bicf.messenger.ID(), + }, + ) + if err != nil { + return err + } + + interceptor, err := bicf.createTopicAndAssignHandler(identifier, mdInterceptor, true) + if err != nil { + return err + } + + return bicf.container.Add(identifier, interceptor) +} diff --git a/process/factory/interceptorscontainer/metaInterceptorsContainerFactory.go b/process/factory/interceptorscontainer/metaInterceptorsContainerFactory.go index 1978d7c01e3..0b16b580511 100644 --- a/process/factory/interceptorscontainer/metaInterceptorsContainerFactory.go +++ b/process/factory/interceptorscontainer/metaInterceptorsContainerFactory.go @@ -185,6 +185,11 @@ func (micf *metaInterceptorsContainerFactory) Create() (process.InterceptorsCont return nil, err } + err = micf.generateValidatorInfoInterceptor() + if err != nil { + return nil, err + } + return micf.container, nil } diff --git a/process/factory/interceptorscontainer/metaInterceptorsContainerFactory_test.go b/process/factory/interceptorscontainer/metaInterceptorsContainerFactory_test.go index e47dbf12a08..c42c2bf76c7 100644 --- a/process/factory/interceptorscontainer/metaInterceptorsContainerFactory_test.go +++ b/process/factory/interceptorscontainer/metaInterceptorsContainerFactory_test.go @@ -602,10 +602,11 @@ func TestMetaInterceptorsContainerFactory_With4ShardsShouldWork(t *testing.T) { numInterceptorsPeerAuthForMetachain := 1 numInterceptorsHeartbeatForMetachain := 1 numInterceptorsShardValidatorInfoForMetachain := 1 + numInterceptorValidatorInfo := 1 totalInterceptors := numInterceptorsMetablock + numInterceptorsShardHeadersForMetachain + numInterceptorsTrieNodes + numInterceptorsTransactionsForMetachain + numInterceptorsUnsignedTxsForMetachain + numInterceptorsMiniBlocksForMetachain + numInterceptorsRewardsTxsForMetachain + numInterceptorsPeerAuthForMetachain + numInterceptorsHeartbeatForMetachain + - numInterceptorsShardValidatorInfoForMetachain + numInterceptorsShardValidatorInfoForMetachain + numInterceptorValidatorInfo assert.Nil(t, err) assert.Equal(t, totalInterceptors, container.Len()) diff --git a/process/factory/interceptorscontainer/shardInterceptorsContainerFactory.go b/process/factory/interceptorscontainer/shardInterceptorsContainerFactory.go index e3208f58c6e..9c11c8fc5ae 100644 --- a/process/factory/interceptorscontainer/shardInterceptorsContainerFactory.go +++ b/process/factory/interceptorscontainer/shardInterceptorsContainerFactory.go @@ -184,6 +184,11 @@ func (sicf *shardInterceptorsContainerFactory) Create() (process.InterceptorsCon return nil, err } + err = sicf.generateValidatorInfoInterceptor() + if err != nil { + return nil, err + } + return sicf.container, nil } diff --git a/process/factory/interceptorscontainer/shardInterceptorsContainerFactory_test.go b/process/factory/interceptorscontainer/shardInterceptorsContainerFactory_test.go index 7fc52df3389..127f53421b3 100644 --- a/process/factory/interceptorscontainer/shardInterceptorsContainerFactory_test.go +++ b/process/factory/interceptorscontainer/shardInterceptorsContainerFactory_test.go @@ -659,9 +659,10 @@ func TestShardInterceptorsContainerFactory_With4ShardsShouldWork(t *testing.T) { numInterceptorPeerAuth := 1 numInterceptorHeartbeat := 1 numInterceptorsShardValidatorInfo := 1 + numInterceptorValidatorInfo := 1 totalInterceptors := numInterceptorTxs + numInterceptorsUnsignedTxs + numInterceptorsRewardTxs + numInterceptorHeaders + numInterceptorMiniBlocks + numInterceptorMetachainHeaders + numInterceptorTrieNodes + - numInterceptorPeerAuth + numInterceptorHeartbeat + numInterceptorsShardValidatorInfo + numInterceptorPeerAuth + numInterceptorHeartbeat + numInterceptorsShardValidatorInfo + numInterceptorValidatorInfo assert.Nil(t, err) assert.Equal(t, totalInterceptors, container.Len()) diff --git a/process/factory/shard/preProcessorsContainerFactory.go b/process/factory/shard/preProcessorsContainerFactory.go index a237428efc4..049471dbbd1 100644 --- a/process/factory/shard/preProcessorsContainerFactory.go +++ b/process/factory/shard/preProcessorsContainerFactory.go @@ -277,6 +277,9 @@ func (ppcm *preProcessorsContainerFactory) createValidatorInfoPreProcessor() (pr ppcm.hasher, ppcm.marshalizer, ppcm.blockSizeComputation, + ppcm.dataPool.ValidatorsInfo(), + ppcm.store, + ppcm.enableEpochsHandler, ) return validatorInfoPreprocessor, err diff --git a/process/interceptors/factory/interceptedDirectConnectionInfoFactory.go b/process/interceptors/factory/interceptedDirectConnectionInfoFactory.go index de81b20cb45..9a3b447f016 100644 --- a/process/interceptors/factory/interceptedDirectConnectionInfoFactory.go +++ b/process/interceptors/factory/interceptedDirectConnectionInfoFactory.go @@ -15,7 +15,7 @@ type interceptedDirectConnectionInfoFactory struct { // NewInterceptedDirectConnectionInfoFactory creates an instance of interceptedDirectConnectionInfoFactory func NewInterceptedDirectConnectionInfoFactory(args ArgInterceptedDataFactory) (*interceptedDirectConnectionInfoFactory, error) { - err := checkArgs(args) + err := checkInterceptedDirectConnectionInfoFactoryArgs(args) if err != nil { return nil, err } @@ -26,7 +26,7 @@ func NewInterceptedDirectConnectionInfoFactory(args ArgInterceptedDataFactory) ( }, nil } -func checkArgs(args ArgInterceptedDataFactory) error { +func checkInterceptedDirectConnectionInfoFactoryArgs(args ArgInterceptedDataFactory) error { if check.IfNil(args.CoreComponents) { return process.ErrNilCoreComponentsHolder } diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go new file mode 100644 index 00000000000..62247d91e17 --- /dev/null +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go @@ -0,0 +1,57 @@ +package factory + +import ( + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/hashing" + "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/process/peer" +) + +type interceptedValidatorInfoDataFactory struct { + marshaller marshal.Marshalizer + hasher hashing.Hasher +} + +// NewInterceptedValidatorInfoDataFactory creates an instance of interceptedValidatorInfoDataFactory +func NewInterceptedValidatorInfoDataFactory(args ArgInterceptedDataFactory) (*interceptedValidatorInfoDataFactory, error) { + err := checkInterceptedValidatorInfoDataFactoryArgs(args) + if err != nil { + return nil, err + } + + return &interceptedValidatorInfoDataFactory{ + marshaller: args.CoreComponents.InternalMarshalizer(), + hasher: args.CoreComponents.Hasher(), + }, nil +} + +func checkInterceptedValidatorInfoDataFactoryArgs(args ArgInterceptedDataFactory) error { + if check.IfNil(args.CoreComponents) { + return process.ErrNilCoreComponentsHolder + } + if check.IfNil(args.CoreComponents.InternalMarshalizer()) { + return process.ErrNilMarshalizer + } + if check.IfNil(args.CoreComponents.Hasher()) { + return process.ErrNilHasher + } + + return nil +} + +// Create creates instances of InterceptedData by unmarshalling provided buffer +func (ividf *interceptedValidatorInfoDataFactory) Create(buff []byte) (process.InterceptedData, error) { + args := peer.ArgInterceptedValidatorInfo{ + DataBuff: buff, + Marshalizer: ividf.marshaller, + Hasher: ividf.hasher, + } + + return peer.NewInterceptedValidatorInfo(args) +} + +// IsInterfaceNil returns true if there is no value under the interface +func (ividf *interceptedValidatorInfoDataFactory) IsInterfaceNil() bool { + return ividf == nil +} diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go new file mode 100644 index 00000000000..3fcf3021e7c --- /dev/null +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go @@ -0,0 +1,95 @@ +package factory + +import ( + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createMockValidatorInfoBuff() []byte { + vi := &state.ValidatorInfo{ + PublicKey: []byte("provided pk"), + ShardId: 123, + List: string(common.EligibleList), + Index: 10, + Rating: 10, + } + + marshalizerMock := testscommon.MarshalizerMock{} + buff, _ := marshalizerMock.Marshal(vi) + + return buff +} + +func TestNewInterceptedValidatorInfoDataFactory(t *testing.T) { + t.Parallel() + + t.Run("nil core components should error", func(t *testing.T) { + t.Parallel() + + _, cryptoComponents := createMockComponentHolders() + args := createMockArgument(nil, cryptoComponents) + + ividf, err := NewInterceptedValidatorInfoDataFactory(*args) + assert.Equal(t, process.ErrNilCoreComponentsHolder, err) + assert.True(t, check.IfNil(ividf)) + }) + t.Run("nil marshaller should error", func(t *testing.T) { + t.Parallel() + + coreComponents, cryptoComponents := createMockComponentHolders() + coreComponents.IntMarsh = nil + args := createMockArgument(coreComponents, cryptoComponents) + + ividf, err := NewInterceptedValidatorInfoDataFactory(*args) + assert.Equal(t, process.ErrNilMarshalizer, err) + assert.True(t, check.IfNil(ividf)) + }) + t.Run("nil hasher should error", func(t *testing.T) { + t.Parallel() + + coreComponents, cryptoComponents := createMockComponentHolders() + coreComponents.Hash = nil + args := createMockArgument(coreComponents, cryptoComponents) + + ividf, err := NewInterceptedValidatorInfoDataFactory(*args) + assert.Equal(t, process.ErrNilHasher, err) + assert.True(t, check.IfNil(ividf)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + ividf, err := NewInterceptedValidatorInfoDataFactory(*createMockArgument(createMockComponentHolders())) + assert.Nil(t, err) + assert.False(t, check.IfNil(ividf)) + }) +} + +func TestInterceptedValidatorInfoDataFactory_Create(t *testing.T) { + t.Parallel() + + t.Run("nil buff should error", func(t *testing.T) { + t.Parallel() + ividf, _ := NewInterceptedValidatorInfoDataFactory(*createMockArgument(createMockComponentHolders())) + require.False(t, check.IfNil(ividf)) + + ivi, err := ividf.Create(nil) + assert.NotNil(t, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + ividf, _ := NewInterceptedValidatorInfoDataFactory(*createMockArgument(createMockComponentHolders())) + require.False(t, check.IfNil(ividf)) + + ivi, err := ividf.Create(createMockValidatorInfoBuff()) + assert.Nil(t, err) + assert.False(t, check.IfNil(ivi)) + }) +} diff --git a/process/interceptors/processor/interface.go b/process/interceptors/processor/interface.go index e4f8a818a5f..3aa8e15bce1 100644 --- a/process/interceptors/processor/interface.go +++ b/process/interceptors/processor/interface.go @@ -4,6 +4,7 @@ import ( "math/big" "github.com/ElrondNetwork/elrond-go-core/data" + "github.com/ElrondNetwork/elrond-go/state" ) // InterceptedTransactionHandler defines an intercepted data wrapper over transaction handler that has @@ -37,3 +38,8 @@ type interceptedPeerAuthenticationMessageHandler interface { Payload() []byte Pubkey() []byte } + +type interceptedValidatorInfo interface { + Hash() []byte + ValidatorInfo() *state.ShardValidatorInfo +} diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go new file mode 100644 index 00000000000..d3d70b6a70d --- /dev/null +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -0,0 +1,70 @@ +package processor + +import ( + "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/process" +) + +// ArgValidatorInfoInterceptorProcessor is the argument structure used to create a new validator info interceptor processor +type ArgValidatorInfoInterceptorProcessor struct { + ValidatorInfoPool dataRetriever.ShardedDataCacherNotifier +} + +type validatorInfoInterceptorProcessor struct { + validatorInfoPool dataRetriever.ShardedDataCacherNotifier +} + +// NewValidatorInfoInterceptorProcessor creates a new validator info interceptor processor +func NewValidatorInfoInterceptorProcessor(args ArgValidatorInfoInterceptorProcessor) (*validatorInfoInterceptorProcessor, error) { + err := checkArgs(args) + if err != nil { + return nil, err + } + + return &validatorInfoInterceptorProcessor{ + validatorInfoPool: args.ValidatorInfoPool, + }, nil +} + +func checkArgs(args ArgValidatorInfoInterceptorProcessor) error { + if check.IfNil(args.ValidatorInfoPool) { + return process.ErrNilValidatorInfoPool + } + + return nil +} + +// Validate returns nil as validation is done on Save +func (viip *validatorInfoInterceptorProcessor) Validate(_ process.InterceptedData, _ core.PeerID) error { + return nil +} + +// Save will save the intercepted validator info into the cache +func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData, _ core.PeerID, _ string) error { + ivi, ok := data.(interceptedValidatorInfo) + if !ok { + return process.ErrWrongTypeAssertion + } + + validatorInfo := ivi.ValidatorInfo() + hash := ivi.Hash() + + log.Trace("validatorInfoInterceptorProcessor.Save", "tx hash", hash, "pk", validatorInfo.PublicKey) + + strCache := process.ShardCacherIdentifier(core.MetachainShardId, core.AllShardId) + viip.validatorInfoPool.AddData(hash, validatorInfo, validatorInfo.Size(), strCache) + + return nil +} + +// RegisterHandler registers a callback function to be notified of incoming validator info +func (viip *validatorInfoInterceptorProcessor) RegisterHandler(_ func(topic string, hash []byte, data interface{})) { + log.Error("validatorInfoInterceptorProcessor.RegisterHandler", "error", "not implemented") +} + +// IsInterfaceNil returns true if there is no value under the interface +func (viip *validatorInfoInterceptorProcessor) IsInterfaceNil() bool { + return viip == nil +} diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go new file mode 100644 index 00000000000..bad90439cdf --- /dev/null +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go @@ -0,0 +1,155 @@ +package processor_test + +import ( + "fmt" + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/epochStart/mock" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/process/interceptors/processor" + "github.com/ElrondNetwork/elrond-go/process/peer" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createMockValidatorInfo() state.ValidatorInfo { + return state.ValidatorInfo{ + PublicKey: []byte("provided pk"), + ShardId: 123, + List: string(common.EligibleList), + Index: 10, + Rating: 10, + } +} + +func createMockInterceptedValidatorInfo() process.InterceptedData { + args := peer.ArgInterceptedValidatorInfo{ + Marshalizer: testscommon.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + } + args.DataBuff, _ = args.Marshalizer.Marshal(createMockValidatorInfo()) + ivi, _ := peer.NewInterceptedValidatorInfo(args) + + return ivi +} + +func createMockArgValidatorInfoInterceptorProcessor() processor.ArgValidatorInfoInterceptorProcessor { + return processor.ArgValidatorInfoInterceptorProcessor{ + ValidatorInfoPool: testscommon.NewShardedDataStub(), + } +} + +func TestNewValidatorInfoInterceptorProcessor(t *testing.T) { + t.Parallel() + + t.Run("nil cache should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoInterceptorProcessor() + args.ValidatorInfoPool = nil + + proc, err := processor.NewValidatorInfoInterceptorProcessor(args) + assert.Equal(t, process.ErrNilValidatorInfoPool, err) + assert.True(t, check.IfNil(proc)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + proc, err := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) + assert.Nil(t, err) + assert.False(t, check.IfNil(proc)) + }) +} + +func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { + t.Parallel() + + t.Run("invalid data should error", func(t *testing.T) { + t.Parallel() + + proc, err := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) + assert.Nil(t, err) + assert.Equal(t, process.ErrWrongTypeAssertion, proc.Save(nil, "", "")) + }) + t.Run("invalid validator info should error", func(t *testing.T) { + t.Parallel() + + providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted validator info + wasCalled := false + args := createMockArgValidatorInfoInterceptorProcessor() + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + AddDataCalled: func(key []byte, data interface{}, sizeInBytes int, cacheID string) { + wasCalled = true + }, + } + + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) + + assert.Equal(t, process.ErrWrongTypeAssertion, proc.Save(providedData, "", "")) + assert.False(t, wasCalled) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + providedEpoch := uint32(15) + providedEpochStr := fmt.Sprintf("%d", providedEpoch) + providedData := createMockInterceptedValidatorInfo() + wasHasOrAddCalled := false + args := createMockArgValidatorInfoInterceptorProcessor() + providedBuff, _ := testscommon.MarshalizerMock{}.Marshal(createMockValidatorInfo()) + hasher := hashingMocks.HasherMock{} + providedHash := hasher.Compute(string(providedBuff)) + + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + AddDataCalled: func(key []byte, data interface{}, sizeInBytes int, cacheID string) { + assert.Equal(t, providedHash, key) + wasHasOrAddCalled = true + }, + } + + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) + + assert.Nil(t, proc.Save(providedData, "", providedEpochStr)) + assert.True(t, wasHasOrAddCalled) + }) +} + +func TestValidatorInfoInterceptorProcessor_Validate(t *testing.T) { + t.Parallel() + + defer func() { + r := recover() + if r != nil { + assert.Fail(t, "should not panic") + } + }() + + args := createMockArgValidatorInfoInterceptorProcessor() + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) + + assert.Nil(t, proc.Validate(createMockInterceptedValidatorInfo(), "")) +} + +func TestValidatorInfoInterceptorProcessor_RegisterHandler(t *testing.T) { + t.Parallel() + + defer func() { + if r := recover(); r != nil { + assert.Fail(t, "should not panic") + } + }() + + proc, err := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) + assert.Nil(t, err) + assert.False(t, check.IfNil(proc)) + + proc.RegisterHandler(nil) +} diff --git a/process/interface.go b/process/interface.go index bad4ba73f89..f14e3e39ea2 100644 --- a/process/interface.go +++ b/process/interface.go @@ -24,6 +24,7 @@ import ( "github.com/ElrondNetwork/elrond-go/process/block/bootstrapStorage" "github.com/ElrondNetwork/elrond-go/process/block/processedMb" "github.com/ElrondNetwork/elrond-go/sharding" + "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" vmcommon "github.com/ElrondNetwork/elrond-vm-common" @@ -182,7 +183,7 @@ type IntermediateTransactionHandler interface { // DataMarshalizer defines the behavior of a structure that is able to marshalize containing data type DataMarshalizer interface { - CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) + CreateMarshalledData(txHashes [][]byte) ([][]byte, error) } // TransactionVerifier interface validates if the transaction is good and if it should be processed @@ -557,6 +558,8 @@ type RequestHandler interface { CreateTrieNodeIdentifier(requestHash []byte, chunkIndex uint32) []byte RequestPeerAuthenticationsChunk(destShardID uint32, chunkIndex uint32) RequestPeerAuthenticationsByHashes(destShardID uint32, hashes [][]byte) + RequestValidatorInfo(hash []byte) + RequestValidatorsInfo(hashes [][]byte) IsInterfaceNil() bool } @@ -902,10 +905,10 @@ type RewardsCreator interface { ) error GetProtocolSustainabilityRewards() *big.Int GetLocalTxCache() epochStart.TransactionCacher - CreateMarshalizedData(body *block.Body) map[string][][]byte + CreateMarshalledData(body *block.Body) map[string][][]byte GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler - SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) + SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) RemoveBlockDataFromPools(metaBlock data.MetaHeaderHandler, body *block.Body) IsInterfaceNil() bool } @@ -913,9 +916,12 @@ type RewardsCreator interface { // EpochStartValidatorInfoCreator defines the functionality for the metachain to create validator statistics at end of epoch type EpochStartValidatorInfoCreator interface { CreateValidatorInfoMiniBlocks(validatorInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) - VerifyValidatorInfoMiniBlocks(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - SaveValidatorInfoBlocksToStorage(metaBlock data.HeaderHandler, body *block.Body) - DeleteValidatorInfoBlocksFromStorage(metaBlock data.HeaderHandler) + VerifyValidatorInfoMiniBlocks(miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error + GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher + CreateMarshalledData(body *block.Body) map[string][][]byte + GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo + SaveBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) + DeleteBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) RemoveBlockDataFromPools(metaBlock data.HeaderHandler, body *block.Body) IsInterfaceNil() bool } @@ -988,7 +994,8 @@ type RatingsStepHandler interface { // ValidatorInfoSyncer defines the method needed for validatorInfoProcessing type ValidatorInfoSyncer interface { - SyncMiniBlocks(metaBlock data.HeaderHandler) ([][]byte, data.BodyHandler, error) + SyncMiniBlocks(headerHandler data.HeaderHandler) ([][]byte, data.BodyHandler, error) + SyncValidatorsInfo(bodyHandler data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) IsInterfaceNil() bool } @@ -1054,6 +1061,7 @@ type EpochStartEventNotifier interface { // NodesCoordinator provides Validator methods needed for the peer processing type NodesCoordinator interface { + GetValidatorWithPublicKey(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) GetAllEligibleValidatorsPublicKeys(epoch uint32) (map[uint32][][]byte, error) GetAllWaitingValidatorsPublicKeys(epoch uint32) (map[uint32][][]byte, error) GetAllLeavingValidatorsPublicKeys(epoch uint32) (map[uint32][][]byte, error) diff --git a/process/mock/epochRewardsCreatorStub.go b/process/mock/epochRewardsCreatorStub.go index e465ef2bdf9..e67aa17b1da 100644 --- a/process/mock/epochRewardsCreatorStub.go +++ b/process/mock/epochRewardsCreatorStub.go @@ -17,13 +17,13 @@ type EpochRewardsCreatorStub struct { VerifyRewardsMiniBlocksCalled func( metaBlock data.MetaHeaderHandler, validatorsInfo map[uint32][]*state.ValidatorInfo, computedEconomics *block.Economics, ) error - CreateMarshalizedDataCalled func(body *block.Body) map[string][][]byte - SaveTxBlockToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler - GetProtocolSustainCalled func() *big.Int - GetLocalTxCacheCalled func() epochStart.TransactionCacher + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte + SaveBlockDataToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler + GetProtocolSustainCalled func() *big.Int + GetLocalTxCacheCalled func() epochStart.TransactionCacher } // GetProtocolSustainabilityRewards - @@ -66,15 +66,15 @@ func (e *EpochRewardsCreatorStub) VerifyRewardsMiniBlocks( return nil } -// CreateMarshalizedData - -func (e *EpochRewardsCreatorStub) CreateMarshalizedData(body *block.Body) map[string][][]byte { - if e.CreateMarshalizedDataCalled != nil { - return e.CreateMarshalizedDataCalled(body) +// CreateMarshalledData - +func (e *EpochRewardsCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if e.CreateMarshalledDataCalled != nil { + return e.CreateMarshalledDataCalled(body) } return nil } -// GetRewardsTxs -- +// GetRewardsTxs - func (e *EpochRewardsCreatorStub) GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler { if e.GetRewardsTxsCalled != nil { return e.GetRewardsTxsCalled(body) @@ -82,17 +82,17 @@ func (e *EpochRewardsCreatorStub) GetRewardsTxs(body *block.Body) map[string]dat return nil } -// SaveTxBlockToStorage - -func (e *EpochRewardsCreatorStub) SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if e.SaveTxBlockToStorageCalled != nil { - e.SaveTxBlockToStorageCalled(metaBlock, body) +// SaveBlockDataToStorage - +func (e *EpochRewardsCreatorStub) SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if e.SaveBlockDataToStorageCalled != nil { + e.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteTxsFromStorage - -func (e *EpochRewardsCreatorStub) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if e.DeleteTxsFromStorageCalled != nil { - e.DeleteTxsFromStorageCalled(metaBlock, body) +// DeleteBlockDataFromStorage - +func (e *EpochRewardsCreatorStub) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if e.DeleteBlockDataFromStorageCalled != nil { + e.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/process/mock/epochValidatorInfoCreatorStub.go b/process/mock/epochValidatorInfoCreatorStub.go index 3533131a117..6cf8318f6a1 100644 --- a/process/mock/epochValidatorInfoCreatorStub.go +++ b/process/mock/epochValidatorInfoCreatorStub.go @@ -3,6 +3,7 @@ package mock import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/state" ) @@ -10,9 +11,11 @@ import ( type EpochValidatorInfoCreatorStub struct { CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - CreateMarshalizedDataCalled func(body block.Body) map[string][][]byte - SaveTxBlockToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.HeaderHandler) + GetLocalValidatorInfoCacheCalled func() epochStart.ValidatorInfoCacher + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte + GetValidatorInfoTxsCalled func(body *block.Body) map[string]*state.ShardValidatorInfo + SaveBlockDataToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) } @@ -25,24 +28,48 @@ func (e *EpochValidatorInfoCreatorStub) CreateValidatorInfoMiniBlocks(validatorI } // VerifyValidatorInfoMiniBlocks - -func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { +func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { if e.VerifyValidatorInfoMiniBlocksCalled != nil { - return e.VerifyValidatorInfoMiniBlocksCalled(miniblocks, validatorsInfo) + return e.VerifyValidatorInfoMiniBlocksCalled(miniBlocks, validatorsInfo) } return nil } -// SaveValidatorInfoBlocksToStorage - -func (e *EpochValidatorInfoCreatorStub) SaveValidatorInfoBlocksToStorage(metaBlock data.HeaderHandler, body *block.Body) { - if e.SaveTxBlockToStorageCalled != nil { - e.SaveTxBlockToStorageCalled(metaBlock, body) +// GetLocalValidatorInfoCache - +func (e *EpochValidatorInfoCreatorStub) GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher { + if e.GetLocalValidatorInfoCacheCalled != nil { + return e.GetLocalValidatorInfoCacheCalled() + } + return nil +} + +// CreateMarshalledData - +func (e *EpochValidatorInfoCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if e.CreateMarshalledDataCalled != nil { + return e.CreateMarshalledDataCalled(body) + } + return nil +} + +// GetValidatorInfoTxs - +func (e *EpochValidatorInfoCreatorStub) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { + if e.GetValidatorInfoTxsCalled != nil { + return e.GetValidatorInfoTxsCalled(body) + } + return nil +} + +// SaveBlockDataToStorage - +func (e *EpochValidatorInfoCreatorStub) SaveBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.SaveBlockDataToStorageCalled != nil { + e.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteValidatorInfoBlocksFromStorage - -func (e *EpochValidatorInfoCreatorStub) DeleteValidatorInfoBlocksFromStorage(metaBlock data.HeaderHandler) { - if e.DeleteTxsFromStorageCalled != nil { - e.DeleteTxsFromStorageCalled(metaBlock) +// DeleteBlockDataFromStorage - +func (e *EpochValidatorInfoCreatorStub) DeleteBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.DeleteBlockDataFromStorageCalled != nil { + e.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/process/mock/intermProcessorStub.go b/process/mock/intermProcessorStub.go index b4818bdc14f..4a03fe40291 100644 --- a/process/mock/intermProcessorStub.go +++ b/process/mock/intermProcessorStub.go @@ -13,7 +13,7 @@ type IntermediateTransactionHandlerStub struct { VerifyInterMiniBlocksCalled func(body *block.Body) error SaveCurrentIntermediateTxToStorageCalled func() CreateBlockStartedCalled func() - CreateMarshalizedDataCalled func(txHashes [][]byte) ([][]byte, error) + CreateMarshalledDataCalled func(txHashes [][]byte) ([][]byte, error) GetAllCurrentFinishedTxsCalled func() map[string]data.TransactionHandler RemoveProcessedResultsCalled func(key []byte) [][]byte InitProcessedResultsCalled func(key []byte) @@ -35,12 +35,12 @@ func (ith *IntermediateTransactionHandlerStub) InitProcessedResults(key []byte) } } -// CreateMarshalizedData - -func (ith *IntermediateTransactionHandlerStub) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - if ith.CreateMarshalizedDataCalled == nil { +// CreateMarshalledData - +func (ith *IntermediateTransactionHandlerStub) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + if ith.CreateMarshalledDataCalled == nil { return nil, nil } - return ith.CreateMarshalizedDataCalled(txHashes) + return ith.CreateMarshalledDataCalled(txHashes) } // AddIntermediateTransactions - diff --git a/process/mock/intermediateTransactionHandlerMock.go b/process/mock/intermediateTransactionHandlerMock.go index c2eee768622..eeb923a1004 100644 --- a/process/mock/intermediateTransactionHandlerMock.go +++ b/process/mock/intermediateTransactionHandlerMock.go @@ -13,7 +13,7 @@ type IntermediateTransactionHandlerMock struct { VerifyInterMiniBlocksCalled func(body *block.Body) error SaveCurrentIntermediateTxToStorageCalled func() CreateBlockStartedCalled func() - CreateMarshalizedDataCalled func(txHashes [][]byte) ([][]byte, error) + CreateMarshalledDataCalled func(txHashes [][]byte) ([][]byte, error) GetAllCurrentFinishedTxsCalled func() map[string]data.TransactionHandler RemoveProcessedResultsCalled func(key []byte) [][]byte InitProcessedResultsCalled func(key []byte) @@ -36,12 +36,12 @@ func (ith *IntermediateTransactionHandlerMock) InitProcessedResults(key []byte) } } -// CreateMarshalizedData - -func (ith *IntermediateTransactionHandlerMock) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - if ith.CreateMarshalizedDataCalled == nil { +// CreateMarshalledData - +func (ith *IntermediateTransactionHandlerMock) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + if ith.CreateMarshalledDataCalled == nil { return nil, nil } - return ith.CreateMarshalizedDataCalled(txHashes) + return ith.CreateMarshalledDataCalled(txHashes) } // AddIntermediateTransactions - diff --git a/process/mock/preprocessorMock.go b/process/mock/preprocessorMock.go index 7f08bb7e21e..9f14d457b6a 100644 --- a/process/mock/preprocessorMock.go +++ b/process/mock/preprocessorMock.go @@ -19,7 +19,7 @@ type PreProcessorMock struct { SaveTxsToStorageCalled func(body *block.Body) error ProcessBlockTransactionsCalled func(header data.HeaderHandler, body *block.Body, haveTime func() bool) error RequestBlockTransactionsCalled func(body *block.Body) int - CreateMarshalizedDataCalled func(txHashes [][]byte) ([][]byte, error) + CreateMarshalledDataCalled func(txHashes [][]byte) ([][]byte, error) RequestTransactionsForMiniBlockCalled func(miniBlock *block.MiniBlock) int ProcessMiniBlockCalled func(miniBlock *block.MiniBlock, haveTime func() bool, haveAdditionalTime func() bool, scheduledMode bool, partialMbExecutionMode bool, indexOfLastTxProcessed int, preProcessorExecutionInfoHandler process.PreProcessorExecutionInfoHandler) ([][]byte, int, bool, error) CreateAndProcessMiniBlocksCalled func(haveTime func() bool) (block.MiniBlockSlice, error) @@ -92,12 +92,12 @@ func (ppm *PreProcessorMock) RequestBlockTransactions(body *block.Body) int { return ppm.RequestBlockTransactionsCalled(body) } -// CreateMarshalizedData - -func (ppm *PreProcessorMock) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - if ppm.CreateMarshalizedDataCalled == nil { +// CreateMarshalledData - +func (ppm *PreProcessorMock) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + if ppm.CreateMarshalledDataCalled == nil { return nil, nil } - return ppm.CreateMarshalizedDataCalled(txHashes) + return ppm.CreateMarshalledDataCalled(txHashes) } // RequestTransactionsForMiniBlock - diff --git a/process/peer/constants.go b/process/peer/constants.go new file mode 100644 index 00000000000..217c63d387f --- /dev/null +++ b/process/peer/constants.go @@ -0,0 +1,10 @@ +package peer + +const ( + minSizeInBytes = 1 + maxSizeInBytes = 128 + interceptedValidatorInfoType = "intercepted validator info" + publicKeyProperty = "public key" + publicKeyPropertyRequiredBytesLen = 96 + listProperty = "list" +) diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go new file mode 100644 index 00000000000..1924cb536eb --- /dev/null +++ b/process/peer/interceptedValidatorInfo.go @@ -0,0 +1,143 @@ +package peer + +import ( + "fmt" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/hashing" + "github.com/ElrondNetwork/elrond-go-core/marshal" + logger "github.com/ElrondNetwork/elrond-go-logger" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" +) + +// ArgInterceptedValidatorInfo is the argument used to create a new intercepted validator info +type ArgInterceptedValidatorInfo struct { + DataBuff []byte + Marshalizer marshal.Marshalizer + Hasher hashing.Hasher +} + +// interceptedValidatorInfo is a wrapper over validatorInfo +type interceptedValidatorInfo struct { + shardValidatorInfo *state.ShardValidatorInfo + hash []byte +} + +// NewInterceptedValidatorInfo creates a new intercepted validator info instance +func NewInterceptedValidatorInfo(args ArgInterceptedValidatorInfo) (*interceptedValidatorInfo, error) { + err := checkArgs(args) + if err != nil { + return nil, err + } + + shardValidatorInfo, err := createShardValidatorInfo(args.Marshalizer, args.DataBuff) + if err != nil { + return nil, err + } + + return &interceptedValidatorInfo{ + shardValidatorInfo: shardValidatorInfo, + hash: args.Hasher.Compute(string(args.DataBuff)), + }, nil +} + +func checkArgs(args ArgInterceptedValidatorInfo) error { + if len(args.DataBuff) == 0 { + return process.ErrNilBuffer + } + if check.IfNil(args.Marshalizer) { + return process.ErrNilMarshalizer + } + if check.IfNil(args.Hasher) { + return process.ErrNilHasher + } + + return nil +} + +func createShardValidatorInfo(marshalizer marshal.Marshalizer, buff []byte) (*state.ShardValidatorInfo, error) { + shardValidatorInfo := &state.ShardValidatorInfo{} + err := marshalizer.Unmarshal(shardValidatorInfo, buff) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil +} + +// CheckValidity checks the validity of the received validator info +func (ivi *interceptedValidatorInfo) CheckValidity() error { + // Verify string properties len + err := verifyPropertyLen(publicKeyProperty, ivi.shardValidatorInfo.PublicKey, publicKeyPropertyRequiredBytesLen, minSizeInBytes, maxSizeInBytes) + if err != nil { + return err + } + err = verifyPropertyLen(listProperty, []byte(ivi.shardValidatorInfo.List), 0, minSizeInBytes, maxSizeInBytes) + if err != nil { + return err + } + + return nil +} + +// IsForCurrentShard always returns true +func (ivi *interceptedValidatorInfo) IsForCurrentShard() bool { + return true +} + +// ValidatorInfo returns the current validator info structure +func (ivi *interceptedValidatorInfo) ValidatorInfo() *state.ShardValidatorInfo { + return ivi.shardValidatorInfo +} + +// Hash returns the hash of this validator info +func (ivi *interceptedValidatorInfo) Hash() []byte { + return ivi.hash +} + +// Type returns the type of this intercepted data +func (ivi *interceptedValidatorInfo) Type() string { + return interceptedValidatorInfoType +} + +// Identifiers returns the identifiers used in requests +func (ivi *interceptedValidatorInfo) Identifiers() [][]byte { + return [][]byte{ivi.hash} +} + +// String returns the validator's info most important fields as string +func (ivi *interceptedValidatorInfo) String() string { + return fmt.Sprintf("pk=%s, shard=%d, list=%s, index=%d, tempRating=%d", + logger.DisplayByteSlice(ivi.shardValidatorInfo.PublicKey), + ivi.shardValidatorInfo.ShardId, + ivi.shardValidatorInfo.List, + ivi.shardValidatorInfo.Index, + ivi.shardValidatorInfo.TempRating, + ) +} + +// verifyPropertyLen returns an error if the provided value is longer than accepted by the network +func verifyPropertyLen(property string, value []byte, requiredLen, minSize, maxSize int) error { + hasRequiredLen := requiredLen != 0 + isOverLimit := len(value) > maxSize + isOverRequiredLen := len(value) > requiredLen + isTooLong := isOverLimit || (hasRequiredLen && isOverRequiredLen) + if isTooLong { + return fmt.Errorf("%w for %s", process.ErrPropertyTooLong, property) + } + + isUnderLimit := len(value) < minSize + isUnderRequiredLen := len(value) < requiredLen + isTooShort := isUnderLimit || (hasRequiredLen && isUnderRequiredLen) + if isTooShort { + return fmt.Errorf("%w for %s", process.ErrPropertyTooShort, property) + } + + return nil +} + +// IsInterfaceNil returns true if there is no value under the interface +func (ivi *interceptedValidatorInfo) IsInterfaceNil() bool { + return ivi == nil +} diff --git a/process/peer/interceptedValidatorInfo_test.go b/process/peer/interceptedValidatorInfo_test.go new file mode 100644 index 00000000000..4993fc774c9 --- /dev/null +++ b/process/peer/interceptedValidatorInfo_test.go @@ -0,0 +1,159 @@ +package peer + +import ( + "errors" + "fmt" + "strings" + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + logger "github.com/ElrondNetwork/elrond-go-logger" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createMockArgInterceptedValidatorInfo() ArgInterceptedValidatorInfo { + args := ArgInterceptedValidatorInfo{ + Marshalizer: testscommon.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + } + args.DataBuff, _ = args.Marshalizer.Marshal(createMockShardValidatorInfo()) + + return args +} + +func TestNewInterceptedValidatorInfo(t *testing.T) { + t.Parallel() + + t.Run("nil data buff should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + args.DataBuff = nil + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, process.ErrNilBuffer, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("nil marshaller should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + args.Marshalizer = nil + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, process.ErrNilMarshalizer, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("nil hasher should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + args.Hasher = nil + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, process.ErrNilHasher, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("unmarshal returns error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgInterceptedValidatorInfo() + args.Marshalizer = &testscommon.MarshalizerStub{ + UnmarshalCalled: func(obj interface{}, buff []byte) error { + return expectedErr + }, + } + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, expectedErr, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + ivi, err := NewInterceptedValidatorInfo(createMockArgInterceptedValidatorInfo()) + assert.Nil(t, err) + assert.False(t, check.IfNil(ivi)) + }) +} + +func TestInterceptedValidatorInfo_CheckValidity(t *testing.T) { + t.Parallel() + + t.Run("publicKeyProperty too short", testInterceptedValidatorInfoPropertyLen(publicKeyProperty, false)) + t.Run("publicKeyProperty too long", testInterceptedValidatorInfoPropertyLen(publicKeyProperty, true)) + + t.Run("listProperty too short", testInterceptedValidatorInfoPropertyLen(listProperty, false)) + t.Run("listProperty too long", testInterceptedValidatorInfoPropertyLen(listProperty, true)) + + t.Run("should work", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + ivi, _ := NewInterceptedValidatorInfo(args) + require.False(t, check.IfNil(ivi)) + assert.Nil(t, ivi.CheckValidity()) + }) +} + +func testInterceptedValidatorInfoPropertyLen(property string, tooLong bool) func(t *testing.T) { + return func(t *testing.T) { + t.Parallel() + + value := []byte("") + expectedError := process.ErrPropertyTooShort + if tooLong { + value = make([]byte, 130) + expectedError = process.ErrPropertyTooLong + } + + args := createMockArgInterceptedValidatorInfo() + ivi, _ := NewInterceptedValidatorInfo(args) + require.False(t, check.IfNil(ivi)) + + switch property { + case publicKeyProperty: + ivi.shardValidatorInfo.PublicKey = value + case listProperty: + ivi.shardValidatorInfo.List = string(value) + default: + assert.True(t, false) + } + + err := ivi.CheckValidity() + assert.True(t, strings.Contains(err.Error(), expectedError.Error())) + } +} + +func TestInterceptedValidatorInfo_Getters(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + ivi, _ := NewInterceptedValidatorInfo(args) + require.False(t, check.IfNil(ivi)) + + validatorInfo := createMockShardValidatorInfo() + validatorInfoBuff, _ := args.Marshalizer.Marshal(validatorInfo) + hash := args.Hasher.Compute(string(validatorInfoBuff)) + + assert.True(t, ivi.IsForCurrentShard()) + assert.Equal(t, validatorInfo, ivi.ValidatorInfo()) + assert.Equal(t, hash, ivi.Hash()) + assert.Equal(t, interceptedValidatorInfoType, ivi.Type()) + + identifiers := ivi.Identifiers() + assert.Equal(t, 1, len(identifiers)) + assert.Equal(t, hash, identifiers[0]) + + str := ivi.String() + assert.True(t, strings.Contains(str, fmt.Sprintf("pk=%s", logger.DisplayByteSlice(ivi.shardValidatorInfo.PublicKey)))) + assert.True(t, strings.Contains(str, fmt.Sprintf("shard=%d", validatorInfo.ShardId))) + assert.True(t, strings.Contains(str, fmt.Sprintf("list=%s", validatorInfo.List))) + assert.True(t, strings.Contains(str, fmt.Sprintf("index=%d", validatorInfo.Index))) + assert.True(t, strings.Contains(str, fmt.Sprintf("tempRating=%d", validatorInfo.TempRating))) +} diff --git a/process/peer/validatorsProvider_test.go b/process/peer/validatorsProvider_test.go index d23b3fa282a..ba307e79b8e 100644 --- a/process/peer/validatorsProvider_test.go +++ b/process/peer/validatorsProvider_test.go @@ -624,7 +624,7 @@ func TestValidatorsProvider_DoesntCallUpdateUpdateCacheWithoutRequests(t *testin } func createMockValidatorInfo() *state.ValidatorInfo { initialInfo := &state.ValidatorInfo{ - PublicKey: []byte("a1"), + PublicKey: []byte("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), ShardId: 0, List: "eligible", Index: 1, @@ -645,6 +645,17 @@ func createMockValidatorInfo() *state.ValidatorInfo { return initialInfo } +func createMockShardValidatorInfo() *state.ShardValidatorInfo { + initialInfo := &state.ShardValidatorInfo{ + PublicKey: bytes.Repeat([]byte("a"), 96), + ShardId: 0, + List: "eligible", + Index: 1, + TempRating: 100, + } + return initialInfo +} + func createDefaultValidatorsProviderArg() ArgValidatorsProvider { return ArgValidatorsProvider{ NodesCoordinator: &shardingMocks.NodesCoordinatorMock{}, diff --git a/sharding/mock/enableEpochsHandlerMock.go b/sharding/mock/enableEpochsHandlerMock.go index d20816b4a2f..4eca2905de3 100644 --- a/sharding/mock/enableEpochsHandlerMock.go +++ b/sharding/mock/enableEpochsHandlerMock.go @@ -2,7 +2,9 @@ package mock // EnableEpochsHandlerMock - type EnableEpochsHandlerMock struct { - WaitingListFixEnableEpochField uint32 + WaitingListFixEnableEpochField uint32 + RefactorPeersMiniBlocksEnableEpochField uint32 + IsRefactorPeersMiniBlocksFlagEnabledField bool } // BlockGasAndFeesReCheckEnableEpoch returns 0 @@ -90,6 +92,11 @@ func (mock *EnableEpochsHandlerMock) MiniBlockPartialExecutionEnableEpoch() uint return 0 } +// RefactorPeersMiniBlocksEnableEpoch returns 0 +func (mock *EnableEpochsHandlerMock) RefactorPeersMiniBlocksEnableEpoch() uint32 { + return mock.RefactorPeersMiniBlocksEnableEpochField +} + // IsSCDeployFlagEnabled returns false func (mock *EnableEpochsHandlerMock) IsSCDeployFlagEnabled() bool { return false @@ -524,6 +531,11 @@ func (mock *EnableEpochsHandlerMock) IsChangeDelegationOwnerFlagEnabled() bool { return false } +// IsRefactorPeersMiniBlocksFlagEnabled returns false +func (mock *EnableEpochsHandlerMock) IsRefactorPeersMiniBlocksFlagEnabled() bool { + return mock.IsRefactorPeersMiniBlocksFlagEnabledField +} + // IsInterfaceNil returns true if there is no value under the interface func (mock *EnableEpochsHandlerMock) IsInterfaceNil() bool { return mock == nil diff --git a/sharding/mock/epochStartNotifierStub.go b/sharding/mock/epochStartNotifierStub.go index c8cd1f3278b..2ddd42f2bbb 100644 --- a/sharding/mock/epochStartNotifierStub.go +++ b/sharding/mock/epochStartNotifierStub.go @@ -9,7 +9,7 @@ import ( type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyAllCalled func(hdr data.HeaderHandler) } @@ -28,9 +28,9 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } } diff --git a/sharding/nodesCoordinator/errors.go b/sharding/nodesCoordinator/errors.go index e9f210ecdd9..daedcf07f86 100644 --- a/sharding/nodesCoordinator/errors.go +++ b/sharding/nodesCoordinator/errors.go @@ -108,3 +108,6 @@ var ErrNilNodeTypeProvider = errors.New("nil node type provider") // ErrNilEnableEpochsHandler signals that a nil enable epochs handler has been provided var ErrNilEnableEpochsHandler = errors.New("nil enable epochs handler") + +// ErrNilValidatorInfoCacher signals that a nil value for the validator info cacher has been provided +var ErrNilValidatorInfoCacher = errors.New("validator info cacher is nil") diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go index 6fe44a92db5..b94155958e6 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go @@ -17,6 +17,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/marshal" logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/errors" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" @@ -94,6 +95,7 @@ type indexHashedNodesCoordinator struct { flagWaitingListFix atomicFlags.Flag nodeTypeProvider NodeTypeProviderHandler enableEpochsHandler common.EnableEpochsHandler + validatorInfoCacher epochStart.ValidatorInfoCacher } // NewIndexHashedNodesCoordinator creates a new index hashed group selector @@ -138,6 +140,7 @@ func NewIndexHashedNodesCoordinator(arguments ArgNodesCoordinator) (*indexHashed nodeTypeProvider: arguments.NodeTypeProvider, isFullArchive: arguments.IsFullArchive, enableEpochsHandler: arguments.EnableEpochsHandler, + validatorInfoCacher: arguments.ValidatorInfoCacher, } ihnc.loadingFromDisk.Store(false) @@ -217,6 +220,9 @@ func checkArguments(arguments ArgNodesCoordinator) error { if check.IfNil(arguments.EnableEpochsHandler) { return ErrNilEnableEpochsHandler } + if check.IfNil(arguments.ValidatorInfoCacher) { + return ErrNilValidatorInfoCacher + } return nil } @@ -558,14 +564,14 @@ func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHa return } - allValidatorInfo, err := createValidatorInfoFromBody(body, ihnc.marshalizer, ihnc.numTotalEligible) + ihnc.updateEpochFlags(newEpoch) + + allValidatorInfo, err := ihnc.createValidatorInfoFromBody(body, ihnc.numTotalEligible, newEpoch) if err != nil { - log.Error("could not create validator info from body - do nothing on nodesCoordinator epochStartPrepare") + log.Error("could not create validator info from body - do nothing on nodesCoordinator epochStartPrepare", "error", err.Error()) return } - ihnc.updateEpochFlags(newEpoch) - ihnc.mutNodesConfig.RLock() previousConfig := ihnc.nodesConfig[ihnc.currentEpoch] if previousConfig == nil { @@ -1170,10 +1176,10 @@ func selectValidators( } // createValidatorInfoFromBody unmarshalls body data to create validator info -func createValidatorInfoFromBody( +func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( body data.BodyHandler, - marshalizer marshal.Marshalizer, previousTotal uint64, + epoch uint32, ) ([]*state.ShardValidatorInfo, error) { if check.IfNil(body) { return nil, ErrNilBlockBody @@ -1191,19 +1197,37 @@ func createValidatorInfoFromBody( } for _, txHash := range peerMiniBlock.TxHashes { - vid := &state.ShardValidatorInfo{} - err := marshalizer.Unmarshal(vid, txHash) + shardValidatorInfo, err := ihnc.getShardValidatorInfoData(txHash, epoch) if err != nil { return nil, err } - allValidatorInfo = append(allValidatorInfo, vid) + allValidatorInfo = append(allValidatorInfo, shardValidatorInfo) } } return allValidatorInfo, nil } +func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, epoch uint32) (*state.ShardValidatorInfo, error) { + if epoch >= ihnc.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { + shardValidatorInfo, err := ihnc.validatorInfoCacher.GetValidatorInfo(txHash) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil + } + + shardValidatorInfo := &state.ShardValidatorInfo{} + err := ihnc.marshalizer.Unmarshal(shardValidatorInfo, txHash) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil +} + func (ihnc *indexHashedNodesCoordinator) updateEpochFlags(epoch uint32) { ihnc.flagWaitingListFix.SetValue(epoch >= ihnc.enableEpochsHandler.WaitingListFixEnableEpoch()) log.Debug("indexHashedNodesCoordinator: waiting list fix", "enabled", ihnc.flagWaitingListFix.IsSet()) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go index b54cf4ae6b2..39d92d908da 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/sharding/mock" "github.com/ElrondNetwork/elrond-go/state" "github.com/stretchr/testify/assert" @@ -158,15 +159,17 @@ func TestIndexHashedNodesCoordinator_IsEpochInConfig(t *testing.T) { t.Parallel() arguments := createArguments() - + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) epoch := uint32(1) ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) - validatorsInfo, _ := createValidatorInfoFromBody(body, arguments.Marshalizer, 10) + ihnc.updateEpochFlags(epoch) + + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) + validatorsInfo, _ := ihnc.createValidatorInfoFromBody(body, 10, epoch) err = ihnc.SetNodesConfigFromValidatorsInfo(epoch, []byte{}, validatorsInfo) require.Nil(t, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go index 03162efa65e..b86400d2369 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go @@ -19,6 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -95,6 +96,7 @@ func TestIndexHashedGroupSelectorWithRater_OkValShouldWork(t *testing.T) { NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, err := NewIndexHashedNodesCoordinator(arguments) assert.Nil(t, err) @@ -190,6 +192,7 @@ func BenchmarkIndexHashedGroupSelectorWithRater_ComputeValidatorsGroup63of400(b NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -264,6 +267,7 @@ func Test_ComputeValidatorsGroup63of400(t *testing.T) { NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) numRounds := uint64(1000000) @@ -338,6 +342,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -391,6 +396,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -458,6 +464,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldWork(t NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -542,6 +549,7 @@ func TestIndexHashedGroupSelectorWithRater_GetAllEligibleValidatorsPublicKeys(t NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -847,6 +855,7 @@ func BenchmarkIndexHashedWithRaterGroupSelector_ComputeValidatorsGroup21of400(b NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index 39265ac7816..709e170ba8f 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -1,6 +1,7 @@ package nodesCoordinator import ( + "bytes" "encoding/hex" "errors" "fmt" @@ -16,15 +17,19 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/endProcess" + "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/hashing/sha256" "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/sharding/mock" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage/lrucache" "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -111,7 +116,10 @@ func createArguments() ArgNodesCoordinator { IsFullArchive: false, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } return arguments } @@ -191,6 +199,15 @@ func TestNewIndexHashedNodesCoordinator_NilCacherShouldErr(t *testing.T) { require.Nil(t, ihnc) } +func TestNewIndexHashedNodesCoordinator_NilEnableEpochsHandlerShouldErr(t *testing.T) { + arguments := createArguments() + arguments.EnableEpochsHandler = nil + ihnc, err := NewIndexHashedNodesCoordinator(arguments) + + require.Equal(t, ErrNilEnableEpochsHandler, err) + require.Nil(t, ihnc) +} + func TestNewIndexHashedGroupSelector_OkValsShouldWork(t *testing.T) { t.Parallel() @@ -261,6 +278,7 @@ func TestIndexHashedNodesCoordinator_OkValShouldWork(t *testing.T) { ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -320,6 +338,7 @@ func TestIndexHashedNodesCoordinator_NewCoordinatorTooFewNodesShouldErr(t *testi ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -393,6 +412,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup1ValidatorShouldRetur ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) list2, err := ihnc.ComputeConsensusGroup([]byte("randomness"), 0, 0, 0) @@ -452,6 +472,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10locksNoM ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -539,6 +560,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10BlocksMe ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -609,6 +631,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup63of400TestEqualSameP ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -672,6 +695,7 @@ func BenchmarkIndexHashedGroupSelector_ComputeValidatorsGroup21of400(b *testing. ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -744,6 +768,7 @@ func runBenchmark(consensusGroupCache Cacher, consensusGroupSize int, nodesMap m ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -793,6 +818,7 @@ func computeMemoryRequirements(consensusGroupCache Cacher, consensusGroupSize in ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -932,6 +958,7 @@ func TestIndexHashedNodesCoordinator_GetValidatorWithPublicKeyShouldWork(t *test ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1015,6 +1042,7 @@ func TestIndexHashedGroupSelector_GetAllEligibleValidatorsPublicKeys(t *testing. ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1093,6 +1121,7 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1102,27 +1131,34 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T require.Nil(t, err) } -func createBlockBodyFromNodesCoordinator(ihnc *indexHashedNodesCoordinator, epoch uint32) *block.Body { +func createBlockBodyFromNodesCoordinator(ihnc *indexHashedNodesCoordinator, epoch uint32, validatorInfoCacher epochStart.ValidatorInfoCacher) *block.Body { body := &block.Body{MiniBlocks: make([]*block.MiniBlock, 0)} - mbs := createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].eligibleMap, string(common.EligibleList), ihnc.marshalizer) + mbs := createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].eligibleMap, string(common.EligibleList), ihnc.marshalizer, ihnc.hasher, validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) - mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].waitingMap, string(common.WaitingList), ihnc.marshalizer) + mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].waitingMap, string(common.WaitingList), ihnc.marshalizer, ihnc.hasher, validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) - mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].leavingMap, string(common.LeavingList), ihnc.marshalizer) + mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].leavingMap, string(common.LeavingList), ihnc.marshalizer, ihnc.hasher, validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) return body } -func createMiniBlocksForNodesMap(nodesMap map[uint32][]Validator, list string, marshalizer marshal.Marshalizer) []*block.MiniBlock { +func createMiniBlocksForNodesMap( + nodesMap map[uint32][]Validator, + list string, + marshaller marshal.Marshalizer, + hasher hashing.Hasher, + validatorInfoCacher epochStart.ValidatorInfoCacher, +) []*block.MiniBlock { + miniBlocks := make([]*block.MiniBlock, 0) for shId, eligibleList := range nodesMap { miniBlock := &block.MiniBlock{Type: block.PeerBlock} for index, eligible := range eligibleList { - shardVInfo := &state.ShardValidatorInfo{ + shardValidatorInfo := &state.ShardValidatorInfo{ PublicKey: eligible.PubKey(), ShardId: shId, List: list, @@ -1130,8 +1166,10 @@ func createMiniBlocksForNodesMap(nodesMap map[uint32][]Validator, list string, m TempRating: 10, } - marshaledData, _ := marshalizer.Marshal(shardVInfo) - miniBlock.TxHashes = append(miniBlock.TxHashes, marshaledData) + shardValidatorInfoHash, _ := core.CalculateHash(marshaller, hasher, shardValidatorInfo) + + miniBlock.TxHashes = append(miniBlock.TxHashes, shardValidatorInfoHash) + validatorInfoCacher.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) } miniBlocks = append(miniBlocks, miniBlock) } @@ -1142,7 +1180,7 @@ func TestIndexHashedNodesCoordinator_EpochStart(t *testing.T) { t.Parallel() arguments := createArguments() - + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) epoch := uint32(1) @@ -1155,7 +1193,7 @@ func TestIndexHashedNodesCoordinator_EpochStart(t *testing.T) { ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) @@ -1288,6 +1326,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInEligible(t *testing.T) { t.Parallel() arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() pk := []byte("pk") arguments.SelfPublicKey = pk ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1309,7 +1348,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInEligible(t *testing.T) { }, }, } - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) @@ -1323,6 +1362,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInWaiting(t *testing.T) { t.Parallel() arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() pk := []byte("pk") arguments.SelfPublicKey = pk ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1344,7 +1384,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInWaiting(t *testing.T) { }, }, } - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) @@ -1357,6 +1397,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInLeaving(t *testing.T) { t.Parallel() arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() pk := []byte("pk") arguments.SelfPublicKey = pk ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1383,7 +1424,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInLeaving(t *testing.T) { }, }, } - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) @@ -1438,7 +1479,10 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, + ValidatorInfoCacher: dataPool.NewCurrentEpochValidatorInfoPool(), } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1453,7 +1497,7 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) ihnc.EpochStartPrepare(header, body) newNodesConfig := ihnc.nodesConfig[1] @@ -1535,6 +1579,7 @@ func TestIndexHashedNodesCoordinator_GetSavedStateKey(t *testing.T) { t.Parallel() args := createArguments() + args.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(args) require.Nil(t, err) @@ -1544,7 +1589,7 @@ func TestIndexHashedNodesCoordinator_GetSavedStateKey(t *testing.T) { Epoch: 1, } - body := createBlockBodyFromNodesCoordinator(ihnc, 0) + body := createBlockBodyFromNodesCoordinator(ihnc, 0, ihnc.validatorInfoCacher) ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) @@ -1618,7 +1663,7 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesEpoch1(t *testi t.Parallel() arguments := createArguments() - + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) @@ -1628,7 +1673,7 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesEpoch1(t *testi Epoch: 1, } - body := createBlockBodyFromNodesCoordinator(ihnc, 0) + body := createBlockBodyFromNodesCoordinator(ihnc, 0, ihnc.validatorInfoCacher) ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) @@ -1660,6 +1705,7 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesAfterRevertToEp t.Parallel() arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) @@ -1669,11 +1715,11 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesAfterRevertToEp Epoch: 1, } - body := createBlockBodyFromNodesCoordinator(ihnc, 0) + body := createBlockBodyFromNodesCoordinator(ihnc, 0, ihnc.validatorInfoCacher) ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 1) + body = createBlockBodyFromNodesCoordinator(ihnc, 1, ihnc.validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, @@ -1682,7 +1728,7 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesAfterRevertToEp ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 2) + body = createBlockBodyFromNodesCoordinator(ihnc, 2, ihnc.validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, @@ -1691,7 +1737,7 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesAfterRevertToEp ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 3) + body = createBlockBodyFromNodesCoordinator(ihnc, 3, ihnc.validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, @@ -2332,3 +2378,56 @@ func TestIndexHashedNodesCoordinator_IsInterfaceNil(t *testing.T) { require.Nil(t, err) require.False(t, check.IfNil(ihnc3)) } + +func TestIndexHashedNodesCoordinator_GetShardValidatorInfoData(t *testing.T) { + t.Parallel() + + t.Run("get shard validator info data before refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + txHash := []byte("txHash") + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + arguments := createArguments() + arguments.EnableEpochsHandler = &mock.EnableEpochsHandlerMock{ + RefactorPeersMiniBlocksEnableEpochField: 1, + } + arguments.ValidatorInfoCacher = &vic.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, txHash) { + return svi, nil + } + return nil, errors.New("error") + }, + } + ihnc, _ := NewIndexHashedNodesCoordinator(arguments) + + marshalledSVI, _ := arguments.Marshalizer.Marshal(svi) + shardValidatorInfo, _ := ihnc.getShardValidatorInfoData(marshalledSVI, 0) + require.Equal(t, svi, shardValidatorInfo) + }) + + t.Run("get shard validator info data after refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + txHash := []byte("txHash") + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + arguments := createArguments() + arguments.EnableEpochsHandler = &mock.EnableEpochsHandlerMock{ + RefactorPeersMiniBlocksEnableEpochField: 0, + } + arguments.ValidatorInfoCacher = &vic.ValidatorInfoCacherStub{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, txHash) { + return svi, nil + } + return nil, errors.New("error") + }, + } + ihnc, _ := NewIndexHashedNodesCoordinator(arguments) + + shardValidatorInfo, _ := ihnc.getShardValidatorInfoData(txHash, 0) + require.Equal(t, svi, shardValidatorInfo) + }) +} diff --git a/sharding/nodesCoordinator/shardingArgs.go b/sharding/nodesCoordinator/shardingArgs.go index e9c5076027a..5071c434976 100644 --- a/sharding/nodesCoordinator/shardingArgs.go +++ b/sharding/nodesCoordinator/shardingArgs.go @@ -5,6 +5,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -30,4 +31,5 @@ type ArgNodesCoordinator struct { NodeTypeProvider NodeTypeProviderHandler IsFullArchive bool EnableEpochsHandler common.EnableEpochsHandler + ValidatorInfoCacher epochStart.ValidatorInfoCacher } diff --git a/testscommon/dataRetriever/poolFactory.go b/testscommon/dataRetriever/poolFactory.go index 6fb460426f4..165d202c102 100644 --- a/testscommon/dataRetriever/poolFactory.go +++ b/testscommon/dataRetriever/poolFactory.go @@ -126,20 +126,30 @@ func CreatePoolsHolder(numShards uint32, selfShard uint32) dataRetriever.PoolsHo heartbeatPool, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolder", err) - currentTx := dataPool.NewCurrentBlockPool() + validatorsInfo, err := shardedData.NewShardedData("validatorsInfoPool", storageUnit.CacheConfig{ + Capacity: 300, + SizeInBytes: 300000, + Shards: 1, + }) + panicIfError("CreatePoolsHolder", err) + + currentBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() + currentEpochValidatorInfo := dataPool.NewCurrentEpochValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ - Transactions: txPool, - UnsignedTransactions: unsignedTxPool, - RewardTransactions: rewardsTxPool, - Headers: headersPool, - MiniBlocks: txBlockBody, - PeerChangesBlocks: peerChangeBlockBody, - TrieNodes: adaptedTrieNodesStorage, - TrieNodesChunks: trieNodesChunks, - CurrentBlockTransactions: currentTx, - SmartContracts: smartContracts, - PeerAuthentications: peerAuthPool, - Heartbeats: heartbeatPool, + Transactions: txPool, + UnsignedTransactions: unsignedTxPool, + RewardTransactions: rewardsTxPool, + Headers: headersPool, + MiniBlocks: txBlockBody, + PeerChangesBlocks: peerChangeBlockBody, + TrieNodes: adaptedTrieNodesStorage, + TrieNodesChunks: trieNodesChunks, + CurrentBlockTransactions: currentBlockTransactions, + CurrentEpochValidatorInfo: currentEpochValidatorInfo, + SmartContracts: smartContracts, + PeerAuthentications: peerAuthPool, + Heartbeats: heartbeatPool, + ValidatorsInfo: validatorsInfo, } holder, err := dataPool.NewDataPool(dataPoolArgs) panicIfError("CreatePoolsHolder", err) @@ -190,6 +200,13 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) smartContracts, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolderWithTxPool", err) + validatorsInfo, err := shardedData.NewShardedData("validatorsInfoPool", storageUnit.CacheConfig{ + Capacity: 300, + SizeInBytes: 300000, + Shards: 1, + }) + panicIfError("CreatePoolsHolderWithTxPool", err) + peerAuthPool, err := timecache.NewTimeCacher(timecache.ArgTimeCacher{ DefaultSpan: peerAuthDuration, CacheExpiry: peerAuthDuration, @@ -200,20 +217,23 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) heartbeatPool, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolderWithTxPool", err) - currentTx := dataPool.NewCurrentBlockPool() + currentBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() + currentEpochValidatorInfo := dataPool.NewCurrentEpochValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ - Transactions: txPool, - UnsignedTransactions: unsignedTxPool, - RewardTransactions: rewardsTxPool, - Headers: headersPool, - MiniBlocks: txBlockBody, - PeerChangesBlocks: peerChangeBlockBody, - TrieNodes: trieNodes, - TrieNodesChunks: trieNodesChunks, - CurrentBlockTransactions: currentTx, - SmartContracts: smartContracts, - PeerAuthentications: peerAuthPool, - Heartbeats: heartbeatPool, + Transactions: txPool, + UnsignedTransactions: unsignedTxPool, + RewardTransactions: rewardsTxPool, + Headers: headersPool, + MiniBlocks: txBlockBody, + PeerChangesBlocks: peerChangeBlockBody, + TrieNodes: trieNodes, + TrieNodesChunks: trieNodesChunks, + CurrentBlockTransactions: currentBlockTransactions, + CurrentEpochValidatorInfo: currentEpochValidatorInfo, + SmartContracts: smartContracts, + PeerAuthentications: peerAuthPool, + Heartbeats: heartbeatPool, + ValidatorsInfo: validatorsInfo, } holder, err := dataPool.NewDataPool(dataPoolArgs) panicIfError("CreatePoolsHolderWithTxPool", err) diff --git a/testscommon/dataRetriever/poolsHolderMock.go b/testscommon/dataRetriever/poolsHolderMock.go index 468421bef9c..29bae65f787 100644 --- a/testscommon/dataRetriever/poolsHolderMock.go +++ b/testscommon/dataRetriever/poolsHolderMock.go @@ -18,18 +18,20 @@ import ( // PoolsHolderMock - type PoolsHolderMock struct { - transactions dataRetriever.ShardedDataCacherNotifier - unsignedTransactions dataRetriever.ShardedDataCacherNotifier - rewardTransactions dataRetriever.ShardedDataCacherNotifier - headers dataRetriever.HeadersPool - miniBlocks storage.Cacher - peerChangesBlocks storage.Cacher - trieNodes storage.Cacher - trieNodesChunks storage.Cacher - smartContracts storage.Cacher - currBlockTxs dataRetriever.TransactionCacher - peerAuthentications storage.Cacher - heartbeats storage.Cacher + transactions dataRetriever.ShardedDataCacherNotifier + unsignedTransactions dataRetriever.ShardedDataCacherNotifier + rewardTransactions dataRetriever.ShardedDataCacherNotifier + headers dataRetriever.HeadersPool + miniBlocks storage.Cacher + peerChangesBlocks storage.Cacher + trieNodes storage.Cacher + trieNodesChunks storage.Cacher + smartContracts storage.Cacher + currBlockTxs dataRetriever.TransactionCacher + currEpochValidatorInfo dataRetriever.ValidatorInfoCacher + peerAuthentications storage.Cacher + heartbeats storage.Cacher + validatorsInfo dataRetriever.ShardedDataCacherNotifier } // NewPoolsHolderMock - @@ -79,7 +81,8 @@ func NewPoolsHolderMock() *PoolsHolderMock { holder.peerChangesBlocks, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.LRUCache, Capacity: 10000, Shards: 1, SizeInBytes: 0}) panicIfError("NewPoolsHolderMock", err) - holder.currBlockTxs = dataPool.NewCurrentBlockPool() + holder.currBlockTxs = dataPool.NewCurrentBlockTransactionsPool() + holder.currEpochValidatorInfo = dataPool.NewCurrentEpochValidatorInfoPool() holder.trieNodes, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.SizeLRUCache, Capacity: 900000, Shards: 1, SizeInBytes: 314572800}) panicIfError("NewPoolsHolderMock", err) @@ -99,6 +102,13 @@ func NewPoolsHolderMock() *PoolsHolderMock { holder.heartbeats, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.LRUCache, Capacity: 10000, Shards: 1, SizeInBytes: 0}) panicIfError("NewPoolsHolderMock", err) + holder.validatorsInfo, err = shardedData.NewShardedData("validatorsInfoPool", storageUnit.CacheConfig{ + Capacity: 100, + SizeInBytes: 100000, + Shards: 1, + }) + panicIfError("NewPoolsHolderMock", err) + return holder } @@ -107,6 +117,11 @@ func (holder *PoolsHolderMock) CurrentBlockTxs() dataRetriever.TransactionCacher return holder.currBlockTxs } +// CurrentEpochValidatorInfo - +func (holder *PoolsHolderMock) CurrentEpochValidatorInfo() dataRetriever.ValidatorInfoCacher { + return holder.currEpochValidatorInfo +} + // Transactions - func (holder *PoolsHolderMock) Transactions() dataRetriever.ShardedDataCacherNotifier { return holder.transactions @@ -172,6 +187,11 @@ func (holder *PoolsHolderMock) Heartbeats() storage.Cacher { return holder.heartbeats } +// ValidatorsInfo - +func (holder *PoolsHolderMock) ValidatorsInfo() dataRetriever.ShardedDataCacherNotifier { + return holder.validatorsInfo +} + // Close - func (holder *PoolsHolderMock) Close() error { var lastError error diff --git a/testscommon/dataRetriever/poolsHolderStub.go b/testscommon/dataRetriever/poolsHolderStub.go index a8dd89a04c5..dbc22b6e564 100644 --- a/testscommon/dataRetriever/poolsHolderStub.go +++ b/testscommon/dataRetriever/poolsHolderStub.go @@ -8,20 +8,22 @@ import ( // PoolsHolderStub - type PoolsHolderStub struct { - HeadersCalled func() dataRetriever.HeadersPool - TransactionsCalled func() dataRetriever.ShardedDataCacherNotifier - UnsignedTransactionsCalled func() dataRetriever.ShardedDataCacherNotifier - RewardTransactionsCalled func() dataRetriever.ShardedDataCacherNotifier - MiniBlocksCalled func() storage.Cacher - MetaBlocksCalled func() storage.Cacher - CurrBlockTxsCalled func() dataRetriever.TransactionCacher - TrieNodesCalled func() storage.Cacher - TrieNodesChunksCalled func() storage.Cacher - PeerChangesBlocksCalled func() storage.Cacher - SmartContractsCalled func() storage.Cacher - PeerAuthenticationsCalled func() storage.Cacher - HeartbeatsCalled func() storage.Cacher - CloseCalled func() error + HeadersCalled func() dataRetriever.HeadersPool + TransactionsCalled func() dataRetriever.ShardedDataCacherNotifier + UnsignedTransactionsCalled func() dataRetriever.ShardedDataCacherNotifier + RewardTransactionsCalled func() dataRetriever.ShardedDataCacherNotifier + MiniBlocksCalled func() storage.Cacher + MetaBlocksCalled func() storage.Cacher + CurrBlockTxsCalled func() dataRetriever.TransactionCacher + CurrEpochValidatorInfoCalled func() dataRetriever.ValidatorInfoCacher + TrieNodesCalled func() storage.Cacher + TrieNodesChunksCalled func() storage.Cacher + PeerChangesBlocksCalled func() storage.Cacher + SmartContractsCalled func() storage.Cacher + PeerAuthenticationsCalled func() storage.Cacher + HeartbeatsCalled func() storage.Cacher + ValidatorsInfoCalled func() dataRetriever.ShardedDataCacherNotifier + CloseCalled func() error } // NewPoolsHolderStub - @@ -92,6 +94,15 @@ func (holder *PoolsHolderStub) CurrentBlockTxs() dataRetriever.TransactionCacher return nil } +// CurrentEpochValidatorInfo - +func (holder *PoolsHolderStub) CurrentEpochValidatorInfo() dataRetriever.ValidatorInfoCacher { + if holder.CurrEpochValidatorInfoCalled != nil { + return holder.CurrEpochValidatorInfoCalled() + } + + return nil +} + // TrieNodes - func (holder *PoolsHolderStub) TrieNodes() storage.Cacher { if holder.TrieNodesCalled != nil { @@ -146,6 +157,15 @@ func (holder *PoolsHolderStub) Heartbeats() storage.Cacher { return testscommon.NewCacherStub() } +// ValidatorsInfo - +func (holder *PoolsHolderStub) ValidatorsInfo() dataRetriever.ShardedDataCacherNotifier { + if holder.ValidatorsInfoCalled != nil { + return holder.ValidatorsInfoCalled() + } + + return testscommon.NewShardedDataStub() +} + // Close - func (holder *PoolsHolderStub) Close() error { if holder.CloseCalled != nil { diff --git a/testscommon/enableEpochsHandlerStub.go b/testscommon/enableEpochsHandlerStub.go index 3633527f7d3..7359a639451 100644 --- a/testscommon/enableEpochsHandlerStub.go +++ b/testscommon/enableEpochsHandlerStub.go @@ -20,6 +20,7 @@ type EnableEpochsHandlerStub struct { CheckExecuteReadOnlyEnableEpochField uint32 StorageAPICostOptimizationEnableEpochField uint32 MiniBlockPartialExecutionEnableEpochField uint32 + RefactorPeersMiniBlocksEnableEpochField uint32 IsSCDeployFlagEnabledField bool IsBuiltInFunctionsFlagEnabledField bool IsRelayedTransactionsFlagEnabledField bool @@ -106,6 +107,7 @@ type EnableEpochsHandlerStub struct { IsESDTNFTImprovementV1FlagEnabledField bool IsSetSenderInEeiOutputTransferFlagEnabledField bool IsChangeDelegationOwnerFlagEnabledField bool + IsRefactorPeersMiniBlocksFlagEnabledField bool } // ResetPenalizedTooMuchGasFlag - @@ -200,6 +202,11 @@ func (stub *EnableEpochsHandlerStub) MiniBlockPartialExecutionEnableEpoch() uint return stub.MiniBlockPartialExecutionEnableEpochField } +// RefactorPeersMiniBlocksEnableEpoch - +func (stub *EnableEpochsHandlerStub) RefactorPeersMiniBlocksEnableEpoch() uint32 { + return stub.RefactorPeersMiniBlocksEnableEpochField +} + // IsSCDeployFlagEnabled - func (stub *EnableEpochsHandlerStub) IsSCDeployFlagEnabled() bool { return stub.IsSCDeployFlagEnabledField @@ -630,6 +637,11 @@ func (stub *EnableEpochsHandlerStub) IsChangeDelegationOwnerFlagEnabled() bool { return stub.IsChangeDelegationOwnerFlagEnabledField } +// IsRefactorPeersMiniBlocksFlagEnabled - +func (stub *EnableEpochsHandlerStub) IsRefactorPeersMiniBlocksFlagEnabled() bool { + return stub.IsRefactorPeersMiniBlocksFlagEnabledField +} + // IsInterfaceNil - func (stub *EnableEpochsHandlerStub) IsInterfaceNil() bool { return stub == nil diff --git a/testscommon/generalConfig.go b/testscommon/generalConfig.go index eba72a3d4ef..64f3aae11d2 100644 --- a/testscommon/generalConfig.go +++ b/testscommon/generalConfig.go @@ -152,6 +152,11 @@ func GetGeneralConfig() config.Config { SizeInBytes: 1000000000, Shards: 1, }, + ValidatorInfoPool: config.CacheConfig{ + Capacity: 10000, + SizeInBytes: 1000000000, + Shards: 1, + }, HeadersPoolConfig: config.HeadersPoolConfig{ MaxHeadersPerShard: 100, NumElementsToRemoveOnEviction: 1, diff --git a/testscommon/genericMocks/actionHandlerStub.go b/testscommon/genericMocks/actionHandlerStub.go index 09aaa2d6324..8595b4cfa8d 100644 --- a/testscommon/genericMocks/actionHandlerStub.go +++ b/testscommon/genericMocks/actionHandlerStub.go @@ -1,6 +1,8 @@ package genericMocks -import "github.com/ElrondNetwork/elrond-go-core/data" +import ( + "github.com/ElrondNetwork/elrond-go-core/data" +) // ActionHandlerStub - type ActionHandlerStub struct { diff --git a/testscommon/marshalizerStub.go b/testscommon/marshalizerStub.go index 18b42297b1e..8281a41278a 100644 --- a/testscommon/marshalizerStub.go +++ b/testscommon/marshalizerStub.go @@ -11,7 +11,7 @@ func (ms *MarshalizerStub) Marshal(obj interface{}) ([]byte, error) { if ms.MarshalCalled != nil { return ms.MarshalCalled(obj) } - return nil, nil + return make([]byte, 0), nil } // Unmarshal - diff --git a/testscommon/requestHandlerStub.go b/testscommon/requestHandlerStub.go index a5bc8b19901..3ff19202845 100644 --- a/testscommon/requestHandlerStub.go +++ b/testscommon/requestHandlerStub.go @@ -21,6 +21,8 @@ type RequestHandlerStub struct { CreateTrieNodeIdentifierCalled func(requestHash []byte, chunkIndex uint32) []byte RequestPeerAuthenticationsChunkCalled func(destShardID uint32, chunkIndex uint32) RequestPeerAuthenticationsByHashesCalled func(destShardID uint32, hashes [][]byte) + RequestValidatorInfoCalled func(hash []byte) + RequestValidatorsInfoCalled func(hashes [][]byte) } // SetNumPeersToQuery - @@ -168,6 +170,20 @@ func (rhs *RequestHandlerStub) RequestPeerAuthenticationsByHashes(destShardID ui } } +// RequestValidatorInfo - +func (rhs *RequestHandlerStub) RequestValidatorInfo(hash []byte) { + if rhs.RequestValidatorInfoCalled != nil { + rhs.RequestValidatorInfoCalled(hash) + } +} + +// RequestValidatorsInfo - +func (rhs *RequestHandlerStub) RequestValidatorsInfo(hashes [][]byte) { + if rhs.RequestValidatorsInfoCalled != nil { + rhs.RequestValidatorsInfoCalled(hashes) + } +} + // IsInterfaceNil returns true if there is no value under the interface func (rhs *RequestHandlerStub) IsInterfaceNil() bool { return rhs == nil diff --git a/testscommon/shardedDataStub.go b/testscommon/shardedDataStub.go index 31713ef0d60..4a076aa0bd6 100644 --- a/testscommon/shardedDataStub.go +++ b/testscommon/shardedDataStub.go @@ -38,7 +38,10 @@ func (sd *ShardedDataStub) RegisterOnAdded(handler func(key []byte, value interf // ShardDataStore - func (sd *ShardedDataStub) ShardDataStore(cacheID string) storage.Cacher { - return sd.ShardDataStoreCalled(cacheID) + if sd.ShardDataStoreCalled != nil { + return sd.ShardDataStoreCalled(cacheID) + } + return nil } // AddData - @@ -50,37 +53,52 @@ func (sd *ShardedDataStub) AddData(key []byte, data interface{}, sizeInBytes int // SearchFirstData - func (sd *ShardedDataStub) SearchFirstData(key []byte) (value interface{}, ok bool) { - return sd.SearchFirstDataCalled(key) + if sd.SearchFirstDataCalled != nil { + return sd.SearchFirstDataCalled(key) + } + return nil, false } // RemoveData - func (sd *ShardedDataStub) RemoveData(key []byte, cacheID string) { - sd.RemoveDataCalled(key, cacheID) + if sd.RemoveDataCalled != nil { + sd.RemoveDataCalled(key, cacheID) + } } // RemoveDataFromAllShards - func (sd *ShardedDataStub) RemoveDataFromAllShards(key []byte) { - sd.RemoveDataFromAllShardsCalled(key) + if sd.RemoveDataFromAllShardsCalled != nil { + sd.RemoveDataFromAllShardsCalled(key) + } } // MergeShardStores - func (sd *ShardedDataStub) MergeShardStores(sourceCacheID, destCacheID string) { - sd.MergeShardStoresCalled(sourceCacheID, destCacheID) + if sd.MergeShardStoresCalled != nil { + sd.MergeShardStoresCalled(sourceCacheID, destCacheID) + } } // Clear - func (sd *ShardedDataStub) Clear() { - sd.ClearCalled() + if sd.ClearCalled != nil { + sd.ClearCalled() + } } // ClearShardStore - func (sd *ShardedDataStub) ClearShardStore(cacheID string) { - sd.ClearShardStoreCalled(cacheID) + if sd.ClearShardStoreCalled != nil { + sd.ClearShardStoreCalled(cacheID) + } } // RemoveSetOfDataFromPool - func (sd *ShardedDataStub) RemoveSetOfDataFromPool(keys [][]byte, cacheID string) { - sd.RemoveSetOfDataFromPoolCalled(keys, cacheID) + if sd.RemoveSetOfDataFromPoolCalled != nil { + sd.RemoveSetOfDataFromPoolCalled(keys, cacheID) + } } // ImmunizeSetOfDataAgainstEviction - diff --git a/testscommon/syncer/transactionsSyncHandlerMock.go b/testscommon/syncer/transactionsSyncHandlerMock.go index c044176703e..7995e7206e8 100644 --- a/testscommon/syncer/transactionsSyncHandlerMock.go +++ b/testscommon/syncer/transactionsSyncHandlerMock.go @@ -3,6 +3,7 @@ package syncer import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/state" "golang.org/x/net/context" ) @@ -10,6 +11,8 @@ import ( type TransactionsSyncHandlerMock struct { SyncTransactionsForCalled func(miniBlocks map[string]*block.MiniBlock, epoch uint32, ctx context.Context) error GetTransactionsCalled func() (map[string]data.TransactionHandler, error) + GetValidatorsInfoCalled func() (map[string]*state.ShardValidatorInfo, error) + ClearFieldsCalled func() } // SyncTransactionsFor - @@ -28,6 +31,21 @@ func (et *TransactionsSyncHandlerMock) GetTransactions() (map[string]data.Transa return nil, nil } +// GetValidatorsInfo - +func (et *TransactionsSyncHandlerMock) GetValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) { + if et.GetValidatorsInfoCalled != nil { + return et.GetValidatorsInfoCalled() + } + return nil, nil +} + +// ClearFields - +func (et *TransactionsSyncHandlerMock) ClearFields() { + if et.ClearFieldsCalled != nil { + et.ClearFieldsCalled() + } +} + // IsInterfaceNil - func (et *TransactionsSyncHandlerMock) IsInterfaceNil() bool { return et == nil diff --git a/testscommon/validatorInfoCacher/validatorInfoCacherStub.go b/testscommon/validatorInfoCacher/validatorInfoCacherStub.go new file mode 100644 index 00000000000..c8d9ad5b443 --- /dev/null +++ b/testscommon/validatorInfoCacher/validatorInfoCacherStub.go @@ -0,0 +1,38 @@ +package validatorInfoCacherStub + +import "github.com/ElrondNetwork/elrond-go/state" + +// ValidatorInfoCacherStub - +type ValidatorInfoCacherStub struct { + CleanCalled func() + AddValidatorInfoCalled func(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) + GetValidatorInfoCalled func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) +} + +// Clean - +func (vics *ValidatorInfoCacherStub) Clean() { + if vics.CleanCalled != nil { + vics.CleanCalled() + } +} + +// GetValidatorInfo - +func (vics *ValidatorInfoCacherStub) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if vics.GetValidatorInfoCalled != nil { + return vics.GetValidatorInfoCalled(validatorInfoHash) + } + + return nil, nil +} + +// AddValidatorInfo - +func (vics *ValidatorInfoCacherStub) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { + if vics.AddValidatorInfoCalled != nil { + vics.AddValidatorInfoCalled(validatorInfoHash, validatorInfo) + } +} + +// IsInterfaceNil returns true if there is no value under the interface +func (vics *ValidatorInfoCacherStub) IsInterfaceNil() bool { + return vics == nil +} diff --git a/update/factory/exportHandlerFactory.go b/update/factory/exportHandlerFactory.go index f4aa63375fe..cec8db108a2 100644 --- a/update/factory/exportHandlerFactory.go +++ b/update/factory/exportHandlerFactory.go @@ -271,8 +271,9 @@ func (e *exportHandlerFactory) Create() (update.ExportHandler, error) { } argsPeerMiniBlocksSyncer := shardchain.ArgPeerMiniBlockSyncer{ - MiniBlocksPool: e.dataPool.MiniBlocks(), - Requesthandler: e.requestHandler, + MiniBlocksPool: e.dataPool.MiniBlocks(), + ValidatorsInfoPool: e.dataPool.ValidatorsInfo(), + RequestHandler: e.requestHandler, } peerMiniBlocksSyncer, err := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) if err != nil { @@ -293,6 +294,7 @@ func (e *exportHandlerFactory) Create() (update.ExportHandler, error) { PeerMiniBlocksSyncer: peerMiniBlocksSyncer, RoundHandler: e.roundHandler, AppStatusHandler: e.CoreComponents.StatusHandler(), + EnableEpochsHandler: e.CoreComponents.EnableEpochsHandler(), } epochHandler, err := shardchain.NewEpochStartTrigger(&argsEpochTrigger) if err != nil { @@ -422,7 +424,7 @@ func (e *exportHandlerFactory) Create() (update.ExportHandler, error) { argsPendingTransactions := sync.ArgsNewTransactionsSyncer{ DataPools: e.dataPool, Storages: e.storageService, - Marshalizer: e.CoreComponents.InternalMarshalizer(), + Marshaller: e.CoreComponents.InternalMarshalizer(), RequestHandler: e.requestHandler, } epochStartTransactionsSyncer, err := sync.NewTransactionsSyncer(argsPendingTransactions) diff --git a/update/genesis/base.go b/update/genesis/base.go index 7fdd41b2c9c..9336554b3e7 100644 --- a/update/genesis/base.go +++ b/update/genesis/base.go @@ -25,6 +25,9 @@ const UnFinishedMetaBlocksIdentifier = "unFinishedMetaBlocks" // TransactionsIdentifier is the constant which defines the export/import identifier for transactions const TransactionsIdentifier = "transactions" +// ValidatorsInfoIdentifier is the constant which defines the export/import identifier for validators info +const ValidatorsInfoIdentifier = "validatorsInfo" + // MiniBlocksIdentifier is the constant which defines the export/import identifier for miniBlocks const MiniBlocksIdentifier = "miniBlocks" @@ -249,3 +252,8 @@ func CreateTransactionKey(key string, tx data.TransactionHandler) string { return "tx" + atSep + "ukw" + atSep + hex.EncodeToString([]byte(key)) } } + +// CreateValidatorInfoKey returns a validator info key +func CreateValidatorInfoKey(key string) string { + return "vi" + atSep + hex.EncodeToString([]byte(key)) +} diff --git a/update/genesis/export.go b/update/genesis/export.go index 900f2cd967f..ec3fde205eb 100644 --- a/update/genesis/export.go +++ b/update/genesis/export.go @@ -134,6 +134,11 @@ func (se *stateExport) ExportAll(epoch uint32) error { return err } + err = se.exportAllValidatorsInfo() + if err != nil { + return err + } + return nil } @@ -154,6 +159,23 @@ func (se *stateExport) exportAllTransactions() error { return se.hardforkStorer.FinishedIdentifier(TransactionsIdentifier) } +func (se *stateExport) exportAllValidatorsInfo() error { + toExportValidatorsInfo, err := se.stateSyncer.GetAllValidatorsInfo() + if err != nil { + return err + } + + log.Debug("Starting export for validators info", "len", len(toExportValidatorsInfo)) + for key, validatorInfo := range toExportValidatorsInfo { + errExport := se.exportValidatorInfo(key, validatorInfo) + if errExport != nil { + return errExport + } + } + + return se.hardforkStorer.FinishedIdentifier(ValidatorsInfoIdentifier) +} + func (se *stateExport) exportAllMiniBlocks() error { toExportMBs, err := se.stateSyncer.GetAllMiniBlocks() if err != nil { @@ -393,6 +415,22 @@ func (se *stateExport) exportTx(key string, tx data.TransactionHandler) error { return nil } +func (se *stateExport) exportValidatorInfo(key string, validatorInfo *state.ShardValidatorInfo) error { + marshaledData, err := json.Marshal(validatorInfo) + if err != nil { + return err + } + + keyToSave := CreateValidatorInfoKey(key) + + err = se.hardforkStorer.Write(ValidatorsInfoIdentifier, []byte(keyToSave), marshaledData) + if err != nil { + return err + } + + return nil +} + func (se *stateExport) exportNodesSetupJson(validators map[uint32][]*state.ValidatorInfo) error { acceptedListsForExport := []common.PeerType{common.EligibleList, common.WaitingList, common.JailedList} initialNodes := make([]*sharding.InitialNode, 0) diff --git a/update/genesis/export_test.go b/update/genesis/export_test.go index 5da226caa7b..2f903fce291 100644 --- a/update/genesis/export_test.go +++ b/update/genesis/export_test.go @@ -260,18 +260,9 @@ func TestExportAll(t *testing.T) { }, } - args := ArgsNewStateExporter{ - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Marshalizer: &mock.MarshalizerMock{}, - StateSyncer: stateSyncer, - HardforkStorer: hs, - Hasher: &hashingMocks.HasherMock{}, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - ExportFolder: "test", - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - } - + args := getDefaultStateExporterArgs() + args.StateSyncer = stateSyncer + args.HardforkStorer = hs stateExporter, _ := NewStateExporter(args) require.False(t, check.IfNil(stateExporter)) @@ -301,17 +292,11 @@ func TestStateExport_ExportTrieShouldExportNodesSetupJson(t *testing.T) { }, } - args := ArgsNewStateExporter{ - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Marshalizer: &mock.MarshalizerMock{}, - StateSyncer: &mock.StateSyncStub{}, - HardforkStorer: hs, - Hasher: &hashingMocks.HasherMock{}, - ExportFolder: testFolderName, - AddressPubKeyConverter: pubKeyConv, - ValidatorPubKeyConverter: pubKeyConv, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - } + args := getDefaultStateExporterArgs() + args.HardforkStorer = hs + args.ExportFolder = testFolderName + args.AddressPubKeyConverter = pubKeyConv + args.ValidatorPubKeyConverter = pubKeyConv trie := &trieMock.TrieStub{ RootCalled: func() ([]byte, error) { @@ -357,17 +342,11 @@ func TestStateExport_ExportNodesSetupJsonShouldExportKeysInAlphabeticalOrder(t * }, } - args := ArgsNewStateExporter{ - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Marshalizer: &mock.MarshalizerMock{}, - StateSyncer: &mock.StateSyncStub{}, - HardforkStorer: hs, - Hasher: &hashingMocks.HasherMock{}, - ExportFolder: testFolderName, - AddressPubKeyConverter: pubKeyConv, - ValidatorPubKeyConverter: pubKeyConv, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - } + args := getDefaultStateExporterArgs() + args.HardforkStorer = hs + args.ExportFolder = testFolderName + args.AddressPubKeyConverter = pubKeyConv + args.ValidatorPubKeyConverter = pubKeyConv stateExporter, err := NewStateExporter(args) require.NoError(t, err) @@ -428,17 +407,9 @@ func TestStateExport_ExportUnfinishedMetaBlocksShouldWork(t *testing.T) { }, } - args := ArgsNewStateExporter{ - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Marshalizer: &mock.MarshalizerMock{}, - StateSyncer: stateSyncer, - HardforkStorer: hs, - Hasher: &hashingMocks.HasherMock{}, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - ExportFolder: "test", - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - } + args := getDefaultStateExporterArgs() + args.StateSyncer = stateSyncer + args.HardforkStorer = hs stateExporter, _ := NewStateExporter(args) require.False(t, check.IfNil(stateExporter)) @@ -448,3 +419,121 @@ func TestStateExport_ExportUnfinishedMetaBlocksShouldWork(t *testing.T) { assert.True(t, unFinishedMetablocksWereWrote) } + +func TestStateExport_ExportAllValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("export all validators info with state syncer error", func(t *testing.T) { + t.Parallel() + + expectedStateSyncerErr := errors.New("state syncer error") + args := getDefaultStateExporterArgs() + args.StateSyncer = &mock.StateSyncStub{ + GetAllValidatorsInfoCalled: func() (map[string]*state.ShardValidatorInfo, error) { + return nil, expectedStateSyncerErr + }, + } + + stateExporter, _ := NewStateExporter(args) + err := stateExporter.exportAllValidatorsInfo() + assert.Equal(t, expectedStateSyncerErr, err) + }) + + t.Run("export all validators info with hardfork storer error", func(t *testing.T) { + t.Parallel() + + expectedHardforkStorerErr := errors.New("hardfork storer error") + args := getDefaultStateExporterArgs() + args.StateSyncer = &mock.StateSyncStub{ + GetAllValidatorsInfoCalled: func() (map[string]*state.ShardValidatorInfo, error) { + mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + mapShardValidatorInfo["key"] = shardValidatorInfo + return mapShardValidatorInfo, nil + }, + } + args.HardforkStorer = &mock.HardforkStorerStub{ + WriteCalled: func(identifier string, key []byte, value []byte) error { + return expectedHardforkStorerErr + }, + } + + stateExporter, _ := NewStateExporter(args) + err := stateExporter.exportAllValidatorsInfo() + assert.Equal(t, expectedHardforkStorerErr, err) + }) + + t.Run("export all validators info without error", func(t *testing.T) { + t.Parallel() + + finishedIdentifierWasCalled := false + args := getDefaultStateExporterArgs() + args.HardforkStorer = &mock.HardforkStorerStub{ + FinishedIdentifierCalled: func(identifier string) error { + finishedIdentifierWasCalled = true + return nil + }, + } + + stateExporter, _ := NewStateExporter(args) + err := stateExporter.exportAllValidatorsInfo() + assert.Nil(t, err) + assert.True(t, finishedIdentifierWasCalled) + }) +} + +func TestStateExport_ExportValidatorInfo(t *testing.T) { + t.Parallel() + + t.Run("export validator info with error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("error") + args := getDefaultStateExporterArgs() + args.HardforkStorer = &mock.HardforkStorerStub{ + WriteCalled: func(identifier string, key []byte, value []byte) error { + return expectedErr + }, + } + + stateExporter, _ := NewStateExporter(args) + key := "key" + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + err := stateExporter.exportValidatorInfo(key, shardValidatorInfo) + assert.Equal(t, expectedErr, err) + }) + + t.Run("export validator info without error", func(t *testing.T) { + t.Parallel() + + args := getDefaultStateExporterArgs() + + stateExporter, _ := NewStateExporter(args) + key := "key" + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + err := stateExporter.exportValidatorInfo(key, shardValidatorInfo) + assert.Nil(t, err) + }) +} + +func getDefaultStateExporterArgs() ArgsNewStateExporter { + return ArgsNewStateExporter{ + ShardCoordinator: mock.NewOneShardCoordinatorMock(), + Marshalizer: &mock.MarshalizerMock{}, + StateSyncer: &mock.StateSyncStub{}, + HardforkStorer: &mock.HardforkStorerStub{}, + Hasher: &hashingMocks.HasherMock{}, + AddressPubKeyConverter: &mock.PubkeyConverterStub{}, + ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, + ExportFolder: "test", + GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, + } +} diff --git a/update/interface.go b/update/interface.go index e2c42116a79..79a1760c82d 100644 --- a/update/interface.go +++ b/update/interface.go @@ -20,6 +20,7 @@ type StateSyncer interface { SyncAllState(epoch uint32) error GetAllTries() (map[string]common.Trie, error) GetAllTransactions() (map[string]data.TransactionHandler, error) + GetAllValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) GetAllMiniBlocks() (map[string]*block.MiniBlock, error) IsInterfaceNil() bool } @@ -143,6 +144,8 @@ type EpochStartPendingMiniBlocksSyncHandler interface { type TransactionsSyncHandler interface { SyncTransactionsFor(miniBlocks map[string]*block.MiniBlock, epoch uint32, ctx context.Context) error GetTransactions() (map[string]data.TransactionHandler, error) + GetValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) + ClearFields() IsInterfaceNil() bool } diff --git a/update/mock/stateSyncStub.go b/update/mock/stateSyncStub.go index 329bbff75aa..cff23e6447d 100644 --- a/update/mock/stateSyncStub.go +++ b/update/mock/stateSyncStub.go @@ -4,6 +4,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/state" ) // StateSyncStub - @@ -13,6 +14,7 @@ type StateSyncStub struct { SyncAllStateCalled func(epoch uint32) error GetAllTriesCalled func() (map[string]common.Trie, error) GetAllTransactionsCalled func() (map[string]data.TransactionHandler, error) + GetAllValidatorsInfoCalled func() (map[string]*state.ShardValidatorInfo, error) GetAllMiniBlocksCalled func() (map[string]*block.MiniBlock, error) } @@ -56,6 +58,14 @@ func (sss *StateSyncStub) GetAllTransactions() (map[string]data.TransactionHandl return nil, nil } +// GetAllValidatorsInfo - +func (sss *StateSyncStub) GetAllValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) { + if sss.GetAllValidatorsInfoCalled != nil { + return sss.GetAllValidatorsInfoCalled() + } + return nil, nil +} + // GetAllMiniBlocks - func (sss *StateSyncStub) GetAllMiniBlocks() (map[string]*block.MiniBlock, error) { if sss.GetAllMiniBlocksCalled != nil { diff --git a/update/process/baseProcess.go b/update/process/baseProcess.go index 6c9fd27ba88..3fb6b28b953 100644 --- a/update/process/baseProcess.go +++ b/update/process/baseProcess.go @@ -230,12 +230,8 @@ func (b *baseProcessor) saveAllBlockDataToStorageForSelfShard( func (b *baseProcessor) saveMiniBlocks(headerHandler data.HeaderHandler, body *block.Body) { miniBlockHeadersHashes := headerHandler.GetMiniBlockHeadersHashes() - mapBlockTypesTxs := make(map[block.Type]map[string]data.TransactionHandler) for i := 0; i < len(body.MiniBlocks); i++ { miniBlock := body.MiniBlocks[i] - if _, ok := mapBlockTypesTxs[miniBlock.Type]; !ok { - mapBlockTypesTxs[miniBlock.Type] = b.txCoordinator.GetAllCurrentUsedTxs(miniBlock.Type) - } marshalizedMiniBlock, errNotCritical := b.marshalizer.Marshal(miniBlock) if errNotCritical != nil { diff --git a/update/sync/coordinator.go b/update/sync/coordinator.go index 5e746affa74..ffcdc1027be 100644 --- a/update/sync/coordinator.go +++ b/update/sync/coordinator.go @@ -11,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/update" ) @@ -212,6 +213,11 @@ func (ss *syncState) GetAllTransactions() (map[string]data.TransactionHandler, e return ss.transactions.GetTransactions() } +// GetAllValidatorsInfo returns the synced validators info +func (ss *syncState) GetAllValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) { + return ss.transactions.GetValidatorsInfo() +} + // GetAllMiniBlocks returns the synced miniblocks func (ss *syncState) GetAllMiniBlocks() (map[string]*block.MiniBlock, error) { return ss.miniBlocks.GetMiniBlocks() diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index b9b89d96a2f..00a2584a2a7 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -15,6 +15,8 @@ import ( "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/storage/txcache" "github.com/ElrondNetwork/elrond-go/update" ) @@ -23,12 +25,13 @@ var _ update.TransactionsSyncHandler = (*transactionsSync)(nil) type transactionsSync struct { mutPendingTx sync.Mutex mapTransactions map[string]data.TransactionHandler - mapHashes map[string]*block.MiniBlock + mapTxsToMiniBlocks map[string]*block.MiniBlock + mapValidatorsInfo map[string]*state.ShardValidatorInfo txPools map[block.Type]dataRetriever.ShardedDataCacherNotifier storage map[block.Type]update.HistoryStorer chReceivedAll chan bool requestHandler process.RequestHandler - marshalizer marshal.Marshalizer + marshaller marshal.Marshalizer epochToSync uint32 stopSync bool syncedAll bool @@ -39,7 +42,7 @@ type transactionsSync struct { type ArgsNewTransactionsSyncer struct { DataPools dataRetriever.PoolsHolder Storages dataRetriever.StorageService - Marshalizer marshal.Marshalizer + Marshaller marshal.Marshalizer RequestHandler process.RequestHandler } @@ -51,7 +54,7 @@ func NewTransactionsSyncer(args ArgsNewTransactionsSyncer) (*transactionsSync, e if check.IfNil(args.DataPools) { return nil, dataRetriever.ErrNilDataPoolHolder } - if check.IfNil(args.Marshalizer) { + if check.IfNil(args.Marshaller) { return nil, dataRetriever.ErrNilMarshalizer } if check.IfNil(args.RequestHandler) { @@ -61,10 +64,11 @@ func NewTransactionsSyncer(args ArgsNewTransactionsSyncer) (*transactionsSync, e ts := &transactionsSync{ mutPendingTx: sync.Mutex{}, mapTransactions: make(map[string]data.TransactionHandler), - mapHashes: make(map[string]*block.MiniBlock), + mapTxsToMiniBlocks: make(map[string]*block.MiniBlock), + mapValidatorsInfo: make(map[string]*state.ShardValidatorInfo), chReceivedAll: make(chan bool), requestHandler: args.RequestHandler, - marshalizer: args.Marshalizer, + marshaller: args.Marshaller, stopSync: true, syncedAll: true, waitTimeBetweenRequests: args.RequestHandler.RequestInterval(), @@ -74,6 +78,7 @@ func NewTransactionsSyncer(args ArgsNewTransactionsSyncer) (*transactionsSync, e ts.txPools[block.TxBlock] = args.DataPools.Transactions() ts.txPools[block.SmartContractResultBlock] = args.DataPools.UnsignedTransactions() ts.txPools[block.RewardsBlock] = args.DataPools.RewardTransactions() + ts.txPools[block.PeerBlock] = args.DataPools.ValidatorsInfo() var err error ts.storage = make(map[block.Type]update.HistoryStorer) @@ -92,7 +97,17 @@ func NewTransactionsSyncer(args ArgsNewTransactionsSyncer) (*transactionsSync, e return nil, err } - for _, pool := range ts.txPools { + ts.storage[block.PeerBlock], err = args.Storages.GetStorer(dataRetriever.UnsignedTransactionUnit) + if err != nil { + return nil, err + } + + for poolType, pool := range ts.txPools { + if poolType == block.PeerBlock { + pool.RegisterOnAdded(ts.receivedValidatorInfo) + continue + } + pool.RegisterOnAdded(ts.receivedTransaction) } @@ -112,7 +127,7 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min numRequestedTxs := 0 for _, miniBlock := range miniBlocks { for _, txHash := range miniBlock.TxHashes { - ts.mapHashes[string(txHash)] = miniBlock + ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock log.Debug("transactionsSync.SyncTransactionsFor", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash needed", txHash) } numRequestedTxs += ts.requestTransactionsFor(miniBlock) @@ -136,7 +151,7 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min return nil case <-time.After(ts.waitTimeBetweenRequests): ts.mutPendingTx.Lock() - log.Debug("transactionsSync.SyncTransactionsFor", "num txs needed", len(ts.mapHashes), "num txs got", len(ts.mapTransactions)) + log.Debug("transactionsSync.SyncTransactionsFor", "num txs needed", len(ts.mapTxsToMiniBlocks), "num txs got", len(ts.mapTransactions)) ts.mutPendingTx.Unlock() continue case <-ctx.Done(): @@ -149,6 +164,15 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min } func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock) int { + if miniBlock.Type == block.PeerBlock { + return ts.requestTransactionsForPeerMiniBlock(miniBlock) + } + + return ts.requestTransactionsForNonPeerMiniBlock(miniBlock) + +} + +func (ts *transactionsSync) requestTransactionsForNonPeerMiniBlock(miniBlock *block.MiniBlock) int { missingTxs := make([][]byte, 0) for _, txHash := range miniBlock.TxHashes { if _, ok := ts.mapTransactions[string(txHash)]; ok { @@ -165,8 +189,8 @@ func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock) i } for _, txHash := range missingTxs { - ts.mapHashes[string(txHash)] = miniBlock - log.Debug("transactionsSync.requestTransactionsFor", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) + ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock + log.Debug("transactionsSync.requestTransactionsForNonPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) } mbType := miniBlock.Type @@ -189,6 +213,33 @@ func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock) i return len(missingTxs) } +func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block.MiniBlock) int { + missingValidatorsInfo := make([][]byte, 0) + for _, txHash := range miniBlock.TxHashes { + _, isValidatorInfoFound := ts.mapValidatorsInfo[string(txHash)] + if isValidatorInfoFound { + continue + } + + validatorInfo, ok := ts.getValidatorInfoFromPoolOrStorage(txHash) + if ok { + ts.mapValidatorsInfo[string(txHash)] = validatorInfo + continue + } + + missingValidatorsInfo = append(missingValidatorsInfo, txHash) + } + + for _, txHash := range missingValidatorsInfo { + ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock + log.Debug("transactionsSync.requestTransactionsForPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) + } + + go ts.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + + return len(missingValidatorsInfo) +} + func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) { ts.mutPendingTx.Lock() if ts.stopSync { @@ -196,27 +247,73 @@ func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) return } - miniBlock, ok := ts.mapHashes[string(txHash)] + miniBlock, foundInMap := ts.mapTxsToMiniBlocks[string(txHash)] + if !foundInMap { + ts.mutPendingTx.Unlock() + return + } + _, foundInMap = ts.mapTransactions[string(txHash)] + if foundInMap { + ts.mutPendingTx.Unlock() + return + } + + var tx data.TransactionHandler + var wrappedTx *txcache.WrappedTransaction + var ok bool + + tx, ok = val.(data.TransactionHandler) + if !ok { + wrappedTx, ok = val.(*txcache.WrappedTransaction) + if !ok { + ts.mutPendingTx.Unlock() + log.Error("transactionsSync.receivedTransaction", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) + return + } + + tx = wrappedTx.Tx + } + + log.Debug("transactionsSync.receivedTransaction", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash got", txHash) + + ts.mapTransactions[string(txHash)] = tx + receivedAllMissing := len(ts.mapTxsToMiniBlocks) == len(ts.mapTransactions)+len(ts.mapValidatorsInfo) + ts.mutPendingTx.Unlock() + + if receivedAllMissing { + ts.chReceivedAll <- true + } +} + +func (ts *transactionsSync) receivedValidatorInfo(txHash []byte, val interface{}) { + ts.mutPendingTx.Lock() + if ts.stopSync { + ts.mutPendingTx.Unlock() + return + } + + miniBlock, ok := ts.mapTxsToMiniBlocks[string(txHash)] if !ok { ts.mutPendingTx.Unlock() return } - _, ok = ts.mapTransactions[string(txHash)] + _, ok = ts.mapValidatorsInfo[string(txHash)] if ok { ts.mutPendingTx.Unlock() return } - tx, ok := val.(data.TransactionHandler) + validatorInfo, ok := val.(*state.ShardValidatorInfo) if !ok { ts.mutPendingTx.Unlock() + log.Error("transactionsSync.receivedValidatorInfo", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) return } - log.Debug("transactionsSync.receivedTransaction", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash got", txHash) + log.Debug("transactionsSync.receivedValidatorInfo", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash got", txHash) - ts.mapTransactions[string(txHash)] = tx - receivedAllMissing := len(ts.mapHashes) == len(ts.mapTransactions) + ts.mapValidatorsInfo[string(txHash)] = validatorInfo + receivedAllMissing := len(ts.mapTxsToMiniBlocks) == len(ts.mapValidatorsInfo)+len(ts.mapTransactions) ts.mutPendingTx.Unlock() if receivedAllMissing { @@ -225,7 +322,7 @@ func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) } func (ts *transactionsSync) getTransactionFromPool(txHash []byte) (data.TransactionHandler, bool) { - mb, ok := ts.mapHashes[string(txHash)] + mb, ok := ts.mapTxsToMiniBlocks[string(txHash)] if !ok { return nil, false } @@ -259,12 +356,50 @@ func (ts *transactionsSync) getTransactionFromPool(txHash []byte) (data.Transact tx, ok := val.(data.TransactionHandler) if !ok { + log.Error("transactionsSync.getTransactionFromPool", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) return nil, false } return tx, true } +func (ts *transactionsSync) getValidatorInfoFromPool(txHash []byte) (*state.ShardValidatorInfo, bool) { + mb, ok := ts.mapTxsToMiniBlocks[string(txHash)] + if !ok { + return nil, false + } + + if _, ok = ts.txPools[block.PeerBlock]; !ok { + log.Debug("transactionsSync.getValidatorInfoFromPool: missing mini block type from sharded data cacher notifier map", + "tx hash", txHash, + "original mb type", mb.Type, + "mb type", block.PeerBlock, + "mb sender shard", mb.SenderShardID, + "mb receiver shard", mb.ReceiverShardID, + "mb num txs", len(mb.TxHashes)) + return nil, false + } + + storeId := process.ShardCacherIdentifier(mb.SenderShardID, mb.ReceiverShardID) + shardTxStore := ts.txPools[block.PeerBlock].ShardDataStore(storeId) + if check.IfNil(shardTxStore) { + return nil, false + } + + val, ok := shardTxStore.Peek(txHash) + if !ok { + return nil, false + } + + validatorInfo, ok := val.(*state.ShardValidatorInfo) + if !ok { + log.Error("transactionsSync.getValidatorInfoFromPool", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) + return nil, false + } + + return validatorInfo, true +} + func (ts *transactionsSync) getTransactionFromPoolWithSearchFirst( txHash []byte, cacher dataRetriever.ShardedDataCacherNotifier, @@ -282,13 +417,30 @@ func (ts *transactionsSync) getTransactionFromPoolWithSearchFirst( return tx, true } +func (ts *transactionsSync) getValidatorInfoFromPoolWithSearchFirst( + txHash []byte, + cacher dataRetriever.ShardedDataCacherNotifier, +) (*state.ShardValidatorInfo, bool) { + val, ok := cacher.SearchFirstData(txHash) + if !ok { + return nil, false + } + + validatorInfo, ok := val.(*state.ShardValidatorInfo) + if !ok { + return nil, false + } + + return validatorInfo, true +} + func (ts *transactionsSync) getTransactionFromPoolOrStorage(hash []byte) (data.TransactionHandler, bool) { txFromPool, ok := ts.getTransactionFromPool(hash) if ok { return txFromPool, true } - miniBlock, ok := ts.mapHashes[string(hash)] + miniBlock, ok := ts.mapTxsToMiniBlocks[string(hash)] if !ok { return nil, false } @@ -319,7 +471,7 @@ func (ts *transactionsSync) getTransactionFromPoolOrStorage(hash []byte) (data.T tx = &rewardTx.RewardTx{} } - err = ts.marshalizer.Unmarshal(tx, txData) + err = ts.marshaller.Unmarshal(tx, txData) if err != nil { return nil, false } @@ -327,6 +479,38 @@ func (ts *transactionsSync) getTransactionFromPoolOrStorage(hash []byte) (data.T return tx, true } +func (ts *transactionsSync) getValidatorInfoFromPoolOrStorage(hash []byte) (*state.ShardValidatorInfo, bool) { + validatorInfoFromPool, ok := ts.getValidatorInfoFromPool(hash) + if ok { + return validatorInfoFromPool, true + } + + miniBlock, ok := ts.mapTxsToMiniBlocks[string(hash)] + if !ok { + return nil, false + } + + validatorInfoFromPoolWithSearchFirst, ok := ts.getValidatorInfoFromPoolWithSearchFirst(hash, ts.txPools[block.PeerBlock]) + if ok { + log.Debug("transactionsSync.getValidatorInfoFromPoolOrStorage: found transaction using search first", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash", hash) + return validatorInfoFromPoolWithSearchFirst, true + } + + validatorInfoData, err := GetDataFromStorage(hash, ts.storage[block.PeerBlock]) + if err != nil { + return nil, false + } + + validatorInfo := &state.ShardValidatorInfo{} + + err = ts.marshaller.Unmarshal(validatorInfo, validatorInfoData) + if err != nil { + return nil, false + } + + return validatorInfo, true +} + // GetTransactions returns the synced transactions func (ts *transactionsSync) GetTransactions() (map[string]data.TransactionHandler, error) { ts.mutPendingTx.Lock() @@ -338,6 +522,26 @@ func (ts *transactionsSync) GetTransactions() (map[string]data.TransactionHandle return ts.mapTransactions, nil } +// GetValidatorsInfo returns the synced validators info +func (ts *transactionsSync) GetValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) { + ts.mutPendingTx.Lock() + defer ts.mutPendingTx.Unlock() + if !ts.syncedAll { + return nil, update.ErrNotSynced + } + + return ts.mapValidatorsInfo, nil +} + +// ClearFields will clear all the maps +func (ts *transactionsSync) ClearFields() { + ts.mutPendingTx.Lock() + ts.mapTransactions = make(map[string]data.TransactionHandler) + ts.mapTxsToMiniBlocks = make(map[string]*block.MiniBlock) + ts.mapValidatorsInfo = make(map[string]*state.ShardValidatorInfo) + ts.mutPendingTx.Unlock() +} + // IsInterfaceNil returns true if underlying object is nil func (ts *transactionsSync) IsInterfaceNil() bool { return ts == nil diff --git a/update/sync/syncTransactions_test.go b/update/sync/syncTransactions_test.go index 9187cadc6ca..6adbe65ac71 100644 --- a/update/sync/syncTransactions_test.go +++ b/update/sync/syncTransactions_test.go @@ -1,23 +1,28 @@ package sync import ( + "bytes" "context" "encoding/json" + "errors" "fmt" "math/big" "strings" "testing" "time" + "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data/block" dataTransaction "github.com/ElrondNetwork/elrond-go-core/data/transaction" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" storageStubs "github.com/ElrondNetwork/elrond-go/testscommon/storage" + "github.com/ElrondNetwork/elrond-go/update" "github.com/ElrondNetwork/elrond-go/update/mock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -31,7 +36,7 @@ func createMockArgs() ArgsNewTransactionsSyncer { return &storageStubs.StorerStub{}, nil }, }, - Marshalizer: &mock.MarshalizerFake{}, + Marshaller: &mock.MarshalizerFake{}, RequestHandler: &testscommon.RequestHandlerStub{}, } } @@ -73,7 +78,7 @@ func TestNewPendingTransactionsSyncer_NilMarshalizer(t *testing.T) { t.Parallel() args := createMockArgs() - args.Marshalizer = nil + args.Marshaller = nil pendingTxsSyncer, err := NewTransactionsSyncer(args) require.Nil(t, pendingTxsSyncer) @@ -341,3 +346,520 @@ func TestSyncPendingTransactionsFor_ReceiveMissingTx(t *testing.T) { cancel() require.Nil(t, err) } + +func TestTransactionsSync_RequestTransactionsForPeerMiniBlockShouldWork(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + svi2 := &state.ShardValidatorInfo{ + PublicKey: []byte("y"), + } + + args := createMockArgs() + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(svi2, []byte("b")) + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapValidatorsInfo["a"] = svi1 + transactionsSyncer.mapTxsToMiniBlocks["b"] = miniBlock + numMissingValidatorsInfo := transactionsSyncer.requestTransactionsForPeerMiniBlock(miniBlock) + + assert.Equal(t, 1, numMissingValidatorsInfo) + assert.Equal(t, 2, len(transactionsSyncer.mapValidatorsInfo)) + assert.Equal(t, 2, len(transactionsSyncer.mapTxsToMiniBlocks)) + assert.Equal(t, svi2, transactionsSyncer.mapValidatorsInfo["b"]) + assert.Equal(t, miniBlock, transactionsSyncer.mapTxsToMiniBlocks["c"]) + transactionsSyncer.mutPendingTx.Unlock() +} + +func TestTransactionsSync_ReceivedValidatorInfo(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + // stop sync is true + transactionsSyncer.receivedValidatorInfo(txHash, svi) + transactionsSyncer.mutPendingTx.Lock() + assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) + transactionsSyncer.mutPendingTx.Unlock() + + // txHash does not exist in mapTxsToMiniBlocks + transactionsSyncer.stopSync = false + transactionsSyncer.receivedValidatorInfo(txHash, svi) + transactionsSyncer.mutPendingTx.Lock() + assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) + transactionsSyncer.mutPendingTx.Unlock() + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + // value received is not of type *state.ShardValidatorInfo + transactionsSyncer.receivedValidatorInfo(txHash, nil) + transactionsSyncer.mutPendingTx.Lock() + assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) + transactionsSyncer.mutPendingTx.Unlock() + + wasReceivedAll := atomic.Flag{} + go func() { + select { + case <-transactionsSyncer.chReceivedAll: + wasReceivedAll.SetValue(true) + return + case <-time.After(time.Second): + } + }() + + // received all missing validators info with success + transactionsSyncer.receivedValidatorInfo(txHash, svi) + transactionsSyncer.mutPendingTx.Lock() + assert.Equal(t, 1, len(transactionsSyncer.mapValidatorsInfo)) + transactionsSyncer.mutPendingTx.Unlock() + assert.True(t, wasReceivedAll.IsSet()) +} + +func TestTransactionsSync_GetValidatorInfoFromPoolShouldWork(t *testing.T) { + t.Parallel() + + t.Run("get validator info from pool when tx hash does not exist in mapTxsToMiniBlocks", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool when shard data store is missing", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return nil + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool when tx hash is not found in shard data store", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(nil, nil) + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool when value received from pool is not of type *state.ShardValidatorInfo", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, txHash) { + return nil, true + } + return nil, false + }, + } + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool should work", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(svi, txHash) + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) + }) +} + +func TestTransactionsSync_GetValidatorInfoFromPoolWithSearchFirstShouldWork(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + // txHash is not found in validatorInfoPool + validatorsInfoPool := &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolWithSearchFirst(txHash, validatorsInfoPool) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + + // value received from validatorInfoPool is not of type *state.ShardValidatorInfo + validatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, true + }, + } + shardValidatorInfo, bFound = transactionsSyncer.getValidatorInfoFromPoolWithSearchFirst(txHash, validatorsInfoPool) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + + // get validator info from pool with search first should work + validatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, txHash) { + return svi, true + } + return nil, false + }, + } + shardValidatorInfo, bFound = transactionsSyncer.getValidatorInfoFromPoolWithSearchFirst(txHash, validatorsInfoPool) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) +} + +func TestTransactionsSync_GetValidatorInfoFromPoolOrStorage(t *testing.T) { + t.Parallel() + + t.Run("get validator info from pool or storage should work from pool", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(svi, txHash) + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) + }) + + t.Run("get validator info from pool or storage when txHash does not exist in mapTxsToMiniBlocks", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool or storage should work using search first", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return svi, true + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) + }) + + t.Run("get validator info from pool or storage when txHash does not exist in storage", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + args.Storages = &storageStubs.ChainStorerStub{ + GetStorerCalled: func(unitType dataRetriever.UnitType) (storage.Storer, error) { + return &storageStubs.StorerStub{ + GetCalled: func(key []byte) ([]byte, error) { + return nil, errors.New("error") + }, + }, nil + }, + } + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(nil, nil) + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool or storage should work from storage", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + marshalledSVI, _ := args.Marshaller.Marshal(svi) + args.Storages = &storageStubs.ChainStorerStub{ + GetStorerCalled: func(unitType dataRetriever.UnitType) (storage.Storer, error) { + return &storageStubs.StorerStub{ + GetCalled: func(key []byte) ([]byte, error) { + if bytes.Equal(key, txHash) { + return marshalledSVI, nil + } + return nil, errors.New("error") + }, + }, nil + }, + } + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(nil, nil) + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) + }) +} + +func TestTransactionsSync_GetValidatorsInfoShouldWork(t *testing.T) { + t.Parallel() + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + transactionsSyncer.syncedAll = false + mapShardValidatorInfo, err := transactionsSyncer.GetValidatorsInfo() + assert.Nil(t, mapShardValidatorInfo) + assert.Equal(t, update.ErrNotSynced, err) + + txHash1 := []byte("hash1") + svi1 := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + txHash2 := []byte("hash2") + svi2 := &state.ShardValidatorInfo{ + PublicKey: []byte("y"), + } + transactionsSyncer.mapValidatorsInfo[string(txHash1)] = svi1 + transactionsSyncer.mapValidatorsInfo[string(txHash2)] = svi2 + + transactionsSyncer.syncedAll = true + mapShardValidatorInfo, err = transactionsSyncer.GetValidatorsInfo() + assert.Equal(t, 2, len(mapShardValidatorInfo)) + assert.Equal(t, svi1, mapShardValidatorInfo[string(txHash1)]) + assert.Equal(t, svi2, mapShardValidatorInfo[string(txHash2)]) + assert.Nil(t, err) +} + +func TestTransactionsSync_ClearFieldsShouldWork(t *testing.T) { + t.Parallel() + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + transactionsSyncer.mapTransactions["a"] = &dataTransaction.Transaction{} + transactionsSyncer.mapTxsToMiniBlocks["b"] = &block.MiniBlock{} + transactionsSyncer.mapValidatorsInfo["c"] = &state.ShardValidatorInfo{} + + assert.Equal(t, 1, len(transactionsSyncer.mapTransactions)) + assert.Equal(t, 1, len(transactionsSyncer.mapTxsToMiniBlocks)) + assert.Equal(t, 1, len(transactionsSyncer.mapValidatorsInfo)) + + transactionsSyncer.ClearFields() + + assert.Equal(t, 0, len(transactionsSyncer.mapTransactions)) + assert.Equal(t, 0, len(transactionsSyncer.mapTxsToMiniBlocks)) + assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) +} + +func getDataPoolsWithShardValidatorInfoAndTxHash(svi *state.ShardValidatorInfo, txHash []byte) dataRetriever.PoolsHolder { + return &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, txHash) { + return svi, true + } + return nil, false + }, + } + }, + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + } +}