From 0b98fe40749f9d6cc8aa641d2b8a4adfa28ab6e0 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 28 Apr 2022 18:16:07 +0300 Subject: [PATCH 01/70] * Added peer tx hash instead peer tx inside peers mini block --- epochStart/metachain/validators.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index eea1720ca65..9f5a7f71e54 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -118,12 +118,12 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat for index, validator := range validatorCopy { shardValidatorInfo := createShardValidatorInfo(validator) - marshalizedShardValidatorInfo, err := vic.marshalizer.Marshal(shardValidatorInfo) + shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) if err != nil { return nil, err } - miniBlock.TxHashes[index] = marshalizedShardValidatorInfo + miniBlock.TxHashes[index] = shardValidatorInfoHash } return miniBlock, nil From 79fcf0474eb091642a498cb939510d9dc09eba6d Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Fri, 29 Apr 2022 22:41:57 +0300 Subject: [PATCH 02/70] added interceptedValidatorInfo, interceptedValidatorInfoDataFactory, validatorInfoInterceptorProcessor, validatorInfoResolver --- common/constants.go | 3 + dataRetriever/errors.go | 6 + .../resolvers/validatorInfoResolver.go | 186 ++++++++++++++++++ process/errors.go | 9 + .../interceptedValidatorInfoDataFactory.go | 63 ++++++ process/interceptors/processor/interface.go | 6 + .../validatorInfoInterceptorProcessor.go | 79 ++++++++ process/interface.go | 2 + process/peer/constants.go | 10 + process/peer/interceptedValidatorInfo.go | 148 ++++++++++++++ 10 files changed, 512 insertions(+) create mode 100644 dataRetriever/resolvers/validatorInfoResolver.go create mode 100644 process/interceptors/factory/interceptedValidatorInfoDataFactory.go create mode 100644 process/interceptors/processor/validatorInfoInterceptorProcessor.go create mode 100644 process/peer/constants.go create mode 100644 process/peer/interceptedValidatorInfo.go diff --git a/common/constants.go b/common/constants.go index bc0dd506877..bd5b929d2d5 100644 --- a/common/constants.go +++ b/common/constants.go @@ -63,6 +63,9 @@ const GenesisTxSignatureString = "GENESISGENESISGENESISGENESISGENESISGENESISGENE // HeartbeatTopic is the topic used for heartbeat signaling const HeartbeatTopic = "heartbeat" +// ValidatorInfoTopic is the topic used for validatorInfo signaling +const ValidatorInfoTopic = "validatorInfo" + // PathShardPlaceholder represents the placeholder for the shard ID in paths const PathShardPlaceholder = "[S]" diff --git a/dataRetriever/errors.go b/dataRetriever/errors.go index c5a810d3dca..a6d9cefb732 100644 --- a/dataRetriever/errors.go +++ b/dataRetriever/errors.go @@ -220,3 +220,9 @@ var ErrNilPathManager = errors.New("nil path manager") // ErrNilEpochNotifier signals that the provided EpochNotifier is nil var ErrNilEpochNotifier = errors.New("nil EpochNotifier") + +// ErrNilValidatorInfoPool signals that a nil validator info pool has been provided +var ErrNilValidatorInfoPool = errors.New("nil validator info pool") + +// ErrNilValidatorInfoStorage signals that a nil validator info storage has been provided +var ErrNilValidatorInfoStorage = errors.New("nil validator info storage") diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go new file mode 100644 index 00000000000..04a06045915 --- /dev/null +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -0,0 +1,186 @@ +package resolvers + +import ( + "fmt" + + "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/data/batch" + "github.com/ElrondNetwork/elrond-go-core/marshal" + logger "github.com/ElrondNetwork/elrond-go-logger" + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/p2p" + "github.com/ElrondNetwork/elrond-go/storage" +) + +// ArgValidatorInfoResolver is the argument structure used to create a new validator info resolver instance +type ArgValidatorInfoResolver struct { + SenderResolver dataRetriever.TopicResolverSender + Marshalizer marshal.Marshalizer + AntifloodHandler dataRetriever.P2PAntifloodHandler + Throttler dataRetriever.ResolverThrottler + ValidatorInfoPool storage.Cacher + ValidatorInfoStorage storage.Storer +} + +// validatorInfoResolver is a wrapper over Resolver that is specialized in resolving validator info requests +type validatorInfoResolver struct { + dataRetriever.TopicResolverSender + messageProcessor + baseStorageResolver + validatorInfoPool storage.Cacher + validatorInfoStorage storage.Storer +} + +// NewValidatorInfoResolver creates a validator info resolver +func NewValidatorInfoResolver(args ArgValidatorInfoResolver) (*validatorInfoResolver, error) { + err := checkArgs(args) + if err != nil { + return nil, err + } + + return &validatorInfoResolver{ + TopicResolverSender: args.SenderResolver, + messageProcessor: messageProcessor{ + marshalizer: args.Marshalizer, + antifloodHandler: args.AntifloodHandler, + throttler: args.Throttler, + topic: args.SenderResolver.RequestTopic(), + }, + validatorInfoPool: args.ValidatorInfoPool, + validatorInfoStorage: args.ValidatorInfoStorage, + }, nil +} + +func checkArgs(args ArgValidatorInfoResolver) error { + if check.IfNil(args.SenderResolver) { + return dataRetriever.ErrNilResolverSender + } + if check.IfNil(args.Marshalizer) { + return dataRetriever.ErrNilMarshalizer + } + if check.IfNil(args.AntifloodHandler) { + return dataRetriever.ErrNilAntifloodHandler + } + if check.IfNil(args.Throttler) { + return dataRetriever.ErrNilThrottler + } + if check.IfNil(args.ValidatorInfoPool) { + return dataRetriever.ErrNilValidatorInfoPool + } + if check.IfNil(args.ValidatorInfoStorage) { + return dataRetriever.ErrNilValidatorInfoStorage + } + + return nil +} + +// RequestDataFromHash requests validator info from other peers by hash +func (res *validatorInfoResolver) RequestDataFromHash(hash []byte, epoch uint32) error { + return res.SendOnRequestTopic( + &dataRetriever.RequestData{ + Type: dataRetriever.HashType, + Value: hash, + Epoch: epoch, + }, + [][]byte{hash}, + ) +} + +// ProcessReceivedMessage represents the callback func from the p2p.Messenger that is called each time a new message is received +// (for the topic this validator was registered to, usually a request topic) +func (res *validatorInfoResolver) ProcessReceivedMessage(message p2p.MessageP2P, fromConnectedPeer core.PeerID) error { + err := res.canProcessMessage(message, fromConnectedPeer) + if err != nil { + return err + } + + res.throttler.StartProcessing() + defer res.throttler.EndProcessing() + + rd, err := res.parseReceivedMessage(message, fromConnectedPeer) + if err != nil { + return err + } + + switch rd.Type { + case dataRetriever.HashType: + return res.resolveHashRequest(rd.Value, rd.Epoch, fromConnectedPeer) + default: + err = dataRetriever.ErrRequestTypeNotImplemented + } + + if err != nil { + err = fmt.Errorf("%w for value %s", err, logger.DisplayByteSlice(rd.Value)) + } + + return err +} + +// resolveHashRequest sends the response for a hash request +func (res *validatorInfoResolver) resolveHashRequest(hash []byte, epoch uint32, pid core.PeerID) error { + data, err := res.fetchValidatorInfoByteSlice(hash, epoch) + if err != nil { + return err + } + + return res.marshalAndSend(data, pid) +} + +func (res *validatorInfoResolver) fetchValidatorInfoByteSlice(hash []byte, epoch uint32) ([]byte, error) { + data, ok := res.validatorInfoPool.Get(hash) + if ok { + return res.marshalizer.Marshal(data) + } + + buff, err := res.getFromStorage(hash, epoch) + if err != nil { + res.ResolverDebugHandler().LogFailedToResolveData( + res.topic, + hash, + err, + ) + return nil, err + } + + res.ResolverDebugHandler().LogSucceededToResolveData(res.topic, hash) + + return buff, nil +} + +func (res *validatorInfoResolver) marshalAndSend(data []byte, pid core.PeerID) error { + b := &batch.Batch{ + Data: [][]byte{data}, + } + buff, err := res.marshalizer.Marshal(b) + if err != nil { + return err + } + + return res.Send(buff, pid) +} + +// SetResolverDebugHandler sets a resolver debug handler +func (res *validatorInfoResolver) SetResolverDebugHandler(handler dataRetriever.ResolverDebugHandler) error { + return res.TopicResolverSender.SetResolverDebugHandler(handler) +} + +// SetNumPeersToQuery sets the number of intra shard and cross shard peers to query +func (res *validatorInfoResolver) SetNumPeersToQuery(intra int, cross int) { + res.TopicResolverSender.SetNumPeersToQuery(intra, cross) +} + +// NumPeersToQuery returns the number of intra shard and cross shard peers to query +func (res *validatorInfoResolver) NumPeersToQuery() (int, int) { + return res.TopicResolverSender.NumPeersToQuery() +} + +// Close returns nil +func (res *validatorInfoResolver) Close() error { + return nil +} + +// IsInterfaceNil returns true if there is no value under the interface +func (res *validatorInfoResolver) IsInterfaceNil() bool { + return res == nil +} diff --git a/process/errors.go b/process/errors.go index fd71c776246..3030a5ee6ed 100644 --- a/process/errors.go +++ b/process/errors.go @@ -1079,3 +1079,12 @@ var ErrNoTxToProcess = errors.New("no transaction to process") // ErrInvalidProcessWaitTime signals that an invalid process wait time was provided var ErrInvalidProcessWaitTime = errors.New("invalid process wait time") + +// ErrNilValidatorInfoPool signals that a nil validator info pool has been provided +var ErrNilValidatorInfoPool = errors.New("nil validator info pool") + +// ErrPropertyTooLong signals that a heartbeat property was too long +var ErrPropertyTooLong = errors.New("property too long") + +// ErrPropertyTooShort signals that a heartbeat property was too short +var ErrPropertyTooShort = errors.New("property too short") diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go new file mode 100644 index 00000000000..fc4fa6dca8d --- /dev/null +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go @@ -0,0 +1,63 @@ +package factory + +import ( + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/hashing" + "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/process/peer" +) + +type interceptedValidatorInfoDataFactory struct { + marshalizer marshal.Marshalizer + hasher hashing.Hasher + nodesCoordinator process.NodesCoordinator +} + +// NewInterceptedValidatorInfoDataFactory creates an instance of interceptedValidatorInfoDataFactory +func NewInterceptedValidatorInfoDataFactory(args ArgInterceptedDataFactory) (*interceptedValidatorInfoDataFactory, error) { + err := checkArgs(args) + if err != nil { + return nil, err + } + + return &interceptedValidatorInfoDataFactory{ + marshalizer: args.CoreComponents.InternalMarshalizer(), + hasher: args.CoreComponents.Hasher(), + nodesCoordinator: args.NodesCoordinator, + }, nil +} + +func checkArgs(args ArgInterceptedDataFactory) error { + if check.IfNil(args.CoreComponents) { + return process.ErrNilCoreComponentsHolder + } + if check.IfNil(args.CoreComponents.InternalMarshalizer()) { + return process.ErrNilMarshalizer + } + if check.IfNil(args.CoreComponents.Hasher()) { + return process.ErrNilHasher + } + if check.IfNil(args.NodesCoordinator) { + return process.ErrNilNodesCoordinator + } + + return nil +} + +// Create creates instances of InterceptedData by unmarshalling provided buffer +func (ividf *interceptedValidatorInfoDataFactory) Create(buff []byte) (process.InterceptedData, error) { + args := peer.ArgInterceptedValidatorInfo{ + DataBuff: buff, + Marshalizer: ividf.marshalizer, + Hasher: ividf.hasher, + NodesCoordinator: ividf.nodesCoordinator, + } + + return peer.NewInterceptedValidatorInfo(args) +} + +// IsInterfaceNil returns true if there is no value under the interface +func (ividf *interceptedValidatorInfoDataFactory) IsInterfaceNil() bool { + return ividf == nil +} diff --git a/process/interceptors/processor/interface.go b/process/interceptors/processor/interface.go index 435c97df887..3419df9da3f 100644 --- a/process/interceptors/processor/interface.go +++ b/process/interceptors/processor/interface.go @@ -4,6 +4,7 @@ import ( "math/big" "github.com/ElrondNetwork/elrond-go-core/data" + "github.com/ElrondNetwork/elrond-go/state" ) // InterceptedTransactionHandler defines an intercepted data wrapper over transaction handler that has @@ -21,3 +22,8 @@ type InterceptedTransactionHandler interface { type ShardedPool interface { AddData(key []byte, data interface{}, sizeInBytes int, cacheID string) } + +type interceptedValidatorInfo interface { + Hash() []byte + ValidatorInfo() state.ValidatorInfo +} diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go new file mode 100644 index 00000000000..a2b7e0ba598 --- /dev/null +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -0,0 +1,79 @@ +package processor + +import ( + "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/storage" +) + +// ArgValidatorInfoInterceptorProcessor is the argument structure used to create a new validator info interceptor processor +type ArgValidatorInfoInterceptorProcessor struct { + Marshalizer marshal.Marshalizer + ValidatorInfoPool storage.Cacher +} + +type validatorInfoInterceptorProcessor struct { + marshalizer marshal.Marshalizer + validatorInfoPool storage.Cacher +} + +// NewValidatorInfoInterceptorProcessor creates a new validator info interceptor processor +func NewValidatorInfoInterceptorProcessor(args ArgValidatorInfoInterceptorProcessor) (*validatorInfoInterceptorProcessor, error) { + err := checkArgs(args) + if err != nil { + return nil, err + } + + return &validatorInfoInterceptorProcessor{ + marshalizer: args.Marshalizer, + validatorInfoPool: args.ValidatorInfoPool, + }, nil +} + +func checkArgs(args ArgValidatorInfoInterceptorProcessor) error { + if check.IfNil(args.Marshalizer) { + return process.ErrNilMarshalizer + } + if check.IfNil(args.ValidatorInfoPool) { + return process.ErrNilValidatorInfoPool + } + + return nil +} + +// Validate checks if the intercepted data can be processed +func (viip *validatorInfoInterceptorProcessor) Validate(data process.InterceptedData, _ core.PeerID) error { + _, ok := data.(interceptedValidatorInfo) + if !ok { + return process.ErrWrongTypeAssertion + } + + return nil +} + +// Save will save the intercepted validator info into the cache +func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData, _ core.PeerID, _ string) error { + ivi, ok := data.(interceptedValidatorInfo) + if !ok { + return process.ErrWrongTypeAssertion + } + + validatorInfo := ivi.ValidatorInfo() + hash := ivi.Hash() + + viip.validatorInfoPool.Put(hash, validatorInfo, validatorInfo.Size()) + + return nil +} + +// RegisterHandler registers a callback function to be notified of incoming validator info +func (viip *validatorInfoInterceptorProcessor) RegisterHandler(_ func(topic string, hash []byte, data interface{})) { + log.Error("validatorInfoInterceptorProcessor.RegisterHandler", "error", "not implemented") +} + +// IsInterfaceNil returns true if there is no value under the interface +func (viip *validatorInfoInterceptorProcessor) IsInterfaceNil() bool { + return viip == nil +} diff --git a/process/interface.go b/process/interface.go index 691cf2f43da..dcb4684cdbb 100644 --- a/process/interface.go +++ b/process/interface.go @@ -24,6 +24,7 @@ import ( "github.com/ElrondNetwork/elrond-go/process/block/bootstrapStorage" "github.com/ElrondNetwork/elrond-go/process/block/processedMb" "github.com/ElrondNetwork/elrond-go/sharding" + "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" vmcommon "github.com/ElrondNetwork/elrond-vm-common" @@ -1041,6 +1042,7 @@ type EpochStartEventNotifier interface { // NodesCoordinator provides Validator methods needed for the peer processing type NodesCoordinator interface { + GetValidatorWithPublicKey(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) GetAllEligibleValidatorsPublicKeys(epoch uint32) (map[uint32][][]byte, error) GetAllWaitingValidatorsPublicKeys(epoch uint32) (map[uint32][][]byte, error) GetAllLeavingValidatorsPublicKeys(epoch uint32) (map[uint32][][]byte, error) diff --git a/process/peer/constants.go b/process/peer/constants.go new file mode 100644 index 00000000000..d1fe9f2bde5 --- /dev/null +++ b/process/peer/constants.go @@ -0,0 +1,10 @@ +package peer + +const ( + minSizeInBytes = 1 + maxSizeInBytes = 128 + interceptedValidatorInfoType = "intercepted validator info" + publicKeyProperty = "public key" + listProperty = "list" + rewardAddressProperty = "reward address" +) diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go new file mode 100644 index 00000000000..11686b743b0 --- /dev/null +++ b/process/peer/interceptedValidatorInfo.go @@ -0,0 +1,148 @@ +package peer + +import ( + "fmt" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/hashing" + "github.com/ElrondNetwork/elrond-go-core/marshal" + logger "github.com/ElrondNetwork/elrond-go-logger" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" +) + +// ArgInterceptedValidatorInfo is the argument used to create a new intercepted validator info +type ArgInterceptedValidatorInfo struct { + DataBuff []byte + Marshalizer marshal.Marshalizer + Hasher hashing.Hasher + NodesCoordinator process.NodesCoordinator +} + +// interceptedValidatorInfo is a wrapper over validatorInfo +type interceptedValidatorInfo struct { + validatorInfo state.ValidatorInfo + nodesCoordinator process.NodesCoordinator + hash []byte +} + +// NewInterceptedValidatorInfo creates a new intercepted validator info instance +func NewInterceptedValidatorInfo(args ArgInterceptedValidatorInfo) (*interceptedValidatorInfo, error) { + err := checkArgs(args) + if err != nil { + return nil, err + } + + validatorInfo, err := createValidatorInfo(args.Marshalizer, args.DataBuff) + if err != nil { + return nil, err + } + + return &interceptedValidatorInfo{ + validatorInfo: *validatorInfo, + nodesCoordinator: args.NodesCoordinator, + hash: args.Hasher.Compute(string(args.DataBuff)), + }, nil +} + +func checkArgs(args ArgInterceptedValidatorInfo) error { + if len(args.DataBuff) == 0 { + return process.ErrNilBuffer + } + if check.IfNil(args.Marshalizer) { + return process.ErrNilMarshalizer + } + if check.IfNil(args.Hasher) { + return process.ErrNilHasher + } + if check.IfNil(args.NodesCoordinator) { + return process.ErrNilNodesCoordinator + } + + return nil +} + +func createValidatorInfo(marshalizer marshal.Marshalizer, buff []byte) (*state.ValidatorInfo, error) { + validatorInfo := &state.ValidatorInfo{} + err := marshalizer.Unmarshal(validatorInfo, buff) + if err != nil { + return nil, err + } + + return validatorInfo, nil +} + +// CheckValidity checks the validity of the received validator info +func (ivi *interceptedValidatorInfo) CheckValidity() error { + // Verify string properties len + err := verifyPropertyLen(publicKeyProperty, ivi.validatorInfo.PublicKey) + if err != nil { + return err + } + err = verifyPropertyLen(listProperty, []byte(ivi.validatorInfo.List)) + if err != nil { + return err + } + err = verifyPropertyLen(rewardAddressProperty, ivi.validatorInfo.RewardAddress) + if err != nil { + return err + } + + // Check if the public key is a validator + _, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.validatorInfo.PublicKey) + return err +} + +// IsForCurrentShard always returns true +func (ivi *interceptedValidatorInfo) IsForCurrentShard() bool { + return true +} + +// ValidatorInfo returns the current validator info structure +func (ivi *interceptedValidatorInfo) ValidatorInfo() state.ValidatorInfo { + return ivi.validatorInfo +} + +// Hash returns the hash of this validator info +func (ivi *interceptedValidatorInfo) Hash() []byte { + return ivi.hash +} + +// Type returns the type of this intercepted data +func (ivi *interceptedValidatorInfo) Type() string { + return interceptedValidatorInfoType +} + +// Identifiers returns the identifiers used in requests +func (ivi *interceptedValidatorInfo) Identifiers() [][]byte { + return [][]byte{ivi.hash} +} + +// String returns the validator's info most important fields as string +func (ivi *interceptedValidatorInfo) String() string { + return fmt.Sprintf("pk=%s, shard=%d, list=%s, index=%d, tempRating=%d, rating=%d", + logger.DisplayByteSlice(ivi.validatorInfo.PublicKey), + ivi.validatorInfo.ShardId, + ivi.validatorInfo.List, + ivi.validatorInfo.Index, + ivi.validatorInfo.TempRating, + ivi.validatorInfo.Rating, + ) +} + +// verifyPropertyLen returns an error if the provided value is longer than accepted by the network +func verifyPropertyLen(property string, value []byte) error { + if len(value) > maxSizeInBytes { + return fmt.Errorf("%w for %s", process.ErrPropertyTooLong, property) + } + if len(value) < minSizeInBytes { + return fmt.Errorf("%w for %s", process.ErrPropertyTooShort, property) + } + + return nil +} + +// IsInterfaceNil returns true if there is no value under the interface +func (ivi *interceptedValidatorInfo) IsInterfaceNil() bool { + return ivi == nil +} From 488887ce8c730c6e4e001fcb76deaab85b21cb11 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Mon, 2 May 2022 08:46:52 +0300 Subject: [PATCH 03/70] use HasOrAdd instead of Put on validatorInfoInterceptorProcessor Save --- .../interceptors/processor/validatorInfoInterceptorProcessor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go index a2b7e0ba598..ba0aabc75ef 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -63,7 +63,7 @@ func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData validatorInfo := ivi.ValidatorInfo() hash := ivi.Hash() - viip.validatorInfoPool.Put(hash, validatorInfo, validatorInfo.Size()) + viip.validatorInfoPool.HasOrAdd(hash, validatorInfo, validatorInfo.Size()) return nil } From aec8fdc2434204b176e2203884715781974161b4 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Mon, 2 May 2022 09:58:48 +0300 Subject: [PATCH 04/70] create baseStorageResolver on ctor --- dataRetriever/resolvers/validatorInfoResolver.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go index 04a06045915..fed556c2d64 100644 --- a/dataRetriever/resolvers/validatorInfoResolver.go +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -21,6 +21,7 @@ type ArgValidatorInfoResolver struct { Throttler dataRetriever.ResolverThrottler ValidatorInfoPool storage.Cacher ValidatorInfoStorage storage.Storer + IsFullHistoryNode bool } // validatorInfoResolver is a wrapper over Resolver that is specialized in resolving validator info requests @@ -47,6 +48,7 @@ func NewValidatorInfoResolver(args ArgValidatorInfoResolver) (*validatorInfoReso throttler: args.Throttler, topic: args.SenderResolver.RequestTopic(), }, + baseStorageResolver: createBaseStorageResolver(args.ValidatorInfoStorage, args.IsFullHistoryNode), validatorInfoPool: args.ValidatorInfoPool, validatorInfoStorage: args.ValidatorInfoStorage, }, nil From c8769e52aaf4090e68bedb8ed2361b258cb9cadb Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Mon, 2 May 2022 12:14:00 +0300 Subject: [PATCH 05/70] added unittests --- .../resolvers/validatorInfoResolver_test.go | 407 ++++++++++++++++++ ...nterceptedValidatorInfoDataFactory_test.go | 107 +++++ .../validatorInfoInterceptorProcessor_test.go | 184 ++++++++ process/peer/interceptedValidatorInfo_test.go | 221 ++++++++++ testscommon/marshalizerStub.go | 12 +- 5 files changed, 929 insertions(+), 2 deletions(-) create mode 100644 dataRetriever/resolvers/validatorInfoResolver_test.go create mode 100644 process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go create mode 100644 process/interceptors/processor/validatorInfoInterceptorProcessor_test.go create mode 100644 process/peer/interceptedValidatorInfo_test.go diff --git a/dataRetriever/resolvers/validatorInfoResolver_test.go b/dataRetriever/resolvers/validatorInfoResolver_test.go new file mode 100644 index 00000000000..d4eff879314 --- /dev/null +++ b/dataRetriever/resolvers/validatorInfoResolver_test.go @@ -0,0 +1,407 @@ +package resolvers_test + +import ( + "errors" + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/data/batch" + "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/dataRetriever/mock" + "github.com/ElrondNetwork/elrond-go/dataRetriever/resolvers" + "github.com/ElrondNetwork/elrond-go/p2p" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/storage" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createMockArgValidatorInfoResolver() resolvers.ArgValidatorInfoResolver { + return resolvers.ArgValidatorInfoResolver{ + SenderResolver: &mock.TopicResolverSenderStub{}, + Marshalizer: &mock.MarshalizerMock{}, + AntifloodHandler: &mock.P2PAntifloodHandlerStub{}, + Throttler: &mock.ThrottlerStub{}, + ValidatorInfoPool: testscommon.NewCacherStub(), + ValidatorInfoStorage: &storage.StorerStub{}, + IsFullHistoryNode: false, + } +} + +func createMockValidatorInfo() state.ValidatorInfo { + return state.ValidatorInfo{ + PublicKey: []byte("provided pk"), + ShardId: 123, + List: string(common.EligibleList), + Index: 10, + Rating: 10, + } +} + +func TestNewValidatorInfoResolver(t *testing.T) { + t.Parallel() + + t.Run("nil SenderResolver should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.SenderResolver = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilResolverSender, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil Marshalizer should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.Marshalizer = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilMarshalizer, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil AntifloodHandler should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.AntifloodHandler = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilAntifloodHandler, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil Throttler should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.Throttler = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilThrottler, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil ValidatorInfoPool should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilValidatorInfoPool, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("nil ValidatorInfoStorage should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoStorage = nil + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrNilValidatorInfoStorage, err) + assert.True(t, check.IfNil(res)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + res, err := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) + assert.Nil(t, err) + assert.False(t, check.IfNil(res)) + + assert.Nil(t, res.Close()) + }) +} + +func TestValidatorInfoResolver_RequestDataFromHash(t *testing.T) { + t.Parallel() + + t.Run("should error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendOnRequestTopicCalled: func(rd *dataRetriever.RequestData, originalHashes [][]byte) error { + return expectedErr + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + err := res.RequestDataFromHash(nil, 0) + assert.Equal(t, expectedErr, err) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + providedHash := []byte("provided hash") + providedEpoch := uint32(123) + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendOnRequestTopicCalled: func(rd *dataRetriever.RequestData, originalHashes [][]byte) error { + assert.Equal(t, providedHash, originalHashes[0]) + assert.Equal(t, dataRetriever.HashType, rd.Type) + assert.Equal(t, providedHash, rd.Value) + assert.Equal(t, providedEpoch, rd.Epoch) + + return nil + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.RequestDataFromHash(providedHash, providedEpoch) + assert.Nil(t, err) + }) +} + +func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { + t.Parallel() + + t.Run("nil message should error", func(t *testing.T) { + t.Parallel() + + res, _ := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(nil, fromConnectedPeer) + assert.Equal(t, dataRetriever.ErrNilMessage, err) + }) + t.Run("canProcessMessage due to antiflood handler error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.AntifloodHandler = &mock.P2PAntifloodHandlerStub{ + CanProcessMessageCalled: func(message p2p.MessageP2P, fromConnectedPeer core.PeerID) error { + return expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, nil), fromConnectedPeer) + assert.True(t, errors.Is(err, expectedErr)) + assert.False(t, args.Throttler.(*mock.ThrottlerStub).StartWasCalled) + assert.False(t, args.Throttler.(*mock.ThrottlerStub).EndWasCalled) + }) + t.Run("parseReceivedMessage returns error due to marshalizer error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.Marshalizer = &mock.MarshalizerStub{ + UnmarshalCalled: func(obj interface{}, buff []byte) error { + return expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, nil), fromConnectedPeer) + assert.True(t, errors.Is(err, expectedErr)) + }) + t.Run("invalid request type should error", func(t *testing.T) { + t.Parallel() + + res, _ := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, []byte("hash")), fromConnectedPeer) + assert.True(t, errors.Is(err, dataRetriever.ErrRequestTypeNotImplemented)) + }) + t.Run("data not found in cache and fetchValidatorInfoByteSlice fails when getting data from storage", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + return nil, expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.Equal(t, expectedErr, err) + }) + t.Run("data found in cache but marshal fails", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + marshalizerMock := testscommon.MarshalizerMock{} + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + return []byte("some value"), true + }, + } + args.Marshalizer = &testscommon.MarshalizerStub{ + MarshalCalled: func(obj interface{}) ([]byte, error) { + return nil, expectedErr + }, + UnmarshalCalled: func(obj interface{}, buff []byte) error { + return marshalizerMock.Unmarshal(obj, buff) + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.NotNil(t, err) + }) + t.Run("data found in storage but marshal fails", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + marshalizerMock := testscommon.MarshalizerMock{} + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + return []byte("some value"), nil + }, + } + args.Marshalizer = &testscommon.MarshalizerStub{ + MarshalCalled: func(obj interface{}) ([]byte, error) { + return nil, expectedErr + }, + UnmarshalCalled: func(obj interface{}, buff []byte) error { + return marshalizerMock.Unmarshal(obj, buff) + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.NotNil(t, err) + }) + t.Run("should work, data from cache", func(t *testing.T) { + t.Parallel() + + wasCalled := false + providedValue := createMockValidatorInfo() + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + return providedValue, true + }, + } + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendCalled: func(buff []byte, peer core.PeerID) error { + marshalizerMock := testscommon.MarshalizerMock{} + b := &batch.Batch{} + _ = marshalizerMock.Unmarshal(b, buff) + + vi := &state.ValidatorInfo{} + _ = marshalizerMock.Unmarshal(vi, b.Data[0]) + + assert.Equal(t, &providedValue, vi) + wasCalled = true + + return nil + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.Nil(t, err) + assert.True(t, wasCalled) + }) + t.Run("should work, data from storage", func(t *testing.T) { + t.Parallel() + + wasCalled := false + providedValue := createMockValidatorInfo() + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + marshalizerMock := testscommon.MarshalizerMock{} + return marshalizerMock.Marshal(providedValue) + }, + } + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendCalled: func(buff []byte, peer core.PeerID) error { + marshalizerMock := testscommon.MarshalizerMock{} + b := &batch.Batch{} + _ = marshalizerMock.Unmarshal(b, buff) + + vi := &state.ValidatorInfo{} + _ = marshalizerMock.Unmarshal(vi, b.Data[0]) + + assert.Equal(t, &providedValue, vi) + wasCalled = true + + return nil + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, []byte("hash")), fromConnectedPeer) + assert.Nil(t, err) + assert.True(t, wasCalled) + }) +} + +func TestValidatorInfoResolver_SetResolverDebugHandler(t *testing.T) { + t.Parallel() + + defer func() { + r := recover() + if r != nil { + assert.Fail(t, "should not panic") + } + }() + + res, _ := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) + require.False(t, check.IfNil(res)) + + _ = res.SetResolverDebugHandler(nil) +} + +func TestValidatorInfoResolver_NumPeersToQuery(t *testing.T) { + t.Parallel() + + providedIntra, providedCross := 5, 10 + receivedIntra, receivedCross := 0, 0 + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SetNumPeersToQueryCalled: func(intra int, cross int) { + assert.Equal(t, providedIntra, intra) + assert.Equal(t, providedCross, cross) + receivedIntra = intra + receivedCross = cross + }, + GetNumPeersToQueryCalled: func() (int, int) { + return receivedIntra, receivedCross + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + res.SetNumPeersToQuery(providedIntra, providedCross) + intra, cross := res.NumPeersToQuery() + assert.Equal(t, providedIntra, intra) + assert.Equal(t, providedCross, cross) +} diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go new file mode 100644 index 00000000000..70efa0486dc --- /dev/null +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go @@ -0,0 +1,107 @@ +package factory + +import ( + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createMockValidatorInfoBuff() []byte { + vi := &state.ValidatorInfo{ + PublicKey: []byte("provided pk"), + ShardId: 123, + List: string(common.EligibleList), + Index: 10, + Rating: 10, + } + + marshalizerMock := testscommon.MarshalizerMock{} + buff, _ := marshalizerMock.Marshal(vi) + + return buff +} + +func TestNewInterceptedValidatorInfoDataFactory(t *testing.T) { + t.Parallel() + + t.Run("nil core components should error", func(t *testing.T) { + t.Parallel() + + coreComponents, cryptoComponents := createMockComponentHolders() + coreComponents = nil + args := createMockArgument(coreComponents, cryptoComponents) + + ividf, err := NewInterceptedValidatorInfoDataFactory(*args) + assert.Equal(t, process.ErrNilCoreComponentsHolder, err) + assert.True(t, check.IfNil(ividf)) + }) + t.Run("nil marshalizer should error", func(t *testing.T) { + t.Parallel() + + coreComponents, cryptoComponents := createMockComponentHolders() + coreComponents.IntMarsh = nil + args := createMockArgument(coreComponents, cryptoComponents) + + ividf, err := NewInterceptedValidatorInfoDataFactory(*args) + assert.Equal(t, process.ErrNilMarshalizer, err) + assert.True(t, check.IfNil(ividf)) + }) + t.Run("nil hasher should error", func(t *testing.T) { + t.Parallel() + + coreComponents, cryptoComponents := createMockComponentHolders() + coreComponents.Hash = nil + args := createMockArgument(coreComponents, cryptoComponents) + + ividf, err := NewInterceptedValidatorInfoDataFactory(*args) + assert.Equal(t, process.ErrNilHasher, err) + assert.True(t, check.IfNil(ividf)) + }) + t.Run("nil nodes coordinator should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgument(createMockComponentHolders()) + args.NodesCoordinator = nil + + ividf, err := NewInterceptedValidatorInfoDataFactory(*args) + assert.Equal(t, process.ErrNilNodesCoordinator, err) + assert.True(t, check.IfNil(ividf)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + ividf, err := NewInterceptedValidatorInfoDataFactory(*createMockArgument(createMockComponentHolders())) + assert.Nil(t, err) + assert.False(t, check.IfNil(ividf)) + }) + +} + +func TestInterceptedValidatorInfoDataFactory_Create(t *testing.T) { + t.Parallel() + + t.Run("nil buff should error", func(t *testing.T) { + t.Parallel() + ividf, _ := NewInterceptedValidatorInfoDataFactory(*createMockArgument(createMockComponentHolders())) + require.False(t, check.IfNil(ividf)) + + ivi, err := ividf.Create(nil) + assert.NotNil(t, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + ividf, _ := NewInterceptedValidatorInfoDataFactory(*createMockArgument(createMockComponentHolders())) + require.False(t, check.IfNil(ividf)) + + ivi, err := ividf.Create(createMockValidatorInfoBuff()) + assert.Nil(t, err) + assert.False(t, check.IfNil(ivi)) + }) +} diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go new file mode 100644 index 00000000000..ee8d945ccdc --- /dev/null +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go @@ -0,0 +1,184 @@ +package processor_test + +import ( + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/epochStart/mock" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/process/interceptors/processor" + "github.com/ElrondNetwork/elrond-go/process/peer" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createMockValidatorInfo() state.ValidatorInfo { + return state.ValidatorInfo{ + PublicKey: []byte("provided pk"), + ShardId: 123, + List: string(common.EligibleList), + Index: 10, + Rating: 10, + } +} + +func createMockInterceptedValidatorInfo() process.InterceptedData { + args := peer.ArgInterceptedValidatorInfo{ + Marshalizer: testscommon.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + NodesCoordinator: &shardingMocks.NodesCoordinatorStub{}, + } + args.DataBuff, _ = args.Marshalizer.Marshal(createMockValidatorInfo()) + ivi, _ := peer.NewInterceptedValidatorInfo(args) + + return ivi +} + +func createMockArgValidatorInfoInterceptorProcessor() processor.ArgValidatorInfoInterceptorProcessor { + return processor.ArgValidatorInfoInterceptorProcessor{ + Marshalizer: testMarshalizer, + ValidatorInfoPool: testscommon.NewCacherStub(), + } +} + +func TestNewValidatorInfoInterceptorProcessor(t *testing.T) { + t.Parallel() + + t.Run("nil marshalizer should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoInterceptorProcessor() + args.Marshalizer = nil + + proc, err := processor.NewValidatorInfoInterceptorProcessor(args) + assert.Equal(t, process.ErrNilMarshalizer, err) + assert.True(t, check.IfNil(proc)) + }) + t.Run("nil cache should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoInterceptorProcessor() + args.ValidatorInfoPool = nil + + proc, err := processor.NewValidatorInfoInterceptorProcessor(args) + assert.Equal(t, process.ErrNilValidatorInfoPool, err) + assert.True(t, check.IfNil(proc)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + proc, err := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) + assert.Nil(t, err) + assert.False(t, check.IfNil(proc)) + }) +} + +func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { + t.Parallel() + + t.Run("invalid data should error", func(t *testing.T) { + t.Parallel() + + proc, err := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) + assert.Nil(t, err) + assert.Equal(t, process.ErrWrongTypeAssertion, proc.Save(nil, "", "")) + }) + t.Run("invalid validator info should error", func(t *testing.T) { + t.Parallel() + + providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted peer auth + wasCalled := false + args := createMockArgValidatorInfoInterceptorProcessor() + args.ValidatorInfoPool = &testscommon.CacherStub{ + HasOrAddCalled: func(key []byte, value interface{}, sizeInBytes int) (has, added bool) { + wasCalled = true + + return false, false + }, + } + + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) + + assert.Equal(t, process.ErrWrongTypeAssertion, proc.Save(providedData, "", "")) + assert.False(t, wasCalled) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + providedData := createMockInterceptedValidatorInfo() + wasCalled := false + args := createMockArgValidatorInfoInterceptorProcessor() + providedBuff, _ := args.Marshalizer.Marshal(createMockValidatorInfo()) + hasher := hashingMocks.HasherMock{} + providedHash := hasher.Compute(string(providedBuff)) + + args.ValidatorInfoPool = &testscommon.CacherStub{ + HasOrAddCalled: func(key []byte, value interface{}, sizeInBytes int) (has, added bool) { + assert.Equal(t, providedHash, key) + + wasCalled = true + + return false, false + }, + } + + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) + + assert.Nil(t, proc.Save(providedData, "", "")) + assert.True(t, wasCalled) + }) +} + +func TestValidatorInfoInterceptorProcessor_Validate(t *testing.T) { + t.Parallel() + + t.Run("nil data should error", func(t *testing.T) { + t.Parallel() + + proc, _ := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) + require.False(t, check.IfNil(proc)) + + assert.Equal(t, process.ErrWrongTypeAssertion, proc.Validate(nil, "")) + }) + t.Run("invalid data should error", func(t *testing.T) { + t.Parallel() + + providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted peer auth + proc, _ := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) + require.False(t, check.IfNil(proc)) + + assert.Equal(t, process.ErrWrongTypeAssertion, proc.Validate(providedData, "")) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoInterceptorProcessor() + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) + + assert.Nil(t, proc.Validate(createMockInterceptedValidatorInfo(), "")) + }) +} + +func TestValidatorInfoInterceptorProcessor_RegisterHandler(t *testing.T) { + t.Parallel() + + defer func() { + if r := recover(); r != nil { + assert.Fail(t, "should not panic") + } + }() + + proc, err := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) + assert.Nil(t, err) + assert.False(t, check.IfNil(proc)) + + proc.RegisterHandler(nil) +} diff --git a/process/peer/interceptedValidatorInfo_test.go b/process/peer/interceptedValidatorInfo_test.go new file mode 100644 index 00000000000..47a043dfdbb --- /dev/null +++ b/process/peer/interceptedValidatorInfo_test.go @@ -0,0 +1,221 @@ +package peer + +import ( + "errors" + "fmt" + "strings" + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + logger "github.com/ElrondNetwork/elrond-go-logger" + "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" + "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createMockArgInterceptedValidatorInfo() ArgInterceptedValidatorInfo { + args := ArgInterceptedValidatorInfo{ + Marshalizer: testscommon.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + NodesCoordinator: &shardingMocks.NodesCoordinatorStub{}, + } + args.DataBuff, _ = args.Marshalizer.Marshal(createMockValidatorInfo()) + + return args +} + +func TestNewInterceptedValidatorInfo(t *testing.T) { + t.Parallel() + + t.Run("nil data buff should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + args.DataBuff = nil + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, process.ErrNilBuffer, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("nil marshalizer should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + args.Marshalizer = nil + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, process.ErrNilMarshalizer, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("nil hasher should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + args.Hasher = nil + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, process.ErrNilHasher, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("nil nodes coordinator should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + args.NodesCoordinator = nil + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, process.ErrNilNodesCoordinator, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("unmarshal returns error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgInterceptedValidatorInfo() + args.Marshalizer = &testscommon.MarshalizerStub{ + UnmarshalCalled: func(obj interface{}, buff []byte) error { + return expectedErr + }, + } + + ivi, err := NewInterceptedValidatorInfo(args) + assert.Equal(t, expectedErr, err) + assert.True(t, check.IfNil(ivi)) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + ivi, err := NewInterceptedValidatorInfo(createMockArgInterceptedValidatorInfo()) + assert.Nil(t, err) + assert.False(t, check.IfNil(ivi)) + }) +} + +func TestInterceptedValidatorInfo_CheckValidity(t *testing.T) { + t.Parallel() + + t.Run("publicKeyProperty too short", testInterceptedValidatorInfoPropertyLen(publicKeyProperty, false)) + t.Run("publicKeyProperty too long", testInterceptedValidatorInfoPropertyLen(publicKeyProperty, true)) + + t.Run("listProperty too short", testInterceptedValidatorInfoPropertyLen(listProperty, false)) + t.Run("listProperty too long", testInterceptedValidatorInfoPropertyLen(listProperty, true)) + + t.Run("rewardAddressProperty too short", testInterceptedValidatorInfoPropertyLen(rewardAddressProperty, false)) + t.Run("rewardAddressProperty too long", testInterceptedValidatorInfoPropertyLen(rewardAddressProperty, true)) + + t.Run("not validator should error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgInterceptedValidatorInfo() + args.NodesCoordinator = &shardingMocks.NodesCoordinatorStub{ + GetValidatorWithPublicKeyCalled: func(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) { + return nil, 0, expectedErr + }, + } + + ivi, _ := NewInterceptedValidatorInfo(args) + require.False(t, check.IfNil(ivi)) + + assert.Equal(t, expectedErr, ivi.CheckValidity()) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + args.NodesCoordinator = &shardingMocks.NodesCoordinatorStub{ + GetValidatorWithPublicKeyCalled: func(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) { + return nil, 0, nil + }, + } + + ivi, _ := NewInterceptedValidatorInfo(args) + require.False(t, check.IfNil(ivi)) + + assert.Nil(t, ivi.CheckValidity()) + }) +} + +func testInterceptedValidatorInfoPropertyLen(property string, tooLong bool) func(t *testing.T) { + return func(t *testing.T) { + t.Parallel() + + value := []byte("") + expectedError := process.ErrPropertyTooShort + if tooLong { + value = make([]byte, 130) + expectedError = process.ErrPropertyTooLong + } + + args := createMockArgInterceptedValidatorInfo() + ivi, _ := NewInterceptedValidatorInfo(args) + require.False(t, check.IfNil(ivi)) + + switch property { + case publicKeyProperty: + ivi.validatorInfo.PublicKey = value + case listProperty: + ivi.validatorInfo.List = string(value) + case rewardAddressProperty: + ivi.validatorInfo.RewardAddress = value + default: + assert.True(t, false) + } + + err := ivi.CheckValidity() + assert.True(t, strings.Contains(err.Error(), expectedError.Error())) + } +} + +func TestInterceptedValidatorInfo_Getters(t *testing.T) { + t.Parallel() + + args := createMockArgInterceptedValidatorInfo() + ivi, _ := NewInterceptedValidatorInfo(args) + require.False(t, check.IfNil(ivi)) + + validatorInfo := createMockValidatorInfo() + validatorInfoBuff, _ := args.Marshalizer.Marshal(validatorInfo) + hash := args.Hasher.Compute(string(validatorInfoBuff)) + + assert.True(t, ivi.IsForCurrentShard()) + assert.Equal(t, *validatorInfo, ivi.ValidatorInfo()) + assert.Equal(t, hash, ivi.Hash()) + assert.Equal(t, interceptedValidatorInfoType, ivi.Type()) + + identifiers := ivi.Identifiers() + assert.Equal(t, 1, len(identifiers)) + assert.Equal(t, hash, identifiers[0]) + + str := ivi.String() + assert.True(t, strings.Contains(str, fmt.Sprintf("pk=%s", logger.DisplayByteSlice(ivi.validatorInfo.PublicKey)))) + assert.True(t, strings.Contains(str, fmt.Sprintf("shard=%d", validatorInfo.ShardId))) + assert.True(t, strings.Contains(str, fmt.Sprintf("list=%s", validatorInfo.List))) + assert.True(t, strings.Contains(str, fmt.Sprintf("index=%d", validatorInfo.Index))) + assert.True(t, strings.Contains(str, fmt.Sprintf("tempRating=%d", validatorInfo.TempRating))) + assert.True(t, strings.Contains(str, fmt.Sprintf("rating=%d", validatorInfo.Rating))) +} + +func TestInterceptedValidatorInfo_Identifiers(t *testing.T) { + +} + +func TestInterceptedValidatorInfo_IsForCurrentShard(t *testing.T) { + +} + +func TestInterceptedValidatorInfo_String(t *testing.T) { + +} + +func TestInterceptedValidatorInfo_Type(t *testing.T) { + +} + +func TestInterceptedValidatorInfo_ValidatorInfo(t *testing.T) { + +} diff --git a/testscommon/marshalizerStub.go b/testscommon/marshalizerStub.go index b29904d02d6..2ebcd236d0b 100644 --- a/testscommon/marshalizerStub.go +++ b/testscommon/marshalizerStub.go @@ -8,12 +8,20 @@ type MarshalizerStub struct { // Marshal - func (ms *MarshalizerStub) Marshal(obj interface{}) ([]byte, error) { - return ms.MarshalCalled(obj) + if ms.MarshalCalled != nil { + return ms.MarshalCalled(obj) + } + + return make([]byte, 0), nil } // Unmarshal - func (ms *MarshalizerStub) Unmarshal(obj interface{}, buff []byte) error { - return ms.UnmarshalCalled(obj, buff) + if ms.UnmarshalCalled != nil { + return ms.UnmarshalCalled(obj, buff) + } + + return nil } // IsInterfaceNil - From 76daab01cb5d78bd7a9d6fe6c02d52dfa5bffd7b Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Mon, 2 May 2022 12:15:43 +0300 Subject: [PATCH 06/70] fixed ineffectual assignment in tests --- .../factory/interceptedValidatorInfoDataFactory_test.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go index 70efa0486dc..abab173bccf 100644 --- a/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go @@ -33,9 +33,8 @@ func TestNewInterceptedValidatorInfoDataFactory(t *testing.T) { t.Run("nil core components should error", func(t *testing.T) { t.Parallel() - coreComponents, cryptoComponents := createMockComponentHolders() - coreComponents = nil - args := createMockArgument(coreComponents, cryptoComponents) + _, cryptoComponents := createMockComponentHolders() + args := createMockArgument(nil, cryptoComponents) ividf, err := NewInterceptedValidatorInfoDataFactory(*args) assert.Equal(t, process.ErrNilCoreComponentsHolder, err) From c4b52599901fb370c837aa7a9fe5e60b899370c7 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Mon, 2 May 2022 13:19:21 +0300 Subject: [PATCH 07/70] fixed comments --- .../processor/validatorInfoInterceptorProcessor_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go index ee8d945ccdc..8bc17146d08 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go @@ -91,7 +91,7 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { t.Run("invalid validator info should error", func(t *testing.T) { t.Parallel() - providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted peer auth + providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted validator info wasCalled := false args := createMockArgValidatorInfoInterceptorProcessor() args.ValidatorInfoPool = &testscommon.CacherStub{ @@ -150,7 +150,7 @@ func TestValidatorInfoInterceptorProcessor_Validate(t *testing.T) { t.Run("invalid data should error", func(t *testing.T) { t.Parallel() - providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted peer auth + providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted validator info proc, _ := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) require.False(t, check.IfNil(proc)) From f65b12e4e8cf830853aabe69e33fb455c4da00a8 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Tue, 3 May 2022 18:50:10 +0300 Subject: [PATCH 08/70] fixes after review --- .../resolvers/validatorInfoResolver.go | 14 +++--- .../resolvers/validatorInfoResolver_test.go | 14 +++--- .../interceptedValidatorInfoDataFactory.go | 6 +-- ...nterceptedValidatorInfoDataFactory_test.go | 2 +- .../validatorInfoInterceptorProcessor.go | 17 +++---- .../validatorInfoInterceptorProcessor_test.go | 45 ++++++------------- process/peer/constants.go | 14 +++--- process/peer/interceptedValidatorInfo.go | 20 ++++++--- process/peer/interceptedValidatorInfo_test.go | 24 +--------- process/peer/validatorsProvider_test.go | 2 +- 10 files changed, 60 insertions(+), 98 deletions(-) diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go index fed556c2d64..4d8db5b701a 100644 --- a/dataRetriever/resolvers/validatorInfoResolver.go +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -16,7 +16,7 @@ import ( // ArgValidatorInfoResolver is the argument structure used to create a new validator info resolver instance type ArgValidatorInfoResolver struct { SenderResolver dataRetriever.TopicResolverSender - Marshalizer marshal.Marshalizer + Marshaller marshal.Marshalizer AntifloodHandler dataRetriever.P2PAntifloodHandler Throttler dataRetriever.ResolverThrottler ValidatorInfoPool storage.Cacher @@ -43,7 +43,7 @@ func NewValidatorInfoResolver(args ArgValidatorInfoResolver) (*validatorInfoReso return &validatorInfoResolver{ TopicResolverSender: args.SenderResolver, messageProcessor: messageProcessor{ - marshalizer: args.Marshalizer, + marshalizer: args.Marshaller, antifloodHandler: args.AntifloodHandler, throttler: args.Throttler, topic: args.SenderResolver.RequestTopic(), @@ -58,7 +58,7 @@ func checkArgs(args ArgValidatorInfoResolver) error { if check.IfNil(args.SenderResolver) { return dataRetriever.ErrNilResolverSender } - if check.IfNil(args.Marshalizer) { + if check.IfNil(args.Marshaller) { return dataRetriever.ErrNilMarshalizer } if check.IfNil(args.AntifloodHandler) { @@ -105,18 +105,14 @@ func (res *validatorInfoResolver) ProcessReceivedMessage(message p2p.MessageP2P, return err } + // TODO: add support for HashArrayType switch rd.Type { case dataRetriever.HashType: return res.resolveHashRequest(rd.Value, rd.Epoch, fromConnectedPeer) default: - err = dataRetriever.ErrRequestTypeNotImplemented } - if err != nil { - err = fmt.Errorf("%w for value %s", err, logger.DisplayByteSlice(rd.Value)) - } - - return err + return fmt.Errorf("%w for value %s", dataRetriever.ErrRequestTypeNotImplemented, logger.DisplayByteSlice(rd.Value)) } // resolveHashRequest sends the response for a hash request diff --git a/dataRetriever/resolvers/validatorInfoResolver_test.go b/dataRetriever/resolvers/validatorInfoResolver_test.go index d4eff879314..b8ecd41239d 100644 --- a/dataRetriever/resolvers/validatorInfoResolver_test.go +++ b/dataRetriever/resolvers/validatorInfoResolver_test.go @@ -22,7 +22,7 @@ import ( func createMockArgValidatorInfoResolver() resolvers.ArgValidatorInfoResolver { return resolvers.ArgValidatorInfoResolver{ SenderResolver: &mock.TopicResolverSenderStub{}, - Marshalizer: &mock.MarshalizerMock{}, + Marshaller: &mock.MarshalizerMock{}, AntifloodHandler: &mock.P2PAntifloodHandlerStub{}, Throttler: &mock.ThrottlerStub{}, ValidatorInfoPool: testscommon.NewCacherStub(), @@ -37,7 +37,7 @@ func createMockValidatorInfo() state.ValidatorInfo { ShardId: 123, List: string(common.EligibleList), Index: 10, - Rating: 10, + Rating: 11, } } @@ -54,11 +54,11 @@ func TestNewValidatorInfoResolver(t *testing.T) { assert.Equal(t, dataRetriever.ErrNilResolverSender, err) assert.True(t, check.IfNil(res)) }) - t.Run("nil Marshalizer should error", func(t *testing.T) { + t.Run("nil marshaller should error", func(t *testing.T) { t.Parallel() args := createMockArgValidatorInfoResolver() - args.Marshalizer = nil + args.Marshaller = nil res, err := resolvers.NewValidatorInfoResolver(args) assert.Equal(t, dataRetriever.ErrNilMarshalizer, err) @@ -193,7 +193,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { expectedErr := errors.New("expected err") args := createMockArgValidatorInfoResolver() - args.Marshalizer = &mock.MarshalizerStub{ + args.Marshaller = &mock.MarshalizerStub{ UnmarshalCalled: func(obj interface{}, buff []byte) error { return expectedErr }, @@ -245,7 +245,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { return []byte("some value"), true }, } - args.Marshalizer = &testscommon.MarshalizerStub{ + args.Marshaller = &testscommon.MarshalizerStub{ MarshalCalled: func(obj interface{}) ([]byte, error) { return nil, expectedErr }, @@ -275,7 +275,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { return []byte("some value"), nil }, } - args.Marshalizer = &testscommon.MarshalizerStub{ + args.Marshaller = &testscommon.MarshalizerStub{ MarshalCalled: func(obj interface{}) ([]byte, error) { return nil, expectedErr }, diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go index fc4fa6dca8d..345a0337eff 100644 --- a/process/interceptors/factory/interceptedValidatorInfoDataFactory.go +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go @@ -9,7 +9,7 @@ import ( ) type interceptedValidatorInfoDataFactory struct { - marshalizer marshal.Marshalizer + marshaller marshal.Marshalizer hasher hashing.Hasher nodesCoordinator process.NodesCoordinator } @@ -22,7 +22,7 @@ func NewInterceptedValidatorInfoDataFactory(args ArgInterceptedDataFactory) (*in } return &interceptedValidatorInfoDataFactory{ - marshalizer: args.CoreComponents.InternalMarshalizer(), + marshaller: args.CoreComponents.InternalMarshalizer(), hasher: args.CoreComponents.Hasher(), nodesCoordinator: args.NodesCoordinator, }, nil @@ -49,7 +49,7 @@ func checkArgs(args ArgInterceptedDataFactory) error { func (ividf *interceptedValidatorInfoDataFactory) Create(buff []byte) (process.InterceptedData, error) { args := peer.ArgInterceptedValidatorInfo{ DataBuff: buff, - Marshalizer: ividf.marshalizer, + Marshalizer: ividf.marshaller, Hasher: ividf.hasher, NodesCoordinator: ividf.nodesCoordinator, } diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go index abab173bccf..2f6ed90e8ad 100644 --- a/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go @@ -40,7 +40,7 @@ func TestNewInterceptedValidatorInfoDataFactory(t *testing.T) { assert.Equal(t, process.ErrNilCoreComponentsHolder, err) assert.True(t, check.IfNil(ividf)) }) - t.Run("nil marshalizer should error", func(t *testing.T) { + t.Run("nil marshaller should error", func(t *testing.T) { t.Parallel() coreComponents, cryptoComponents := createMockComponentHolders() diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go index ba0aabc75ef..7bb1d40894d 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -10,12 +10,12 @@ import ( // ArgValidatorInfoInterceptorProcessor is the argument structure used to create a new validator info interceptor processor type ArgValidatorInfoInterceptorProcessor struct { - Marshalizer marshal.Marshalizer + Marshaller marshal.Marshalizer ValidatorInfoPool storage.Cacher } type validatorInfoInterceptorProcessor struct { - marshalizer marshal.Marshalizer + marshaller marshal.Marshalizer validatorInfoPool storage.Cacher } @@ -27,13 +27,13 @@ func NewValidatorInfoInterceptorProcessor(args ArgValidatorInfoInterceptorProces } return &validatorInfoInterceptorProcessor{ - marshalizer: args.Marshalizer, + marshaller: args.Marshaller, validatorInfoPool: args.ValidatorInfoPool, }, nil } func checkArgs(args ArgValidatorInfoInterceptorProcessor) error { - if check.IfNil(args.Marshalizer) { + if check.IfNil(args.Marshaller) { return process.ErrNilMarshalizer } if check.IfNil(args.ValidatorInfoPool) { @@ -43,13 +43,8 @@ func checkArgs(args ArgValidatorInfoInterceptorProcessor) error { return nil } -// Validate checks if the intercepted data can be processed -func (viip *validatorInfoInterceptorProcessor) Validate(data process.InterceptedData, _ core.PeerID) error { - _, ok := data.(interceptedValidatorInfo) - if !ok { - return process.ErrWrongTypeAssertion - } - +// Validate returns nil as validation is done on Save +func (viip *validatorInfoInterceptorProcessor) Validate(_ process.InterceptedData, _ core.PeerID) error { return nil } diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go index 8bc17146d08..fa29e83e4c5 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go @@ -41,7 +41,7 @@ func createMockInterceptedValidatorInfo() process.InterceptedData { func createMockArgValidatorInfoInterceptorProcessor() processor.ArgValidatorInfoInterceptorProcessor { return processor.ArgValidatorInfoInterceptorProcessor{ - Marshalizer: testMarshalizer, + Marshaller: testMarshalizer, ValidatorInfoPool: testscommon.NewCacherStub(), } } @@ -49,11 +49,11 @@ func createMockArgValidatorInfoInterceptorProcessor() processor.ArgValidatorInfo func TestNewValidatorInfoInterceptorProcessor(t *testing.T) { t.Parallel() - t.Run("nil marshalizer should error", func(t *testing.T) { + t.Run("nil marshaller should error", func(t *testing.T) { t.Parallel() args := createMockArgValidatorInfoInterceptorProcessor() - args.Marshalizer = nil + args.Marshaller = nil proc, err := processor.NewValidatorInfoInterceptorProcessor(args) assert.Equal(t, process.ErrNilMarshalizer, err) @@ -97,7 +97,6 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { args.ValidatorInfoPool = &testscommon.CacherStub{ HasOrAddCalled: func(key []byte, value interface{}, sizeInBytes int) (has, added bool) { wasCalled = true - return false, false }, } @@ -114,16 +113,14 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { providedData := createMockInterceptedValidatorInfo() wasCalled := false args := createMockArgValidatorInfoInterceptorProcessor() - providedBuff, _ := args.Marshalizer.Marshal(createMockValidatorInfo()) + providedBuff, _ := args.Marshaller.Marshal(createMockValidatorInfo()) hasher := hashingMocks.HasherMock{} providedHash := hasher.Compute(string(providedBuff)) args.ValidatorInfoPool = &testscommon.CacherStub{ HasOrAddCalled: func(key []byte, value interface{}, sizeInBytes int) (has, added bool) { assert.Equal(t, providedHash, key) - wasCalled = true - return false, false }, } @@ -139,32 +136,18 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { func TestValidatorInfoInterceptorProcessor_Validate(t *testing.T) { t.Parallel() - t.Run("nil data should error", func(t *testing.T) { - t.Parallel() - - proc, _ := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) - require.False(t, check.IfNil(proc)) - - assert.Equal(t, process.ErrWrongTypeAssertion, proc.Validate(nil, "")) - }) - t.Run("invalid data should error", func(t *testing.T) { - t.Parallel() - - providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted validator info - proc, _ := processor.NewValidatorInfoInterceptorProcessor(createMockArgValidatorInfoInterceptorProcessor()) - require.False(t, check.IfNil(proc)) - - assert.Equal(t, process.ErrWrongTypeAssertion, proc.Validate(providedData, "")) - }) - t.Run("should work", func(t *testing.T) { - t.Parallel() + defer func() { + r := recover() + if r != nil { + assert.Fail(t, "should not panic") + } + }() - args := createMockArgValidatorInfoInterceptorProcessor() - proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) - require.False(t, check.IfNil(proc)) + args := createMockArgValidatorInfoInterceptorProcessor() + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) - assert.Nil(t, proc.Validate(createMockInterceptedValidatorInfo(), "")) - }) + assert.Nil(t, proc.Validate(createMockInterceptedValidatorInfo(), "")) } func TestValidatorInfoInterceptorProcessor_RegisterHandler(t *testing.T) { diff --git a/process/peer/constants.go b/process/peer/constants.go index d1fe9f2bde5..af3619369bd 100644 --- a/process/peer/constants.go +++ b/process/peer/constants.go @@ -1,10 +1,12 @@ package peer const ( - minSizeInBytes = 1 - maxSizeInBytes = 128 - interceptedValidatorInfoType = "intercepted validator info" - publicKeyProperty = "public key" - listProperty = "list" - rewardAddressProperty = "reward address" + minSizeInBytes = 1 + maxSizeInBytes = 128 + interceptedValidatorInfoType = "intercepted validator info" + publicKeyProperty = "public key" + publicKeyPropertyRequiredBytesLen = 96 + listProperty = "list" + rewardAddressProperty = "reward address" + rewardAddressPropertyMaxPropertyBytesLen = 32 ) diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go index 11686b743b0..415fccf22af 100644 --- a/process/peer/interceptedValidatorInfo.go +++ b/process/peer/interceptedValidatorInfo.go @@ -75,15 +75,15 @@ func createValidatorInfo(marshalizer marshal.Marshalizer, buff []byte) (*state.V // CheckValidity checks the validity of the received validator info func (ivi *interceptedValidatorInfo) CheckValidity() error { // Verify string properties len - err := verifyPropertyLen(publicKeyProperty, ivi.validatorInfo.PublicKey) + err := verifyPropertyLen(publicKeyProperty, ivi.validatorInfo.PublicKey, publicKeyPropertyRequiredBytesLen, minSizeInBytes, maxSizeInBytes) if err != nil { return err } - err = verifyPropertyLen(listProperty, []byte(ivi.validatorInfo.List)) + err = verifyPropertyLen(listProperty, []byte(ivi.validatorInfo.List), 0, minSizeInBytes, maxSizeInBytes) if err != nil { return err } - err = verifyPropertyLen(rewardAddressProperty, ivi.validatorInfo.RewardAddress) + err = verifyPropertyLen(rewardAddressProperty, ivi.validatorInfo.RewardAddress, 0, minSizeInBytes, rewardAddressPropertyMaxPropertyBytesLen) if err != nil { return err } @@ -131,11 +131,19 @@ func (ivi *interceptedValidatorInfo) String() string { } // verifyPropertyLen returns an error if the provided value is longer than accepted by the network -func verifyPropertyLen(property string, value []byte) error { - if len(value) > maxSizeInBytes { +func verifyPropertyLen(property string, value []byte, requiredLen, minSize, maxSize int) error { + hasRequiredLen := requiredLen != 0 + isOverLimit := len(value) > maxSize + isOverRequiredLen := len(value) > requiredLen + isTooLong := isOverLimit || (hasRequiredLen && isOverRequiredLen) + if isTooLong { return fmt.Errorf("%w for %s", process.ErrPropertyTooLong, property) } - if len(value) < minSizeInBytes { + + isUnderLimit := len(value) < minSize + isUnderRequiredLen := len(value) < requiredLen + isTooShort := isUnderLimit || (hasRequiredLen && isUnderRequiredLen) + if isTooShort { return fmt.Errorf("%w for %s", process.ErrPropertyTooShort, property) } diff --git a/process/peer/interceptedValidatorInfo_test.go b/process/peer/interceptedValidatorInfo_test.go index 47a043dfdbb..db6d7f7299f 100644 --- a/process/peer/interceptedValidatorInfo_test.go +++ b/process/peer/interceptedValidatorInfo_test.go @@ -41,7 +41,7 @@ func TestNewInterceptedValidatorInfo(t *testing.T) { assert.Equal(t, process.ErrNilBuffer, err) assert.True(t, check.IfNil(ivi)) }) - t.Run("nil marshalizer should error", func(t *testing.T) { + t.Run("nil marshaller should error", func(t *testing.T) { t.Parallel() args := createMockArgInterceptedValidatorInfo() @@ -120,7 +120,6 @@ func TestInterceptedValidatorInfo_CheckValidity(t *testing.T) { ivi, _ := NewInterceptedValidatorInfo(args) require.False(t, check.IfNil(ivi)) - assert.Equal(t, expectedErr, ivi.CheckValidity()) }) t.Run("should work", func(t *testing.T) { @@ -135,7 +134,6 @@ func TestInterceptedValidatorInfo_CheckValidity(t *testing.T) { ivi, _ := NewInterceptedValidatorInfo(args) require.False(t, check.IfNil(ivi)) - assert.Nil(t, ivi.CheckValidity()) }) } @@ -199,23 +197,3 @@ func TestInterceptedValidatorInfo_Getters(t *testing.T) { assert.True(t, strings.Contains(str, fmt.Sprintf("tempRating=%d", validatorInfo.TempRating))) assert.True(t, strings.Contains(str, fmt.Sprintf("rating=%d", validatorInfo.Rating))) } - -func TestInterceptedValidatorInfo_Identifiers(t *testing.T) { - -} - -func TestInterceptedValidatorInfo_IsForCurrentShard(t *testing.T) { - -} - -func TestInterceptedValidatorInfo_String(t *testing.T) { - -} - -func TestInterceptedValidatorInfo_Type(t *testing.T) { - -} - -func TestInterceptedValidatorInfo_ValidatorInfo(t *testing.T) { - -} diff --git a/process/peer/validatorsProvider_test.go b/process/peer/validatorsProvider_test.go index d23b3fa282a..af1c814cf25 100644 --- a/process/peer/validatorsProvider_test.go +++ b/process/peer/validatorsProvider_test.go @@ -624,7 +624,7 @@ func TestValidatorsProvider_DoesntCallUpdateUpdateCacheWithoutRequests(t *testin } func createMockValidatorInfo() *state.ValidatorInfo { initialInfo := &state.ValidatorInfo{ - PublicKey: []byte("a1"), + PublicKey: []byte("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), ShardId: 0, List: "eligible", Index: 1, From b71abcfc8ea765354e7a0a270ac3886ea163b808 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Wed, 4 May 2022 11:09:41 +0300 Subject: [PATCH 09/70] integrated resolver + interceptor --- cmd/node/config/config.toml | 6 ++ config/config.go | 1 + dataRetriever/dataPool/dataPool.go | 11 ++++ dataRetriever/dataPool/dataPool_test.go | 13 ++++ dataRetriever/factory/dataPoolFactory.go | 7 +++ dataRetriever/factory/dataPoolFactory_test.go | 8 +++ .../baseResolversContainerFactory.go | 30 +++++++++ .../metaResolversContainerFactory.go | 5 ++ .../shardResolversContainerFactory.go | 5 ++ .../shardResolversContainerFactory_test.go | 4 +- .../shardResolversContainerFactory_test.go | 4 +- dataRetriever/interface.go | 1 + .../requestHandlers/requestHandler.go | 31 ++++++++++ .../requestHandlers/requestHandler_test.go | 61 +++++++++++++++++++ epochStart/interface.go | 1 + genesis/process/disabled/requestHandler.go | 4 ++ .../baseInterceptorsContainerFactory.go | 44 +++++++++++++ .../metaInterceptorsContainerFactory.go | 5 ++ .../shardInterceptorsContainerFactory.go | 5 ++ .../shardInterceptorsContainerFactory_test.go | 4 +- process/interface.go | 1 + testscommon/dataRetriever/poolFactory.go | 10 +++ testscommon/dataRetriever/poolsHolderMock.go | 9 +++ testscommon/dataRetriever/poolsHolderStub.go | 10 +++ testscommon/generalConfig.go | 1 + testscommon/requestHandlerStub.go | 8 +++ 26 files changed, 286 insertions(+), 3 deletions(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index 27227a3758b..46c6f5b77ad 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -439,6 +439,12 @@ SizeInBytes = 26214400 # 25MB per each pair (metachain, destinationShard) Shards = 4 +[ValidatorInfoPool] + Name = "ValidatorInfoPool" + Capacity = 100000 + Type = "SizeLRU" + SizeInBytes = 314572800 #300MB + #PublicKeyPeerId represents the main cache used to map Elrond block signing public keys to their associated peer id's. [PublicKeyPeerId] Name = "PublicKeyPeerId" diff --git a/config/config.go b/config/config.go index 4042c847f8c..f60402cd2de 100644 --- a/config/config.go +++ b/config/config.go @@ -142,6 +142,7 @@ type Config struct { WhiteListPool CacheConfig WhiteListerVerifiedTxs CacheConfig SmartContractDataPool CacheConfig + ValidatorInfoPool CacheConfig TrieSyncStorage TrieSyncStorageConfig EpochStartConfig EpochStartConfig AddressPubkeyConverter PubkeyConfig diff --git a/dataRetriever/dataPool/dataPool.go b/dataRetriever/dataPool/dataPool.go index baf78ae7156..c33e3a4dcce 100644 --- a/dataRetriever/dataPool/dataPool.go +++ b/dataRetriever/dataPool/dataPool.go @@ -19,6 +19,7 @@ type dataPool struct { trieNodesChunks storage.Cacher currBlockTxs dataRetriever.TransactionCacher smartContracts storage.Cacher + validatorsInfo storage.Cacher } // DataPoolArgs represents the data pool's constructor structure @@ -33,6 +34,7 @@ type DataPoolArgs struct { TrieNodesChunks storage.Cacher CurrentBlockTransactions dataRetriever.TransactionCacher SmartContracts storage.Cacher + ValidatorsInfo storage.Cacher } // NewDataPool creates a data pools holder object @@ -67,6 +69,9 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { if check.IfNil(args.SmartContracts) { return nil, dataRetriever.ErrNilSmartContractsPool } + if check.IfNil(args.ValidatorsInfo) { + return nil, dataRetriever.ErrNilValidatorInfoPool + } return &dataPool{ transactions: args.Transactions, @@ -79,6 +84,7 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { trieNodesChunks: args.TrieNodesChunks, currBlockTxs: args.CurrentBlockTransactions, smartContracts: args.SmartContracts, + validatorsInfo: args.ValidatorsInfo, }, nil } @@ -132,6 +138,11 @@ func (dp *dataPool) SmartContracts() storage.Cacher { return dp.smartContracts } +// ValidatorsInfo returns the holder for validators info +func (dp *dataPool) ValidatorsInfo() storage.Cacher { + return dp.validatorsInfo +} + // IsInterfaceNil returns true if there is no value under the interface func (dp *dataPool) IsInterfaceNil() bool { return dp == nil diff --git a/dataRetriever/dataPool/dataPool_test.go b/dataRetriever/dataPool/dataPool_test.go index bd0552b7fb1..64e7a48f969 100644 --- a/dataRetriever/dataPool/dataPool_test.go +++ b/dataRetriever/dataPool/dataPool_test.go @@ -25,6 +25,7 @@ func createMockDataPoolArgs() dataPool.DataPoolArgs { TrieNodesChunks: testscommon.NewCacherStub(), CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, SmartContracts: testscommon.NewCacherStub(), + ValidatorsInfo: testscommon.NewCacherStub(), } } @@ -116,6 +117,17 @@ func TestNewDataPool_NilSmartContractsShouldErr(t *testing.T) { assert.Nil(t, tdp) } +func TestNewDataPool_NilValidatorsInfoShouldErr(t *testing.T) { + t.Parallel() + + args := createMockDataPoolArgs() + args.ValidatorsInfo = nil + tdp, err := dataPool.NewDataPool(args) + + assert.Equal(t, dataRetriever.ErrNilValidatorInfoPool, err) + assert.Nil(t, tdp) +} + func TestNewDataPool_NilPeerBlocksShouldErr(t *testing.T) { t.Parallel() @@ -149,6 +161,7 @@ func TestNewDataPool_OkValsShouldWork(t *testing.T) { TrieNodesChunks: testscommon.NewCacherStub(), CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, SmartContracts: testscommon.NewCacherStub(), + ValidatorsInfo: testscommon.NewCacherStub(), } tdp, err := dataPool.NewDataPool(args) diff --git a/dataRetriever/factory/dataPoolFactory.go b/dataRetriever/factory/dataPoolFactory.go index c9cb7ed29bc..01ff3b172b5 100644 --- a/dataRetriever/factory/dataPoolFactory.go +++ b/dataRetriever/factory/dataPoolFactory.go @@ -124,6 +124,12 @@ func NewDataPoolFromConfig(args ArgsDataPool) (dataRetriever.PoolsHolder, error) return nil, fmt.Errorf("%w while creating the cache for the smartcontract results", err) } + cacherCfg = factory.GetCacherFromConfig(mainConfig.ValidatorInfoPool) + validatorsInfo, err := storageUnit.NewCache(cacherCfg) + if err != nil { + return nil, fmt.Errorf("%w while creating the cache for the validator info results", err) + } + currBlockTxs := dataPool.NewCurrentBlockPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, @@ -136,6 +142,7 @@ func NewDataPoolFromConfig(args ArgsDataPool) (dataRetriever.PoolsHolder, error) TrieNodesChunks: trieNodesChunks, CurrentBlockTransactions: currBlockTxs, SmartContracts: smartContracts, + ValidatorsInfo: validatorsInfo, } return dataPool.NewDataPool(dataPoolArgs) } diff --git a/dataRetriever/factory/dataPoolFactory_test.go b/dataRetriever/factory/dataPoolFactory_test.go index 0acef55e241..84d8b94bca1 100644 --- a/dataRetriever/factory/dataPoolFactory_test.go +++ b/dataRetriever/factory/dataPoolFactory_test.go @@ -128,6 +128,14 @@ func TestNewDataPoolFromConfig_BadConfigShouldErr(t *testing.T) { fmt.Println(err) require.True(t, errors.Is(err, storage.ErrNotSupportedCacheType)) require.True(t, strings.Contains(err.Error(), "the cache for the smartcontract results")) + + args = getGoodArgs() + args.Config.ValidatorInfoPool.Type = "invalid cache type" + holder, err = NewDataPoolFromConfig(args) + require.Nil(t, holder) + fmt.Println(err) + require.True(t, errors.Is(err, storage.ErrNotSupportedCacheType)) + require.True(t, strings.Contains(err.Error(), "the cache for the validator info results")) } func getGoodArgs() ArgsDataPool { diff --git a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go index 43b77538d6a..d78b7d0e53a 100644 --- a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go @@ -357,3 +357,33 @@ func (brcf *baseResolversContainerFactory) createTrieNodesResolver( return resolver, nil } + +func (brcf *baseResolversContainerFactory) generateValidatorInfoResolver() error { + identifierValidatorInfo := common.ValidatorInfoTopic + shardC := brcf.shardCoordinator + resolverSender, err := brcf.createOneResolverSender(identifierValidatorInfo, EmptyExcludePeersOnTopic, shardC.SelfId()) + if err != nil { + return err + } + + arg := resolvers.ArgValidatorInfoResolver{ + SenderResolver: resolverSender, + Marshaller: brcf.marshalizer, + AntifloodHandler: brcf.inputAntifloodHandler, + Throttler: brcf.throttler, + ValidatorInfoPool: brcf.dataPools.ValidatorsInfo(), + ValidatorInfoStorage: brcf.store.GetStorer(dataRetriever.UnsignedTransactionUnit), + IsFullHistoryNode: brcf.isFullHistoryNode, + } + validatorInfoResolver, err := resolvers.NewValidatorInfoResolver(arg) + if err != nil { + return err + } + + err = brcf.messenger.RegisterMessageProcessor(validatorInfoResolver.RequestTopic(), common.DefaultResolversIdentifier, validatorInfoResolver) + if err != nil { + return err + } + + return brcf.container.Add(identifierValidatorInfo, validatorInfoResolver) +} diff --git a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go index c80c0544a1d..6158945a265 100644 --- a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go @@ -120,6 +120,11 @@ func (mrcf *metaResolversContainerFactory) Create() (dataRetriever.ResolversCont return nil, err } + err = mrcf.generateValidatorInfoResolver() + if err != nil { + return nil, err + } + return mrcf.container, nil } diff --git a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go index 3102399912b..573819ff206 100644 --- a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go @@ -118,6 +118,11 @@ func (srcf *shardResolversContainerFactory) Create() (dataRetriever.ResolversCon return nil, err } + err = srcf.generateValidatorInfoResolver() + if err != nil { + return nil, err + } + return srcf.container, nil } diff --git a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go index fb5a532033f..f81b526bdff 100644 --- a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go +++ b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go @@ -354,8 +354,10 @@ func TestShardResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolverMiniBlocks := noOfShards + 2 numResolverMetaBlockHeaders := 1 numResolverTrieNodes := 1 + numResolverValidatorInfo := 1 totalResolvers := numResolverTxs + numResolverHeaders + numResolverMiniBlocks + - numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes + numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes + + numResolverValidatorInfo assert.Equal(t, totalResolvers, container.Len()) } diff --git a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go index d1ae4138732..08535c98dd3 100644 --- a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go +++ b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go @@ -173,8 +173,10 @@ func TestShardResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolverMiniBlocks := noOfShards + 2 numResolverMetaBlockHeaders := 1 numResolverTrieNodes := 1 + numResolverValidatorInfo := 1 totalResolvers := numResolverTxs + numResolverHeaders + numResolverMiniBlocks + - numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes + numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes + + numResolverValidatorInfo assert.Equal(t, totalResolvers, container.Len()) } diff --git a/dataRetriever/interface.go b/dataRetriever/interface.go index 21f42f35efd..f5f092f6f65 100644 --- a/dataRetriever/interface.go +++ b/dataRetriever/interface.go @@ -321,6 +321,7 @@ type PoolsHolder interface { TrieNodesChunks() storage.Cacher SmartContracts() storage.Cacher CurrentBlockTxs() TransactionCacher + ValidatorsInfo() storage.Cacher IsInterfaceNil() bool } diff --git a/dataRetriever/requestHandlers/requestHandler.go b/dataRetriever/requestHandlers/requestHandler.go index c4d3cc85908..b819a5ec1e2 100644 --- a/dataRetriever/requestHandlers/requestHandler.go +++ b/dataRetriever/requestHandlers/requestHandler.go @@ -11,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/core/partitioning" "github.com/ElrondNetwork/elrond-go-logger" + "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process/factory" @@ -551,6 +552,36 @@ func (rrh *resolverRequestHandler) RequestMetaHeaderByNonce(nonce uint64) { rrh.addRequestedItems([][]byte{key}, uniqueMetaHeadersSuffix) } +// RequestValidatorInfo asks for the validator info associated with a specific hash from connected peers +func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { + log.Debug("requesting validator info messages from network", + "topic", common.ValidatorInfoTopic, + "hash", hash, + "epoch", rrh.epoch, + ) + + resolver, err := rrh.resolversFinder.MetaChainResolver(common.ValidatorInfoTopic) + if err != nil { + log.Error("RequestValidatorInfo.MetaChainResolver", + "error", err.Error(), + "topic", common.ValidatorInfoTopic, + "hash", hash, + "epoch", rrh.epoch, + ) + return + } + + err = resolver.RequestDataFromHash(hash, rrh.epoch) + if err != nil { + log.Debug("RequestValidatorInfo.RequestDataFromHash", + "error", err.Error(), + "topic", common.ValidatorInfoTopic, + "hash", hash, + "epoch", rrh.epoch, + ) + } +} + func (rrh *resolverRequestHandler) testIfRequestIsNeeded(key []byte, suffix string) bool { rrh.sweepIfNeeded() diff --git a/dataRetriever/requestHandlers/requestHandler_test.go b/dataRetriever/requestHandlers/requestHandler_test.go index fa1e573006e..8ef34576a9a 100644 --- a/dataRetriever/requestHandlers/requestHandler_test.go +++ b/dataRetriever/requestHandlers/requestHandler_test.go @@ -1,6 +1,7 @@ package requestHandlers import ( + "bytes" "sync/atomic" "testing" "time" @@ -1205,3 +1206,63 @@ func TestResolverRequestHandler_RequestTrieNodeNotAValidResolver(t *testing.T) { rrh.RequestTrieNode([]byte("hash"), "topic", 1) assert.True(t, called) } + +func TestResolverRequestHandler_RequestValidatorInfo(t *testing.T) { + t.Parallel() + + t.Run("MetaChainResolver returns error", func(t *testing.T) { + providedHash := []byte("provided hash") + wasCalled := false + res := &mock.ResolverStub{ + RequestDataFromHashCalled: func(hash []byte, epoch uint32) error { + wasCalled = true + return nil + }, + } + + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + return res, errors.New("provided err") + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{}, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorInfo(providedHash) + + assert.False(t, wasCalled) + }) + t.Run("should work", func(t *testing.T) { + providedHash := []byte("provided hash") + wasCalled := false + res := &mock.ResolverStub{ + RequestDataFromHashCalled: func(hash []byte, epoch uint32) error { + assert.True(t, bytes.Equal(providedHash, hash)) + wasCalled = true + return nil + }, + } + + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + return res, nil + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{}, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorInfo(providedHash) + + assert.True(t, wasCalled) + }) +} diff --git a/epochStart/interface.go b/epochStart/interface.go index 45c5cab69cc..ba3e5972fc9 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -58,6 +58,7 @@ type RequestHandler interface { RequestInterval() time.Duration SetNumPeersToQuery(key string, intra int, cross int) error GetNumPeersToQuery(key string) (int, int, error) + RequestValidatorInfo(hash []byte) IsInterfaceNil() bool } diff --git a/genesis/process/disabled/requestHandler.go b/genesis/process/disabled/requestHandler.go index 2fa9d93fa5c..cfac6ba21c7 100644 --- a/genesis/process/disabled/requestHandler.go +++ b/genesis/process/disabled/requestHandler.go @@ -78,6 +78,10 @@ func (r *RequestHandler) CreateTrieNodeIdentifier(_ []byte, _ uint32) []byte { return make([]byte, 0) } +// RequestValidatorInfo does nothing +func (r *RequestHandler) RequestValidatorInfo(_ []byte) { +} + // IsInterfaceNil returns true if there is no value under the interface func (r *RequestHandler) IsInterfaceNil() bool { return r == nil diff --git a/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go b/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go index 2355400f349..32fc75df644 100644 --- a/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go +++ b/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go @@ -589,3 +589,47 @@ func (bicf *baseInterceptorsContainerFactory) generateUnsignedTxsInterceptors() return bicf.container.AddMultiple(keys, interceptorsSlice) } + +func (bicf *baseInterceptorsContainerFactory) generateValidatorInfoInterceptor() error { + identifier := common.ValidatorInfoTopic + + interceptedValidatorInfoFactory, err := interceptorFactory.NewInterceptedValidatorInfoDataFactory(*bicf.argInterceptorFactory) + if err != nil { + return err + } + + internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + argProcessor := processor.ArgValidatorInfoInterceptorProcessor{ + Marshaller: internalMarshalizer, + ValidatorInfoPool: bicf.dataPool.ValidatorsInfo(), + } + + validatorInfoProcessor, err := processor.NewValidatorInfoInterceptorProcessor(argProcessor) + if err != nil { + return err + } + + mdInterceptor, err := interceptors.NewMultiDataInterceptor( + interceptors.ArgMultiDataInterceptor{ + Topic: identifier, + Marshalizer: internalMarshalizer, + DataFactory: interceptedValidatorInfoFactory, + Processor: validatorInfoProcessor, + Throttler: bicf.globalThrottler, + AntifloodHandler: bicf.antifloodHandler, + WhiteListRequest: bicf.whiteListHandler, + PreferredPeersHolder: bicf.preferredPeersHolder, + CurrentPeerId: bicf.messenger.ID(), + }, + ) + if err != nil { + return err + } + + interceptor, err := bicf.createTopicAndAssignHandler(identifier, mdInterceptor, true) + if err != nil { + return err + } + + return bicf.container.Add(identifier, interceptor) +} diff --git a/process/factory/interceptorscontainer/metaInterceptorsContainerFactory.go b/process/factory/interceptorscontainer/metaInterceptorsContainerFactory.go index fe6a17c03bb..b79c1cd9b53 100644 --- a/process/factory/interceptorscontainer/metaInterceptorsContainerFactory.go +++ b/process/factory/interceptorscontainer/metaInterceptorsContainerFactory.go @@ -154,6 +154,11 @@ func (micf *metaInterceptorsContainerFactory) Create() (process.InterceptorsCont return nil, err } + err = micf.generateValidatorInfoInterceptor() + if err != nil { + return nil, err + } + return micf.container, nil } diff --git a/process/factory/interceptorscontainer/shardInterceptorsContainerFactory.go b/process/factory/interceptorscontainer/shardInterceptorsContainerFactory.go index 21be93af9c9..a739c1e9d5e 100644 --- a/process/factory/interceptorscontainer/shardInterceptorsContainerFactory.go +++ b/process/factory/interceptorscontainer/shardInterceptorsContainerFactory.go @@ -153,6 +153,11 @@ func (sicf *shardInterceptorsContainerFactory) Create() (process.InterceptorsCon return nil, err } + err = sicf.generateValidatorInfoInterceptor() + if err != nil { + return nil, err + } + return sicf.container, nil } diff --git a/process/factory/interceptorscontainer/shardInterceptorsContainerFactory_test.go b/process/factory/interceptorscontainer/shardInterceptorsContainerFactory_test.go index 9561370a58a..09b1fae612e 100644 --- a/process/factory/interceptorscontainer/shardInterceptorsContainerFactory_test.go +++ b/process/factory/interceptorscontainer/shardInterceptorsContainerFactory_test.go @@ -594,8 +594,10 @@ func TestShardInterceptorsContainerFactory_With4ShardsShouldWork(t *testing.T) { numInterceptorMiniBlocks := noOfShards + 2 numInterceptorMetachainHeaders := 1 numInterceptorTrieNodes := 1 + numInterceptorValidatorInfo := 1 totalInterceptors := numInterceptorTxs + numInterceptorsUnsignedTxs + numInterceptorsRewardTxs + - numInterceptorHeaders + numInterceptorMiniBlocks + numInterceptorMetachainHeaders + numInterceptorTrieNodes + numInterceptorHeaders + numInterceptorMiniBlocks + numInterceptorMetachainHeaders + numInterceptorTrieNodes + + numInterceptorValidatorInfo assert.Nil(t, err) assert.Equal(t, totalInterceptors, container.Len()) diff --git a/process/interface.go b/process/interface.go index dcb4684cdbb..bd88b1ed89c 100644 --- a/process/interface.go +++ b/process/interface.go @@ -549,6 +549,7 @@ type RequestHandler interface { GetNumPeersToQuery(key string) (int, int, error) RequestTrieNode(requestHash []byte, topic string, chunkIndex uint32) CreateTrieNodeIdentifier(requestHash []byte, chunkIndex uint32) []byte + RequestValidatorInfo(hash []byte) IsInterfaceNil() bool } diff --git a/testscommon/dataRetriever/poolFactory.go b/testscommon/dataRetriever/poolFactory.go index be7bd68578f..1ddfb66a845 100644 --- a/testscommon/dataRetriever/poolFactory.go +++ b/testscommon/dataRetriever/poolFactory.go @@ -112,6 +112,10 @@ func CreatePoolsHolder(numShards uint32, selfShard uint32) dataRetriever.PoolsHo smartContracts, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolder", err) + cacherConfig = storageUnit.CacheConfig{Capacity: 50000, Type: storageUnit.LRUCache} + validatorsInfo, err := storageUnit.NewCache(cacherConfig) + panicIfError("CreatePoolsHolder", err) + currentTx := dataPool.NewCurrentBlockPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, @@ -124,6 +128,7 @@ func CreatePoolsHolder(numShards uint32, selfShard uint32) dataRetriever.PoolsHo TrieNodesChunks: trieNodesChunks, CurrentBlockTransactions: currentTx, SmartContracts: smartContracts, + ValidatorsInfo: validatorsInfo, } holder, err := dataPool.NewDataPool(dataPoolArgs) panicIfError("CreatePoolsHolder", err) @@ -174,6 +179,10 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) smartContracts, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolderWithTxPool", err) + cacherConfig = storageUnit.CacheConfig{Capacity: 50000, Type: storageUnit.LRUCache} + validatorsInfo, err := storageUnit.NewCache(cacherConfig) + panicIfError("CreatePoolsHolderWithTxPool", err) + currentTx := dataPool.NewCurrentBlockPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, @@ -186,6 +195,7 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) TrieNodesChunks: trieNodesChunks, CurrentBlockTransactions: currentTx, SmartContracts: smartContracts, + ValidatorsInfo: validatorsInfo, } holder, err := dataPool.NewDataPool(dataPoolArgs) panicIfError("CreatePoolsHolderWithTxPool", err) diff --git a/testscommon/dataRetriever/poolsHolderMock.go b/testscommon/dataRetriever/poolsHolderMock.go index 112ada62273..94cce7167e7 100644 --- a/testscommon/dataRetriever/poolsHolderMock.go +++ b/testscommon/dataRetriever/poolsHolderMock.go @@ -24,6 +24,7 @@ type PoolsHolderMock struct { trieNodesChunks storage.Cacher smartContracts storage.Cacher currBlockTxs dataRetriever.TransactionCacher + validatorsInfo storage.Cacher } // NewPoolsHolderMock - @@ -84,6 +85,9 @@ func NewPoolsHolderMock() *PoolsHolderMock { holder.smartContracts, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.LRUCache, Capacity: 10000, Shards: 1, SizeInBytes: 0}) panicIfError("NewPoolsHolderMock", err) + holder.validatorsInfo, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.LRUCache, Capacity: 10000, Shards: 1, SizeInBytes: 0}) + panicIfError("NewPoolsHolderMock", err) + return holder } @@ -147,6 +151,11 @@ func (holder *PoolsHolderMock) SmartContracts() storage.Cacher { return holder.smartContracts } +// ValidatorsInfo - +func (holder *PoolsHolderMock) ValidatorsInfo() storage.Cacher { + return holder.validatorsInfo +} + // IsInterfaceNil returns true if there is no value under the interface func (holder *PoolsHolderMock) IsInterfaceNil() bool { return holder == nil diff --git a/testscommon/dataRetriever/poolsHolderStub.go b/testscommon/dataRetriever/poolsHolderStub.go index 7d6f7976f5e..8ee8a385a77 100644 --- a/testscommon/dataRetriever/poolsHolderStub.go +++ b/testscommon/dataRetriever/poolsHolderStub.go @@ -19,6 +19,7 @@ type PoolsHolderStub struct { TrieNodesChunksCalled func() storage.Cacher PeerChangesBlocksCalled func() storage.Cacher SmartContractsCalled func() storage.Cacher + ValidatorsInfoCalled func() storage.Cacher } // NewPoolsHolderStub - @@ -125,6 +126,15 @@ func (holder *PoolsHolderStub) SmartContracts() storage.Cacher { return testscommon.NewCacherStub() } +// ValidatorsInfo - +func (holder *PoolsHolderStub) ValidatorsInfo() storage.Cacher { + if holder.ValidatorsInfoCalled != nil { + return holder.ValidatorsInfoCalled() + } + + return testscommon.NewCacherStub() +} + // IsInterfaceNil returns true if there is no value under the interface func (holder *PoolsHolderStub) IsInterfaceNil() bool { return holder == nil diff --git a/testscommon/generalConfig.go b/testscommon/generalConfig.go index 907a543cee2..cbbb0bbaaed 100644 --- a/testscommon/generalConfig.go +++ b/testscommon/generalConfig.go @@ -145,6 +145,7 @@ func GetGeneralConfig() config.Config { SizeInBytes: 1000000000, Shards: 1, }, + ValidatorInfoPool: getLRUCacheConfig(), HeadersPoolConfig: config.HeadersPoolConfig{ MaxHeadersPerShard: 100, NumElementsToRemoveOnEviction: 1, diff --git a/testscommon/requestHandlerStub.go b/testscommon/requestHandlerStub.go index 6c2f90f0e5d..f4cf2b23d5f 100644 --- a/testscommon/requestHandlerStub.go +++ b/testscommon/requestHandlerStub.go @@ -19,6 +19,7 @@ type RequestHandlerStub struct { GetNumPeersToQueryCalled func(key string) (int, int, error) RequestTrieNodeCalled func(requestHash []byte, topic string, chunkIndex uint32) CreateTrieNodeIdentifierCalled func(requestHash []byte, chunkIndex uint32) []byte + RequestValidatorInfoCalled func(hash []byte) } // SetNumPeersToQuery - @@ -152,6 +153,13 @@ func (rhs *RequestHandlerStub) RequestTrieNode(requestHash []byte, topic string, } } +// RequestValidatorInfo - +func (rhs *RequestHandlerStub) RequestValidatorInfo(hash []byte) { + if rhs.RequestValidatorInfoCalled != nil { + rhs.RequestValidatorInfoCalled(hash) + } +} + // IsInterfaceNil returns true if there is no value under the interface func (rhs *RequestHandlerStub) IsInterfaceNil() bool { return rhs == nil From d68ce4dc5d5bc8cca1b0e1dabc4c6bd094f1c1ac Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Wed, 4 May 2022 11:12:04 +0300 Subject: [PATCH 10/70] remove useless default --- dataRetriever/resolvers/validatorInfoResolver.go | 1 - 1 file changed, 1 deletion(-) diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go index 4d8db5b701a..587d2d463eb 100644 --- a/dataRetriever/resolvers/validatorInfoResolver.go +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -109,7 +109,6 @@ func (res *validatorInfoResolver) ProcessReceivedMessage(message p2p.MessageP2P, switch rd.Type { case dataRetriever.HashType: return res.resolveHashRequest(rd.Value, rd.Epoch, fromConnectedPeer) - default: } return fmt.Errorf("%w for value %s", dataRetriever.ErrRequestTypeNotImplemented, logger.DisplayByteSlice(rd.Value)) From 17e4ae67869898be58c0d78eca0153c644cf3bf8 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Wed, 4 May 2022 11:27:11 +0300 Subject: [PATCH 11/70] fixed tests --- .../resolverscontainer/metaResolversContainerFactory_test.go | 3 ++- .../shardResolversContainerFactory_test.go | 5 +---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go index 299add48362..d648d214282 100644 --- a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go +++ b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go @@ -272,8 +272,9 @@ func TestMetaResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolversRewards := noOfShards numResolversTxs := noOfShards + 1 numResolversTrieNodes := 2 + numResolverValidatorInfo := 1 totalResolvers := numResolversShardHeadersForMetachain + numResolverMetablocks + numResolversMiniBlocks + - numResolversUnsigned + numResolversTxs + numResolversTrieNodes + numResolversRewards + numResolversUnsigned + numResolversTxs + numResolversTrieNodes + numResolversRewards + numResolverValidatorInfo assert.Equal(t, totalResolvers, container.Len()) diff --git a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go index 08535c98dd3..f7451742625 100644 --- a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go +++ b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go @@ -172,11 +172,8 @@ func TestShardResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolverHeaders := 1 numResolverMiniBlocks := noOfShards + 2 numResolverMetaBlockHeaders := 1 - numResolverTrieNodes := 1 - numResolverValidatorInfo := 1 totalResolvers := numResolverTxs + numResolverHeaders + numResolverMiniBlocks + - numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes + - numResolverValidatorInfo + numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs assert.Equal(t, totalResolvers, container.Len()) } From 9b8fc4aed89555208e9e84c5260bd77be4f95bca Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Wed, 4 May 2022 12:08:34 +0300 Subject: [PATCH 12/70] added back line removed by mistake --- .../shardResolversContainerFactory_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go index f7451742625..d1ae4138732 100644 --- a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go +++ b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go @@ -172,8 +172,9 @@ func TestShardResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolverHeaders := 1 numResolverMiniBlocks := noOfShards + 2 numResolverMetaBlockHeaders := 1 + numResolverTrieNodes := 1 totalResolvers := numResolverTxs + numResolverHeaders + numResolverMiniBlocks + - numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes assert.Equal(t, totalResolvers, container.Len()) } From 6d78ef416eca473c09364afe8cfa93cb0d3bc438 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Wed, 4 May 2022 12:26:04 +0300 Subject: [PATCH 13/70] fix tests --- .../metaInterceptorsContainerFactory_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/process/factory/interceptorscontainer/metaInterceptorsContainerFactory_test.go b/process/factory/interceptorscontainer/metaInterceptorsContainerFactory_test.go index 65c0c39bb51..e5209291ac8 100644 --- a/process/factory/interceptorscontainer/metaInterceptorsContainerFactory_test.go +++ b/process/factory/interceptorscontainer/metaInterceptorsContainerFactory_test.go @@ -539,9 +539,10 @@ func TestMetaInterceptorsContainerFactory_With4ShardsShouldWork(t *testing.T) { numInterceptorsUnsignedTxsForMetachain := noOfShards + 1 numInterceptorsRewardsTxsForMetachain := noOfShards numInterceptorsTrieNodes := 2 + numInterceptorValidatorInfo := 1 totalInterceptors := numInterceptorsMetablock + numInterceptorsShardHeadersForMetachain + numInterceptorsTrieNodes + numInterceptorsTransactionsForMetachain + numInterceptorsUnsignedTxsForMetachain + numInterceptorsMiniBlocksForMetachain + - numInterceptorsRewardsTxsForMetachain + numInterceptorsRewardsTxsForMetachain + numInterceptorValidatorInfo assert.Nil(t, err) assert.Equal(t, totalInterceptors, container.Len()) From d1bad30e4efc25f6c8c3b0a259289db75256b637 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Wed, 4 May 2022 12:58:50 +0300 Subject: [PATCH 14/70] integrated storage into interceptor processor in order to write the received validator info --- process/errors.go | 3 + .../baseInterceptorsContainerFactory.go | 5 +- .../validatorInfoInterceptorProcessor.go | 44 ++++++++-- .../validatorInfoInterceptorProcessor_test.go | 83 +++++++++++++++++-- 4 files changed, 119 insertions(+), 16 deletions(-) diff --git a/process/errors.go b/process/errors.go index 3030a5ee6ed..30d4869c888 100644 --- a/process/errors.go +++ b/process/errors.go @@ -1083,6 +1083,9 @@ var ErrInvalidProcessWaitTime = errors.New("invalid process wait time") // ErrNilValidatorInfoPool signals that a nil validator info pool has been provided var ErrNilValidatorInfoPool = errors.New("nil validator info pool") +// ErrNilValidatorInfoStorage signals that a nil validator info storage has been provided +var ErrNilValidatorInfoStorage = errors.New("nil validator info storage") + // ErrPropertyTooLong signals that a heartbeat property was too long var ErrPropertyTooLong = errors.New("property too long") diff --git a/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go b/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go index 32fc75df644..399390dd595 100644 --- a/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go +++ b/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go @@ -600,8 +600,9 @@ func (bicf *baseInterceptorsContainerFactory) generateValidatorInfoInterceptor() internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() argProcessor := processor.ArgValidatorInfoInterceptorProcessor{ - Marshaller: internalMarshalizer, - ValidatorInfoPool: bicf.dataPool.ValidatorsInfo(), + Marshaller: internalMarshalizer, + ValidatorInfoPool: bicf.dataPool.ValidatorsInfo(), + ValidatorInfoStorage: bicf.store.GetStorer(dataRetriever.UnsignedTransactionUnit), } validatorInfoProcessor, err := processor.NewValidatorInfoInterceptorProcessor(argProcessor) diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go index 7bb1d40894d..c304d1f671b 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -1,22 +1,32 @@ package processor import ( + "strconv" + "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) +const ( + epochBase = 10 + epochSize = 32 +) + // ArgValidatorInfoInterceptorProcessor is the argument structure used to create a new validator info interceptor processor type ArgValidatorInfoInterceptorProcessor struct { - Marshaller marshal.Marshalizer - ValidatorInfoPool storage.Cacher + Marshaller marshal.Marshalizer + ValidatorInfoPool storage.Cacher + ValidatorInfoStorage storage.Storer } type validatorInfoInterceptorProcessor struct { - marshaller marshal.Marshalizer - validatorInfoPool storage.Cacher + marshaller marshal.Marshalizer + validatorInfoPool storage.Cacher + validatorInfoStorage storage.Storer } // NewValidatorInfoInterceptorProcessor creates a new validator info interceptor processor @@ -27,8 +37,9 @@ func NewValidatorInfoInterceptorProcessor(args ArgValidatorInfoInterceptorProces } return &validatorInfoInterceptorProcessor{ - marshaller: args.Marshaller, - validatorInfoPool: args.ValidatorInfoPool, + marshaller: args.Marshaller, + validatorInfoPool: args.ValidatorInfoPool, + validatorInfoStorage: args.ValidatorInfoStorage, }, nil } @@ -39,6 +50,9 @@ func checkArgs(args ArgValidatorInfoInterceptorProcessor) error { if check.IfNil(args.ValidatorInfoPool) { return process.ErrNilValidatorInfoPool } + if check.IfNil(args.ValidatorInfoStorage) { + return process.ErrNilValidatorInfoStorage + } return nil } @@ -49,7 +63,7 @@ func (viip *validatorInfoInterceptorProcessor) Validate(_ process.InterceptedDat } // Save will save the intercepted validator info into the cache -func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData, _ core.PeerID, _ string) error { +func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData, _ core.PeerID, epoch string) error { ivi, ok := data.(interceptedValidatorInfo) if !ok { return process.ErrWrongTypeAssertion @@ -60,7 +74,21 @@ func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData viip.validatorInfoPool.HasOrAdd(hash, validatorInfo, validatorInfo.Size()) - return nil + return viip.updateStorage(hash, validatorInfo, epoch) +} + +func (viip *validatorInfoInterceptorProcessor) updateStorage(hash []byte, validatorInfo state.ValidatorInfo, epoch string) error { + buff, err := viip.marshaller.Marshal(&validatorInfo) + if err != nil { + return err + } + + epochUint, err := strconv.ParseUint(epoch, epochBase, epochSize) + if err != nil { + return err + } + + return viip.validatorInfoStorage.PutInEpoch(hash, buff, uint32(epochUint)) } // RegisterHandler registers a callback function to be notified of incoming validator info diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go index fa29e83e4c5..9cf2bfc8f48 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go @@ -1,6 +1,8 @@ package processor_test import ( + "errors" + "fmt" "testing" "github.com/ElrondNetwork/elrond-go-core/core/check" @@ -13,6 +15,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + "github.com/ElrondNetwork/elrond-go/testscommon/storage" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -41,8 +44,9 @@ func createMockInterceptedValidatorInfo() process.InterceptedData { func createMockArgValidatorInfoInterceptorProcessor() processor.ArgValidatorInfoInterceptorProcessor { return processor.ArgValidatorInfoInterceptorProcessor{ - Marshaller: testMarshalizer, - ValidatorInfoPool: testscommon.NewCacherStub(), + Marshaller: testMarshalizer, + ValidatorInfoPool: testscommon.NewCacherStub(), + ValidatorInfoStorage: &storage.StorerStub{}, } } @@ -69,6 +73,16 @@ func TestNewValidatorInfoInterceptorProcessor(t *testing.T) { assert.Equal(t, process.ErrNilValidatorInfoPool, err) assert.True(t, check.IfNil(proc)) }) + t.Run("nil storage should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoInterceptorProcessor() + args.ValidatorInfoStorage = nil + + proc, err := processor.NewValidatorInfoInterceptorProcessor(args) + assert.Equal(t, process.ErrNilValidatorInfoStorage, err) + assert.True(t, check.IfNil(proc)) + }) t.Run("should work", func(t *testing.T) { t.Parallel() @@ -107,12 +121,58 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { assert.Equal(t, process.ErrWrongTypeAssertion, proc.Save(providedData, "", "")) assert.False(t, wasCalled) }) - t.Run("should work", func(t *testing.T) { + t.Run("marshal returns error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + providedData := createMockInterceptedValidatorInfo() + wasCalled := false + args := createMockArgValidatorInfoInterceptorProcessor() + args.ValidatorInfoStorage = &storage.StorerStub{ + PutInEpochCalled: func(key, data []byte, epoch uint32) error { + wasCalled = true + return nil + }, + } + args.Marshaller = &testscommon.MarshalizerStub{ + MarshalCalled: func(obj interface{}) ([]byte, error) { + return nil, expectedErr + }, + } + + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) + + assert.Equal(t, expectedErr, proc.Save(providedData, "", "")) + assert.False(t, wasCalled) + }) + t.Run("epoch uncastable to uint should error", func(t *testing.T) { t.Parallel() providedData := createMockInterceptedValidatorInfo() wasCalled := false args := createMockArgValidatorInfoInterceptorProcessor() + args.ValidatorInfoStorage = &storage.StorerStub{ + PutInEpochCalled: func(key, data []byte, epoch uint32) error { + wasCalled = true + return nil + }, + } + + proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) + require.False(t, check.IfNil(proc)) + + assert.NotNil(t, proc.Save(providedData, "", "non uint epoch")) + assert.False(t, wasCalled) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + providedEpoch := uint32(15) + providedEpochStr := fmt.Sprintf("%d", providedEpoch) + providedData := createMockInterceptedValidatorInfo() + wasHasOrAddCalled := false + args := createMockArgValidatorInfoInterceptorProcessor() providedBuff, _ := args.Marshaller.Marshal(createMockValidatorInfo()) hasher := hashingMocks.HasherMock{} providedHash := hasher.Compute(string(providedBuff)) @@ -120,16 +180,27 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { args.ValidatorInfoPool = &testscommon.CacherStub{ HasOrAddCalled: func(key []byte, value interface{}, sizeInBytes int) (has, added bool) { assert.Equal(t, providedHash, key) - wasCalled = true + wasHasOrAddCalled = true return false, false }, } + wasPutInEpochCalled := false + args.ValidatorInfoStorage = &storage.StorerStub{ + PutInEpochCalled: func(key, data []byte, epoch uint32) error { + assert.Equal(t, providedHash, key) + assert.Equal(t, providedBuff, data) + assert.Equal(t, providedEpoch, epoch) + wasPutInEpochCalled = true + return nil + }, + } proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) require.False(t, check.IfNil(proc)) - assert.Nil(t, proc.Save(providedData, "", "")) - assert.True(t, wasCalled) + assert.Nil(t, proc.Save(providedData, "", providedEpochStr)) + assert.True(t, wasHasOrAddCalled) + assert.True(t, wasPutInEpochCalled) }) } From 9f77bced0e4cb81de01c9b6a5c4bc1c48059e80d Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Wed, 4 May 2022 17:18:53 +0300 Subject: [PATCH 15/70] partial fixes after review --- cmd/node/config/config.toml | 4 ++-- dataRetriever/requestHandlers/requestHandler.go | 9 +++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index 46c6f5b77ad..50347d7a48d 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -441,9 +441,9 @@ [ValidatorInfoPool] Name = "ValidatorInfoPool" - Capacity = 100000 + Capacity = 10000 Type = "SizeLRU" - SizeInBytes = 314572800 #300MB + SizeInBytes = 31457280 #30MB #PublicKeyPeerId represents the main cache used to map Elrond block signing public keys to their associated peer id's. [PublicKeyPeerId] diff --git a/dataRetriever/requestHandlers/requestHandler.go b/dataRetriever/requestHandlers/requestHandler.go index b819a5ec1e2..304efbd91a0 100644 --- a/dataRetriever/requestHandlers/requestHandler.go +++ b/dataRetriever/requestHandlers/requestHandler.go @@ -30,6 +30,7 @@ const uniqueMiniblockSuffix = "mb" const uniqueHeadersSuffix = "hdr" const uniqueMetaHeadersSuffix = "mhdr" const uniqueTrieNodesSuffix = "tn" +const uniqueValidatorInfoSuffix = "vi" // TODO move the keys definitions that are whitelisted in core and use them in InterceptedData implementations, Identifiers() function @@ -554,6 +555,10 @@ func (rrh *resolverRequestHandler) RequestMetaHeaderByNonce(nonce uint64) { // RequestValidatorInfo asks for the validator info associated with a specific hash from connected peers func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { + if !rrh.testIfRequestIsNeeded(hash, uniqueValidatorInfoSuffix) { + return + } + log.Debug("requesting validator info messages from network", "topic", common.ValidatorInfoTopic, "hash", hash, @@ -571,6 +576,8 @@ func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { return } + rrh.whiteList.Add([][]byte{hash}) + err = resolver.RequestDataFromHash(hash, rrh.epoch) if err != nil { log.Debug("RequestValidatorInfo.RequestDataFromHash", @@ -580,6 +587,8 @@ func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { "epoch", rrh.epoch, ) } + + rrh.addRequestedItems([][]byte{hash}, uniqueValidatorInfoSuffix) } func (rrh *resolverRequestHandler) testIfRequestIsNeeded(key []byte, suffix string) bool { From d2c90e2b163286c3602ac1199603b2099ee1d4ee Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Wed, 4 May 2022 18:52:55 +0300 Subject: [PATCH 16/70] fixes after review, data is not written to storage into interceptors processor anymore --- process/errors.go | 3 - .../baseInterceptorsContainerFactory.go | 30 ++++--- .../validatorInfoInterceptorProcessor.go | 45 ++-------- .../validatorInfoInterceptorProcessor_test.go | 83 +------------------ 4 files changed, 21 insertions(+), 140 deletions(-) diff --git a/process/errors.go b/process/errors.go index 30d4869c888..3030a5ee6ed 100644 --- a/process/errors.go +++ b/process/errors.go @@ -1083,9 +1083,6 @@ var ErrInvalidProcessWaitTime = errors.New("invalid process wait time") // ErrNilValidatorInfoPool signals that a nil validator info pool has been provided var ErrNilValidatorInfoPool = errors.New("nil validator info pool") -// ErrNilValidatorInfoStorage signals that a nil validator info storage has been provided -var ErrNilValidatorInfoStorage = errors.New("nil validator info storage") - // ErrPropertyTooLong signals that a heartbeat property was too long var ErrPropertyTooLong = errors.New("property too long") diff --git a/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go b/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go index 399390dd595..e69d270c320 100644 --- a/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go +++ b/process/factory/interceptorscontainer/baseInterceptorsContainerFactory.go @@ -228,11 +228,11 @@ func (bicf *baseInterceptorsContainerFactory) createOneTxInterceptor(topic strin return nil, err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: txFactory, Processor: txProcessor, Throttler: bicf.globalThrottler, @@ -271,11 +271,11 @@ func (bicf *baseInterceptorsContainerFactory) createOneUnsignedTxInterceptor(top return nil, err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: txFactory, Processor: txProcessor, Throttler: bicf.globalThrottler, @@ -314,11 +314,11 @@ func (bicf *baseInterceptorsContainerFactory) createOneRewardTxInterceptor(topic return nil, err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: txFactory, Processor: txProcessor, Throttler: bicf.globalThrottler, @@ -426,11 +426,11 @@ func (bicf *baseInterceptorsContainerFactory) generateMiniBlocksInterceptors() e } func (bicf *baseInterceptorsContainerFactory) createOneMiniBlocksInterceptor(topic string) (process.Interceptor, error) { - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() hasher := bicf.argInterceptorFactory.CoreComponents.Hasher() argProcessor := &processor.ArgMiniblockInterceptorProcessor{ MiniblockCache: bicf.dataPool.MiniBlocks(), - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, Hasher: hasher, ShardCoordinator: bicf.shardCoordinator, WhiteListHandler: bicf.whiteListHandler, @@ -448,7 +448,7 @@ func (bicf *baseInterceptorsContainerFactory) createOneMiniBlocksInterceptor(top interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: miniblockFactory, Processor: miniblockProcessor, Throttler: bicf.globalThrottler, @@ -520,11 +520,11 @@ func (bicf *baseInterceptorsContainerFactory) createOneTrieNodesInterceptor(topi return nil, err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() interceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: topic, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: trieNodesFactory, Processor: trieNodesProcessor, Throttler: bicf.globalThrottler, @@ -598,11 +598,9 @@ func (bicf *baseInterceptorsContainerFactory) generateValidatorInfoInterceptor() return err } - internalMarshalizer := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() + internalMarshaller := bicf.argInterceptorFactory.CoreComponents.InternalMarshalizer() argProcessor := processor.ArgValidatorInfoInterceptorProcessor{ - Marshaller: internalMarshalizer, - ValidatorInfoPool: bicf.dataPool.ValidatorsInfo(), - ValidatorInfoStorage: bicf.store.GetStorer(dataRetriever.UnsignedTransactionUnit), + ValidatorInfoPool: bicf.dataPool.ValidatorsInfo(), } validatorInfoProcessor, err := processor.NewValidatorInfoInterceptorProcessor(argProcessor) @@ -613,7 +611,7 @@ func (bicf *baseInterceptorsContainerFactory) generateValidatorInfoInterceptor() mdInterceptor, err := interceptors.NewMultiDataInterceptor( interceptors.ArgMultiDataInterceptor{ Topic: identifier, - Marshalizer: internalMarshalizer, + Marshalizer: internalMarshaller, DataFactory: interceptedValidatorInfoFactory, Processor: validatorInfoProcessor, Throttler: bicf.globalThrottler, diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go index c304d1f671b..f30cd0d9625 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -1,32 +1,19 @@ package processor import ( - "strconv" - "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" - "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/process" - "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) -const ( - epochBase = 10 - epochSize = 32 -) - // ArgValidatorInfoInterceptorProcessor is the argument structure used to create a new validator info interceptor processor type ArgValidatorInfoInterceptorProcessor struct { - Marshaller marshal.Marshalizer - ValidatorInfoPool storage.Cacher - ValidatorInfoStorage storage.Storer + ValidatorInfoPool storage.Cacher } type validatorInfoInterceptorProcessor struct { - marshaller marshal.Marshalizer - validatorInfoPool storage.Cacher - validatorInfoStorage storage.Storer + validatorInfoPool storage.Cacher } // NewValidatorInfoInterceptorProcessor creates a new validator info interceptor processor @@ -37,22 +24,14 @@ func NewValidatorInfoInterceptorProcessor(args ArgValidatorInfoInterceptorProces } return &validatorInfoInterceptorProcessor{ - marshaller: args.Marshaller, - validatorInfoPool: args.ValidatorInfoPool, - validatorInfoStorage: args.ValidatorInfoStorage, + validatorInfoPool: args.ValidatorInfoPool, }, nil } func checkArgs(args ArgValidatorInfoInterceptorProcessor) error { - if check.IfNil(args.Marshaller) { - return process.ErrNilMarshalizer - } if check.IfNil(args.ValidatorInfoPool) { return process.ErrNilValidatorInfoPool } - if check.IfNil(args.ValidatorInfoStorage) { - return process.ErrNilValidatorInfoStorage - } return nil } @@ -63,7 +42,7 @@ func (viip *validatorInfoInterceptorProcessor) Validate(_ process.InterceptedDat } // Save will save the intercepted validator info into the cache -func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData, _ core.PeerID, epoch string) error { +func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData, _ core.PeerID, _ string) error { ivi, ok := data.(interceptedValidatorInfo) if !ok { return process.ErrWrongTypeAssertion @@ -74,21 +53,7 @@ func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData viip.validatorInfoPool.HasOrAdd(hash, validatorInfo, validatorInfo.Size()) - return viip.updateStorage(hash, validatorInfo, epoch) -} - -func (viip *validatorInfoInterceptorProcessor) updateStorage(hash []byte, validatorInfo state.ValidatorInfo, epoch string) error { - buff, err := viip.marshaller.Marshal(&validatorInfo) - if err != nil { - return err - } - - epochUint, err := strconv.ParseUint(epoch, epochBase, epochSize) - if err != nil { - return err - } - - return viip.validatorInfoStorage.PutInEpoch(hash, buff, uint32(epochUint)) + return nil } // RegisterHandler registers a callback function to be notified of incoming validator info diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go index 9cf2bfc8f48..a65df5f4768 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go @@ -1,7 +1,6 @@ package processor_test import ( - "errors" "fmt" "testing" @@ -15,7 +14,6 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" - "github.com/ElrondNetwork/elrond-go/testscommon/storage" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -44,25 +42,13 @@ func createMockInterceptedValidatorInfo() process.InterceptedData { func createMockArgValidatorInfoInterceptorProcessor() processor.ArgValidatorInfoInterceptorProcessor { return processor.ArgValidatorInfoInterceptorProcessor{ - Marshaller: testMarshalizer, - ValidatorInfoPool: testscommon.NewCacherStub(), - ValidatorInfoStorage: &storage.StorerStub{}, + ValidatorInfoPool: testscommon.NewCacherStub(), } } func TestNewValidatorInfoInterceptorProcessor(t *testing.T) { t.Parallel() - t.Run("nil marshaller should error", func(t *testing.T) { - t.Parallel() - - args := createMockArgValidatorInfoInterceptorProcessor() - args.Marshaller = nil - - proc, err := processor.NewValidatorInfoInterceptorProcessor(args) - assert.Equal(t, process.ErrNilMarshalizer, err) - assert.True(t, check.IfNil(proc)) - }) t.Run("nil cache should error", func(t *testing.T) { t.Parallel() @@ -73,16 +59,6 @@ func TestNewValidatorInfoInterceptorProcessor(t *testing.T) { assert.Equal(t, process.ErrNilValidatorInfoPool, err) assert.True(t, check.IfNil(proc)) }) - t.Run("nil storage should error", func(t *testing.T) { - t.Parallel() - - args := createMockArgValidatorInfoInterceptorProcessor() - args.ValidatorInfoStorage = nil - - proc, err := processor.NewValidatorInfoInterceptorProcessor(args) - assert.Equal(t, process.ErrNilValidatorInfoStorage, err) - assert.True(t, check.IfNil(proc)) - }) t.Run("should work", func(t *testing.T) { t.Parallel() @@ -121,50 +97,6 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { assert.Equal(t, process.ErrWrongTypeAssertion, proc.Save(providedData, "", "")) assert.False(t, wasCalled) }) - t.Run("marshal returns error", func(t *testing.T) { - t.Parallel() - - expectedErr := errors.New("expected err") - providedData := createMockInterceptedValidatorInfo() - wasCalled := false - args := createMockArgValidatorInfoInterceptorProcessor() - args.ValidatorInfoStorage = &storage.StorerStub{ - PutInEpochCalled: func(key, data []byte, epoch uint32) error { - wasCalled = true - return nil - }, - } - args.Marshaller = &testscommon.MarshalizerStub{ - MarshalCalled: func(obj interface{}) ([]byte, error) { - return nil, expectedErr - }, - } - - proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) - require.False(t, check.IfNil(proc)) - - assert.Equal(t, expectedErr, proc.Save(providedData, "", "")) - assert.False(t, wasCalled) - }) - t.Run("epoch uncastable to uint should error", func(t *testing.T) { - t.Parallel() - - providedData := createMockInterceptedValidatorInfo() - wasCalled := false - args := createMockArgValidatorInfoInterceptorProcessor() - args.ValidatorInfoStorage = &storage.StorerStub{ - PutInEpochCalled: func(key, data []byte, epoch uint32) error { - wasCalled = true - return nil - }, - } - - proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) - require.False(t, check.IfNil(proc)) - - assert.NotNil(t, proc.Save(providedData, "", "non uint epoch")) - assert.False(t, wasCalled) - }) t.Run("should work", func(t *testing.T) { t.Parallel() @@ -173,7 +105,7 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { providedData := createMockInterceptedValidatorInfo() wasHasOrAddCalled := false args := createMockArgValidatorInfoInterceptorProcessor() - providedBuff, _ := args.Marshaller.Marshal(createMockValidatorInfo()) + providedBuff, _ := testscommon.MarshalizerMock{}.Marshal(createMockValidatorInfo()) hasher := hashingMocks.HasherMock{} providedHash := hasher.Compute(string(providedBuff)) @@ -184,23 +116,12 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { return false, false }, } - wasPutInEpochCalled := false - args.ValidatorInfoStorage = &storage.StorerStub{ - PutInEpochCalled: func(key, data []byte, epoch uint32) error { - assert.Equal(t, providedHash, key) - assert.Equal(t, providedBuff, data) - assert.Equal(t, providedEpoch, epoch) - wasPutInEpochCalled = true - return nil - }, - } proc, _ := processor.NewValidatorInfoInterceptorProcessor(args) require.False(t, check.IfNil(proc)) assert.Nil(t, proc.Save(providedData, "", providedEpochStr)) assert.True(t, wasHasOrAddCalled) - assert.True(t, wasPutInEpochCalled) }) } From a8f0e48281497e25527a32e017aa0f6b518ece45 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 5 May 2022 00:20:45 +0300 Subject: [PATCH 17/70] * Added validator info new approach in epochStart/metachain --- ...Pool.go => currentBlockTransactionPool.go} | 10 +- ...go => currentBlockTransactionPool_test.go} | 3 +- .../dataPool/currentBlockValidatorInfoPool.go | 60 ++++++++++++ .../currentBlockValidatorInfoPool_test.go | 33 +++++++ dataRetriever/dataPool/dataPool.go | 77 ++++++++------- dataRetriever/dataPool/dataPool_test.go | 61 +++++++----- dataRetriever/errors.go | 12 ++- dataRetriever/factory/dataPoolFactory.go | 2 +- dataRetriever/interface.go | 12 ++- .../mock/validatorInfoForCurrentBlockMock.go | 39 ++++++++ epochStart/errors.go | 3 + epochStart/metachain/baseRewards.go | 2 +- epochStart/metachain/systemSCs_test.go | 6 +- epochStart/metachain/validators.go | 95 ++++++++++++++----- epochStart/metachain/validators_test.go | 26 ++--- factory/blockProcessorCreator.go | 12 ++- .../mock/epochValidatorInfoCreatorStub.go | 28 +++--- integrationTests/testProcessorNode.go | 12 ++- .../vm/delegation/delegationScenarios_test.go | 2 +- process/block/metablock.go | 2 +- process/interface.go | 4 +- process/mock/epochValidatorInfoCreatorStub.go | 28 +++--- testscommon/dataRetriever/poolFactory.go | 4 +- testscommon/dataRetriever/poolsHolderMock.go | 31 +++--- testscommon/dataRetriever/poolsHolderStub.go | 34 ++++--- 25 files changed, 423 insertions(+), 175 deletions(-) rename dataRetriever/dataPool/{currentBlockPool.go => currentBlockTransactionPool.go} (80%) rename dataRetriever/dataPool/{currentBlockPool_test.go => currentBlockTransactionPool_test.go} (94%) create mode 100644 dataRetriever/dataPool/currentBlockValidatorInfoPool.go create mode 100644 dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go create mode 100644 dataRetriever/mock/validatorInfoForCurrentBlockMock.go diff --git a/dataRetriever/dataPool/currentBlockPool.go b/dataRetriever/dataPool/currentBlockTransactionPool.go similarity index 80% rename from dataRetriever/dataPool/currentBlockPool.go rename to dataRetriever/dataPool/currentBlockTransactionPool.go index f8f3ac2c4ef..5222e88ca3b 100644 --- a/dataRetriever/dataPool/currentBlockPool.go +++ b/dataRetriever/dataPool/currentBlockTransactionPool.go @@ -15,22 +15,22 @@ type transactionMapCacher struct { txsForBlock map[string]data.TransactionHandler } -// NewCurrentBlockPool returns a new pool to be used for current block -func NewCurrentBlockPool() *transactionMapCacher { +// NewCurrentBlockTransactionPool returns a new transaction pool to be used for the current block +func NewCurrentBlockTransactionPool() *transactionMapCacher { return &transactionMapCacher{ mutTxs: sync.RWMutex{}, txsForBlock: make(map[string]data.TransactionHandler), } } -// Clean creates a new pool +// Clean creates a new transaction pool func (tmc *transactionMapCacher) Clean() { tmc.mutTxs.Lock() tmc.txsForBlock = make(map[string]data.TransactionHandler) tmc.mutTxs.Unlock() } -// GetTx returns the element saved for the hash +// GetTx gets the transaction for the given hash func (tmc *transactionMapCacher) GetTx(txHash []byte) (data.TransactionHandler, error) { tmc.mutTxs.RLock() defer tmc.mutTxs.RUnlock() @@ -43,7 +43,7 @@ func (tmc *transactionMapCacher) GetTx(txHash []byte) (data.TransactionHandler, return tx, nil } -// AddTx writes the tx to the map +// AddTx adds the transaction for the given hash func (tmc *transactionMapCacher) AddTx(txHash []byte, tx data.TransactionHandler) { if check.IfNil(tx) { return diff --git a/dataRetriever/dataPool/currentBlockPool_test.go b/dataRetriever/dataPool/currentBlockTransactionPool_test.go similarity index 94% rename from dataRetriever/dataPool/currentBlockPool_test.go rename to dataRetriever/dataPool/currentBlockTransactionPool_test.go index 08d3ab82a15..720e4d547e4 100644 --- a/dataRetriever/dataPool/currentBlockPool_test.go +++ b/dataRetriever/dataPool/currentBlockTransactionPool_test.go @@ -13,7 +13,7 @@ func TestCurrentBlockPool_AddGetCleanTx(t *testing.T) { txHash := []byte("hash") tx := &transaction.Transaction{} - currentBlockPool := NewCurrentBlockPool() + currentBlockPool := NewCurrentBlockTransactionPool() require.False(t, currentBlockPool.IsInterfaceNil()) currentBlockPool.AddTx(txHash, tx) @@ -31,5 +31,4 @@ func TestCurrentBlockPool_AddGetCleanTx(t *testing.T) { txFromPool, err = currentBlockPool.GetTx(txHash) require.Nil(t, txFromPool) require.Equal(t, dataRetriever.ErrTxNotFoundInBlockPool, err) - } diff --git a/dataRetriever/dataPool/currentBlockValidatorInfoPool.go b/dataRetriever/dataPool/currentBlockValidatorInfoPool.go new file mode 100644 index 00000000000..bdae2de508c --- /dev/null +++ b/dataRetriever/dataPool/currentBlockValidatorInfoPool.go @@ -0,0 +1,60 @@ +package dataPool + +import ( + "github.com/ElrondNetwork/elrond-go/state" + "sync" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go/dataRetriever" +) + +var _ dataRetriever.ValidatorInfoCacher = (*validatorInfoMapCacher)(nil) + +type validatorInfoMapCacher struct { + mutValidatorInfo sync.RWMutex + validatorInfoForBlock map[string]*state.ShardValidatorInfo +} + +// NewCurrentBlockValidatorInfoPool returns a new validator info pool to be used for the current block +func NewCurrentBlockValidatorInfoPool() *validatorInfoMapCacher { + return &validatorInfoMapCacher{ + mutValidatorInfo: sync.RWMutex{}, + validatorInfoForBlock: make(map[string]*state.ShardValidatorInfo), + } +} + +// Clean creates a new validator info pool +func (vimc *validatorInfoMapCacher) Clean() { + vimc.mutValidatorInfo.Lock() + vimc.validatorInfoForBlock = make(map[string]*state.ShardValidatorInfo) + vimc.mutValidatorInfo.Unlock() +} + +// GetValidatorInfo gets the validator info for the given hash +func (vimc *validatorInfoMapCacher) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + vimc.mutValidatorInfo.RLock() + defer vimc.mutValidatorInfo.RUnlock() + + validatorInfo, ok := vimc.validatorInfoForBlock[string(validatorInfoHash)] + if !ok { + return nil, dataRetriever.ErrValidatorInfoNotFoundInBlockPool + } + + return validatorInfo, nil +} + +// AddValidatorInfo adds the validator info for the given hash +func (vimc *validatorInfoMapCacher) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { + if check.IfNil(validatorInfo) { + return + } + + vimc.mutValidatorInfo.Lock() + vimc.validatorInfoForBlock[string(validatorInfoHash)] = validatorInfo + vimc.mutValidatorInfo.Unlock() +} + +// IsInterfaceNil returns true if underlying object is nil +func (vimc *validatorInfoMapCacher) IsInterfaceNil() bool { + return vimc == nil +} diff --git a/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go b/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go new file mode 100644 index 00000000000..b6389753ea8 --- /dev/null +++ b/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go @@ -0,0 +1,33 @@ +package dataPool + +import ( + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/state" + "github.com/stretchr/testify/require" + "testing" +) + +func TestCurrentBlockValidatorInfoPool_AddGetCleanTx(t *testing.T) { + t.Parallel() + + validatorInfoHash := []byte("hash") + validatorInfo := &state.ValidatorInfo{} + currentValidatorInfoPool := NewCurrentBlockValidatorInfoPool() + require.False(t, currentValidatorInfoPool.IsInterfaceNil()) + + currentValidatorInfoPool.AddValidatorInfo(validatorInfoHash, validatorInfo) + currentValidatorInfoPool.AddValidatorInfo(validatorInfoHash, nil) + + validatorInfoFromPool, err := currentValidatorInfoPool.GetValidatorInfo([]byte("wrong hash")) + require.Nil(t, validatorInfoFromPool) + require.Equal(t, dataRetriever.ErrValidatorInfoNotFoundInBlockPool, err) + + validatorInfoFromPool, err = currentValidatorInfoPool.GetValidatorInfo(validatorInfoHash) + require.Nil(t, err) + require.Equal(t, validatorInfo, validatorInfoFromPool) + + currentValidatorInfoPool.Clean() + validatorInfoFromPool, err = currentValidatorInfoPool.GetValidatorInfo(validatorInfoHash) + require.Nil(t, validatorInfoFromPool) + require.Equal(t, dataRetriever.ErrValidatorInfoNotFoundInBlockPool, err) +} diff --git a/dataRetriever/dataPool/dataPool.go b/dataRetriever/dataPool/dataPool.go index c33e3a4dcce..16a25abb43b 100644 --- a/dataRetriever/dataPool/dataPool.go +++ b/dataRetriever/dataPool/dataPool.go @@ -9,32 +9,34 @@ import ( var _ dataRetriever.PoolsHolder = (*dataPool)(nil) type dataPool struct { - transactions dataRetriever.ShardedDataCacherNotifier - unsignedTransactions dataRetriever.ShardedDataCacherNotifier - rewardTransactions dataRetriever.ShardedDataCacherNotifier - headers dataRetriever.HeadersPool - miniBlocks storage.Cacher - peerChangesBlocks storage.Cacher - trieNodes storage.Cacher - trieNodesChunks storage.Cacher - currBlockTxs dataRetriever.TransactionCacher - smartContracts storage.Cacher - validatorsInfo storage.Cacher + transactions dataRetriever.ShardedDataCacherNotifier + unsignedTransactions dataRetriever.ShardedDataCacherNotifier + rewardTransactions dataRetriever.ShardedDataCacherNotifier + headers dataRetriever.HeadersPool + miniBlocks storage.Cacher + peerChangesBlocks storage.Cacher + trieNodes storage.Cacher + trieNodesChunks storage.Cacher + currBlockTxs dataRetriever.TransactionCacher + currBlockValidatorInfo dataRetriever.ValidatorInfoCacher + smartContracts storage.Cacher + validatorsInfo storage.Cacher } // DataPoolArgs represents the data pool's constructor structure type DataPoolArgs struct { - Transactions dataRetriever.ShardedDataCacherNotifier - UnsignedTransactions dataRetriever.ShardedDataCacherNotifier - RewardTransactions dataRetriever.ShardedDataCacherNotifier - Headers dataRetriever.HeadersPool - MiniBlocks storage.Cacher - PeerChangesBlocks storage.Cacher - TrieNodes storage.Cacher - TrieNodesChunks storage.Cacher - CurrentBlockTransactions dataRetriever.TransactionCacher - SmartContracts storage.Cacher - ValidatorsInfo storage.Cacher + Transactions dataRetriever.ShardedDataCacherNotifier + UnsignedTransactions dataRetriever.ShardedDataCacherNotifier + RewardTransactions dataRetriever.ShardedDataCacherNotifier + Headers dataRetriever.HeadersPool + MiniBlocks storage.Cacher + PeerChangesBlocks storage.Cacher + TrieNodes storage.Cacher + TrieNodesChunks storage.Cacher + CurrentBlockTransactions dataRetriever.TransactionCacher + CurrentBlockValidatorInfo dataRetriever.ValidatorInfoCacher + SmartContracts storage.Cacher + ValidatorsInfo storage.Cacher } // NewDataPool creates a data pools holder object @@ -60,6 +62,9 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { if check.IfNil(args.CurrentBlockTransactions) { return nil, dataRetriever.ErrNilCurrBlockTxs } + if check.IfNil(args.CurrentBlockValidatorInfo) { + return nil, dataRetriever.ErrNilCurrBlockValidatorInfo + } if check.IfNil(args.TrieNodes) { return nil, dataRetriever.ErrNilTrieNodesPool } @@ -74,17 +79,18 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { } return &dataPool{ - transactions: args.Transactions, - unsignedTransactions: args.UnsignedTransactions, - rewardTransactions: args.RewardTransactions, - headers: args.Headers, - miniBlocks: args.MiniBlocks, - peerChangesBlocks: args.PeerChangesBlocks, - trieNodes: args.TrieNodes, - trieNodesChunks: args.TrieNodesChunks, - currBlockTxs: args.CurrentBlockTransactions, - smartContracts: args.SmartContracts, - validatorsInfo: args.ValidatorsInfo, + transactions: args.Transactions, + unsignedTransactions: args.UnsignedTransactions, + rewardTransactions: args.RewardTransactions, + headers: args.Headers, + miniBlocks: args.MiniBlocks, + peerChangesBlocks: args.PeerChangesBlocks, + trieNodes: args.TrieNodes, + trieNodesChunks: args.TrieNodesChunks, + currBlockTxs: args.CurrentBlockTransactions, + currBlockValidatorInfo: args.CurrentBlockValidatorInfo, + smartContracts: args.SmartContracts, + validatorsInfo: args.ValidatorsInfo, }, nil } @@ -93,6 +99,11 @@ func (dp *dataPool) CurrentBlockTxs() dataRetriever.TransactionCacher { return dp.currBlockTxs } +// CurrentBlockValidatorInfo returns the holder for current block validator info +func (dp *dataPool) CurrentBlockValidatorInfo() dataRetriever.ValidatorInfoCacher { + return dp.currBlockValidatorInfo +} + // Transactions returns the holder for transactions func (dp *dataPool) Transactions() dataRetriever.ShardedDataCacherNotifier { return dp.transactions diff --git a/dataRetriever/dataPool/dataPool_test.go b/dataRetriever/dataPool/dataPool_test.go index 64e7a48f969..f27acfbd367 100644 --- a/dataRetriever/dataPool/dataPool_test.go +++ b/dataRetriever/dataPool/dataPool_test.go @@ -15,17 +15,18 @@ import ( func createMockDataPoolArgs() dataPool.DataPoolArgs { return dataPool.DataPoolArgs{ - Transactions: testscommon.NewShardedDataStub(), - UnsignedTransactions: testscommon.NewShardedDataStub(), - RewardTransactions: testscommon.NewShardedDataStub(), - Headers: &mock.HeadersCacherStub{}, - MiniBlocks: testscommon.NewCacherStub(), - PeerChangesBlocks: testscommon.NewCacherStub(), - TrieNodes: testscommon.NewCacherStub(), - TrieNodesChunks: testscommon.NewCacherStub(), - CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, - SmartContracts: testscommon.NewCacherStub(), - ValidatorsInfo: testscommon.NewCacherStub(), + Transactions: testscommon.NewShardedDataStub(), + UnsignedTransactions: testscommon.NewShardedDataStub(), + RewardTransactions: testscommon.NewShardedDataStub(), + Headers: &mock.HeadersCacherStub{}, + MiniBlocks: testscommon.NewCacherStub(), + PeerChangesBlocks: testscommon.NewCacherStub(), + TrieNodes: testscommon.NewCacherStub(), + TrieNodesChunks: testscommon.NewCacherStub(), + CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, + CurrentBlockValidatorInfo: &mock.ValidatorInfoForCurrentBlockStub{}, + SmartContracts: testscommon.NewCacherStub(), + ValidatorsInfo: testscommon.NewCacherStub(), } } @@ -139,7 +140,8 @@ func TestNewDataPool_NilPeerBlocksShouldErr(t *testing.T) { assert.Nil(t, tdp) } -func TestNewDataPool_NilCurrBlockShouldErr(t *testing.T) { +func TestNewDataPool_NilCurrBlockTransactionsShouldErr(t *testing.T) { + t.Parallel() args := createMockDataPoolArgs() args.CurrentBlockTransactions = nil @@ -149,19 +151,31 @@ func TestNewDataPool_NilCurrBlockShouldErr(t *testing.T) { require.Equal(t, dataRetriever.ErrNilCurrBlockTxs, err) } +func TestNewDataPool_NilCurrBlockValidatorInfoShouldErr(t *testing.T) { + t.Parallel() + + args := createMockDataPoolArgs() + args.CurrentBlockValidatorInfo = nil + tdp, err := dataPool.NewDataPool(args) + + require.Nil(t, tdp) + require.Equal(t, dataRetriever.ErrNilCurrBlockValidatorInfo, err) +} + func TestNewDataPool_OkValsShouldWork(t *testing.T) { args := dataPool.DataPoolArgs{ - Transactions: testscommon.NewShardedDataStub(), - UnsignedTransactions: testscommon.NewShardedDataStub(), - RewardTransactions: testscommon.NewShardedDataStub(), - Headers: &mock.HeadersCacherStub{}, - MiniBlocks: testscommon.NewCacherStub(), - PeerChangesBlocks: testscommon.NewCacherStub(), - TrieNodes: testscommon.NewCacherStub(), - TrieNodesChunks: testscommon.NewCacherStub(), - CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, - SmartContracts: testscommon.NewCacherStub(), - ValidatorsInfo: testscommon.NewCacherStub(), + Transactions: testscommon.NewShardedDataStub(), + UnsignedTransactions: testscommon.NewShardedDataStub(), + RewardTransactions: testscommon.NewShardedDataStub(), + Headers: &mock.HeadersCacherStub{}, + MiniBlocks: testscommon.NewCacherStub(), + PeerChangesBlocks: testscommon.NewCacherStub(), + TrieNodes: testscommon.NewCacherStub(), + TrieNodesChunks: testscommon.NewCacherStub(), + CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, + CurrentBlockValidatorInfo: &mock.ValidatorInfoForCurrentBlockStub{}, + SmartContracts: testscommon.NewCacherStub(), + ValidatorsInfo: testscommon.NewCacherStub(), } tdp, err := dataPool.NewDataPool(args) @@ -176,6 +190,7 @@ func TestNewDataPool_OkValsShouldWork(t *testing.T) { assert.True(t, args.MiniBlocks == tdp.MiniBlocks()) assert.True(t, args.PeerChangesBlocks == tdp.PeerChangesBlocks()) assert.True(t, args.CurrentBlockTransactions == tdp.CurrentBlockTxs()) + assert.True(t, args.CurrentBlockValidatorInfo == tdp.CurrentBlockValidatorInfo()) assert.True(t, args.TrieNodes == tdp.TrieNodes()) assert.True(t, args.TrieNodesChunks == tdp.TrieNodesChunks()) assert.True(t, args.SmartContracts == tdp.SmartContracts()) diff --git a/dataRetriever/errors.go b/dataRetriever/errors.go index f66f33b24d7..f9c9e92e378 100644 --- a/dataRetriever/errors.go +++ b/dataRetriever/errors.go @@ -13,8 +13,11 @@ var ErrSendRequest = errors.New("cannot send request: peer list is empty or erro // ErrNilValue signals the value is nil var ErrNilValue = errors.New("nil value") -// ErrTxNotFoundInBlockPool signals the value is nil -var ErrTxNotFoundInBlockPool = errors.New("cannot find tx in current block pool") +// ErrTxNotFoundInBlockPool signals that transaction was not found in the current block pool +var ErrTxNotFoundInBlockPool = errors.New("transaction was not found in the current block pool") + +// ErrValidatorInfoNotFoundInBlockPool signals that validator info was not found in the current block pool +var ErrValidatorInfoNotFoundInBlockPool = errors.New("validator info was not found in the current block pool") // ErrNilMarshalizer signals that an operation has been attempted to or with a nil Marshalizer implementation var ErrNilMarshalizer = errors.New("nil Marshalizer") @@ -146,9 +149,12 @@ var ErrNilPeersRatingHandler = errors.New("nil peers rating handler") // ErrNilTrieDataGetter signals that a nil trie data getter has been provided var ErrNilTrieDataGetter = errors.New("nil trie data getter provided") -// ErrNilCurrBlockTxs signals that nil current blocks txs holder was provided +// ErrNilCurrBlockTxs signals that nil current block txs holder was provided var ErrNilCurrBlockTxs = errors.New("nil current block txs holder") +// ErrNilCurrBlockValidatorInfo signals that nil current block validator info holder was provided +var ErrNilCurrBlockValidatorInfo = errors.New("nil current block validator info holder") + // ErrNilRequestedItemsHandler signals that a nil requested items handler was provided var ErrNilRequestedItemsHandler = errors.New("nil requested items handler") diff --git a/dataRetriever/factory/dataPoolFactory.go b/dataRetriever/factory/dataPoolFactory.go index 01ff3b172b5..5d6fe671700 100644 --- a/dataRetriever/factory/dataPoolFactory.go +++ b/dataRetriever/factory/dataPoolFactory.go @@ -130,7 +130,7 @@ func NewDataPoolFromConfig(args ArgsDataPool) (dataRetriever.PoolsHolder, error) return nil, fmt.Errorf("%w while creating the cache for the validator info results", err) } - currBlockTxs := dataPool.NewCurrentBlockPool() + currBlockTxs := dataPool.NewCurrentBlockTransactionPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, UnsignedTransactions: uTxPool, diff --git a/dataRetriever/interface.go b/dataRetriever/interface.go index f5f092f6f65..5c245715c91 100644 --- a/dataRetriever/interface.go +++ b/dataRetriever/interface.go @@ -8,6 +8,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/counting" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go/p2p" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -301,7 +302,7 @@ type HeadersPool interface { GetNumHeaders(shardId uint32) int } -// TransactionCacher defines the methods for the local cacher, info for current round +// TransactionCacher defines the methods for the local transaction cacher, needed for the current block type TransactionCacher interface { Clean() GetTx(txHash []byte) (data.TransactionHandler, error) @@ -309,6 +310,14 @@ type TransactionCacher interface { IsInterfaceNil() bool } +// ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current block +type ValidatorInfoCacher interface { + Clean() + GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) + AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) + IsInterfaceNil() bool +} + // PoolsHolder defines getters for data pools type PoolsHolder interface { Transactions() ShardedDataCacherNotifier @@ -321,6 +330,7 @@ type PoolsHolder interface { TrieNodesChunks() storage.Cacher SmartContracts() storage.Cacher CurrentBlockTxs() TransactionCacher + CurrentBlockValidatorInfo() ValidatorInfoCacher ValidatorsInfo() storage.Cacher IsInterfaceNil() bool } diff --git a/dataRetriever/mock/validatorInfoForCurrentBlockMock.go b/dataRetriever/mock/validatorInfoForCurrentBlockMock.go new file mode 100644 index 00000000000..3162cc1d612 --- /dev/null +++ b/dataRetriever/mock/validatorInfoForCurrentBlockMock.go @@ -0,0 +1,39 @@ +package mock + +import ( + "github.com/ElrondNetwork/elrond-go/state" +) + +// ValidatorInfoForCurrentBlockStub - +type ValidatorInfoForCurrentBlockStub struct { + CleanCalled func() + GetValidatorInfoCalled func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) + AddValidatorInfoCalled func(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) +} + +// Clean - +func (t *ValidatorInfoForCurrentBlockStub) Clean() { + if t.CleanCalled != nil { + t.CleanCalled() + } +} + +// GetValidatorInfo - +func (v *ValidatorInfoForCurrentBlockStub) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if v.GetValidatorInfoCalled != nil { + return v.GetValidatorInfoCalled(validatorInfoHash) + } + return nil, nil +} + +// AddValidatorInfo - +func (v *ValidatorInfoForCurrentBlockStub) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { + if v.AddValidatorInfoCalled != nil { + v.AddValidatorInfoCalled(validatorInfoHash, validatorInfo) + } +} + +// IsInterfaceNil - +func (v *ValidatorInfoForCurrentBlockStub) IsInterfaceNil() bool { + return v == nil +} diff --git a/epochStart/errors.go b/epochStart/errors.go index 9836200173d..122fee8c8cf 100644 --- a/epochStart/errors.go +++ b/epochStart/errors.go @@ -331,3 +331,6 @@ var ErrNilScheduledTxsHandler = errors.New("nil scheduled transactions handler") // ErrNilScheduledDataSyncerFactory signals that a nil scheduled data syncer factory was provided var ErrNilScheduledDataSyncerFactory = errors.New("nil scheduled data syncer factory") + +// ErrNilValidatorInfoStorage signals that nil validator info storage has been provided +var ErrNilValidatorInfoStorage = errors.New("nil validator info storage") diff --git a/epochStart/metachain/baseRewards.go b/epochStart/metachain/baseRewards.go index 8a152a9bd25..55e24d4b607 100644 --- a/epochStart/metachain/baseRewards.go +++ b/epochStart/metachain/baseRewards.go @@ -81,7 +81,7 @@ func NewBaseRewardsCreator(args BaseRewardsCreatorArgs) (*baseRewardsCreator, er return nil, epochStart.ErrProtocolSustainabilityAddressInMetachain } - currTxsCache := dataPool.NewCurrentBlockPool() + currTxsCache := dataPool.NewCurrentBlockTransactionPool() brc := &baseRewardsCreator{ currTxs: currTxsCache, shardCoordinator: args.ShardCoordinator, diff --git a/epochStart/metachain/systemSCs_test.go b/epochStart/metachain/systemSCs_test.go index 3ff9bee95dc..868e24d0b8e 100644 --- a/epochStart/metachain/systemSCs_test.go +++ b/epochStart/metachain/systemSCs_test.go @@ -1115,7 +1115,7 @@ func TestSystemSCProcessor_ProcessDelegationRewardsNothingToExecute(t *testing.T args, _ := createFullArgumentsForSystemSCProcessing(1000, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockPool() + localCache := dataPool.NewCurrentBlockTransactionPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: 0, @@ -1134,7 +1134,7 @@ func TestSystemSCProcessor_ProcessDelegationRewardsErrors(t *testing.T) { args, _ := createFullArgumentsForSystemSCProcessing(1000, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockPool() + localCache := dataPool.NewCurrentBlockTransactionPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: core.MetachainShardId, @@ -1179,7 +1179,7 @@ func TestSystemSCProcessor_ProcessDelegationRewards(t *testing.T) { args, scContainer := createFullArgumentsForSystemSCProcessing(1000, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockPool() + localCache := dataPool.NewCurrentBlockTransactionPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: core.MetachainShardId, diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 9f5a7f71e54..3c446b94136 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -3,6 +3,7 @@ package metachain import ( "bytes" "sort" + "sync" "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" @@ -11,6 +12,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/sharding" @@ -22,19 +24,23 @@ var _ process.EpochStartValidatorInfoCreator = (*validatorInfoCreator)(nil) // ArgsNewValidatorInfoCreator defines the arguments structure needed to create a new validatorInfo creator type ArgsNewValidatorInfoCreator struct { - ShardCoordinator sharding.Coordinator - MiniBlockStorage storage.Storer - Hasher hashing.Hasher - Marshalizer marshal.Marshalizer - DataPool dataRetriever.PoolsHolder + ShardCoordinator sharding.Coordinator + ValidatorInfoStorage storage.Storer + MiniBlockStorage storage.Storer + Hasher hashing.Hasher + Marshalizer marshal.Marshalizer + DataPool dataRetriever.PoolsHolder } type validatorInfoCreator struct { - shardCoordinator sharding.Coordinator - miniBlockStorage storage.Storer - hasher hashing.Hasher - marshalizer marshal.Marshalizer - dataPool dataRetriever.PoolsHolder + currValidatorInfo dataRetriever.ValidatorInfoCacher + shardCoordinator sharding.Coordinator + validatorInfoStorage storage.Storer + miniBlockStorage storage.Storer + hasher hashing.Hasher + marshalizer marshal.Marshalizer + dataPool dataRetriever.PoolsHolder + mutValidatorInfo sync.Mutex } // NewValidatorInfoCreator creates a new validatorInfo creator object @@ -48,6 +54,9 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr if check.IfNil(args.Hasher) { return nil, epochStart.ErrNilHasher } + if check.IfNil(args.ValidatorInfoStorage) { + return nil, epochStart.ErrNilValidatorInfoStorage + } if check.IfNil(args.MiniBlockStorage) { return nil, epochStart.ErrNilStorage } @@ -55,12 +64,15 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr return nil, epochStart.ErrNilDataPoolsHolder } + currValidatorInfoCache := dataPool.NewCurrentBlockValidatorInfoPool() vic := &validatorInfoCreator{ - shardCoordinator: args.ShardCoordinator, - hasher: args.Hasher, - marshalizer: args.Marshalizer, - miniBlockStorage: args.MiniBlockStorage, - dataPool: args.DataPool, + currValidatorInfo: currValidatorInfoCache, + shardCoordinator: args.ShardCoordinator, + hasher: args.Hasher, + marshalizer: args.Marshalizer, + validatorInfoStorage: args.ValidatorInfoStorage, + miniBlockStorage: args.MiniBlockStorage, + dataPool: args.DataPool, } return vic, nil @@ -72,6 +84,11 @@ func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo ma return nil, epochStart.ErrNilValidatorInfo } + vic.mutValidatorInfo.Lock() + defer vic.mutValidatorInfo.Unlock() + + vic.clean() + miniblocks := make([]*block.MiniBlock, 0) for shardId := uint32(0); shardId < vic.shardCoordinator.NumberOfShards(); shardId++ { @@ -103,6 +120,10 @@ func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo ma return miniblocks, nil } +func (vic *validatorInfoCreator) clean() { + vic.currValidatorInfo.Clean() +} + func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.ValidatorInfo) (*block.MiniBlock, error) { miniBlock := &block.MiniBlock{} miniBlock.SenderShardID = vic.shardCoordinator.SelfId() @@ -194,33 +215,61 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( return nil } -// SaveValidatorInfoBlocksToStorage saves created data to storage -func (vic *validatorInfoCreator) SaveValidatorInfoBlocksToStorage(_ data.HeaderHandler, body *block.Body) { +// SaveValidatorInfoBlockDataToStorage saves created data to storage +func (vic *validatorInfoCreator) SaveValidatorInfoBlockDataToStorage(_ data.HeaderHandler, body *block.Body) { if check.IfNil(body) { return } + var validatorInfo *state.ShardValidatorInfo + var marshalledData []byte + var err error + for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { continue } - marshalizedData, err := vic.marshalizer.Marshal(miniBlock) + for _, validatorInfoHash := range miniBlock.TxHashes { + validatorInfo, err = vic.currValidatorInfo.GetValidatorInfo(validatorInfoHash) + if err != nil { + continue + } + + marshalledData, err = vic.marshalizer.Marshal(validatorInfo) + if err != nil { + continue + } + + _ = vic.validatorInfoStorage.Put(validatorInfoHash, marshalledData) + } + + marshalledData, err = vic.marshalizer.Marshal(miniBlock) if err != nil { continue } - mbHash := vic.hasher.Compute(string(marshalizedData)) - _ = vic.miniBlockStorage.Put(mbHash, marshalizedData) + mbHash := vic.hasher.Compute(string(marshalledData)) + _ = vic.miniBlockStorage.Put(mbHash, marshalledData) } } -// DeleteValidatorInfoBlocksFromStorage deletes data from storage -func (vic *validatorInfoCreator) DeleteValidatorInfoBlocksFromStorage(metaBlock data.HeaderHandler) { - if check.IfNil(metaBlock) { +// DeleteValidatorInfoBlockDataFromStorage deletes data from storage +func (vic *validatorInfoCreator) DeleteValidatorInfoBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { + if check.IfNil(metaBlock) || check.IfNil(body) { return } + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + + for _, txHash := range miniBlock.TxHashes { + _ = vic.validatorInfoStorage.Remove(txHash) + } + } + for _, mbHeader := range metaBlock.GetMiniBlockHeaderHandlers() { if mbHeader.GetTypeInt32() == int32(block.PeerBlock) { _ = vic.miniBlockStorage.Remove(mbHeader.GetHash()) diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index c65c0a2ecbb..cc85eb4fb1d 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -112,10 +112,11 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator _ = shardCoordinator.SetSelfId(core.MetachainShardId) argsNewEpochEconomics := ArgsNewValidatorInfoCreator{ - ShardCoordinator: shardCoordinator, - MiniBlockStorage: createMemUnit(), - Hasher: &hashingMocks.HasherMock{}, - Marshalizer: &mock.MarshalizerMock{}, + ShardCoordinator: shardCoordinator, + ValidatorInfoStorage: createMemUnit(), + MiniBlockStorage: createMemUnit(), + Hasher: &hashingMocks.HasherMock{}, + Marshalizer: &mock.MarshalizerMock{}, DataPool: &dataRetrieverMock.PoolsHolderStub{ MiniBlocksCalled: func() storage.Cacher { return &testscommon.CacherStub{ @@ -377,7 +378,7 @@ func createValidatorInfoMiniBlocks( return miniblocks } -func TestEpochValidatorInfoCreator_SaveValidatorInfoBlocksToStorage(t *testing.T) { +func TestEpochValidatorInfoCreator_SaveValidatorInfoBlockDataToStorage(t *testing.T) { validatorInfo := createMockValidatorInfo() arguments := createMockEpochValidatorInfoCreatorsArguments() arguments.MiniBlockStorage = mock.NewStorerMock() @@ -427,7 +428,7 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoBlocksToStorage(t *testing.T } body := &block.Body{MiniBlocks: miniblocks} - vic.SaveValidatorInfoBlocksToStorage(meta, body) + vic.SaveValidatorInfoBlockDataToStorage(meta, body) for i, mbHeader := range meta.MiniBlockHeaders { mb, err := miniBlockStorage.Get(mbHeader.Hash) @@ -440,15 +441,15 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoBlocksToStorage(t *testing.T } } -func TestEpochValidatorInfoCreator_DeleteValidatorInfoBlocksFromStorage(t *testing.T) { - testDeleteValidatorInfoBlock(t, block.PeerBlock, false) +func TestEpochValidatorInfoCreator_DeleteValidatorInfoBlockDataFromStorage(t *testing.T) { + testDeleteValidatorInfoBlockData(t, block.PeerBlock, false) } -func TestEpochValidatorInfoCreator_DeleteValidatorInfoBlocksFromStorageDoesDeleteOnlyPeerBlocks(t *testing.T) { - testDeleteValidatorInfoBlock(t, block.TxBlock, true) +func TestEpochValidatorInfoCreator_DeleteValidatorInfoBlockDataFromStorageDoesDeleteOnlyPeerBlocks(t *testing.T) { + testDeleteValidatorInfoBlockData(t, block.TxBlock, true) } -func testDeleteValidatorInfoBlock(t *testing.T, blockType block.Type, shouldExist bool) { +func testDeleteValidatorInfoBlockData(t *testing.T, blockType block.Type, shouldExist bool) { validatorInfo := createMockValidatorInfo() arguments := createMockEpochValidatorInfoCreatorsArguments() arguments.MiniBlockStorage = mock.NewStorerMock() @@ -504,7 +505,8 @@ func testDeleteValidatorInfoBlock(t *testing.T, blockType block.Type, shouldExis require.Nil(t, err) } - vic.DeleteValidatorInfoBlocksFromStorage(meta) + body := &block.Body{} + vic.DeleteValidatorInfoBlockDataFromStorage(meta, body) for _, mbHeader := range meta.MiniBlockHeaders { mb, err := mbStorage.Get(mbHeader.Hash) diff --git a/factory/blockProcessorCreator.go b/factory/blockProcessorCreator.go index 797b45451f6..6be127ca50a 100644 --- a/factory/blockProcessorCreator.go +++ b/factory/blockProcessorCreator.go @@ -752,12 +752,14 @@ func (pcf *processComponentsFactory) newMetaBlockProcessor( return nil, err } + validatorInfoStorage := pcf.data.StorageService().GetStorer(dataRetriever.UnsignedTransactionUnit) argsEpochValidatorInfo := metachainEpochStart.ArgsNewValidatorInfoCreator{ - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - MiniBlockStorage: miniBlockStorage, - Hasher: pcf.coreData.Hasher(), - Marshalizer: pcf.coreData.InternalMarshalizer(), - DataPool: pcf.data.Datapool(), + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + ValidatorInfoStorage: validatorInfoStorage, + MiniBlockStorage: miniBlockStorage, + Hasher: pcf.coreData.Hasher(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + DataPool: pcf.data.Datapool(), } validatorInfoCreator, err := metachainEpochStart.NewValidatorInfoCreator(argsEpochValidatorInfo) if err != nil { diff --git a/integrationTests/mock/epochValidatorInfoCreatorStub.go b/integrationTests/mock/epochValidatorInfoCreatorStub.go index 3533131a117..61d45987048 100644 --- a/integrationTests/mock/epochValidatorInfoCreatorStub.go +++ b/integrationTests/mock/epochValidatorInfoCreatorStub.go @@ -8,12 +8,12 @@ import ( // EpochValidatorInfoCreatorStub - type EpochValidatorInfoCreatorStub struct { - CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) - VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - CreateMarshalizedDataCalled func(body block.Body) map[string][][]byte - SaveTxBlockToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.HeaderHandler) - RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) + CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) + VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error + CreateMarshalizedDataCalled func(body block.Body) map[string][][]byte + SaveValidatorInfoBlockDataToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + DeleteValidatorInfoBlockDataFromStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) } // CreateValidatorInfoMiniBlocks - @@ -32,17 +32,17 @@ func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniblocks return nil } -// SaveValidatorInfoBlocksToStorage - -func (e *EpochValidatorInfoCreatorStub) SaveValidatorInfoBlocksToStorage(metaBlock data.HeaderHandler, body *block.Body) { - if e.SaveTxBlockToStorageCalled != nil { - e.SaveTxBlockToStorageCalled(metaBlock, body) +// SaveValidatorInfoBlockDataToStorage - +func (e *EpochValidatorInfoCreatorStub) SaveValidatorInfoBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.SaveValidatorInfoBlockDataToStorageCalled != nil { + e.SaveValidatorInfoBlockDataToStorageCalled(metaBlock, body) } } -// DeleteValidatorInfoBlocksFromStorage - -func (e *EpochValidatorInfoCreatorStub) DeleteValidatorInfoBlocksFromStorage(metaBlock data.HeaderHandler) { - if e.DeleteTxsFromStorageCalled != nil { - e.DeleteTxsFromStorageCalled(metaBlock) +// DeleteValidatorInfoBlockDataFromStorage - +func (e *EpochValidatorInfoCreatorStub) DeleteValidatorInfoBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.DeleteValidatorInfoBlockDataFromStorageCalled != nil { + e.DeleteValidatorInfoBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/integrationTests/testProcessorNode.go b/integrationTests/testProcessorNode.go index bf1b72c3290..f84c2c12ede 100644 --- a/integrationTests/testProcessorNode.go +++ b/integrationTests/testProcessorNode.go @@ -2127,12 +2127,14 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { } epochStartRewards, _ := metachain.NewRewardsCreatorProxy(argsEpochRewards) + validatorInfoStorage := tpn.Storage.GetStorer(dataRetriever.UnsignedTransactionUnit) argsEpochValidatorInfo := metachain.ArgsNewValidatorInfoCreator{ - ShardCoordinator: tpn.ShardCoordinator, - MiniBlockStorage: miniBlockStorage, - Hasher: TestHasher, - Marshalizer: TestMarshalizer, - DataPool: tpn.DataPool, + ShardCoordinator: tpn.ShardCoordinator, + ValidatorInfoStorage: validatorInfoStorage, + MiniBlockStorage: miniBlockStorage, + Hasher: TestHasher, + Marshalizer: TestMarshalizer, + DataPool: tpn.DataPool, } epochStartValidatorInfo, _ := metachain.NewValidatorInfoCreator(argsEpochValidatorInfo) diff --git a/integrationTests/vm/delegation/delegationScenarios_test.go b/integrationTests/vm/delegation/delegationScenarios_test.go index c2e44f77740..d259bd2284a 100644 --- a/integrationTests/vm/delegation/delegationScenarios_test.go +++ b/integrationTests/vm/delegation/delegationScenarios_test.go @@ -1133,7 +1133,7 @@ func addRewardsToDelegation(tpn *integrationTests.TestProcessorNode, recvAddr [] }, } - txCacher := dataPool.NewCurrentBlockPool() + txCacher := dataPool.NewCurrentBlockTransactionPool() txCacher.AddTx(rewardTxHash, tx) _ = tpn.EpochStartSystemSCProcessor.ProcessDelegationRewards(mbSlice, txCacher) diff --git a/process/block/metablock.go b/process/block/metablock.go index 4a0dc0deef1..b9237aed284 100644 --- a/process/block/metablock.go +++ b/process/block/metablock.go @@ -1580,7 +1580,7 @@ func (mp *metaProcessor) commitEpochStart(header *block.MetaBlock, body *block.B if header.IsStartOfEpochBlock() { mp.epochStartTrigger.SetProcessed(header, body) go mp.epochRewardsCreator.SaveTxBlockToStorage(header, body) - go mp.validatorInfoCreator.SaveValidatorInfoBlocksToStorage(header, body) + go mp.validatorInfoCreator.SaveValidatorInfoBlockDataToStorage(header, body) } else { currentHeader := mp.blockChain.GetCurrentBlockHeader() if !check.IfNil(currentHeader) && currentHeader.IsStartOfEpochBlock() { diff --git a/process/interface.go b/process/interface.go index bd88b1ed89c..bfe4cbc9966 100644 --- a/process/interface.go +++ b/process/interface.go @@ -903,8 +903,8 @@ type RewardsCreator interface { type EpochStartValidatorInfoCreator interface { CreateValidatorInfoMiniBlocks(validatorInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) VerifyValidatorInfoMiniBlocks(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - SaveValidatorInfoBlocksToStorage(metaBlock data.HeaderHandler, body *block.Body) - DeleteValidatorInfoBlocksFromStorage(metaBlock data.HeaderHandler) + SaveValidatorInfoBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) + DeleteValidatorInfoBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) RemoveBlockDataFromPools(metaBlock data.HeaderHandler, body *block.Body) IsInterfaceNil() bool } diff --git a/process/mock/epochValidatorInfoCreatorStub.go b/process/mock/epochValidatorInfoCreatorStub.go index 3533131a117..61d45987048 100644 --- a/process/mock/epochValidatorInfoCreatorStub.go +++ b/process/mock/epochValidatorInfoCreatorStub.go @@ -8,12 +8,12 @@ import ( // EpochValidatorInfoCreatorStub - type EpochValidatorInfoCreatorStub struct { - CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) - VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - CreateMarshalizedDataCalled func(body block.Body) map[string][][]byte - SaveTxBlockToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.HeaderHandler) - RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) + CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) + VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error + CreateMarshalizedDataCalled func(body block.Body) map[string][][]byte + SaveValidatorInfoBlockDataToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + DeleteValidatorInfoBlockDataFromStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) } // CreateValidatorInfoMiniBlocks - @@ -32,17 +32,17 @@ func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniblocks return nil } -// SaveValidatorInfoBlocksToStorage - -func (e *EpochValidatorInfoCreatorStub) SaveValidatorInfoBlocksToStorage(metaBlock data.HeaderHandler, body *block.Body) { - if e.SaveTxBlockToStorageCalled != nil { - e.SaveTxBlockToStorageCalled(metaBlock, body) +// SaveValidatorInfoBlockDataToStorage - +func (e *EpochValidatorInfoCreatorStub) SaveValidatorInfoBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.SaveValidatorInfoBlockDataToStorageCalled != nil { + e.SaveValidatorInfoBlockDataToStorageCalled(metaBlock, body) } } -// DeleteValidatorInfoBlocksFromStorage - -func (e *EpochValidatorInfoCreatorStub) DeleteValidatorInfoBlocksFromStorage(metaBlock data.HeaderHandler) { - if e.DeleteTxsFromStorageCalled != nil { - e.DeleteTxsFromStorageCalled(metaBlock) +// DeleteValidatorInfoBlockDataFromStorage - +func (e *EpochValidatorInfoCreatorStub) DeleteValidatorInfoBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.DeleteValidatorInfoBlockDataFromStorageCalled != nil { + e.DeleteValidatorInfoBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/testscommon/dataRetriever/poolFactory.go b/testscommon/dataRetriever/poolFactory.go index 1ddfb66a845..6e217612a0a 100644 --- a/testscommon/dataRetriever/poolFactory.go +++ b/testscommon/dataRetriever/poolFactory.go @@ -116,7 +116,7 @@ func CreatePoolsHolder(numShards uint32, selfShard uint32) dataRetriever.PoolsHo validatorsInfo, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolder", err) - currentTx := dataPool.NewCurrentBlockPool() + currentTx := dataPool.NewCurrentBlockTransactionPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, UnsignedTransactions: unsignedTxPool, @@ -183,7 +183,7 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) validatorsInfo, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolderWithTxPool", err) - currentTx := dataPool.NewCurrentBlockPool() + currentTx := dataPool.NewCurrentBlockTransactionPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, UnsignedTransactions: unsignedTxPool, diff --git a/testscommon/dataRetriever/poolsHolderMock.go b/testscommon/dataRetriever/poolsHolderMock.go index 94cce7167e7..c8662c60972 100644 --- a/testscommon/dataRetriever/poolsHolderMock.go +++ b/testscommon/dataRetriever/poolsHolderMock.go @@ -14,17 +14,18 @@ import ( // PoolsHolderMock - type PoolsHolderMock struct { - transactions dataRetriever.ShardedDataCacherNotifier - unsignedTransactions dataRetriever.ShardedDataCacherNotifier - rewardTransactions dataRetriever.ShardedDataCacherNotifier - headers dataRetriever.HeadersPool - miniBlocks storage.Cacher - peerChangesBlocks storage.Cacher - trieNodes storage.Cacher - trieNodesChunks storage.Cacher - smartContracts storage.Cacher - currBlockTxs dataRetriever.TransactionCacher - validatorsInfo storage.Cacher + transactions dataRetriever.ShardedDataCacherNotifier + unsignedTransactions dataRetriever.ShardedDataCacherNotifier + rewardTransactions dataRetriever.ShardedDataCacherNotifier + headers dataRetriever.HeadersPool + miniBlocks storage.Cacher + peerChangesBlocks storage.Cacher + trieNodes storage.Cacher + trieNodesChunks storage.Cacher + smartContracts storage.Cacher + currBlockTxs dataRetriever.TransactionCacher + currBlockValidatorInfo dataRetriever.ValidatorInfoCacher + validatorsInfo storage.Cacher } // NewPoolsHolderMock - @@ -74,7 +75,8 @@ func NewPoolsHolderMock() *PoolsHolderMock { holder.peerChangesBlocks, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.LRUCache, Capacity: 10000, Shards: 1, SizeInBytes: 0}) panicIfError("NewPoolsHolderMock", err) - holder.currBlockTxs = dataPool.NewCurrentBlockPool() + holder.currBlockTxs = dataPool.NewCurrentBlockTransactionPool() + holder.currBlockValidatorInfo = dataPool.NewCurrentBlockValidatorInfoPool() holder.trieNodes, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.SizeLRUCache, Capacity: 900000, Shards: 1, SizeInBytes: 314572800}) panicIfError("NewPoolsHolderMock", err) @@ -96,6 +98,11 @@ func (holder *PoolsHolderMock) CurrentBlockTxs() dataRetriever.TransactionCacher return holder.currBlockTxs } +// CurrentBlockValidatorInfo - +func (holder *PoolsHolderMock) CurrentBlockValidatorInfo() dataRetriever.ValidatorInfoCacher { + return holder.currBlockValidatorInfo +} + // Transactions - func (holder *PoolsHolderMock) Transactions() dataRetriever.ShardedDataCacherNotifier { return holder.transactions diff --git a/testscommon/dataRetriever/poolsHolderStub.go b/testscommon/dataRetriever/poolsHolderStub.go index 8ee8a385a77..bdbd9df067f 100644 --- a/testscommon/dataRetriever/poolsHolderStub.go +++ b/testscommon/dataRetriever/poolsHolderStub.go @@ -8,18 +8,19 @@ import ( // PoolsHolderStub - type PoolsHolderStub struct { - HeadersCalled func() dataRetriever.HeadersPool - TransactionsCalled func() dataRetriever.ShardedDataCacherNotifier - UnsignedTransactionsCalled func() dataRetriever.ShardedDataCacherNotifier - RewardTransactionsCalled func() dataRetriever.ShardedDataCacherNotifier - MiniBlocksCalled func() storage.Cacher - MetaBlocksCalled func() storage.Cacher - CurrBlockTxsCalled func() dataRetriever.TransactionCacher - TrieNodesCalled func() storage.Cacher - TrieNodesChunksCalled func() storage.Cacher - PeerChangesBlocksCalled func() storage.Cacher - SmartContractsCalled func() storage.Cacher - ValidatorsInfoCalled func() storage.Cacher + HeadersCalled func() dataRetriever.HeadersPool + TransactionsCalled func() dataRetriever.ShardedDataCacherNotifier + UnsignedTransactionsCalled func() dataRetriever.ShardedDataCacherNotifier + RewardTransactionsCalled func() dataRetriever.ShardedDataCacherNotifier + MiniBlocksCalled func() storage.Cacher + MetaBlocksCalled func() storage.Cacher + CurrBlockTxsCalled func() dataRetriever.TransactionCacher + CurrBlockValidatorInfoCalled func() dataRetriever.ValidatorInfoCacher + TrieNodesCalled func() storage.Cacher + TrieNodesChunksCalled func() storage.Cacher + PeerChangesBlocksCalled func() storage.Cacher + SmartContractsCalled func() storage.Cacher + ValidatorsInfoCalled func() storage.Cacher } // NewPoolsHolderStub - @@ -90,6 +91,15 @@ func (holder *PoolsHolderStub) CurrentBlockTxs() dataRetriever.TransactionCacher return nil } +// CurrentBlockValidatorInfo - +func (holder *PoolsHolderStub) CurrentBlockValidatorInfo() dataRetriever.ValidatorInfoCacher { + if holder.CurrBlockValidatorInfoCalled != nil { + return holder.CurrBlockValidatorInfoCalled() + } + + return nil +} + // TrieNodes - func (holder *PoolsHolderStub) TrieNodes() storage.Cacher { if holder.TrieNodesCalled != nil { From fad699b1569c3ae811e1019084e4444e23992346 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 5 May 2022 00:56:26 +0300 Subject: [PATCH 18/70] * Added missing DataPoolArgs initialization --- .../dataPool/currentBlockTransactionPool.go | 4 +- .../currentBlockTransactionPool_test.go | 2 +- dataRetriever/factory/dataPoolFactory.go | 26 +++++----- epochStart/metachain/baseRewards.go | 2 +- epochStart/metachain/systemSCs_test.go | 6 +-- .../vm/delegation/delegationScenarios_test.go | 2 +- testscommon/dataRetriever/poolFactory.go | 52 ++++++++++--------- testscommon/dataRetriever/poolsHolderMock.go | 2 +- 8 files changed, 51 insertions(+), 45 deletions(-) diff --git a/dataRetriever/dataPool/currentBlockTransactionPool.go b/dataRetriever/dataPool/currentBlockTransactionPool.go index 5222e88ca3b..4ec1accf237 100644 --- a/dataRetriever/dataPool/currentBlockTransactionPool.go +++ b/dataRetriever/dataPool/currentBlockTransactionPool.go @@ -15,8 +15,8 @@ type transactionMapCacher struct { txsForBlock map[string]data.TransactionHandler } -// NewCurrentBlockTransactionPool returns a new transaction pool to be used for the current block -func NewCurrentBlockTransactionPool() *transactionMapCacher { +// NewCurrentBlockTransactionsPool returns a new transactions pool to be used for the current block +func NewCurrentBlockTransactionsPool() *transactionMapCacher { return &transactionMapCacher{ mutTxs: sync.RWMutex{}, txsForBlock: make(map[string]data.TransactionHandler), diff --git a/dataRetriever/dataPool/currentBlockTransactionPool_test.go b/dataRetriever/dataPool/currentBlockTransactionPool_test.go index 720e4d547e4..dc589ff1433 100644 --- a/dataRetriever/dataPool/currentBlockTransactionPool_test.go +++ b/dataRetriever/dataPool/currentBlockTransactionPool_test.go @@ -13,7 +13,7 @@ func TestCurrentBlockPool_AddGetCleanTx(t *testing.T) { txHash := []byte("hash") tx := &transaction.Transaction{} - currentBlockPool := NewCurrentBlockTransactionPool() + currentBlockPool := NewCurrentBlockTransactionsPool() require.False(t, currentBlockPool.IsInterfaceNil()) currentBlockPool.AddTx(txHash, tx) diff --git a/dataRetriever/factory/dataPoolFactory.go b/dataRetriever/factory/dataPoolFactory.go index 5d6fe671700..e8d48dd4be7 100644 --- a/dataRetriever/factory/dataPoolFactory.go +++ b/dataRetriever/factory/dataPoolFactory.go @@ -130,19 +130,21 @@ func NewDataPoolFromConfig(args ArgsDataPool) (dataRetriever.PoolsHolder, error) return nil, fmt.Errorf("%w while creating the cache for the validator info results", err) } - currBlockTxs := dataPool.NewCurrentBlockTransactionPool() + currBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() + currBlockValidatorInfo := dataPool.NewCurrentBlockValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ - Transactions: txPool, - UnsignedTransactions: uTxPool, - RewardTransactions: rewardTxPool, - Headers: hdrPool, - MiniBlocks: txBlockBody, - PeerChangesBlocks: peerChangeBlockBody, - TrieNodes: adaptedTrieNodesStorage, - TrieNodesChunks: trieNodesChunks, - CurrentBlockTransactions: currBlockTxs, - SmartContracts: smartContracts, - ValidatorsInfo: validatorsInfo, + Transactions: txPool, + UnsignedTransactions: uTxPool, + RewardTransactions: rewardTxPool, + Headers: hdrPool, + MiniBlocks: txBlockBody, + PeerChangesBlocks: peerChangeBlockBody, + TrieNodes: adaptedTrieNodesStorage, + TrieNodesChunks: trieNodesChunks, + CurrentBlockTransactions: currBlockTransactions, + CurrentBlockValidatorInfo: currBlockValidatorInfo, + SmartContracts: smartContracts, + ValidatorsInfo: validatorsInfo, } return dataPool.NewDataPool(dataPoolArgs) } diff --git a/epochStart/metachain/baseRewards.go b/epochStart/metachain/baseRewards.go index 55e24d4b607..15bb63a1ad3 100644 --- a/epochStart/metachain/baseRewards.go +++ b/epochStart/metachain/baseRewards.go @@ -81,7 +81,7 @@ func NewBaseRewardsCreator(args BaseRewardsCreatorArgs) (*baseRewardsCreator, er return nil, epochStart.ErrProtocolSustainabilityAddressInMetachain } - currTxsCache := dataPool.NewCurrentBlockTransactionPool() + currTxsCache := dataPool.NewCurrentBlockTransactionsPool() brc := &baseRewardsCreator{ currTxs: currTxsCache, shardCoordinator: args.ShardCoordinator, diff --git a/epochStart/metachain/systemSCs_test.go b/epochStart/metachain/systemSCs_test.go index 868e24d0b8e..fdacab49656 100644 --- a/epochStart/metachain/systemSCs_test.go +++ b/epochStart/metachain/systemSCs_test.go @@ -1115,7 +1115,7 @@ func TestSystemSCProcessor_ProcessDelegationRewardsNothingToExecute(t *testing.T args, _ := createFullArgumentsForSystemSCProcessing(1000, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockTransactionPool() + localCache := dataPool.NewCurrentBlockTransactionsPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: 0, @@ -1134,7 +1134,7 @@ func TestSystemSCProcessor_ProcessDelegationRewardsErrors(t *testing.T) { args, _ := createFullArgumentsForSystemSCProcessing(1000, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockTransactionPool() + localCache := dataPool.NewCurrentBlockTransactionsPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: core.MetachainShardId, @@ -1179,7 +1179,7 @@ func TestSystemSCProcessor_ProcessDelegationRewards(t *testing.T) { args, scContainer := createFullArgumentsForSystemSCProcessing(1000, createMemUnit()) s, _ := NewSystemSCProcessor(args) - localCache := dataPool.NewCurrentBlockTransactionPool() + localCache := dataPool.NewCurrentBlockTransactionsPool() miniBlocks := []*block.MiniBlock{ { SenderShardID: core.MetachainShardId, diff --git a/integrationTests/vm/delegation/delegationScenarios_test.go b/integrationTests/vm/delegation/delegationScenarios_test.go index d259bd2284a..cb17f077e0b 100644 --- a/integrationTests/vm/delegation/delegationScenarios_test.go +++ b/integrationTests/vm/delegation/delegationScenarios_test.go @@ -1133,7 +1133,7 @@ func addRewardsToDelegation(tpn *integrationTests.TestProcessorNode, recvAddr [] }, } - txCacher := dataPool.NewCurrentBlockTransactionPool() + txCacher := dataPool.NewCurrentBlockTransactionsPool() txCacher.AddTx(rewardTxHash, tx) _ = tpn.EpochStartSystemSCProcessor.ProcessDelegationRewards(mbSlice, txCacher) diff --git a/testscommon/dataRetriever/poolFactory.go b/testscommon/dataRetriever/poolFactory.go index 6e217612a0a..ea27150be81 100644 --- a/testscommon/dataRetriever/poolFactory.go +++ b/testscommon/dataRetriever/poolFactory.go @@ -116,19 +116,21 @@ func CreatePoolsHolder(numShards uint32, selfShard uint32) dataRetriever.PoolsHo validatorsInfo, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolder", err) - currentTx := dataPool.NewCurrentBlockTransactionPool() + currentBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() + currentBlockValidatorInfo := dataPool.NewCurrentBlockValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ - Transactions: txPool, - UnsignedTransactions: unsignedTxPool, - RewardTransactions: rewardsTxPool, - Headers: headersPool, - MiniBlocks: txBlockBody, - PeerChangesBlocks: peerChangeBlockBody, - TrieNodes: adaptedTrieNodesStorage, - TrieNodesChunks: trieNodesChunks, - CurrentBlockTransactions: currentTx, - SmartContracts: smartContracts, - ValidatorsInfo: validatorsInfo, + Transactions: txPool, + UnsignedTransactions: unsignedTxPool, + RewardTransactions: rewardsTxPool, + Headers: headersPool, + MiniBlocks: txBlockBody, + PeerChangesBlocks: peerChangeBlockBody, + TrieNodes: adaptedTrieNodesStorage, + TrieNodesChunks: trieNodesChunks, + CurrentBlockTransactions: currentBlockTransactions, + CurrentBlockValidatorInfo: currentBlockValidatorInfo, + SmartContracts: smartContracts, + ValidatorsInfo: validatorsInfo, } holder, err := dataPool.NewDataPool(dataPoolArgs) panicIfError("CreatePoolsHolder", err) @@ -183,19 +185,21 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) validatorsInfo, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolderWithTxPool", err) - currentTx := dataPool.NewCurrentBlockTransactionPool() + currentBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() + currentBlockValidatorInfo := dataPool.NewCurrentBlockValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ - Transactions: txPool, - UnsignedTransactions: unsignedTxPool, - RewardTransactions: rewardsTxPool, - Headers: headersPool, - MiniBlocks: txBlockBody, - PeerChangesBlocks: peerChangeBlockBody, - TrieNodes: trieNodes, - TrieNodesChunks: trieNodesChunks, - CurrentBlockTransactions: currentTx, - SmartContracts: smartContracts, - ValidatorsInfo: validatorsInfo, + Transactions: txPool, + UnsignedTransactions: unsignedTxPool, + RewardTransactions: rewardsTxPool, + Headers: headersPool, + MiniBlocks: txBlockBody, + PeerChangesBlocks: peerChangeBlockBody, + TrieNodes: trieNodes, + TrieNodesChunks: trieNodesChunks, + CurrentBlockTransactions: currentBlockTransactions, + CurrentBlockValidatorInfo: currentBlockValidatorInfo, + SmartContracts: smartContracts, + ValidatorsInfo: validatorsInfo, } holder, err := dataPool.NewDataPool(dataPoolArgs) panicIfError("CreatePoolsHolderWithTxPool", err) diff --git a/testscommon/dataRetriever/poolsHolderMock.go b/testscommon/dataRetriever/poolsHolderMock.go index c8662c60972..5bc0755bf89 100644 --- a/testscommon/dataRetriever/poolsHolderMock.go +++ b/testscommon/dataRetriever/poolsHolderMock.go @@ -75,7 +75,7 @@ func NewPoolsHolderMock() *PoolsHolderMock { holder.peerChangesBlocks, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.LRUCache, Capacity: 10000, Shards: 1, SizeInBytes: 0}) panicIfError("NewPoolsHolderMock", err) - holder.currBlockTxs = dataPool.NewCurrentBlockTransactionPool() + holder.currBlockTxs = dataPool.NewCurrentBlockTransactionsPool() holder.currBlockValidatorInfo = dataPool.NewCurrentBlockValidatorInfoPool() holder.trieNodes, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.SizeLRUCache, Capacity: 900000, Shards: 1, SizeInBytes: 314572800}) From 828db36549433870b1ccd31412bc4c5dde7d041f Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Thu, 5 May 2022 09:08:30 +0300 Subject: [PATCH 19/70] added RequestDataFromHashArray --- cmd/node/config/config.toml | 3 + config/config.go | 6 + dataRetriever/errors.go | 6 + .../factory/resolverscontainer/args.go | 35 +-- .../baseResolversContainerFactory.go | 64 ++-- .../metaResolversContainerFactory.go | 41 +-- .../metaResolversContainerFactory_test.go | 16 +- .../shardResolversContainerFactory.go | 41 +-- .../shardResolversContainerFactory_test.go | 15 +- dataRetriever/interface.go | 6 + .../requestHandlers/requestHandler.go | 45 +++ .../requestHandlers/requestHandler_test.go | 85 +++++- .../resolvers/validatorInfoResolver.go | 135 ++++++++- .../resolvers/validatorInfoResolver_test.go | 278 ++++++++++++++++-- epochStart/bootstrap/process.go | 33 ++- epochStart/bootstrap/process_test.go | 3 + factory/processComponents.go | 70 ++--- factory/stateComponents_test.go | 3 + genesis/process/disabled/requestHandler.go | 4 + integrationTests/testProcessorNode.go | 3 +- process/interface.go | 1 + testscommon/generalConfig.go | 3 + testscommon/requestHandlerStub.go | 8 + 23 files changed, 725 insertions(+), 179 deletions(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index 50347d7a48d..ba882220b6b 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -897,3 +897,6 @@ NumCrossShardPeers = 2 NumIntraShardPeers = 1 NumFullHistoryPeers = 3 + +[ValidatorInfo] + MaxNumOfValidatorInfoInResponse = 10 diff --git a/config/config.go b/config/config.go index f60402cd2de..264855e97f6 100644 --- a/config/config.go +++ b/config/config.go @@ -187,6 +187,12 @@ type Config struct { VMOutputCacher CacheConfig PeersRatingConfig PeersRatingConfig + + ValidatorInfo ValidatorInfoConfig +} + +type ValidatorInfoConfig struct { + MaxNumOfValidatorInfoInResponse int } // PeersRatingConfig will hold settings related to peers rating diff --git a/dataRetriever/errors.go b/dataRetriever/errors.go index f66f33b24d7..3cfcf9ca488 100644 --- a/dataRetriever/errors.go +++ b/dataRetriever/errors.go @@ -229,3 +229,9 @@ var ErrNilValidatorInfoPool = errors.New("nil validator info pool") // ErrNilValidatorInfoStorage signals that a nil validator info storage has been provided var ErrNilValidatorInfoStorage = errors.New("nil validator info storage") + +// ErrValidatorInfoNotFound signals that no validator info was found +var ErrValidatorInfoNotFound = errors.New("validator info not found") + +// ErrInvalidNumOfValidatorInfo signals that an invalid number of validator info was provided +var ErrInvalidNumOfValidatorInfo = errors.New("invalid num of validator info") diff --git a/dataRetriever/factory/resolverscontainer/args.go b/dataRetriever/factory/resolverscontainer/args.go index c0e3ad276cb..067dbab735c 100644 --- a/dataRetriever/factory/resolverscontainer/args.go +++ b/dataRetriever/factory/resolverscontainer/args.go @@ -12,21 +12,22 @@ import ( // FactoryArgs will hold the arguments for ResolversContainerFactory for both shard and meta type FactoryArgs struct { - ResolverConfig config.ResolverConfig - NumConcurrentResolvingJobs int32 - ShardCoordinator sharding.Coordinator - Messenger dataRetriever.TopicMessageHandler - Store dataRetriever.StorageService - Marshalizer marshal.Marshalizer - DataPools dataRetriever.PoolsHolder - Uint64ByteSliceConverter typeConverters.Uint64ByteSliceConverter - DataPacker dataRetriever.DataPacker - TriesContainer common.TriesHolder - InputAntifloodHandler dataRetriever.P2PAntifloodHandler - OutputAntifloodHandler dataRetriever.P2PAntifloodHandler - CurrentNetworkEpochProvider dataRetriever.CurrentNetworkEpochProviderHandler - PreferredPeersHolder p2p.PreferredPeersHolderHandler - PeersRatingHandler dataRetriever.PeersRatingHandler - SizeCheckDelta uint32 - IsFullHistoryNode bool + ResolverConfig config.ResolverConfig + NumConcurrentResolvingJobs int32 + ShardCoordinator sharding.Coordinator + Messenger dataRetriever.TopicMessageHandler + Store dataRetriever.StorageService + Marshalizer marshal.Marshalizer + DataPools dataRetriever.PoolsHolder + Uint64ByteSliceConverter typeConverters.Uint64ByteSliceConverter + DataPacker dataRetriever.DataPacker + TriesContainer common.TriesHolder + InputAntifloodHandler dataRetriever.P2PAntifloodHandler + OutputAntifloodHandler dataRetriever.P2PAntifloodHandler + CurrentNetworkEpochProvider dataRetriever.CurrentNetworkEpochProviderHandler + PreferredPeersHolder p2p.PreferredPeersHolderHandler + PeersRatingHandler dataRetriever.PeersRatingHandler + SizeCheckDelta uint32 + IsFullHistoryNode bool + MaxNumOfValidatorInfoInResponse int } diff --git a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go index d78b7d0e53a..0b4fd793651 100644 --- a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go @@ -19,30 +19,33 @@ import ( // EmptyExcludePeersOnTopic is an empty topic const EmptyExcludePeersOnTopic = "" +const minNumOfValidatorInfoInResponse = 5 + var log = logger.GetOrCreate("dataRetriever/factory/resolverscontainer") type baseResolversContainerFactory struct { - container dataRetriever.ResolversContainer - shardCoordinator sharding.Coordinator - messenger dataRetriever.TopicMessageHandler - store dataRetriever.StorageService - marshalizer marshal.Marshalizer - dataPools dataRetriever.PoolsHolder - uint64ByteSliceConverter typeConverters.Uint64ByteSliceConverter - intRandomizer dataRetriever.IntRandomizer - dataPacker dataRetriever.DataPacker - triesContainer common.TriesHolder - inputAntifloodHandler dataRetriever.P2PAntifloodHandler - outputAntifloodHandler dataRetriever.P2PAntifloodHandler - throttler dataRetriever.ResolverThrottler - intraShardTopic string - isFullHistoryNode bool - currentNetworkEpochProvider dataRetriever.CurrentNetworkEpochProviderHandler - preferredPeersHolder dataRetriever.PreferredPeersHolderHandler - peersRatingHandler dataRetriever.PeersRatingHandler - numCrossShardPeers int - numIntraShardPeers int - numFullHistoryPeers int + container dataRetriever.ResolversContainer + shardCoordinator sharding.Coordinator + messenger dataRetriever.TopicMessageHandler + store dataRetriever.StorageService + marshalizer marshal.Marshalizer + dataPools dataRetriever.PoolsHolder + uint64ByteSliceConverter typeConverters.Uint64ByteSliceConverter + intRandomizer dataRetriever.IntRandomizer + dataPacker dataRetriever.DataPacker + triesContainer common.TriesHolder + inputAntifloodHandler dataRetriever.P2PAntifloodHandler + outputAntifloodHandler dataRetriever.P2PAntifloodHandler + throttler dataRetriever.ResolverThrottler + intraShardTopic string + isFullHistoryNode bool + currentNetworkEpochProvider dataRetriever.CurrentNetworkEpochProviderHandler + preferredPeersHolder dataRetriever.PreferredPeersHolderHandler + peersRatingHandler dataRetriever.PeersRatingHandler + numCrossShardPeers int + numIntraShardPeers int + numFullHistoryPeers int + maxNumOfValidatorInfoInResponse int } func (brcf *baseResolversContainerFactory) checkParams() error { @@ -97,6 +100,10 @@ func (brcf *baseResolversContainerFactory) checkParams() error { if brcf.numFullHistoryPeers <= 0 { return fmt.Errorf("%w for numFullHistoryPeers", dataRetriever.ErrInvalidValue) } + if brcf.maxNumOfValidatorInfoInResponse < minNumOfValidatorInfoInResponse { + return fmt.Errorf("%w for maxNumOfValidatorInfoInResponse, expected %d, received %d", + dataRetriever.ErrInvalidValue, minNumOfValidatorInfoInResponse, brcf.maxNumOfValidatorInfoInResponse) + } return nil } @@ -367,13 +374,14 @@ func (brcf *baseResolversContainerFactory) generateValidatorInfoResolver() error } arg := resolvers.ArgValidatorInfoResolver{ - SenderResolver: resolverSender, - Marshaller: brcf.marshalizer, - AntifloodHandler: brcf.inputAntifloodHandler, - Throttler: brcf.throttler, - ValidatorInfoPool: brcf.dataPools.ValidatorsInfo(), - ValidatorInfoStorage: brcf.store.GetStorer(dataRetriever.UnsignedTransactionUnit), - IsFullHistoryNode: brcf.isFullHistoryNode, + SenderResolver: resolverSender, + Marshaller: brcf.marshalizer, + AntifloodHandler: brcf.inputAntifloodHandler, + Throttler: brcf.throttler, + ValidatorInfoPool: brcf.dataPools.ValidatorsInfo(), + ValidatorInfoStorage: brcf.store.GetStorer(dataRetriever.UnsignedTransactionUnit), + IsFullHistoryNode: brcf.isFullHistoryNode, + MaxNumOfValidatorInfoInResponse: brcf.maxNumOfValidatorInfoInResponse, } validatorInfoResolver, err := resolvers.NewValidatorInfoResolver(arg) if err != nil { diff --git a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go index 6158945a265..a0f017bf769 100644 --- a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go @@ -36,26 +36,27 @@ func NewMetaResolversContainerFactory( container := containers.NewResolversContainer() base := &baseResolversContainerFactory{ - container: container, - shardCoordinator: args.ShardCoordinator, - messenger: args.Messenger, - store: args.Store, - marshalizer: args.Marshalizer, - dataPools: args.DataPools, - uint64ByteSliceConverter: args.Uint64ByteSliceConverter, - intRandomizer: &random.ConcurrentSafeIntRandomizer{}, - dataPacker: args.DataPacker, - triesContainer: args.TriesContainer, - inputAntifloodHandler: args.InputAntifloodHandler, - outputAntifloodHandler: args.OutputAntifloodHandler, - throttler: thr, - isFullHistoryNode: args.IsFullHistoryNode, - currentNetworkEpochProvider: args.CurrentNetworkEpochProvider, - preferredPeersHolder: args.PreferredPeersHolder, - peersRatingHandler: args.PeersRatingHandler, - numCrossShardPeers: int(args.ResolverConfig.NumCrossShardPeers), - numIntraShardPeers: int(args.ResolverConfig.NumIntraShardPeers), - numFullHistoryPeers: int(args.ResolverConfig.NumFullHistoryPeers), + container: container, + shardCoordinator: args.ShardCoordinator, + messenger: args.Messenger, + store: args.Store, + marshalizer: args.Marshalizer, + dataPools: args.DataPools, + uint64ByteSliceConverter: args.Uint64ByteSliceConverter, + intRandomizer: &random.ConcurrentSafeIntRandomizer{}, + dataPacker: args.DataPacker, + triesContainer: args.TriesContainer, + inputAntifloodHandler: args.InputAntifloodHandler, + outputAntifloodHandler: args.OutputAntifloodHandler, + throttler: thr, + isFullHistoryNode: args.IsFullHistoryNode, + currentNetworkEpochProvider: args.CurrentNetworkEpochProvider, + preferredPeersHolder: args.PreferredPeersHolder, + peersRatingHandler: args.PeersRatingHandler, + numCrossShardPeers: int(args.ResolverConfig.NumCrossShardPeers), + numIntraShardPeers: int(args.ResolverConfig.NumIntraShardPeers), + numFullHistoryPeers: int(args.ResolverConfig.NumFullHistoryPeers), + maxNumOfValidatorInfoInResponse: args.MaxNumOfValidatorInfoInResponse, } err = base.checkParams() diff --git a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go index d648d214282..b8344aba9cf 100644 --- a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go +++ b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go @@ -1,6 +1,7 @@ package resolverscontainer_test import ( + "errors" "strings" "testing" @@ -212,6 +213,18 @@ func TestNewMetaResolversContainerFactory_NilTrieDataGetterShouldErr(t *testing. assert.Equal(t, dataRetriever.ErrNilTrieDataGetter, err) } +func TestNewMetaResolversContainerFactory_InvalidMAxValidatorInfoShouldErr(t *testing.T) { + t.Parallel() + + args := getArgumentsMeta() + args.MaxNumOfValidatorInfoInResponse = 0 + rcf, err := resolverscontainer.NewMetaResolversContainerFactory(args) + + assert.Nil(t, rcf) + assert.True(t, errors.Is(err, dataRetriever.ErrInvalidValue)) + assert.True(t, strings.Contains(err.Error(), "maxNumOfValidatorInfoInResponse")) +} + func TestNewMetaResolversContainerFactory_ShouldWork(t *testing.T) { t.Parallel() @@ -304,6 +317,7 @@ func getArgumentsMeta() resolverscontainer.FactoryArgs { NumIntraShardPeers: 2, NumFullHistoryPeers: 3, }, - PeersRatingHandler: &p2pmocks.PeersRatingHandlerStub{}, + PeersRatingHandler: &p2pmocks.PeersRatingHandlerStub{}, + MaxNumOfValidatorInfoInResponse: 5, } } diff --git a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go index 573819ff206..26bfe10e71d 100644 --- a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go @@ -34,26 +34,27 @@ func NewShardResolversContainerFactory( container := containers.NewResolversContainer() base := &baseResolversContainerFactory{ - container: container, - shardCoordinator: args.ShardCoordinator, - messenger: args.Messenger, - store: args.Store, - marshalizer: args.Marshalizer, - dataPools: args.DataPools, - uint64ByteSliceConverter: args.Uint64ByteSliceConverter, - intRandomizer: &random.ConcurrentSafeIntRandomizer{}, - dataPacker: args.DataPacker, - triesContainer: args.TriesContainer, - inputAntifloodHandler: args.InputAntifloodHandler, - outputAntifloodHandler: args.OutputAntifloodHandler, - throttler: thr, - isFullHistoryNode: args.IsFullHistoryNode, - currentNetworkEpochProvider: args.CurrentNetworkEpochProvider, - preferredPeersHolder: args.PreferredPeersHolder, - peersRatingHandler: args.PeersRatingHandler, - numCrossShardPeers: int(args.ResolverConfig.NumCrossShardPeers), - numIntraShardPeers: int(args.ResolverConfig.NumIntraShardPeers), - numFullHistoryPeers: int(args.ResolverConfig.NumFullHistoryPeers), + container: container, + shardCoordinator: args.ShardCoordinator, + messenger: args.Messenger, + store: args.Store, + marshalizer: args.Marshalizer, + dataPools: args.DataPools, + uint64ByteSliceConverter: args.Uint64ByteSliceConverter, + intRandomizer: &random.ConcurrentSafeIntRandomizer{}, + dataPacker: args.DataPacker, + triesContainer: args.TriesContainer, + inputAntifloodHandler: args.InputAntifloodHandler, + outputAntifloodHandler: args.OutputAntifloodHandler, + throttler: thr, + isFullHistoryNode: args.IsFullHistoryNode, + currentNetworkEpochProvider: args.CurrentNetworkEpochProvider, + preferredPeersHolder: args.PreferredPeersHolder, + peersRatingHandler: args.PeersRatingHandler, + numCrossShardPeers: int(args.ResolverConfig.NumCrossShardPeers), + numIntraShardPeers: int(args.ResolverConfig.NumIntraShardPeers), + numFullHistoryPeers: int(args.ResolverConfig.NumFullHistoryPeers), + maxNumOfValidatorInfoInResponse: args.MaxNumOfValidatorInfoInResponse, } err = base.checkParams() diff --git a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go index f81b526bdff..58ef5b08b43 100644 --- a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go +++ b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go @@ -252,6 +252,18 @@ func TestNewShardResolversContainerFactory_InvalidNumFullHistoryPeersShouldErr(t assert.True(t, errors.Is(err, dataRetriever.ErrInvalidValue)) } +func TestNewShardResolversContainerFactory_InvalidMAxValidatorInfoShouldErr(t *testing.T) { + t.Parallel() + + args := getArgumentsShard() + args.MaxNumOfValidatorInfoInResponse = 0 + rcf, err := resolverscontainer.NewShardResolversContainerFactory(args) + + assert.Nil(t, rcf) + assert.True(t, errors.Is(err, dataRetriever.ErrInvalidValue)) + assert.True(t, strings.Contains(err.Error(), "maxNumOfValidatorInfoInResponse")) +} + func TestNewShardResolversContainerFactory_ShouldWork(t *testing.T) { t.Parallel() @@ -383,6 +395,7 @@ func getArgumentsShard() resolverscontainer.FactoryArgs { NumIntraShardPeers: 2, NumFullHistoryPeers: 3, }, - PeersRatingHandler: &p2pmocks.PeersRatingHandlerStub{}, + PeersRatingHandler: &p2pmocks.PeersRatingHandlerStub{}, + MaxNumOfValidatorInfoInResponse: 5, } } diff --git a/dataRetriever/interface.go b/dataRetriever/interface.go index f5f092f6f65..0367b1814f4 100644 --- a/dataRetriever/interface.go +++ b/dataRetriever/interface.go @@ -153,6 +153,12 @@ type MiniBlocksResolver interface { RequestDataFromHashArray(hashes [][]byte, epoch uint32) error } +// ValidatorInfoResolver defines what a validator info resolver should do +type ValidatorInfoResolver interface { + Resolver + RequestDataFromHashArray(hashes [][]byte, epoch uint32) error +} + // TopicResolverSender defines what sending operations are allowed for a topic resolver type TopicResolverSender interface { SendOnRequestTopic(rd *RequestData, originalHashes [][]byte) error diff --git a/dataRetriever/requestHandlers/requestHandler.go b/dataRetriever/requestHandlers/requestHandler.go index 304efbd91a0..46926001648 100644 --- a/dataRetriever/requestHandlers/requestHandler.go +++ b/dataRetriever/requestHandlers/requestHandler.go @@ -591,6 +591,51 @@ func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { rrh.addRequestedItems([][]byte{hash}, uniqueValidatorInfoSuffix) } +// RequestValidatorsInfo asks for the validator info associated with the specified hashes from connected peers +func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { + unrequestedHashes := rrh.getUnrequestedHashes(hashes, uniqueValidatorInfoSuffix) + if len(unrequestedHashes) == 0 { + return + } + + log.Debug("requesting validator info messages from network", + "topic", common.ValidatorInfoTopic, + "num hashes", len(unrequestedHashes), + "epoch", rrh.epoch, + ) + + resolver, err := rrh.resolversFinder.MetaChainResolver(common.ValidatorInfoTopic) + if err != nil { + log.Error("RequestValidatorInfo.MetaChainResolver", + "error", err.Error(), + "topic", common.ValidatorInfoTopic, + "num hashes", len(unrequestedHashes), + "epoch", rrh.epoch, + ) + return + } + + validatorInfoResolver, ok := resolver.(HashSliceResolver) + if !ok { + log.Warn("wrong assertion type when creating a validator info resolver") + return + } + + rrh.whiteList.Add(unrequestedHashes) + + err = validatorInfoResolver.RequestDataFromHashArray(unrequestedHashes, rrh.epoch) + if err != nil { + log.Debug("RequestValidatorInfo.RequestDataFromHash", + "error", err.Error(), + "topic", common.ValidatorInfoTopic, + "num hashes", len(unrequestedHashes), + "epoch", rrh.epoch, + ) + } + + rrh.addRequestedItems(unrequestedHashes, uniqueValidatorInfoSuffix) +} + func (rrh *resolverRequestHandler) testIfRequestIsNeeded(key []byte, suffix string) bool { rrh.sweepIfNeeded() diff --git a/dataRetriever/requestHandlers/requestHandler_test.go b/dataRetriever/requestHandlers/requestHandler_test.go index 8ef34576a9a..1702632a7c0 100644 --- a/dataRetriever/requestHandlers/requestHandler_test.go +++ b/dataRetriever/requestHandlers/requestHandler_test.go @@ -7,6 +7,7 @@ import ( "time" "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/dataRetriever/mock" "github.com/ElrondNetwork/elrond-go/storage/timecache" @@ -1234,7 +1235,6 @@ func TestResolverRequestHandler_RequestValidatorInfo(t *testing.T) { ) rrh.RequestValidatorInfo(providedHash) - assert.False(t, wasCalled) }) t.Run("should work", func(t *testing.T) { @@ -1251,6 +1251,7 @@ func TestResolverRequestHandler_RequestValidatorInfo(t *testing.T) { rrh, _ := NewResolverRequestHandler( &mock.ResolversFinderStub{ MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + assert.Equal(t, common.ValidatorInfoTopic, baseTopic) return res, nil }, }, @@ -1262,7 +1263,89 @@ func TestResolverRequestHandler_RequestValidatorInfo(t *testing.T) { ) rrh.RequestValidatorInfo(providedHash) + assert.True(t, wasCalled) + }) +} + +func TestResolverRequestHandler_RequestValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("MetaChainResolver returns error", func(t *testing.T) { + providedHash := []byte("provided hash") + wasCalled := false + res := &mock.ResolverStub{ + RequestDataFromHashCalled: func(hash []byte, epoch uint32) error { + wasCalled = true + return nil + }, + } + + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + return res, errors.New("provided err") + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{}, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorsInfo([][]byte{providedHash}) + assert.False(t, wasCalled) + }) + t.Run("cast fails", func(t *testing.T) { + providedHash := []byte("provided hash") + mbResolver := &mock.ResolverStub{} // uncastable to HashSliceResolver + wasCalled := false + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + return mbResolver, nil + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{ + AddCalled: func(keys [][]byte) { + wasCalled = true + }, + }, + 100, + 0, + time.Second, + ) + + rrh.RequestValidatorsInfo([][]byte{providedHash}) + assert.False(t, wasCalled) + }) + t.Run("should work", func(t *testing.T) { + providedHashes := [][]byte{[]byte("provided hash 1"), []byte("provided hash 2")} + wasCalled := false + res := &mock.HashSliceResolverStub{ + RequestDataFromHashArrayCalled: func(hashes [][]byte, epoch uint32) error { + assert.Equal(t, providedHashes, hashes) + wasCalled = true + return nil + }, + } + + rrh, _ := NewResolverRequestHandler( + &mock.ResolversFinderStub{ + MetaChainResolverCalled: func(baseTopic string) (resolver dataRetriever.Resolver, e error) { + assert.Equal(t, common.ValidatorInfoTopic, baseTopic) + return res, nil + }, + }, + &mock.RequestedItemsHandlerStub{}, + &mock.WhiteListHandlerStub{}, + 100, + 0, + time.Second, + ) + rrh.RequestValidatorsInfo(providedHashes) assert.True(t, wasCalled) }) } diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go index 587d2d463eb..cf208f52d0d 100644 --- a/dataRetriever/resolvers/validatorInfoResolver.go +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -13,15 +13,18 @@ import ( "github.com/ElrondNetwork/elrond-go/storage" ) +const minNumOfValidatorInfo = 5 + // ArgValidatorInfoResolver is the argument structure used to create a new validator info resolver instance type ArgValidatorInfoResolver struct { - SenderResolver dataRetriever.TopicResolverSender - Marshaller marshal.Marshalizer - AntifloodHandler dataRetriever.P2PAntifloodHandler - Throttler dataRetriever.ResolverThrottler - ValidatorInfoPool storage.Cacher - ValidatorInfoStorage storage.Storer - IsFullHistoryNode bool + SenderResolver dataRetriever.TopicResolverSender + Marshaller marshal.Marshalizer + AntifloodHandler dataRetriever.P2PAntifloodHandler + Throttler dataRetriever.ResolverThrottler + ValidatorInfoPool storage.Cacher + ValidatorInfoStorage storage.Storer + IsFullHistoryNode bool + MaxNumOfValidatorInfoInResponse int } // validatorInfoResolver is a wrapper over Resolver that is specialized in resolving validator info requests @@ -29,8 +32,9 @@ type validatorInfoResolver struct { dataRetriever.TopicResolverSender messageProcessor baseStorageResolver - validatorInfoPool storage.Cacher - validatorInfoStorage storage.Storer + validatorInfoPool storage.Cacher + validatorInfoStorage storage.Storer + maxNumOfValidatorInfoInResponse int } // NewValidatorInfoResolver creates a validator info resolver @@ -48,9 +52,10 @@ func NewValidatorInfoResolver(args ArgValidatorInfoResolver) (*validatorInfoReso throttler: args.Throttler, topic: args.SenderResolver.RequestTopic(), }, - baseStorageResolver: createBaseStorageResolver(args.ValidatorInfoStorage, args.IsFullHistoryNode), - validatorInfoPool: args.ValidatorInfoPool, - validatorInfoStorage: args.ValidatorInfoStorage, + baseStorageResolver: createBaseStorageResolver(args.ValidatorInfoStorage, args.IsFullHistoryNode), + validatorInfoPool: args.ValidatorInfoPool, + validatorInfoStorage: args.ValidatorInfoStorage, + maxNumOfValidatorInfoInResponse: args.MaxNumOfValidatorInfoInResponse, }, nil } @@ -73,6 +78,9 @@ func checkArgs(args ArgValidatorInfoResolver) error { if check.IfNil(args.ValidatorInfoStorage) { return dataRetriever.ErrNilValidatorInfoStorage } + if args.MaxNumOfValidatorInfoInResponse < minNumOfValidatorInfo { + return dataRetriever.ErrInvalidNumOfValidatorInfo + } return nil } @@ -89,6 +97,26 @@ func (res *validatorInfoResolver) RequestDataFromHash(hash []byte, epoch uint32) ) } +// RequestDataFromHashArray requests validator info from other peers by hash array +func (res *validatorInfoResolver) RequestDataFromHashArray(hashes [][]byte, epoch uint32) error { + b := &batch.Batch{ + Data: hashes, + } + buffHashes, err := res.marshalizer.Marshal(b) + if err != nil { + return err + } + + return res.SendOnRequestTopic( + &dataRetriever.RequestData{ + Type: dataRetriever.HashArrayType, + Value: buffHashes, + Epoch: epoch, + }, + hashes, + ) +} + // ProcessReceivedMessage represents the callback func from the p2p.Messenger that is called each time a new message is received // (for the topic this validator was registered to, usually a request topic) func (res *validatorInfoResolver) ProcessReceivedMessage(message p2p.MessageP2P, fromConnectedPeer core.PeerID) error { @@ -105,10 +133,11 @@ func (res *validatorInfoResolver) ProcessReceivedMessage(message p2p.MessageP2P, return err } - // TODO: add support for HashArrayType switch rd.Type { case dataRetriever.HashType: return res.resolveHashRequest(rd.Value, rd.Epoch, fromConnectedPeer) + case dataRetriever.HashArrayType: + return res.resolveMultipleHashesRequest(rd.Value, rd.Epoch, fromConnectedPeer) } return fmt.Errorf("%w for value %s", dataRetriever.ErrRequestTypeNotImplemented, logger.DisplayByteSlice(rd.Value)) @@ -121,7 +150,81 @@ func (res *validatorInfoResolver) resolveHashRequest(hash []byte, epoch uint32, return err } - return res.marshalAndSend(data, pid) + return res.marshalAndSend([][]byte{data}, pid) +} + +// resolveMultipleHashesRequest sends the response for a hash array type request +func (res *validatorInfoResolver) resolveMultipleHashesRequest(hashesBuff []byte, epoch uint32, pid core.PeerID) error { + b := batch.Batch{} + err := res.marshalizer.Unmarshal(&b, hashesBuff) + if err != nil { + return err + } + hashes := b.Data + + validatorInfoForHashes, err := res.fetchValidatorInfoForHashes(hashes, epoch) + if err != nil { + return fmt.Errorf("resolveMultipleHashesRequest error %w from buff %s", err, hashesBuff) + } + + return res.sendValidatorInfoForHashes(validatorInfoForHashes, pid) +} + +func (res *validatorInfoResolver) sendValidatorInfoForHashes(validatorInfoForHashes [][]byte, pid core.PeerID) error { + if len(validatorInfoForHashes) > res.maxNumOfValidatorInfoInResponse { + return res.sendLargeDataBuff(validatorInfoForHashes, pid) + } + + return res.marshalAndSend(validatorInfoForHashes, pid) +} + +func (res *validatorInfoResolver) sendLargeDataBuff(dataBuff [][]byte, pid core.PeerID) error { + chunksMap := res.splitDataBuffIntoChunks(dataBuff) + for _, chunk := range chunksMap { + err := res.marshalAndSend(chunk, pid) + if err != nil { + return err + } + } + + return nil +} + +func (res *validatorInfoResolver) splitDataBuffIntoChunks(dataBuff [][]byte) map[int][][]byte { + chunksMap := make(map[int][][]byte) + currentChunk := make([][]byte, 0) + currentChunkSize := 0 + chunkIndex := 0 + for _, data := range dataBuff { + if currentChunkSize == res.maxNumOfValidatorInfoInResponse { + chunksMap[chunkIndex] = currentChunk + chunkIndex++ + currentChunk = make([][]byte, 0) + currentChunkSize = 0 + } + + currentChunk = append(currentChunk, data) + currentChunkSize++ + } + chunksMap[chunkIndex] = currentChunk + + return chunksMap +} + +func (res *validatorInfoResolver) fetchValidatorInfoForHashes(hashes [][]byte, epoch uint32) ([][]byte, error) { + validatorInfos := make([][]byte, 0) + for _, hash := range hashes { + validatorInfoForHash, _ := res.fetchValidatorInfoByteSlice(hash, epoch) + if validatorInfoForHash != nil { + validatorInfos = append(validatorInfos, validatorInfoForHash) + } + } + + if len(validatorInfos) == 0 { + return nil, dataRetriever.ErrValidatorInfoNotFound + } + + return validatorInfos, nil } func (res *validatorInfoResolver) fetchValidatorInfoByteSlice(hash []byte, epoch uint32) ([]byte, error) { @@ -145,9 +248,9 @@ func (res *validatorInfoResolver) fetchValidatorInfoByteSlice(hash []byte, epoch return buff, nil } -func (res *validatorInfoResolver) marshalAndSend(data []byte, pid core.PeerID) error { +func (res *validatorInfoResolver) marshalAndSend(data [][]byte, pid core.PeerID) error { b := &batch.Batch{ - Data: [][]byte{data}, + Data: data, } buff, err := res.marshalizer.Marshal(b) if err != nil { diff --git a/dataRetriever/resolvers/validatorInfoResolver_test.go b/dataRetriever/resolvers/validatorInfoResolver_test.go index b8ecd41239d..993b1aced84 100644 --- a/dataRetriever/resolvers/validatorInfoResolver_test.go +++ b/dataRetriever/resolvers/validatorInfoResolver_test.go @@ -2,6 +2,8 @@ package resolvers_test import ( "errors" + "fmt" + "strings" "testing" "github.com/ElrondNetwork/elrond-go-core/core" @@ -21,19 +23,20 @@ import ( func createMockArgValidatorInfoResolver() resolvers.ArgValidatorInfoResolver { return resolvers.ArgValidatorInfoResolver{ - SenderResolver: &mock.TopicResolverSenderStub{}, - Marshaller: &mock.MarshalizerMock{}, - AntifloodHandler: &mock.P2PAntifloodHandlerStub{}, - Throttler: &mock.ThrottlerStub{}, - ValidatorInfoPool: testscommon.NewCacherStub(), - ValidatorInfoStorage: &storage.StorerStub{}, - IsFullHistoryNode: false, + SenderResolver: &mock.TopicResolverSenderStub{}, + Marshaller: &mock.MarshalizerMock{}, + AntifloodHandler: &mock.P2PAntifloodHandlerStub{}, + Throttler: &mock.ThrottlerStub{}, + ValidatorInfoPool: testscommon.NewCacherStub(), + ValidatorInfoStorage: &storage.StorerStub{}, + IsFullHistoryNode: false, + MaxNumOfValidatorInfoInResponse: 5, } } -func createMockValidatorInfo() state.ValidatorInfo { +func createMockValidatorInfo(pk []byte) state.ValidatorInfo { return state.ValidatorInfo{ - PublicKey: []byte("provided pk"), + PublicKey: pk, ShardId: 123, List: string(common.EligibleList), Index: 10, @@ -104,6 +107,16 @@ func TestNewValidatorInfoResolver(t *testing.T) { assert.Equal(t, dataRetriever.ErrNilValidatorInfoStorage, err) assert.True(t, check.IfNil(res)) }) + t.Run("invalid MaxNumOfValidatorInfoInResponse should error", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.MaxNumOfValidatorInfoInResponse = 0 + + res, err := resolvers.NewValidatorInfoResolver(args) + assert.Equal(t, dataRetriever.ErrInvalidNumOfValidatorInfo, err) + assert.True(t, check.IfNil(res)) + }) t.Run("should work", func(t *testing.T) { t.Parallel() @@ -158,6 +171,67 @@ func TestValidatorInfoResolver_RequestDataFromHash(t *testing.T) { }) } +func TestValidatorInfoResolver_RequestDataFromHashArray(t *testing.T) { + t.Parallel() + + t.Run("marshal returns error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.Marshaller = &testscommon.MarshalizerStub{ + MarshalCalled: func(obj interface{}) ([]byte, error) { + return nil, expectedErr + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + err := res.RequestDataFromHashArray(nil, 0) + assert.Equal(t, expectedErr, err) + }) + t.Run("should error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendOnRequestTopicCalled: func(rd *dataRetriever.RequestData, originalHashes [][]byte) error { + return expectedErr + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + err := res.RequestDataFromHashArray(nil, 0) + assert.Equal(t, expectedErr, err) + }) + t.Run("should work", func(t *testing.T) { + t.Parallel() + + providedHashes := [][]byte{[]byte("provided hash")} + providedEpoch := uint32(123) + args := createMockArgValidatorInfoResolver() + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendOnRequestTopicCalled: func(rd *dataRetriever.RequestData, originalHashes [][]byte) error { + assert.Equal(t, providedHashes, originalHashes) + assert.Equal(t, dataRetriever.HashArrayType, rd.Type) + + b := &batch.Batch{} + _ = args.Marshaller.Unmarshal(b, rd.Value) + assert.Equal(t, providedHashes, b.Data) + assert.Equal(t, providedEpoch, rd.Epoch) + + return nil + }, + } + + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.RequestDataFromHashArray(providedHashes, providedEpoch) + assert.Nil(t, err) + }) +} + func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { t.Parallel() @@ -204,15 +278,18 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashType, nil), fromConnectedPeer) assert.True(t, errors.Is(err, expectedErr)) }) + t.Run("invalid request type should error", func(t *testing.T) { t.Parallel() res, _ := resolvers.NewValidatorInfoResolver(createMockArgValidatorInfoResolver()) require.False(t, check.IfNil(res)) - err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, []byte("hash")), fromConnectedPeer) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.NonceType, []byte("hash")), fromConnectedPeer) assert.True(t, errors.Is(err, dataRetriever.ErrRequestTypeNotImplemented)) }) + + // resolveHashRequest t.Run("data not found in cache and fetchValidatorInfoByteSlice fails when getting data from storage", func(t *testing.T) { t.Parallel() @@ -238,7 +315,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { t.Parallel() expectedErr := errors.New("expected err") - marshalizerMock := testscommon.MarshalizerMock{} + marshallerMock := testscommon.MarshalizerMock{} args := createMockArgValidatorInfoResolver() args.ValidatorInfoPool = &testscommon.CacherStub{ GetCalled: func(key []byte) (value interface{}, ok bool) { @@ -250,7 +327,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { return nil, expectedErr }, UnmarshalCalled: func(obj interface{}, buff []byte) error { - return marshalizerMock.Unmarshal(obj, buff) + return marshallerMock.Unmarshal(obj, buff) }, } res, _ := resolvers.NewValidatorInfoResolver(args) @@ -263,7 +340,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { t.Parallel() expectedErr := errors.New("expected err") - marshalizerMock := testscommon.MarshalizerMock{} + marshallerMock := testscommon.MarshalizerMock{} args := createMockArgValidatorInfoResolver() args.ValidatorInfoPool = &testscommon.CacherStub{ GetCalled: func(key []byte) (value interface{}, ok bool) { @@ -280,7 +357,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { return nil, expectedErr }, UnmarshalCalled: func(obj interface{}, buff []byte) error { - return marshalizerMock.Unmarshal(obj, buff) + return marshallerMock.Unmarshal(obj, buff) }, } res, _ := resolvers.NewValidatorInfoResolver(args) @@ -293,7 +370,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { t.Parallel() wasCalled := false - providedValue := createMockValidatorInfo() + providedValue := createMockValidatorInfo([]byte("provided pk")) args := createMockArgValidatorInfoResolver() args.ValidatorInfoPool = &testscommon.CacherStub{ GetCalled: func(key []byte) (value interface{}, ok bool) { @@ -302,12 +379,12 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { } args.SenderResolver = &mock.TopicResolverSenderStub{ SendCalled: func(buff []byte, peer core.PeerID) error { - marshalizerMock := testscommon.MarshalizerMock{} + marshallerMock := testscommon.MarshalizerMock{} b := &batch.Batch{} - _ = marshalizerMock.Unmarshal(b, buff) + _ = marshallerMock.Unmarshal(b, buff) vi := &state.ValidatorInfo{} - _ = marshalizerMock.Unmarshal(vi, b.Data[0]) + _ = marshallerMock.Unmarshal(vi, b.Data[0]) assert.Equal(t, &providedValue, vi) wasCalled = true @@ -326,7 +403,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { t.Parallel() wasCalled := false - providedValue := createMockValidatorInfo() + providedValue := createMockValidatorInfo([]byte("provided pk")) args := createMockArgValidatorInfoResolver() args.ValidatorInfoPool = &testscommon.CacherStub{ GetCalled: func(key []byte) (value interface{}, ok bool) { @@ -335,18 +412,18 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { } args.ValidatorInfoStorage = &storage.StorerStub{ SearchFirstCalled: func(key []byte) ([]byte, error) { - marshalizerMock := testscommon.MarshalizerMock{} - return marshalizerMock.Marshal(providedValue) + marshallerMock := testscommon.MarshalizerMock{} + return marshallerMock.Marshal(providedValue) }, } args.SenderResolver = &mock.TopicResolverSenderStub{ SendCalled: func(buff []byte, peer core.PeerID) error { - marshalizerMock := testscommon.MarshalizerMock{} + marshallerMock := testscommon.MarshalizerMock{} b := &batch.Batch{} - _ = marshalizerMock.Unmarshal(b, buff) + _ = marshallerMock.Unmarshal(b, buff) vi := &state.ValidatorInfo{} - _ = marshalizerMock.Unmarshal(vi, b.Data[0]) + _ = marshallerMock.Unmarshal(vi, b.Data[0]) assert.Equal(t, &providedValue, vi) wasCalled = true @@ -361,6 +438,159 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { assert.Nil(t, err) assert.True(t, wasCalled) }) + + // resolveMultipleHashesRequest + t.Run("unmarshal fails", func(t *testing.T) { + t.Parallel() + + numCalls := 0 + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.Marshaller = &testscommon.MarshalizerStub{ + UnmarshalCalled: func(obj interface{}, buff []byte) error { + marshallerMock := testscommon.MarshalizerMock{} + if numCalls < 1 { + numCalls++ + return marshallerMock.Unmarshal(obj, buff) + } + + return expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, []byte("hash")), fromConnectedPeer) + assert.Equal(t, expectedErr, err) + }) + t.Run("no hash found", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + return nil, errors.New("not found") + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + b := &batch.Batch{ + Data: [][]byte{[]byte("hash")}, + } + buff, _ := args.Marshaller.Marshal(b) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + assert.True(t, strings.Contains(err.Error(), dataRetriever.ErrValidatorInfoNotFound.Error())) + }) + t.Run("enough hashes for one chunk should work", func(t *testing.T) { + t.Parallel() + + wasCalled := false + numOfProvidedData := 3 + providedHashes := make([][]byte, 0) + providedData := make([]state.ValidatorInfo, 0) + for i := 0; i < numOfProvidedData; i++ { + hashStr := fmt.Sprintf("hash%d", i) + providedHashes = append(providedHashes, []byte(hashStr)) + pkStr := fmt.Sprintf("pk%d", i) + providedData = append(providedData, createMockValidatorInfo([]byte(pkStr))) + } + args := createMockArgValidatorInfoResolver() + numOfCalls := 0 + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + val := providedData[numOfCalls] + numOfCalls++ + return val, true + }, + } + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendCalled: func(buff []byte, peer core.PeerID) error { + marshallerMock := testscommon.MarshalizerMock{} + b := &batch.Batch{} + _ = marshallerMock.Unmarshal(b, buff) + assert.Equal(t, numOfProvidedData, len(b.Data)) + + for i := 0; i < numOfProvidedData; i++ { + vi := &state.ValidatorInfo{} + _ = marshallerMock.Unmarshal(vi, b.Data[i]) + + assert.Equal(t, &providedData[i], vi) + } + + wasCalled = true + return nil + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + buff, _ := args.Marshaller.Marshal(&batch.Batch{Data: providedHashes}) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + assert.Nil(t, err) + assert.True(t, wasCalled) + }) + t.Run("multiple chunks should work", func(t *testing.T) { + t.Parallel() + + args := createMockArgValidatorInfoResolver() + numOfProvidedData := 2*args.MaxNumOfValidatorInfoInResponse + 2 // 2 chunks of 5 + 1 chunk of 2 + providedHashes := make([][]byte, 0) + providedData := make([]state.ValidatorInfo, 0) + for i := 0; i < numOfProvidedData; i++ { + hashStr := fmt.Sprintf("hash%d", i) + providedHashes = append(providedHashes, []byte(hashStr)) + pkStr := fmt.Sprintf("pk%d", i) + providedData = append(providedData, createMockValidatorInfo([]byte(pkStr))) + } + numOfCalls := 0 + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + val := providedData[numOfCalls] + numOfCalls++ + return val, true + }, + } + numOfCallsSend := 0 + args.SenderResolver = &mock.TopicResolverSenderStub{ + SendCalled: func(buff []byte, peer core.PeerID) error { + marshallerMock := testscommon.MarshalizerMock{} + b := &batch.Batch{} + _ = marshallerMock.Unmarshal(b, buff) + + expectedLen := args.MaxNumOfValidatorInfoInResponse + if numOfCallsSend == 2 { + expectedLen = numOfProvidedData % args.MaxNumOfValidatorInfoInResponse + } + dataLen := len(b.Data) + assert.Equal(t, expectedLen, dataLen) + + for i := 0; i < dataLen; i++ { + vi := &state.ValidatorInfo{} + _ = marshallerMock.Unmarshal(vi, b.Data[i]) + + indexInProvidedData := numOfCallsSend*args.MaxNumOfValidatorInfoInResponse + i + assert.Equal(t, &providedData[indexInProvidedData], vi) + } + + numOfCallsSend++ + return nil + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + buff, _ := args.Marshaller.Marshal(&batch.Batch{Data: providedHashes}) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + assert.Nil(t, err) + expectedNumOfCalls := numOfProvidedData/args.MaxNumOfValidatorInfoInResponse + 1 + assert.Equal(t, expectedNumOfCalls, numOfCallsSend) + }) } func TestValidatorInfoResolver_SetResolverDebugHandler(t *testing.T) { diff --git a/epochStart/bootstrap/process.go b/epochStart/bootstrap/process.go index d4da4fed0f6..81f0fd4f6a8 100644 --- a/epochStart/bootstrap/process.go +++ b/epochStart/bootstrap/process.go @@ -1126,22 +1126,23 @@ func (e *epochStartBootstrap) createRequestHandler() error { // this one should only be used before determining the correct shard where the node should reside log.Debug("epochStartBootstrap.createRequestHandler", "shard", e.shardCoordinator.SelfId()) resolversContainerArgs := resolverscontainer.FactoryArgs{ - ShardCoordinator: e.shardCoordinator, - Messenger: e.messenger, - Store: storageService, - Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), - DataPools: e.dataPool, - Uint64ByteSliceConverter: uint64ByteSlice.NewBigEndianConverter(), - NumConcurrentResolvingJobs: 10, - DataPacker: dataPacker, - TriesContainer: e.trieContainer, - SizeCheckDelta: 0, - InputAntifloodHandler: disabled.NewAntiFloodHandler(), - OutputAntifloodHandler: disabled.NewAntiFloodHandler(), - CurrentNetworkEpochProvider: disabled.NewCurrentNetworkEpochProviderHandler(), - PreferredPeersHolder: disabled.NewPreferredPeersHolder(), - ResolverConfig: e.generalConfig.Resolvers, - PeersRatingHandler: disabled.NewDisabledPeersRatingHandler(), + ShardCoordinator: e.shardCoordinator, + Messenger: e.messenger, + Store: storageService, + Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), + DataPools: e.dataPool, + Uint64ByteSliceConverter: uint64ByteSlice.NewBigEndianConverter(), + NumConcurrentResolvingJobs: 10, + DataPacker: dataPacker, + TriesContainer: e.trieContainer, + SizeCheckDelta: 0, + InputAntifloodHandler: disabled.NewAntiFloodHandler(), + OutputAntifloodHandler: disabled.NewAntiFloodHandler(), + CurrentNetworkEpochProvider: disabled.NewCurrentNetworkEpochProviderHandler(), + PreferredPeersHolder: disabled.NewPreferredPeersHolder(), + ResolverConfig: e.generalConfig.Resolvers, + PeersRatingHandler: disabled.NewDisabledPeersRatingHandler(), + MaxNumOfValidatorInfoInResponse: e.generalConfig.ValidatorInfo.MaxNumOfValidatorInfoInResponse, } resolverFactory, err := resolverscontainer.NewMetaResolversContainerFactory(resolversContainerArgs) if err != nil { diff --git a/epochStart/bootstrap/process_test.go b/epochStart/bootstrap/process_test.go index e879ed62e6b..6ae861e9171 100644 --- a/epochStart/bootstrap/process_test.go +++ b/epochStart/bootstrap/process_test.go @@ -179,6 +179,9 @@ func createMockEpochStartBootstrapArgs( Shards: 10, }, Resolvers: generalCfg.Resolvers, + ValidatorInfo: config.ValidatorInfoConfig{ + MaxNumOfValidatorInfoInResponse: 5, + }, }, EconomicsData: &economicsmocks.EconomicsHandlerStub{ MinGasPriceCalled: func() uint64 { diff --git a/factory/processComponents.go b/factory/processComponents.go index 74026921159..e7f2c65cd25 100644 --- a/factory/processComponents.go +++ b/factory/processComponents.go @@ -1025,23 +1025,24 @@ func (pcf *processComponentsFactory) newShardResolverContainerFactory( } resolversContainerFactoryArgs := resolverscontainer.FactoryArgs{ - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - Messenger: pcf.network.NetworkMessenger(), - Store: pcf.data.StorageService(), - Marshalizer: pcf.coreData.InternalMarshalizer(), - DataPools: pcf.data.Datapool(), - Uint64ByteSliceConverter: pcf.coreData.Uint64ByteSliceConverter(), - DataPacker: dataPacker, - TriesContainer: pcf.state.TriesContainer(), - SizeCheckDelta: pcf.config.Marshalizer.SizeCheckDelta, - InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), - OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), - NumConcurrentResolvingJobs: pcf.config.Antiflood.NumConcurrentResolverJobs, - IsFullHistoryNode: pcf.prefConfigs.FullArchive, - CurrentNetworkEpochProvider: currentEpochProvider, - ResolverConfig: pcf.config.Resolvers, - PreferredPeersHolder: pcf.network.PreferredPeersHolderHandler(), - PeersRatingHandler: pcf.network.PeersRatingHandler(), + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + Messenger: pcf.network.NetworkMessenger(), + Store: pcf.data.StorageService(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + DataPools: pcf.data.Datapool(), + Uint64ByteSliceConverter: pcf.coreData.Uint64ByteSliceConverter(), + DataPacker: dataPacker, + TriesContainer: pcf.state.TriesContainer(), + SizeCheckDelta: pcf.config.Marshalizer.SizeCheckDelta, + InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), + OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), + NumConcurrentResolvingJobs: pcf.config.Antiflood.NumConcurrentResolverJobs, + IsFullHistoryNode: pcf.prefConfigs.FullArchive, + CurrentNetworkEpochProvider: currentEpochProvider, + ResolverConfig: pcf.config.Resolvers, + PreferredPeersHolder: pcf.network.PreferredPeersHolderHandler(), + PeersRatingHandler: pcf.network.PeersRatingHandler(), + MaxNumOfValidatorInfoInResponse: pcf.config.ValidatorInfo.MaxNumOfValidatorInfoInResponse, } resolversContainerFactory, err := resolverscontainer.NewShardResolversContainerFactory(resolversContainerFactoryArgs) if err != nil { @@ -1061,23 +1062,24 @@ func (pcf *processComponentsFactory) newMetaResolverContainerFactory( } resolversContainerFactoryArgs := resolverscontainer.FactoryArgs{ - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - Messenger: pcf.network.NetworkMessenger(), - Store: pcf.data.StorageService(), - Marshalizer: pcf.coreData.InternalMarshalizer(), - DataPools: pcf.data.Datapool(), - Uint64ByteSliceConverter: pcf.coreData.Uint64ByteSliceConverter(), - DataPacker: dataPacker, - TriesContainer: pcf.state.TriesContainer(), - SizeCheckDelta: pcf.config.Marshalizer.SizeCheckDelta, - InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), - OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), - NumConcurrentResolvingJobs: pcf.config.Antiflood.NumConcurrentResolverJobs, - IsFullHistoryNode: pcf.prefConfigs.FullArchive, - CurrentNetworkEpochProvider: currentEpochProvider, - ResolverConfig: pcf.config.Resolvers, - PreferredPeersHolder: pcf.network.PreferredPeersHolderHandler(), - PeersRatingHandler: pcf.network.PeersRatingHandler(), + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + Messenger: pcf.network.NetworkMessenger(), + Store: pcf.data.StorageService(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + DataPools: pcf.data.Datapool(), + Uint64ByteSliceConverter: pcf.coreData.Uint64ByteSliceConverter(), + DataPacker: dataPacker, + TriesContainer: pcf.state.TriesContainer(), + SizeCheckDelta: pcf.config.Marshalizer.SizeCheckDelta, + InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), + OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), + NumConcurrentResolvingJobs: pcf.config.Antiflood.NumConcurrentResolverJobs, + IsFullHistoryNode: pcf.prefConfigs.FullArchive, + CurrentNetworkEpochProvider: currentEpochProvider, + ResolverConfig: pcf.config.Resolvers, + PreferredPeersHolder: pcf.network.PreferredPeersHolderHandler(), + PeersRatingHandler: pcf.network.PeersRatingHandler(), + MaxNumOfValidatorInfoInResponse: pcf.config.ValidatorInfo.MaxNumOfValidatorInfoInResponse, } resolversContainerFactory, err := resolverscontainer.NewMetaResolversContainerFactory(resolversContainerFactoryArgs) if err != nil { diff --git a/factory/stateComponents_test.go b/factory/stateComponents_test.go index 1928827e2d0..83041b8da8a 100644 --- a/factory/stateComponents_test.go +++ b/factory/stateComponents_test.go @@ -231,6 +231,9 @@ func getGeneralConfig() config.Config { Type: "LRU", Shards: 1, }, + ValidatorInfo: config.ValidatorInfoConfig{ + MaxNumOfValidatorInfoInResponse: 5, + }, } } diff --git a/genesis/process/disabled/requestHandler.go b/genesis/process/disabled/requestHandler.go index cfac6ba21c7..eec780cdc0a 100644 --- a/genesis/process/disabled/requestHandler.go +++ b/genesis/process/disabled/requestHandler.go @@ -82,6 +82,10 @@ func (r *RequestHandler) CreateTrieNodeIdentifier(_ []byte, _ uint32) []byte { func (r *RequestHandler) RequestValidatorInfo(_ []byte) { } +// RequestValidatorsInfo - +func (r *RequestHandler) RequestValidatorsInfo(_ [][]byte) { +} + // IsInterfaceNil returns true if there is no value under the interface func (r *RequestHandler) IsInterfaceNil() bool { return r == nil diff --git a/integrationTests/testProcessorNode.go b/integrationTests/testProcessorNode.go index bf1b72c3290..8c8f952dd26 100644 --- a/integrationTests/testProcessorNode.go +++ b/integrationTests/testProcessorNode.go @@ -1348,7 +1348,8 @@ func (tpn *TestProcessorNode) initResolvers() { NumIntraShardPeers: 1, NumFullHistoryPeers: 3, }, - PeersRatingHandler: tpn.PeersRatingHandler, + PeersRatingHandler: tpn.PeersRatingHandler, + MaxNumOfValidatorInfoInResponse: 5, } var err error diff --git a/process/interface.go b/process/interface.go index bd88b1ed89c..2353332b2a9 100644 --- a/process/interface.go +++ b/process/interface.go @@ -550,6 +550,7 @@ type RequestHandler interface { RequestTrieNode(requestHash []byte, topic string, chunkIndex uint32) CreateTrieNodeIdentifier(requestHash []byte, chunkIndex uint32) []byte RequestValidatorInfo(hash []byte) + RequestValidatorsInfo(hashes [][]byte) IsInterfaceNil() bool } diff --git a/testscommon/generalConfig.go b/testscommon/generalConfig.go index cbbb0bbaaed..cc43fcfe151 100644 --- a/testscommon/generalConfig.go +++ b/testscommon/generalConfig.go @@ -399,6 +399,9 @@ func GetGeneralConfig() config.Config { TopRatedCacheCapacity: 1000, BadRatedCacheCapacity: 1000, }, + ValidatorInfo: config.ValidatorInfoConfig{ + MaxNumOfValidatorInfoInResponse: 5, + }, } } diff --git a/testscommon/requestHandlerStub.go b/testscommon/requestHandlerStub.go index f4cf2b23d5f..8c1509d9cad 100644 --- a/testscommon/requestHandlerStub.go +++ b/testscommon/requestHandlerStub.go @@ -20,6 +20,7 @@ type RequestHandlerStub struct { RequestTrieNodeCalled func(requestHash []byte, topic string, chunkIndex uint32) CreateTrieNodeIdentifierCalled func(requestHash []byte, chunkIndex uint32) []byte RequestValidatorInfoCalled func(hash []byte) + RequestValidatorsInfoCalled func(hashes [][]byte) } // SetNumPeersToQuery - @@ -160,6 +161,13 @@ func (rhs *RequestHandlerStub) RequestValidatorInfo(hash []byte) { } } +// RequestValidatorsInfo - +func (rhs *RequestHandlerStub) RequestValidatorsInfo(hashes [][]byte) { + if rhs.RequestValidatorsInfoCalled != nil { + rhs.RequestValidatorsInfoCalled(hashes) + } +} + // IsInterfaceNil returns true if there is no value under the interface func (rhs *RequestHandlerStub) IsInterfaceNil() bool { return rhs == nil From d148cc5d382d78df30bfdf98456d6f85a2934234 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Thu, 5 May 2022 10:27:52 +0300 Subject: [PATCH 20/70] fixed tests --- .../resolvers/validatorInfoResolver_test.go | 32 +++++++++++++------ 1 file changed, 23 insertions(+), 9 deletions(-) diff --git a/dataRetriever/resolvers/validatorInfoResolver_test.go b/dataRetriever/resolvers/validatorInfoResolver_test.go index 993b1aced84..470cf64710a 100644 --- a/dataRetriever/resolvers/validatorInfoResolver_test.go +++ b/dataRetriever/resolvers/validatorInfoResolver_test.go @@ -16,6 +16,7 @@ import ( "github.com/ElrondNetwork/elrond-go/p2p" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/storage" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -542,11 +543,20 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { numOfProvidedData := 2*args.MaxNumOfValidatorInfoInResponse + 2 // 2 chunks of 5 + 1 chunk of 2 providedHashes := make([][]byte, 0) providedData := make([]state.ValidatorInfo, 0) + testHasher := hashingMocks.HasherMock{} + testMarshaller := testscommon.MarshalizerMock{} + providedDataMap := make(map[string]struct{}, 0) for i := 0; i < numOfProvidedData; i++ { hashStr := fmt.Sprintf("hash%d", i) providedHashes = append(providedHashes, []byte(hashStr)) pkStr := fmt.Sprintf("pk%d", i) - providedData = append(providedData, createMockValidatorInfo([]byte(pkStr))) + newValidatorInfo := createMockValidatorInfo([]byte(pkStr)) + providedData = append(providedData, newValidatorInfo) + + buff, err := testMarshaller.Marshal(newValidatorInfo) + require.Nil(t, err) + hash := testHasher.Compute(string(buff)) + providedDataMap[string(hash)] = struct{}{} } numOfCalls := 0 args.ValidatorInfoPool = &testscommon.CacherStub{ @@ -559,23 +569,23 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { numOfCallsSend := 0 args.SenderResolver = &mock.TopicResolverSenderStub{ SendCalled: func(buff []byte, peer core.PeerID) error { + println(numOfCallsSend) marshallerMock := testscommon.MarshalizerMock{} b := &batch.Batch{} _ = marshallerMock.Unmarshal(b, buff) - expectedLen := args.MaxNumOfValidatorInfoInResponse - if numOfCallsSend == 2 { - expectedLen = numOfProvidedData % args.MaxNumOfValidatorInfoInResponse - } dataLen := len(b.Data) - assert.Equal(t, expectedLen, dataLen) + assert.True(t, dataLen <= args.MaxNumOfValidatorInfoInResponse) for i := 0; i < dataLen; i++ { vi := &state.ValidatorInfo{} _ = marshallerMock.Unmarshal(vi, b.Data[i]) - indexInProvidedData := numOfCallsSend*args.MaxNumOfValidatorInfoInResponse + i - assert.Equal(t, &providedData[indexInProvidedData], vi) + // remove this info from the provided map + buff, err := testMarshaller.Marshal(vi) + require.Nil(t, err) + hash := testHasher.Compute(string(buff)) + delete(providedDataMap, string(hash)) } numOfCallsSend++ @@ -588,8 +598,12 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { buff, _ := args.Marshaller.Marshal(&batch.Batch{Data: providedHashes}) err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) assert.Nil(t, err) - expectedNumOfCalls := numOfProvidedData/args.MaxNumOfValidatorInfoInResponse + 1 + expectedNumOfCalls := numOfProvidedData / args.MaxNumOfValidatorInfoInResponse + if numOfProvidedData%args.MaxNumOfValidatorInfoInResponse != 0 { + expectedNumOfCalls++ + } assert.Equal(t, expectedNumOfCalls, numOfCallsSend) + assert.Equal(t, 0, len(providedDataMap)) // all items should have been deleted on Send }) } From 8debf65829e79740258a0a545eb6ea56f777039d Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 6 May 2022 00:09:02 +0300 Subject: [PATCH 21/70] * Implemented missing methods in epoch validator info creator --- epochStart/interface.go | 14 +- epochStart/metachain/baseRewards.go | 36 ++--- epochStart/metachain/rewardsCreatorProxy.go | 18 +-- epochStart/metachain/validators.go | 135 ++++++++++++++---- epochStart/mock/rewardsCreatorStub.go | 30 ++-- .../mock/epochRewardsCreatorStub.go | 40 +++--- .../mock/epochValidatorInfoCreatorStub.go | 59 +++++--- process/block/metablock.go | 8 +- process/interface.go | 15 +- process/mock/epochRewardsCreatorStub.go | 40 +++--- process/mock/epochValidatorInfoCreatorStub.go | 59 +++++--- 11 files changed, 301 insertions(+), 153 deletions(-) diff --git a/epochStart/interface.go b/epochStart/interface.go index ba3e5972fc9..82c285a2630 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -148,12 +148,18 @@ type ManualEpochStartNotifier interface { IsInterfaceNil() bool } -// TransactionCacher defines the methods for the local cacher, info for current round +// TransactionCacher defines the methods for the local transaction cacher, needed for the current block type TransactionCacher interface { GetTx(txHash []byte) (data.TransactionHandler, error) IsInterfaceNil() bool } +// ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current block +type ValidatorInfoCacher interface { + GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) + IsInterfaceNil() bool +} + // StakingDataProvider is able to provide staking data from the system smart contracts type StakingDataProvider interface { GetTotalStakeEligibleNodes() *big.Int @@ -191,10 +197,10 @@ type RewardsCreator interface { ) error GetProtocolSustainabilityRewards() *big.Int GetLocalTxCache() TransactionCacher - CreateMarshalizedData(body *block.Body) map[string][][]byte + CreateMarshalledData(body *block.Body) map[string][][]byte GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler - SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) + SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) RemoveBlockDataFromPools(metaBlock data.MetaHeaderHandler, body *block.Body) IsInterfaceNil() bool } diff --git a/epochStart/metachain/baseRewards.go b/epochStart/metachain/baseRewards.go index 15bb63a1ad3..271e803cc15 100644 --- a/epochStart/metachain/baseRewards.go +++ b/epochStart/metachain/baseRewards.go @@ -117,13 +117,13 @@ func (brc *baseRewardsCreator) GetLocalTxCache() epochStart.TransactionCacher { return brc.currTxs } -// CreateMarshalizedData creates the marshalized data to be sent to shards -func (brc *baseRewardsCreator) CreateMarshalizedData(body *block.Body) map[string][][]byte { +// CreateMarshalledData creates the marshalled data to be sent to shards +func (brc *baseRewardsCreator) CreateMarshalledData(body *block.Body) map[string][][]byte { if check.IfNil(body) { return nil } - mrsTxs := make(map[string][][]byte) + marshalledRewardsTxs := make(map[string][][]byte) for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.RewardsBlock { @@ -135,35 +135,35 @@ func (brc *baseRewardsCreator) CreateMarshalizedData(body *block.Body) map[strin } broadcastTopic := createBroadcastTopic(brc.shardCoordinator, miniBlock.ReceiverShardID) - if _, ok := mrsTxs[broadcastTopic]; !ok { - mrsTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) + if _, ok := marshalledRewardsTxs[broadcastTopic]; !ok { + marshalledRewardsTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) } for _, txHash := range miniBlock.TxHashes { rwdTx, err := brc.currTxs.GetTx(txHash) if err != nil { - log.Error("rewardsCreator.CreateMarshalizedData.GetTx", "hash", txHash, "error", err) + log.Error("rewardsCreator.CreateMarshalledData.GetTx", "hash", txHash, "error", err) continue } - marshalizedData, err := brc.marshalizer.Marshal(rwdTx) + marshalledData, err := brc.marshalizer.Marshal(rwdTx) if err != nil { - log.Error("rewardsCreator.CreateMarshalizedData.Marshal", "hash", txHash, "error", err) + log.Error("rewardsCreator.CreateMarshalledData.Marshal", "hash", txHash, "error", err) continue } - mrsTxs[broadcastTopic] = append(mrsTxs[broadcastTopic], marshalizedData) + marshalledRewardsTxs[broadcastTopic] = append(marshalledRewardsTxs[broadcastTopic], marshalledData) } - if len(mrsTxs[broadcastTopic]) == 0 { - delete(mrsTxs, broadcastTopic) + if len(marshalledRewardsTxs[broadcastTopic]) == 0 { + delete(marshalledRewardsTxs, broadcastTopic) } } - return mrsTxs + return marshalledRewardsTxs } -// GetRewardsTxs will return rewards txs MUST be called before SaveTxBlockToStorage +// GetRewardsTxs returns rewards txs for the current block func (brc *baseRewardsCreator) GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler { rewardsTxs := make(map[string]data.TransactionHandler) for _, miniBlock := range body.MiniBlocks { @@ -184,8 +184,8 @@ func (brc *baseRewardsCreator) GetRewardsTxs(body *block.Body) map[string]data.T return rewardsTxs } -// SaveTxBlockToStorage saves created data to storage -func (brc *baseRewardsCreator) SaveTxBlockToStorage(_ data.MetaHeaderHandler, body *block.Body) { +// SaveBlockDataToStorage saves block data to storage +func (brc *baseRewardsCreator) SaveBlockDataToStorage(_ data.MetaHeaderHandler, body *block.Body) { if check.IfNil(body) { return } @@ -219,8 +219,8 @@ func (brc *baseRewardsCreator) SaveTxBlockToStorage(_ data.MetaHeaderHandler, bo } } -// DeleteTxsFromStorage deletes data from storage -func (brc *baseRewardsCreator) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { +// DeleteBlockDataFromStorage deletes block data from storage +func (brc *baseRewardsCreator) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { if check.IfNil(metaBlock) || check.IfNil(body) { return } @@ -242,7 +242,7 @@ func (brc *baseRewardsCreator) DeleteTxsFromStorage(metaBlock data.MetaHeaderHan } } -// RemoveBlockDataFromPools removes block info from pools +// RemoveBlockDataFromPools removes block data from pools func (brc *baseRewardsCreator) RemoveBlockDataFromPools(metaBlock data.MetaHeaderHandler, body *block.Body) { if check.IfNil(metaBlock) || check.IfNil(body) { return diff --git a/epochStart/metachain/rewardsCreatorProxy.go b/epochStart/metachain/rewardsCreatorProxy.go index 0fc7feebd75..33187c1fe09 100644 --- a/epochStart/metachain/rewardsCreatorProxy.go +++ b/epochStart/metachain/rewardsCreatorProxy.go @@ -101,9 +101,9 @@ func (rcp *rewardsCreatorProxy) GetLocalTxCache() epochStart.TransactionCacher { return rcp.rc.GetLocalTxCache() } -// CreateMarshalizedData proxies the same method of the configured rewardsCreator instance -func (rcp *rewardsCreatorProxy) CreateMarshalizedData(body *block.Body) map[string][][]byte { - return rcp.rc.CreateMarshalizedData(body) +// CreateMarshalledData proxies the same method of the configured rewardsCreator instance +func (rcp *rewardsCreatorProxy) CreateMarshalledData(body *block.Body) map[string][][]byte { + return rcp.rc.CreateMarshalledData(body) } // GetRewardsTxs proxies the same method of the configured rewardsCreator instance @@ -111,14 +111,14 @@ func (rcp *rewardsCreatorProxy) GetRewardsTxs(body *block.Body) map[string]data. return rcp.rc.GetRewardsTxs(body) } -// SaveTxBlockToStorage proxies the same method of the configured rewardsCreator instance -func (rcp *rewardsCreatorProxy) SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - rcp.rc.SaveTxBlockToStorage(metaBlock, body) +// SaveBlockDataToStorage proxies the same method of the configured rewardsCreator instance +func (rcp *rewardsCreatorProxy) SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + rcp.rc.SaveBlockDataToStorage(metaBlock, body) } -// DeleteTxsFromStorage proxies the same method of the configured rewardsCreator instance -func (rcp *rewardsCreatorProxy) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - rcp.rc.DeleteTxsFromStorage(metaBlock, body) +// DeleteBlockDataFromStorage proxies the same method of the configured rewardsCreator instance +func (rcp *rewardsCreatorProxy) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + rcp.rc.DeleteBlockDataFromStorage(metaBlock, body) } // RemoveBlockDataFromPools proxies the same method of the configured rewardsCreator instance diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 3c446b94136..43de0d161a7 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -78,7 +78,7 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr return vic, nil } -// CreateValidatorInfoMiniBlocks creates the validatorInfo miniblocks according to the provided validatorInfo map +// CreateValidatorInfoMiniBlocks creates the validatorInfo mini blocks according to the provided validatorInfo map func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) { if validatorsInfo == nil { return nil, epochStart.ErrNilValidatorInfo @@ -89,7 +89,7 @@ func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo ma vic.clean() - miniblocks := make([]*block.MiniBlock, 0) + miniBlocks := make([]*block.MiniBlock, 0) for shardId := uint32(0); shardId < vic.shardCoordinator.NumberOfShards(); shardId++ { validators := validatorsInfo[shardId] @@ -102,12 +102,12 @@ func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo ma return nil, err } - miniblocks = append(miniblocks, miniBlock) + miniBlocks = append(miniBlocks, miniBlock) } validators := validatorsInfo[core.MetachainShardId] if len(validators) == 0 { - return miniblocks, nil + return miniBlocks, nil } miniBlock, err := vic.createMiniBlock(validators) @@ -115,13 +115,9 @@ func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo ma return nil, err } - miniblocks = append(miniblocks, miniBlock) + miniBlocks = append(miniBlocks, miniBlock) - return miniblocks, nil -} - -func (vic *validatorInfoCreator) clean() { - vic.currValidatorInfo.Clean() + return miniBlocks, nil } func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.ValidatorInfo) (*block.MiniBlock, error) { @@ -160,12 +156,12 @@ func createShardValidatorInfo(validator *state.ValidatorInfo) *state.ShardValida } } -// VerifyValidatorInfoMiniBlocks verifies if received validatorinfo miniblocks are correct +// VerifyValidatorInfoMiniBlocks verifies if received validator info mini blocks are correct func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( - miniblocks []*block.MiniBlock, + miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo, ) error { - if len(miniblocks) == 0 { + if len(miniBlocks) == 0 { return epochStart.ErrNilMiniblocks } @@ -186,7 +182,7 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( numReceivedValidatorInfoMBs := 0 var receivedMbHash []byte - for _, receivedMb := range miniblocks { + for _, receivedMb := range miniBlocks { if receivedMb == nil { return epochStart.ErrNilMiniblock } @@ -203,7 +199,7 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( _, ok := hashesToMiniBlocks[string(receivedMbHash)] if !ok { - // TODO: add display debug prints of miniblocks contents + // TODO: add display debug prints of mini blocks contents return epochStart.ErrValidatorMiniBlockHashDoesNotMatch } } @@ -215,8 +211,80 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( return nil } -// SaveValidatorInfoBlockDataToStorage saves created data to storage -func (vic *validatorInfoCreator) SaveValidatorInfoBlockDataToStorage(_ data.HeaderHandler, body *block.Body) { +// GetLocalValidatorInfoCache returns the local validator info cache which holds all the validator info for the current block +func (vic *validatorInfoCreator) GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher { + return vic.currValidatorInfo +} + +// CreateMarshalledData creates the marshalled data to be sent to shards +func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[string][][]byte { + if check.IfNil(body) { + return nil + } + + marshalledValidatorInfoTxs := make(map[string][][]byte) + + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + if miniBlock.SenderShardID != vic.shardCoordinator.SelfId() || + miniBlock.ReceiverShardID == vic.shardCoordinator.SelfId() { + continue + } + + broadcastTopic := createBroadcastTopic(vic.shardCoordinator, miniBlock.ReceiverShardID) + if _, ok := marshalledValidatorInfoTxs[broadcastTopic]; !ok { + marshalledValidatorInfoTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) + } + + for _, txHash := range miniBlock.TxHashes { + validatorInfoTx, err := vic.currValidatorInfo.GetValidatorInfo(txHash) + if err != nil { + log.Error("validatorInfoCreator.CreateMarshalledData.GetValidatorInfo", "hash", txHash, "error", err) + continue + } + + marshalledData, err := vic.marshalizer.Marshal(validatorInfoTx) + if err != nil { + log.Error("validatorInfoCreator.CreateMarshalledData.Marshal", "hash", txHash, "error", err) + continue + } + + marshalledValidatorInfoTxs[broadcastTopic] = append(marshalledValidatorInfoTxs[broadcastTopic], marshalledData) + } + + if len(marshalledValidatorInfoTxs[broadcastTopic]) == 0 { + delete(marshalledValidatorInfoTxs, broadcastTopic) + } + } + + return marshalledValidatorInfoTxs +} + +// GetValidatorInfoTxs returns validator info txs for the current block +func (vic *validatorInfoCreator) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { + validatorInfoTxs := make(map[string]*state.ShardValidatorInfo) + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + + for _, txHash := range miniBlock.TxHashes { + validatorInfoTx, err := vic.currValidatorInfo.GetValidatorInfo(txHash) + if err != nil { + continue + } + + validatorInfoTxs[string(txHash)] = validatorInfoTx + } + } + + return validatorInfoTxs +} + +// SaveBlockDataToStorage saves block data to storage +func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, body *block.Body) { if check.IfNil(body) { return } @@ -254,8 +322,8 @@ func (vic *validatorInfoCreator) SaveValidatorInfoBlockDataToStorage(_ data.Head } } -// DeleteValidatorInfoBlockDataFromStorage deletes data from storage -func (vic *validatorInfoCreator) DeleteValidatorInfoBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { +// DeleteBlockDataFromStorage deletes block data from storage +func (vic *validatorInfoCreator) DeleteBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { if check.IfNil(metaBlock) || check.IfNil(body) { return } @@ -277,18 +345,24 @@ func (vic *validatorInfoCreator) DeleteValidatorInfoBlockDataFromStorage(metaBlo } } -// IsInterfaceNil return true if underlying object is nil -func (vic *validatorInfoCreator) IsInterfaceNil() bool { - return vic == nil -} - -// RemoveBlockDataFromPools removes block info from pools -func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderHandler, _ *block.Body) { +// RemoveBlockDataFromPools removes block data from pools +func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderHandler, body *block.Body) { if check.IfNil(metaBlock) { return } miniBlocksPool := vic.dataPool.MiniBlocks() + validatorInfoPool := vic.dataPool.ValidatorsInfo() + + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + + for _, txHash := range miniBlock.TxHashes { + validatorInfoPool.Remove(txHash) + } + } for _, mbHeader := range metaBlock.GetMiniBlockHeaderHandlers() { if mbHeader.GetTypeInt32() != int32(block.PeerBlock) { @@ -305,3 +379,12 @@ func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderH "num txs", mbHeader.GetTxCount()) } } + +func (vic *validatorInfoCreator) clean() { + vic.currValidatorInfo.Clean() +} + +// IsInterfaceNil return true if underlying object is nil +func (vic *validatorInfoCreator) IsInterfaceNil() bool { + return vic == nil +} diff --git a/epochStart/mock/rewardsCreatorStub.go b/epochStart/mock/rewardsCreatorStub.go index 3be87ced58a..5fa99e8df4e 100644 --- a/epochStart/mock/rewardsCreatorStub.go +++ b/epochStart/mock/rewardsCreatorStub.go @@ -19,10 +19,10 @@ type RewardsCreatorStub struct { ) error GetProtocolSustainabilityRewardsCalled func() *big.Int GetLocalTxCacheCalled func() epochStart.TransactionCacher - CreateMarshalizedDataCalled func(body *block.Body) map[string][][]byte + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler - SaveTxBlockToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + SaveBlockDataToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) } @@ -68,10 +68,10 @@ func (rcs *RewardsCreatorStub) GetLocalTxCache() epochStart.TransactionCacher { return nil } -// CreateMarshalizedData - -func (rcs *RewardsCreatorStub) CreateMarshalizedData(body *block.Body) map[string][][]byte { - if rcs.CreateMarshalizedDataCalled != nil { - return rcs.CreateMarshalizedDataCalled(body) +// CreateMarshalledData - +func (rcs *RewardsCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if rcs.CreateMarshalledDataCalled != nil { + return rcs.CreateMarshalledDataCalled(body) } return nil } @@ -84,17 +84,17 @@ func (rcs *RewardsCreatorStub) GetRewardsTxs(body *block.Body) map[string]data.T return nil } -// SaveTxBlockToStorage - -func (rcs *RewardsCreatorStub) SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if rcs.SaveTxBlockToStorageCalled != nil { - rcs.SaveTxBlockToStorageCalled(metaBlock, body) +// SaveBlockDataToStorage - +func (rcs *RewardsCreatorStub) SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if rcs.SaveBlockDataToStorageCalled != nil { + rcs.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteTxsFromStorage - -func (rcs *RewardsCreatorStub) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if rcs.DeleteTxsFromStorageCalled != nil { - rcs.DeleteTxsFromStorageCalled(metaBlock, body) +// DeleteBlockDataFromStorage - +func (rcs *RewardsCreatorStub) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if rcs.DeleteBlockDataFromStorageCalled != nil { + rcs.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/integrationTests/mock/epochRewardsCreatorStub.go b/integrationTests/mock/epochRewardsCreatorStub.go index 5302875ec54..986b91158bf 100644 --- a/integrationTests/mock/epochRewardsCreatorStub.go +++ b/integrationTests/mock/epochRewardsCreatorStub.go @@ -17,13 +17,13 @@ type EpochRewardsCreatorStub struct { VerifyRewardsMiniBlocksCalled func( metaBlock data.MetaHeaderHandler, validatorsInfo map[uint32][]*state.ValidatorInfo, computedEconomics *block.Economics, ) error - CreateMarshalizedDataCalled func(body *block.Body) map[string][][]byte - SaveTxBlockToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler - GetProtocolSustainCalled func() *big.Int - GetLocalTxCacheCalled func() epochStart.TransactionCacher + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte + SaveBlockDataToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler + GetProtocolSustainCalled func() *big.Int + GetLocalTxCacheCalled func() epochStart.TransactionCacher } // GetProtocolSustainabilityRewards - @@ -54,7 +54,7 @@ func (e *EpochRewardsCreatorStub) CreateRewardsMiniBlocks( return nil, nil } -// GetRewardsTxs -- +// GetRewardsTxs - func (e *EpochRewardsCreatorStub) GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler { if e.GetRewardsTxsCalled != nil { return e.GetRewardsTxsCalled(body) @@ -74,25 +74,25 @@ func (e *EpochRewardsCreatorStub) VerifyRewardsMiniBlocks( return nil } -// CreateMarshalizedData - -func (e *EpochRewardsCreatorStub) CreateMarshalizedData(body *block.Body) map[string][][]byte { - if e.CreateMarshalizedDataCalled != nil { - return e.CreateMarshalizedDataCalled(body) +// CreateMarshalledData - +func (e *EpochRewardsCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if e.CreateMarshalledDataCalled != nil { + return e.CreateMarshalledDataCalled(body) } return nil } -// SaveTxBlockToStorage - -func (e *EpochRewardsCreatorStub) SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if e.SaveTxBlockToStorageCalled != nil { - e.SaveTxBlockToStorageCalled(metaBlock, body) +// SaveBlockDataToStorage - +func (e *EpochRewardsCreatorStub) SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if e.SaveBlockDataToStorageCalled != nil { + e.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteTxsFromStorage - -func (e *EpochRewardsCreatorStub) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if e.DeleteTxsFromStorageCalled != nil { - e.DeleteTxsFromStorageCalled(metaBlock, body) +// DeleteBlockDataFromStorage - +func (e *EpochRewardsCreatorStub) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if e.DeleteBlockDataFromStorageCalled != nil { + e.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/integrationTests/mock/epochValidatorInfoCreatorStub.go b/integrationTests/mock/epochValidatorInfoCreatorStub.go index 61d45987048..6cf8318f6a1 100644 --- a/integrationTests/mock/epochValidatorInfoCreatorStub.go +++ b/integrationTests/mock/epochValidatorInfoCreatorStub.go @@ -3,17 +3,20 @@ package mock import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/state" ) // EpochValidatorInfoCreatorStub - type EpochValidatorInfoCreatorStub struct { - CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) - VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - CreateMarshalizedDataCalled func(body block.Body) map[string][][]byte - SaveValidatorInfoBlockDataToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) - DeleteValidatorInfoBlockDataFromStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) - RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) + CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) + VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error + GetLocalValidatorInfoCacheCalled func() epochStart.ValidatorInfoCacher + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte + GetValidatorInfoTxsCalled func(body *block.Body) map[string]*state.ShardValidatorInfo + SaveBlockDataToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) } // CreateValidatorInfoMiniBlocks - @@ -25,24 +28,48 @@ func (e *EpochValidatorInfoCreatorStub) CreateValidatorInfoMiniBlocks(validatorI } // VerifyValidatorInfoMiniBlocks - -func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { +func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { if e.VerifyValidatorInfoMiniBlocksCalled != nil { - return e.VerifyValidatorInfoMiniBlocksCalled(miniblocks, validatorsInfo) + return e.VerifyValidatorInfoMiniBlocksCalled(miniBlocks, validatorsInfo) } return nil } -// SaveValidatorInfoBlockDataToStorage - -func (e *EpochValidatorInfoCreatorStub) SaveValidatorInfoBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) { - if e.SaveValidatorInfoBlockDataToStorageCalled != nil { - e.SaveValidatorInfoBlockDataToStorageCalled(metaBlock, body) +// GetLocalValidatorInfoCache - +func (e *EpochValidatorInfoCreatorStub) GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher { + if e.GetLocalValidatorInfoCacheCalled != nil { + return e.GetLocalValidatorInfoCacheCalled() + } + return nil +} + +// CreateMarshalledData - +func (e *EpochValidatorInfoCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if e.CreateMarshalledDataCalled != nil { + return e.CreateMarshalledDataCalled(body) + } + return nil +} + +// GetValidatorInfoTxs - +func (e *EpochValidatorInfoCreatorStub) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { + if e.GetValidatorInfoTxsCalled != nil { + return e.GetValidatorInfoTxsCalled(body) + } + return nil +} + +// SaveBlockDataToStorage - +func (e *EpochValidatorInfoCreatorStub) SaveBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.SaveBlockDataToStorageCalled != nil { + e.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteValidatorInfoBlockDataFromStorage - -func (e *EpochValidatorInfoCreatorStub) DeleteValidatorInfoBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { - if e.DeleteValidatorInfoBlockDataFromStorageCalled != nil { - e.DeleteValidatorInfoBlockDataFromStorageCalled(metaBlock, body) +// DeleteBlockDataFromStorage - +func (e *EpochValidatorInfoCreatorStub) DeleteBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.DeleteBlockDataFromStorageCalled != nil { + e.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/process/block/metablock.go b/process/block/metablock.go index b9237aed284..fda3609f95c 100644 --- a/process/block/metablock.go +++ b/process/block/metablock.go @@ -1579,8 +1579,8 @@ func (mp *metaProcessor) getRewardsTxs(header *block.MetaBlock, body *block.Body func (mp *metaProcessor) commitEpochStart(header *block.MetaBlock, body *block.Body) { if header.IsStartOfEpochBlock() { mp.epochStartTrigger.SetProcessed(header, body) - go mp.epochRewardsCreator.SaveTxBlockToStorage(header, body) - go mp.validatorInfoCreator.SaveValidatorInfoBlockDataToStorage(header, body) + go mp.epochRewardsCreator.SaveBlockDataToStorage(header, body) + go mp.validatorInfoCreator.SaveBlockDataToStorage(header, body) } else { currentHeader := mp.blockChain.GetCurrentBlockHeader() if !check.IfNil(currentHeader) && currentHeader.IsStartOfEpochBlock() { @@ -2381,7 +2381,9 @@ func (mp *metaProcessor) MarshalizedDataToBroadcast( var mrsTxs map[string][][]byte if hdr.IsStartOfEpochBlock() { - mrsTxs = mp.epochRewardsCreator.CreateMarshalizedData(body) + mrsTxs = mp.epochRewardsCreator.CreateMarshalledData(body) + //TODO: Append also validator info txs to be broadcast + //mrsTxs = mp.validatorInfoCreator.CreateMarshalledData(body) } else { mrsTxs = mp.txCoordinator.CreateMarshalizedData(body) } diff --git a/process/interface.go b/process/interface.go index bfe4cbc9966..6d94bd6b99a 100644 --- a/process/interface.go +++ b/process/interface.go @@ -891,10 +891,10 @@ type RewardsCreator interface { ) error GetProtocolSustainabilityRewards() *big.Int GetLocalTxCache() epochStart.TransactionCacher - CreateMarshalizedData(body *block.Body) map[string][][]byte + CreateMarshalledData(body *block.Body) map[string][][]byte GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler - SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) + SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) RemoveBlockDataFromPools(metaBlock data.MetaHeaderHandler, body *block.Body) IsInterfaceNil() bool } @@ -902,9 +902,12 @@ type RewardsCreator interface { // EpochStartValidatorInfoCreator defines the functionality for the metachain to create validator statistics at end of epoch type EpochStartValidatorInfoCreator interface { CreateValidatorInfoMiniBlocks(validatorInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) - VerifyValidatorInfoMiniBlocks(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - SaveValidatorInfoBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) - DeleteValidatorInfoBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) + VerifyValidatorInfoMiniBlocks(miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error + GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher + CreateMarshalledData(body *block.Body) map[string][][]byte + GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo + SaveBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) + DeleteBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) RemoveBlockDataFromPools(metaBlock data.HeaderHandler, body *block.Body) IsInterfaceNil() bool } diff --git a/process/mock/epochRewardsCreatorStub.go b/process/mock/epochRewardsCreatorStub.go index e465ef2bdf9..e67aa17b1da 100644 --- a/process/mock/epochRewardsCreatorStub.go +++ b/process/mock/epochRewardsCreatorStub.go @@ -17,13 +17,13 @@ type EpochRewardsCreatorStub struct { VerifyRewardsMiniBlocksCalled func( metaBlock data.MetaHeaderHandler, validatorsInfo map[uint32][]*state.ValidatorInfo, computedEconomics *block.Economics, ) error - CreateMarshalizedDataCalled func(body *block.Body) map[string][][]byte - SaveTxBlockToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - DeleteTxsFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) - GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler - GetProtocolSustainCalled func() *big.Int - GetLocalTxCacheCalled func() epochStart.TransactionCacher + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte + SaveBlockDataToStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + RemoveBlockDataFromPoolsCalled func(metaBlock data.MetaHeaderHandler, body *block.Body) + GetRewardsTxsCalled func(body *block.Body) map[string]data.TransactionHandler + GetProtocolSustainCalled func() *big.Int + GetLocalTxCacheCalled func() epochStart.TransactionCacher } // GetProtocolSustainabilityRewards - @@ -66,15 +66,15 @@ func (e *EpochRewardsCreatorStub) VerifyRewardsMiniBlocks( return nil } -// CreateMarshalizedData - -func (e *EpochRewardsCreatorStub) CreateMarshalizedData(body *block.Body) map[string][][]byte { - if e.CreateMarshalizedDataCalled != nil { - return e.CreateMarshalizedDataCalled(body) +// CreateMarshalledData - +func (e *EpochRewardsCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if e.CreateMarshalledDataCalled != nil { + return e.CreateMarshalledDataCalled(body) } return nil } -// GetRewardsTxs -- +// GetRewardsTxs - func (e *EpochRewardsCreatorStub) GetRewardsTxs(body *block.Body) map[string]data.TransactionHandler { if e.GetRewardsTxsCalled != nil { return e.GetRewardsTxsCalled(body) @@ -82,17 +82,17 @@ func (e *EpochRewardsCreatorStub) GetRewardsTxs(body *block.Body) map[string]dat return nil } -// SaveTxBlockToStorage - -func (e *EpochRewardsCreatorStub) SaveTxBlockToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if e.SaveTxBlockToStorageCalled != nil { - e.SaveTxBlockToStorageCalled(metaBlock, body) +// SaveBlockDataToStorage - +func (e *EpochRewardsCreatorStub) SaveBlockDataToStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if e.SaveBlockDataToStorageCalled != nil { + e.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteTxsFromStorage - -func (e *EpochRewardsCreatorStub) DeleteTxsFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { - if e.DeleteTxsFromStorageCalled != nil { - e.DeleteTxsFromStorageCalled(metaBlock, body) +// DeleteBlockDataFromStorage - +func (e *EpochRewardsCreatorStub) DeleteBlockDataFromStorage(metaBlock data.MetaHeaderHandler, body *block.Body) { + if e.DeleteBlockDataFromStorageCalled != nil { + e.DeleteBlockDataFromStorageCalled(metaBlock, body) } } diff --git a/process/mock/epochValidatorInfoCreatorStub.go b/process/mock/epochValidatorInfoCreatorStub.go index 61d45987048..6cf8318f6a1 100644 --- a/process/mock/epochValidatorInfoCreatorStub.go +++ b/process/mock/epochValidatorInfoCreatorStub.go @@ -3,17 +3,20 @@ package mock import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/state" ) // EpochValidatorInfoCreatorStub - type EpochValidatorInfoCreatorStub struct { - CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) - VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error - CreateMarshalizedDataCalled func(body block.Body) map[string][][]byte - SaveValidatorInfoBlockDataToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) - DeleteValidatorInfoBlockDataFromStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) - RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) + CreateValidatorInfoMiniBlocksCalled func(validatorsInfo map[uint32][]*state.ValidatorInfo) (block.MiniBlockSlice, error) + VerifyValidatorInfoMiniBlocksCalled func(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error + GetLocalValidatorInfoCacheCalled func() epochStart.ValidatorInfoCacher + CreateMarshalledDataCalled func(body *block.Body) map[string][][]byte + GetValidatorInfoTxsCalled func(body *block.Body) map[string]*state.ShardValidatorInfo + SaveBlockDataToStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + DeleteBlockDataFromStorageCalled func(metaBlock data.HeaderHandler, body *block.Body) + RemoveBlockDataFromPoolsCalled func(metaBlock data.HeaderHandler, body *block.Body) } // CreateValidatorInfoMiniBlocks - @@ -25,24 +28,48 @@ func (e *EpochValidatorInfoCreatorStub) CreateValidatorInfoMiniBlocks(validatorI } // VerifyValidatorInfoMiniBlocks - -func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniblocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { +func (e *EpochValidatorInfoCreatorStub) VerifyValidatorInfoMiniBlocks(miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { if e.VerifyValidatorInfoMiniBlocksCalled != nil { - return e.VerifyValidatorInfoMiniBlocksCalled(miniblocks, validatorsInfo) + return e.VerifyValidatorInfoMiniBlocksCalled(miniBlocks, validatorsInfo) } return nil } -// SaveValidatorInfoBlockDataToStorage - -func (e *EpochValidatorInfoCreatorStub) SaveValidatorInfoBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) { - if e.SaveValidatorInfoBlockDataToStorageCalled != nil { - e.SaveValidatorInfoBlockDataToStorageCalled(metaBlock, body) +// GetLocalValidatorInfoCache - +func (e *EpochValidatorInfoCreatorStub) GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher { + if e.GetLocalValidatorInfoCacheCalled != nil { + return e.GetLocalValidatorInfoCacheCalled() + } + return nil +} + +// CreateMarshalledData - +func (e *EpochValidatorInfoCreatorStub) CreateMarshalledData(body *block.Body) map[string][][]byte { + if e.CreateMarshalledDataCalled != nil { + return e.CreateMarshalledDataCalled(body) + } + return nil +} + +// GetValidatorInfoTxs - +func (e *EpochValidatorInfoCreatorStub) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { + if e.GetValidatorInfoTxsCalled != nil { + return e.GetValidatorInfoTxsCalled(body) + } + return nil +} + +// SaveBlockDataToStorage - +func (e *EpochValidatorInfoCreatorStub) SaveBlockDataToStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.SaveBlockDataToStorageCalled != nil { + e.SaveBlockDataToStorageCalled(metaBlock, body) } } -// DeleteValidatorInfoBlockDataFromStorage - -func (e *EpochValidatorInfoCreatorStub) DeleteValidatorInfoBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { - if e.DeleteValidatorInfoBlockDataFromStorageCalled != nil { - e.DeleteValidatorInfoBlockDataFromStorageCalled(metaBlock, body) +// DeleteBlockDataFromStorage - +func (e *EpochValidatorInfoCreatorStub) DeleteBlockDataFromStorage(metaBlock data.HeaderHandler, body *block.Body) { + if e.DeleteBlockDataFromStorageCalled != nil { + e.DeleteBlockDataFromStorageCalled(metaBlock, body) } } From 357803bb0c98cc71dbc9304a6cfd81ef13e82b39 Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Fri, 6 May 2022 16:04:04 +0300 Subject: [PATCH 22/70] fixes after review-now using datapacker --- cmd/node/config/config.toml | 3 - config/config.go | 6 -- dataRetriever/errors.go | 3 - .../factory/resolverscontainer/args.go | 35 ++++----- .../baseResolversContainerFactory.go | 65 +++++++--------- .../metaResolversContainerFactory.go | 41 +++++----- .../metaResolversContainerFactory_test.go | 16 +--- .../shardResolversContainerFactory.go | 41 +++++----- .../shardResolversContainerFactory_test.go | 15 +--- .../resolvers/validatorInfoResolver.go | 78 +++++++------------ .../resolvers/validatorInfoResolver_test.go | 67 +++++++++++----- epochStart/bootstrap/process.go | 33 ++++---- epochStart/bootstrap/process_test.go | 3 - factory/processComponents.go | 70 ++++++++--------- factory/stateComponents_test.go | 3 - integrationTests/testProcessorNode.go | 3 +- testscommon/generalConfig.go | 3 - 17 files changed, 213 insertions(+), 272 deletions(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index ba882220b6b..50347d7a48d 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -897,6 +897,3 @@ NumCrossShardPeers = 2 NumIntraShardPeers = 1 NumFullHistoryPeers = 3 - -[ValidatorInfo] - MaxNumOfValidatorInfoInResponse = 10 diff --git a/config/config.go b/config/config.go index 264855e97f6..f60402cd2de 100644 --- a/config/config.go +++ b/config/config.go @@ -187,12 +187,6 @@ type Config struct { VMOutputCacher CacheConfig PeersRatingConfig PeersRatingConfig - - ValidatorInfo ValidatorInfoConfig -} - -type ValidatorInfoConfig struct { - MaxNumOfValidatorInfoInResponse int } // PeersRatingConfig will hold settings related to peers rating diff --git a/dataRetriever/errors.go b/dataRetriever/errors.go index 3cfcf9ca488..da3ccd04f83 100644 --- a/dataRetriever/errors.go +++ b/dataRetriever/errors.go @@ -232,6 +232,3 @@ var ErrNilValidatorInfoStorage = errors.New("nil validator info storage") // ErrValidatorInfoNotFound signals that no validator info was found var ErrValidatorInfoNotFound = errors.New("validator info not found") - -// ErrInvalidNumOfValidatorInfo signals that an invalid number of validator info was provided -var ErrInvalidNumOfValidatorInfo = errors.New("invalid num of validator info") diff --git a/dataRetriever/factory/resolverscontainer/args.go b/dataRetriever/factory/resolverscontainer/args.go index 067dbab735c..c0e3ad276cb 100644 --- a/dataRetriever/factory/resolverscontainer/args.go +++ b/dataRetriever/factory/resolverscontainer/args.go @@ -12,22 +12,21 @@ import ( // FactoryArgs will hold the arguments for ResolversContainerFactory for both shard and meta type FactoryArgs struct { - ResolverConfig config.ResolverConfig - NumConcurrentResolvingJobs int32 - ShardCoordinator sharding.Coordinator - Messenger dataRetriever.TopicMessageHandler - Store dataRetriever.StorageService - Marshalizer marshal.Marshalizer - DataPools dataRetriever.PoolsHolder - Uint64ByteSliceConverter typeConverters.Uint64ByteSliceConverter - DataPacker dataRetriever.DataPacker - TriesContainer common.TriesHolder - InputAntifloodHandler dataRetriever.P2PAntifloodHandler - OutputAntifloodHandler dataRetriever.P2PAntifloodHandler - CurrentNetworkEpochProvider dataRetriever.CurrentNetworkEpochProviderHandler - PreferredPeersHolder p2p.PreferredPeersHolderHandler - PeersRatingHandler dataRetriever.PeersRatingHandler - SizeCheckDelta uint32 - IsFullHistoryNode bool - MaxNumOfValidatorInfoInResponse int + ResolverConfig config.ResolverConfig + NumConcurrentResolvingJobs int32 + ShardCoordinator sharding.Coordinator + Messenger dataRetriever.TopicMessageHandler + Store dataRetriever.StorageService + Marshalizer marshal.Marshalizer + DataPools dataRetriever.PoolsHolder + Uint64ByteSliceConverter typeConverters.Uint64ByteSliceConverter + DataPacker dataRetriever.DataPacker + TriesContainer common.TriesHolder + InputAntifloodHandler dataRetriever.P2PAntifloodHandler + OutputAntifloodHandler dataRetriever.P2PAntifloodHandler + CurrentNetworkEpochProvider dataRetriever.CurrentNetworkEpochProviderHandler + PreferredPeersHolder p2p.PreferredPeersHolderHandler + PeersRatingHandler dataRetriever.PeersRatingHandler + SizeCheckDelta uint32 + IsFullHistoryNode bool } diff --git a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go index 0b4fd793651..c7c46e86061 100644 --- a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go @@ -19,33 +19,30 @@ import ( // EmptyExcludePeersOnTopic is an empty topic const EmptyExcludePeersOnTopic = "" -const minNumOfValidatorInfoInResponse = 5 - var log = logger.GetOrCreate("dataRetriever/factory/resolverscontainer") type baseResolversContainerFactory struct { - container dataRetriever.ResolversContainer - shardCoordinator sharding.Coordinator - messenger dataRetriever.TopicMessageHandler - store dataRetriever.StorageService - marshalizer marshal.Marshalizer - dataPools dataRetriever.PoolsHolder - uint64ByteSliceConverter typeConverters.Uint64ByteSliceConverter - intRandomizer dataRetriever.IntRandomizer - dataPacker dataRetriever.DataPacker - triesContainer common.TriesHolder - inputAntifloodHandler dataRetriever.P2PAntifloodHandler - outputAntifloodHandler dataRetriever.P2PAntifloodHandler - throttler dataRetriever.ResolverThrottler - intraShardTopic string - isFullHistoryNode bool - currentNetworkEpochProvider dataRetriever.CurrentNetworkEpochProviderHandler - preferredPeersHolder dataRetriever.PreferredPeersHolderHandler - peersRatingHandler dataRetriever.PeersRatingHandler - numCrossShardPeers int - numIntraShardPeers int - numFullHistoryPeers int - maxNumOfValidatorInfoInResponse int + container dataRetriever.ResolversContainer + shardCoordinator sharding.Coordinator + messenger dataRetriever.TopicMessageHandler + store dataRetriever.StorageService + marshalizer marshal.Marshalizer + dataPools dataRetriever.PoolsHolder + uint64ByteSliceConverter typeConverters.Uint64ByteSliceConverter + intRandomizer dataRetriever.IntRandomizer + dataPacker dataRetriever.DataPacker + triesContainer common.TriesHolder + inputAntifloodHandler dataRetriever.P2PAntifloodHandler + outputAntifloodHandler dataRetriever.P2PAntifloodHandler + throttler dataRetriever.ResolverThrottler + intraShardTopic string + isFullHistoryNode bool + currentNetworkEpochProvider dataRetriever.CurrentNetworkEpochProviderHandler + preferredPeersHolder dataRetriever.PreferredPeersHolderHandler + peersRatingHandler dataRetriever.PeersRatingHandler + numCrossShardPeers int + numIntraShardPeers int + numFullHistoryPeers int } func (brcf *baseResolversContainerFactory) checkParams() error { @@ -100,10 +97,6 @@ func (brcf *baseResolversContainerFactory) checkParams() error { if brcf.numFullHistoryPeers <= 0 { return fmt.Errorf("%w for numFullHistoryPeers", dataRetriever.ErrInvalidValue) } - if brcf.maxNumOfValidatorInfoInResponse < minNumOfValidatorInfoInResponse { - return fmt.Errorf("%w for maxNumOfValidatorInfoInResponse, expected %d, received %d", - dataRetriever.ErrInvalidValue, minNumOfValidatorInfoInResponse, brcf.maxNumOfValidatorInfoInResponse) - } return nil } @@ -374,14 +367,14 @@ func (brcf *baseResolversContainerFactory) generateValidatorInfoResolver() error } arg := resolvers.ArgValidatorInfoResolver{ - SenderResolver: resolverSender, - Marshaller: brcf.marshalizer, - AntifloodHandler: brcf.inputAntifloodHandler, - Throttler: brcf.throttler, - ValidatorInfoPool: brcf.dataPools.ValidatorsInfo(), - ValidatorInfoStorage: brcf.store.GetStorer(dataRetriever.UnsignedTransactionUnit), - IsFullHistoryNode: brcf.isFullHistoryNode, - MaxNumOfValidatorInfoInResponse: brcf.maxNumOfValidatorInfoInResponse, + SenderResolver: resolverSender, + Marshaller: brcf.marshalizer, + AntifloodHandler: brcf.inputAntifloodHandler, + Throttler: brcf.throttler, + ValidatorInfoPool: brcf.dataPools.ValidatorsInfo(), + ValidatorInfoStorage: brcf.store.GetStorer(dataRetriever.UnsignedTransactionUnit), + DataPacker: brcf.dataPacker, + IsFullHistoryNode: brcf.isFullHistoryNode, } validatorInfoResolver, err := resolvers.NewValidatorInfoResolver(arg) if err != nil { diff --git a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go index a0f017bf769..6158945a265 100644 --- a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory.go @@ -36,27 +36,26 @@ func NewMetaResolversContainerFactory( container := containers.NewResolversContainer() base := &baseResolversContainerFactory{ - container: container, - shardCoordinator: args.ShardCoordinator, - messenger: args.Messenger, - store: args.Store, - marshalizer: args.Marshalizer, - dataPools: args.DataPools, - uint64ByteSliceConverter: args.Uint64ByteSliceConverter, - intRandomizer: &random.ConcurrentSafeIntRandomizer{}, - dataPacker: args.DataPacker, - triesContainer: args.TriesContainer, - inputAntifloodHandler: args.InputAntifloodHandler, - outputAntifloodHandler: args.OutputAntifloodHandler, - throttler: thr, - isFullHistoryNode: args.IsFullHistoryNode, - currentNetworkEpochProvider: args.CurrentNetworkEpochProvider, - preferredPeersHolder: args.PreferredPeersHolder, - peersRatingHandler: args.PeersRatingHandler, - numCrossShardPeers: int(args.ResolverConfig.NumCrossShardPeers), - numIntraShardPeers: int(args.ResolverConfig.NumIntraShardPeers), - numFullHistoryPeers: int(args.ResolverConfig.NumFullHistoryPeers), - maxNumOfValidatorInfoInResponse: args.MaxNumOfValidatorInfoInResponse, + container: container, + shardCoordinator: args.ShardCoordinator, + messenger: args.Messenger, + store: args.Store, + marshalizer: args.Marshalizer, + dataPools: args.DataPools, + uint64ByteSliceConverter: args.Uint64ByteSliceConverter, + intRandomizer: &random.ConcurrentSafeIntRandomizer{}, + dataPacker: args.DataPacker, + triesContainer: args.TriesContainer, + inputAntifloodHandler: args.InputAntifloodHandler, + outputAntifloodHandler: args.OutputAntifloodHandler, + throttler: thr, + isFullHistoryNode: args.IsFullHistoryNode, + currentNetworkEpochProvider: args.CurrentNetworkEpochProvider, + preferredPeersHolder: args.PreferredPeersHolder, + peersRatingHandler: args.PeersRatingHandler, + numCrossShardPeers: int(args.ResolverConfig.NumCrossShardPeers), + numIntraShardPeers: int(args.ResolverConfig.NumIntraShardPeers), + numFullHistoryPeers: int(args.ResolverConfig.NumFullHistoryPeers), } err = base.checkParams() diff --git a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go index b8344aba9cf..d648d214282 100644 --- a/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go +++ b/dataRetriever/factory/resolverscontainer/metaResolversContainerFactory_test.go @@ -1,7 +1,6 @@ package resolverscontainer_test import ( - "errors" "strings" "testing" @@ -213,18 +212,6 @@ func TestNewMetaResolversContainerFactory_NilTrieDataGetterShouldErr(t *testing. assert.Equal(t, dataRetriever.ErrNilTrieDataGetter, err) } -func TestNewMetaResolversContainerFactory_InvalidMAxValidatorInfoShouldErr(t *testing.T) { - t.Parallel() - - args := getArgumentsMeta() - args.MaxNumOfValidatorInfoInResponse = 0 - rcf, err := resolverscontainer.NewMetaResolversContainerFactory(args) - - assert.Nil(t, rcf) - assert.True(t, errors.Is(err, dataRetriever.ErrInvalidValue)) - assert.True(t, strings.Contains(err.Error(), "maxNumOfValidatorInfoInResponse")) -} - func TestNewMetaResolversContainerFactory_ShouldWork(t *testing.T) { t.Parallel() @@ -317,7 +304,6 @@ func getArgumentsMeta() resolverscontainer.FactoryArgs { NumIntraShardPeers: 2, NumFullHistoryPeers: 3, }, - PeersRatingHandler: &p2pmocks.PeersRatingHandlerStub{}, - MaxNumOfValidatorInfoInResponse: 5, + PeersRatingHandler: &p2pmocks.PeersRatingHandlerStub{}, } } diff --git a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go index 26bfe10e71d..573819ff206 100644 --- a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory.go @@ -34,27 +34,26 @@ func NewShardResolversContainerFactory( container := containers.NewResolversContainer() base := &baseResolversContainerFactory{ - container: container, - shardCoordinator: args.ShardCoordinator, - messenger: args.Messenger, - store: args.Store, - marshalizer: args.Marshalizer, - dataPools: args.DataPools, - uint64ByteSliceConverter: args.Uint64ByteSliceConverter, - intRandomizer: &random.ConcurrentSafeIntRandomizer{}, - dataPacker: args.DataPacker, - triesContainer: args.TriesContainer, - inputAntifloodHandler: args.InputAntifloodHandler, - outputAntifloodHandler: args.OutputAntifloodHandler, - throttler: thr, - isFullHistoryNode: args.IsFullHistoryNode, - currentNetworkEpochProvider: args.CurrentNetworkEpochProvider, - preferredPeersHolder: args.PreferredPeersHolder, - peersRatingHandler: args.PeersRatingHandler, - numCrossShardPeers: int(args.ResolverConfig.NumCrossShardPeers), - numIntraShardPeers: int(args.ResolverConfig.NumIntraShardPeers), - numFullHistoryPeers: int(args.ResolverConfig.NumFullHistoryPeers), - maxNumOfValidatorInfoInResponse: args.MaxNumOfValidatorInfoInResponse, + container: container, + shardCoordinator: args.ShardCoordinator, + messenger: args.Messenger, + store: args.Store, + marshalizer: args.Marshalizer, + dataPools: args.DataPools, + uint64ByteSliceConverter: args.Uint64ByteSliceConverter, + intRandomizer: &random.ConcurrentSafeIntRandomizer{}, + dataPacker: args.DataPacker, + triesContainer: args.TriesContainer, + inputAntifloodHandler: args.InputAntifloodHandler, + outputAntifloodHandler: args.OutputAntifloodHandler, + throttler: thr, + isFullHistoryNode: args.IsFullHistoryNode, + currentNetworkEpochProvider: args.CurrentNetworkEpochProvider, + preferredPeersHolder: args.PreferredPeersHolder, + peersRatingHandler: args.PeersRatingHandler, + numCrossShardPeers: int(args.ResolverConfig.NumCrossShardPeers), + numIntraShardPeers: int(args.ResolverConfig.NumIntraShardPeers), + numFullHistoryPeers: int(args.ResolverConfig.NumFullHistoryPeers), } err = base.checkParams() diff --git a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go index 58ef5b08b43..f81b526bdff 100644 --- a/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go +++ b/dataRetriever/factory/resolverscontainer/shardResolversContainerFactory_test.go @@ -252,18 +252,6 @@ func TestNewShardResolversContainerFactory_InvalidNumFullHistoryPeersShouldErr(t assert.True(t, errors.Is(err, dataRetriever.ErrInvalidValue)) } -func TestNewShardResolversContainerFactory_InvalidMAxValidatorInfoShouldErr(t *testing.T) { - t.Parallel() - - args := getArgumentsShard() - args.MaxNumOfValidatorInfoInResponse = 0 - rcf, err := resolverscontainer.NewShardResolversContainerFactory(args) - - assert.Nil(t, rcf) - assert.True(t, errors.Is(err, dataRetriever.ErrInvalidValue)) - assert.True(t, strings.Contains(err.Error(), "maxNumOfValidatorInfoInResponse")) -} - func TestNewShardResolversContainerFactory_ShouldWork(t *testing.T) { t.Parallel() @@ -395,7 +383,6 @@ func getArgumentsShard() resolverscontainer.FactoryArgs { NumIntraShardPeers: 2, NumFullHistoryPeers: 3, }, - PeersRatingHandler: &p2pmocks.PeersRatingHandlerStub{}, - MaxNumOfValidatorInfoInResponse: 5, + PeersRatingHandler: &p2pmocks.PeersRatingHandlerStub{}, } } diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go index cf208f52d0d..36aca2b90b2 100644 --- a/dataRetriever/resolvers/validatorInfoResolver.go +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -13,18 +13,19 @@ import ( "github.com/ElrondNetwork/elrond-go/storage" ) -const minNumOfValidatorInfo = 5 +// maxBuffToSendValidatorsInfo represents max buffer size to send in bytes +const maxBuffToSendValidatorsInfo = 1 << 18 // 256KB // ArgValidatorInfoResolver is the argument structure used to create a new validator info resolver instance type ArgValidatorInfoResolver struct { - SenderResolver dataRetriever.TopicResolverSender - Marshaller marshal.Marshalizer - AntifloodHandler dataRetriever.P2PAntifloodHandler - Throttler dataRetriever.ResolverThrottler - ValidatorInfoPool storage.Cacher - ValidatorInfoStorage storage.Storer - IsFullHistoryNode bool - MaxNumOfValidatorInfoInResponse int + SenderResolver dataRetriever.TopicResolverSender + Marshaller marshal.Marshalizer + AntifloodHandler dataRetriever.P2PAntifloodHandler + Throttler dataRetriever.ResolverThrottler + ValidatorInfoPool storage.Cacher + ValidatorInfoStorage storage.Storer + DataPacker dataRetriever.DataPacker + IsFullHistoryNode bool } // validatorInfoResolver is a wrapper over Resolver that is specialized in resolving validator info requests @@ -32,9 +33,9 @@ type validatorInfoResolver struct { dataRetriever.TopicResolverSender messageProcessor baseStorageResolver - validatorInfoPool storage.Cacher - validatorInfoStorage storage.Storer - maxNumOfValidatorInfoInResponse int + validatorInfoPool storage.Cacher + validatorInfoStorage storage.Storer + dataPacker dataRetriever.DataPacker } // NewValidatorInfoResolver creates a validator info resolver @@ -52,10 +53,10 @@ func NewValidatorInfoResolver(args ArgValidatorInfoResolver) (*validatorInfoReso throttler: args.Throttler, topic: args.SenderResolver.RequestTopic(), }, - baseStorageResolver: createBaseStorageResolver(args.ValidatorInfoStorage, args.IsFullHistoryNode), - validatorInfoPool: args.ValidatorInfoPool, - validatorInfoStorage: args.ValidatorInfoStorage, - maxNumOfValidatorInfoInResponse: args.MaxNumOfValidatorInfoInResponse, + baseStorageResolver: createBaseStorageResolver(args.ValidatorInfoStorage, args.IsFullHistoryNode), + validatorInfoPool: args.ValidatorInfoPool, + validatorInfoStorage: args.ValidatorInfoStorage, + dataPacker: args.DataPacker, }, nil } @@ -78,8 +79,8 @@ func checkArgs(args ArgValidatorInfoResolver) error { if check.IfNil(args.ValidatorInfoStorage) { return dataRetriever.ErrNilValidatorInfoStorage } - if args.MaxNumOfValidatorInfoInResponse < minNumOfValidatorInfo { - return dataRetriever.ErrInvalidNumOfValidatorInfo + if check.IfNil(args.DataPacker) { + return dataRetriever.ErrNilDataPacker } return nil @@ -150,7 +151,7 @@ func (res *validatorInfoResolver) resolveHashRequest(hash []byte, epoch uint32, return err } - return res.marshalAndSend([][]byte{data}, pid) + return res.marshalAndSend(data, pid) } // resolveMultipleHashesRequest sends the response for a hash array type request @@ -171,17 +172,13 @@ func (res *validatorInfoResolver) resolveMultipleHashesRequest(hashesBuff []byte } func (res *validatorInfoResolver) sendValidatorInfoForHashes(validatorInfoForHashes [][]byte, pid core.PeerID) error { - if len(validatorInfoForHashes) > res.maxNumOfValidatorInfoInResponse { - return res.sendLargeDataBuff(validatorInfoForHashes, pid) + buffsToSend, err := res.dataPacker.PackDataInChunks(validatorInfoForHashes, maxBuffToSendValidatorsInfo) + if err != nil { + return err } - return res.marshalAndSend(validatorInfoForHashes, pid) -} - -func (res *validatorInfoResolver) sendLargeDataBuff(dataBuff [][]byte, pid core.PeerID) error { - chunksMap := res.splitDataBuffIntoChunks(dataBuff) - for _, chunk := range chunksMap { - err := res.marshalAndSend(chunk, pid) + for _, buff := range buffsToSend { + err = res.Send(buff, pid) if err != nil { return err } @@ -190,27 +187,6 @@ func (res *validatorInfoResolver) sendLargeDataBuff(dataBuff [][]byte, pid core. return nil } -func (res *validatorInfoResolver) splitDataBuffIntoChunks(dataBuff [][]byte) map[int][][]byte { - chunksMap := make(map[int][][]byte) - currentChunk := make([][]byte, 0) - currentChunkSize := 0 - chunkIndex := 0 - for _, data := range dataBuff { - if currentChunkSize == res.maxNumOfValidatorInfoInResponse { - chunksMap[chunkIndex] = currentChunk - chunkIndex++ - currentChunk = make([][]byte, 0) - currentChunkSize = 0 - } - - currentChunk = append(currentChunk, data) - currentChunkSize++ - } - chunksMap[chunkIndex] = currentChunk - - return chunksMap -} - func (res *validatorInfoResolver) fetchValidatorInfoForHashes(hashes [][]byte, epoch uint32) ([][]byte, error) { validatorInfos := make([][]byte, 0) for _, hash := range hashes { @@ -248,9 +224,9 @@ func (res *validatorInfoResolver) fetchValidatorInfoByteSlice(hash []byte, epoch return buff, nil } -func (res *validatorInfoResolver) marshalAndSend(data [][]byte, pid core.PeerID) error { +func (res *validatorInfoResolver) marshalAndSend(data []byte, pid core.PeerID) error { b := &batch.Batch{ - Data: data, + Data: [][]byte{data}, } buff, err := res.marshalizer.Marshal(b) if err != nil { diff --git a/dataRetriever/resolvers/validatorInfoResolver_test.go b/dataRetriever/resolvers/validatorInfoResolver_test.go index 470cf64710a..c43e3f512c6 100644 --- a/dataRetriever/resolvers/validatorInfoResolver_test.go +++ b/dataRetriever/resolvers/validatorInfoResolver_test.go @@ -8,6 +8,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go-core/core/partitioning" "github.com/ElrondNetwork/elrond-go-core/data/batch" "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/dataRetriever" @@ -24,14 +25,14 @@ import ( func createMockArgValidatorInfoResolver() resolvers.ArgValidatorInfoResolver { return resolvers.ArgValidatorInfoResolver{ - SenderResolver: &mock.TopicResolverSenderStub{}, - Marshaller: &mock.MarshalizerMock{}, - AntifloodHandler: &mock.P2PAntifloodHandlerStub{}, - Throttler: &mock.ThrottlerStub{}, - ValidatorInfoPool: testscommon.NewCacherStub(), - ValidatorInfoStorage: &storage.StorerStub{}, - IsFullHistoryNode: false, - MaxNumOfValidatorInfoInResponse: 5, + SenderResolver: &mock.TopicResolverSenderStub{}, + Marshaller: &mock.MarshalizerMock{}, + AntifloodHandler: &mock.P2PAntifloodHandlerStub{}, + Throttler: &mock.ThrottlerStub{}, + ValidatorInfoPool: testscommon.NewCacherStub(), + ValidatorInfoStorage: &storage.StorerStub{}, + DataPacker: &mock.DataPackerStub{}, + IsFullHistoryNode: false, } } @@ -108,14 +109,14 @@ func TestNewValidatorInfoResolver(t *testing.T) { assert.Equal(t, dataRetriever.ErrNilValidatorInfoStorage, err) assert.True(t, check.IfNil(res)) }) - t.Run("invalid MaxNumOfValidatorInfoInResponse should error", func(t *testing.T) { + t.Run("nil DataPacker should error", func(t *testing.T) { t.Parallel() args := createMockArgValidatorInfoResolver() - args.MaxNumOfValidatorInfoInResponse = 0 + args.DataPacker = nil res, err := resolvers.NewValidatorInfoResolver(args) - assert.Equal(t, dataRetriever.ErrInvalidNumOfValidatorInfo, err) + assert.Equal(t, dataRetriever.ErrNilDataPacker, err) assert.True(t, check.IfNil(res)) }) t.Run("should work", func(t *testing.T) { @@ -488,7 +489,37 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) assert.True(t, strings.Contains(err.Error(), dataRetriever.ErrValidatorInfoNotFound.Error())) }) - t.Run("enough hashes for one chunk should work", func(t *testing.T) { + t.Run("pack data in chuncks returns error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("expected err") + args := createMockArgValidatorInfoResolver() + args.ValidatorInfoPool = &testscommon.CacherStub{ + GetCalled: func(key []byte) (value interface{}, ok bool) { + return key, true + }, + } + args.ValidatorInfoStorage = &storage.StorerStub{ + SearchFirstCalled: func(key []byte) ([]byte, error) { + return nil, errors.New("not found") + }, + } + args.DataPacker = &mock.DataPackerStub{ + PackDataInChunksCalled: func(data [][]byte, limit int) ([][]byte, error) { + return nil, expectedErr + }, + } + res, _ := resolvers.NewValidatorInfoResolver(args) + require.False(t, check.IfNil(res)) + + b := &batch.Batch{ + Data: [][]byte{[]byte("hash")}, + } + buff, _ := args.Marshaller.Marshal(b) + err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + assert.Equal(t, expectedErr, err) + }) + t.Run("all hashes in one chunk should work", func(t *testing.T) { t.Parallel() wasCalled := false @@ -528,6 +559,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { return nil }, } + args.DataPacker, _ = partitioning.NewSimpleDataPacker(args.Marshaller) res, _ := resolvers.NewValidatorInfoResolver(args) require.False(t, check.IfNil(res)) @@ -540,7 +572,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { t.Parallel() args := createMockArgValidatorInfoResolver() - numOfProvidedData := 2*args.MaxNumOfValidatorInfoInResponse + 2 // 2 chunks of 5 + 1 chunk of 2 + numOfProvidedData := 1000 providedHashes := make([][]byte, 0) providedData := make([]state.ValidatorInfo, 0) testHasher := hashingMocks.HasherMock{} @@ -575,8 +607,6 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { _ = marshallerMock.Unmarshal(b, buff) dataLen := len(b.Data) - assert.True(t, dataLen <= args.MaxNumOfValidatorInfoInResponse) - for i := 0; i < dataLen; i++ { vi := &state.ValidatorInfo{} _ = marshallerMock.Unmarshal(vi, b.Data[i]) @@ -592,17 +622,14 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { return nil }, } + args.DataPacker, _ = partitioning.NewSimpleDataPacker(args.Marshaller) res, _ := resolvers.NewValidatorInfoResolver(args) require.False(t, check.IfNil(res)) buff, _ := args.Marshaller.Marshal(&batch.Batch{Data: providedHashes}) err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) assert.Nil(t, err) - expectedNumOfCalls := numOfProvidedData / args.MaxNumOfValidatorInfoInResponse - if numOfProvidedData%args.MaxNumOfValidatorInfoInResponse != 0 { - expectedNumOfCalls++ - } - assert.Equal(t, expectedNumOfCalls, numOfCallsSend) + assert.Equal(t, 2, numOfCallsSend) // ~677 messages in a chunk assert.Equal(t, 0, len(providedDataMap)) // all items should have been deleted on Send }) } diff --git a/epochStart/bootstrap/process.go b/epochStart/bootstrap/process.go index 81f0fd4f6a8..d4da4fed0f6 100644 --- a/epochStart/bootstrap/process.go +++ b/epochStart/bootstrap/process.go @@ -1126,23 +1126,22 @@ func (e *epochStartBootstrap) createRequestHandler() error { // this one should only be used before determining the correct shard where the node should reside log.Debug("epochStartBootstrap.createRequestHandler", "shard", e.shardCoordinator.SelfId()) resolversContainerArgs := resolverscontainer.FactoryArgs{ - ShardCoordinator: e.shardCoordinator, - Messenger: e.messenger, - Store: storageService, - Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), - DataPools: e.dataPool, - Uint64ByteSliceConverter: uint64ByteSlice.NewBigEndianConverter(), - NumConcurrentResolvingJobs: 10, - DataPacker: dataPacker, - TriesContainer: e.trieContainer, - SizeCheckDelta: 0, - InputAntifloodHandler: disabled.NewAntiFloodHandler(), - OutputAntifloodHandler: disabled.NewAntiFloodHandler(), - CurrentNetworkEpochProvider: disabled.NewCurrentNetworkEpochProviderHandler(), - PreferredPeersHolder: disabled.NewPreferredPeersHolder(), - ResolverConfig: e.generalConfig.Resolvers, - PeersRatingHandler: disabled.NewDisabledPeersRatingHandler(), - MaxNumOfValidatorInfoInResponse: e.generalConfig.ValidatorInfo.MaxNumOfValidatorInfoInResponse, + ShardCoordinator: e.shardCoordinator, + Messenger: e.messenger, + Store: storageService, + Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), + DataPools: e.dataPool, + Uint64ByteSliceConverter: uint64ByteSlice.NewBigEndianConverter(), + NumConcurrentResolvingJobs: 10, + DataPacker: dataPacker, + TriesContainer: e.trieContainer, + SizeCheckDelta: 0, + InputAntifloodHandler: disabled.NewAntiFloodHandler(), + OutputAntifloodHandler: disabled.NewAntiFloodHandler(), + CurrentNetworkEpochProvider: disabled.NewCurrentNetworkEpochProviderHandler(), + PreferredPeersHolder: disabled.NewPreferredPeersHolder(), + ResolverConfig: e.generalConfig.Resolvers, + PeersRatingHandler: disabled.NewDisabledPeersRatingHandler(), } resolverFactory, err := resolverscontainer.NewMetaResolversContainerFactory(resolversContainerArgs) if err != nil { diff --git a/epochStart/bootstrap/process_test.go b/epochStart/bootstrap/process_test.go index 6ae861e9171..e879ed62e6b 100644 --- a/epochStart/bootstrap/process_test.go +++ b/epochStart/bootstrap/process_test.go @@ -179,9 +179,6 @@ func createMockEpochStartBootstrapArgs( Shards: 10, }, Resolvers: generalCfg.Resolvers, - ValidatorInfo: config.ValidatorInfoConfig{ - MaxNumOfValidatorInfoInResponse: 5, - }, }, EconomicsData: &economicsmocks.EconomicsHandlerStub{ MinGasPriceCalled: func() uint64 { diff --git a/factory/processComponents.go b/factory/processComponents.go index e7f2c65cd25..74026921159 100644 --- a/factory/processComponents.go +++ b/factory/processComponents.go @@ -1025,24 +1025,23 @@ func (pcf *processComponentsFactory) newShardResolverContainerFactory( } resolversContainerFactoryArgs := resolverscontainer.FactoryArgs{ - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - Messenger: pcf.network.NetworkMessenger(), - Store: pcf.data.StorageService(), - Marshalizer: pcf.coreData.InternalMarshalizer(), - DataPools: pcf.data.Datapool(), - Uint64ByteSliceConverter: pcf.coreData.Uint64ByteSliceConverter(), - DataPacker: dataPacker, - TriesContainer: pcf.state.TriesContainer(), - SizeCheckDelta: pcf.config.Marshalizer.SizeCheckDelta, - InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), - OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), - NumConcurrentResolvingJobs: pcf.config.Antiflood.NumConcurrentResolverJobs, - IsFullHistoryNode: pcf.prefConfigs.FullArchive, - CurrentNetworkEpochProvider: currentEpochProvider, - ResolverConfig: pcf.config.Resolvers, - PreferredPeersHolder: pcf.network.PreferredPeersHolderHandler(), - PeersRatingHandler: pcf.network.PeersRatingHandler(), - MaxNumOfValidatorInfoInResponse: pcf.config.ValidatorInfo.MaxNumOfValidatorInfoInResponse, + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + Messenger: pcf.network.NetworkMessenger(), + Store: pcf.data.StorageService(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + DataPools: pcf.data.Datapool(), + Uint64ByteSliceConverter: pcf.coreData.Uint64ByteSliceConverter(), + DataPacker: dataPacker, + TriesContainer: pcf.state.TriesContainer(), + SizeCheckDelta: pcf.config.Marshalizer.SizeCheckDelta, + InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), + OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), + NumConcurrentResolvingJobs: pcf.config.Antiflood.NumConcurrentResolverJobs, + IsFullHistoryNode: pcf.prefConfigs.FullArchive, + CurrentNetworkEpochProvider: currentEpochProvider, + ResolverConfig: pcf.config.Resolvers, + PreferredPeersHolder: pcf.network.PreferredPeersHolderHandler(), + PeersRatingHandler: pcf.network.PeersRatingHandler(), } resolversContainerFactory, err := resolverscontainer.NewShardResolversContainerFactory(resolversContainerFactoryArgs) if err != nil { @@ -1062,24 +1061,23 @@ func (pcf *processComponentsFactory) newMetaResolverContainerFactory( } resolversContainerFactoryArgs := resolverscontainer.FactoryArgs{ - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - Messenger: pcf.network.NetworkMessenger(), - Store: pcf.data.StorageService(), - Marshalizer: pcf.coreData.InternalMarshalizer(), - DataPools: pcf.data.Datapool(), - Uint64ByteSliceConverter: pcf.coreData.Uint64ByteSliceConverter(), - DataPacker: dataPacker, - TriesContainer: pcf.state.TriesContainer(), - SizeCheckDelta: pcf.config.Marshalizer.SizeCheckDelta, - InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), - OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), - NumConcurrentResolvingJobs: pcf.config.Antiflood.NumConcurrentResolverJobs, - IsFullHistoryNode: pcf.prefConfigs.FullArchive, - CurrentNetworkEpochProvider: currentEpochProvider, - ResolverConfig: pcf.config.Resolvers, - PreferredPeersHolder: pcf.network.PreferredPeersHolderHandler(), - PeersRatingHandler: pcf.network.PeersRatingHandler(), - MaxNumOfValidatorInfoInResponse: pcf.config.ValidatorInfo.MaxNumOfValidatorInfoInResponse, + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + Messenger: pcf.network.NetworkMessenger(), + Store: pcf.data.StorageService(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + DataPools: pcf.data.Datapool(), + Uint64ByteSliceConverter: pcf.coreData.Uint64ByteSliceConverter(), + DataPacker: dataPacker, + TriesContainer: pcf.state.TriesContainer(), + SizeCheckDelta: pcf.config.Marshalizer.SizeCheckDelta, + InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), + OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), + NumConcurrentResolvingJobs: pcf.config.Antiflood.NumConcurrentResolverJobs, + IsFullHistoryNode: pcf.prefConfigs.FullArchive, + CurrentNetworkEpochProvider: currentEpochProvider, + ResolverConfig: pcf.config.Resolvers, + PreferredPeersHolder: pcf.network.PreferredPeersHolderHandler(), + PeersRatingHandler: pcf.network.PeersRatingHandler(), } resolversContainerFactory, err := resolverscontainer.NewMetaResolversContainerFactory(resolversContainerFactoryArgs) if err != nil { diff --git a/factory/stateComponents_test.go b/factory/stateComponents_test.go index 83041b8da8a..1928827e2d0 100644 --- a/factory/stateComponents_test.go +++ b/factory/stateComponents_test.go @@ -231,9 +231,6 @@ func getGeneralConfig() config.Config { Type: "LRU", Shards: 1, }, - ValidatorInfo: config.ValidatorInfoConfig{ - MaxNumOfValidatorInfoInResponse: 5, - }, } } diff --git a/integrationTests/testProcessorNode.go b/integrationTests/testProcessorNode.go index 8c8f952dd26..bf1b72c3290 100644 --- a/integrationTests/testProcessorNode.go +++ b/integrationTests/testProcessorNode.go @@ -1348,8 +1348,7 @@ func (tpn *TestProcessorNode) initResolvers() { NumIntraShardPeers: 1, NumFullHistoryPeers: 3, }, - PeersRatingHandler: tpn.PeersRatingHandler, - MaxNumOfValidatorInfoInResponse: 5, + PeersRatingHandler: tpn.PeersRatingHandler, } var err error diff --git a/testscommon/generalConfig.go b/testscommon/generalConfig.go index cc43fcfe151..cbbb0bbaaed 100644 --- a/testscommon/generalConfig.go +++ b/testscommon/generalConfig.go @@ -399,9 +399,6 @@ func GetGeneralConfig() config.Config { TopRatedCacheCapacity: 1000, BadRatedCacheCapacity: 1000, }, - ValidatorInfo: config.ValidatorInfoConfig{ - MaxNumOfValidatorInfoInResponse: 5, - }, } } From 664cd7cea7aa9533cd2cce4929097bed2cf23cbb Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Mon, 9 May 2022 13:43:21 +0300 Subject: [PATCH 23/70] fixes after review --- dataRetriever/requestHandlers/requestHandler.go | 2 +- dataRetriever/resolvers/validatorInfoResolver.go | 7 ++++++- .../resolvers/validatorInfoResolver_test.go | 15 +++++++-------- genesis/process/disabled/requestHandler.go | 2 +- 4 files changed, 15 insertions(+), 11 deletions(-) diff --git a/dataRetriever/requestHandlers/requestHandler.go b/dataRetriever/requestHandlers/requestHandler.go index 46926001648..f90400cf06b 100644 --- a/dataRetriever/requestHandlers/requestHandler.go +++ b/dataRetriever/requestHandlers/requestHandler.go @@ -591,7 +591,7 @@ func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { rrh.addRequestedItems([][]byte{hash}, uniqueValidatorInfoSuffix) } -// RequestValidatorsInfo asks for the validator info associated with the specified hashes from connected peers +// RequestValidatorsInfo asks for the validators` info associated with the specified hashes from connected peers func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { unrequestedHashes := rrh.getUnrequestedHashes(hashes, uniqueValidatorInfoSuffix) if len(unrequestedHashes) == 0 { diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go index 36aca2b90b2..9462a903548 100644 --- a/dataRetriever/resolvers/validatorInfoResolver.go +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -1,6 +1,7 @@ package resolvers import ( + "encoding/hex" "fmt" "github.com/ElrondNetwork/elrond-go-core/core" @@ -165,7 +166,11 @@ func (res *validatorInfoResolver) resolveMultipleHashesRequest(hashesBuff []byte validatorInfoForHashes, err := res.fetchValidatorInfoForHashes(hashes, epoch) if err != nil { - return fmt.Errorf("resolveMultipleHashesRequest error %w from buff %s", err, hashesBuff) + outputHashes := "" + for _, hash := range hashes { + outputHashes += hex.EncodeToString(hash) + " " + } + return fmt.Errorf("resolveMultipleHashesRequest error %w from buff %s", err, outputHashes) } return res.sendValidatorInfoForHashes(validatorInfoForHashes, pid) diff --git a/dataRetriever/resolvers/validatorInfoResolver_test.go b/dataRetriever/resolvers/validatorInfoResolver_test.go index c43e3f512c6..895e9827091 100644 --- a/dataRetriever/resolvers/validatorInfoResolver_test.go +++ b/dataRetriever/resolvers/validatorInfoResolver_test.go @@ -445,18 +445,17 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { t.Run("unmarshal fails", func(t *testing.T) { t.Parallel() - numCalls := 0 expectedErr := errors.New("expected err") args := createMockArgValidatorInfoResolver() args.Marshaller = &testscommon.MarshalizerStub{ UnmarshalCalled: func(obj interface{}, buff []byte) error { - marshallerMock := testscommon.MarshalizerMock{} - if numCalls < 1 { - numCalls++ - return marshallerMock.Unmarshal(obj, buff) + switch obj.(type) { + case *dataRetriever.RequestData: + return testscommon.MarshalizerMock{}.Unmarshal(obj, buff) + case *batch.Batch: + return expectedErr } - - return expectedErr + return nil }, } res, _ := resolvers.NewValidatorInfoResolver(args) @@ -487,6 +486,7 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { } buff, _ := args.Marshaller.Marshal(b) err := res.ProcessReceivedMessage(createRequestMsg(dataRetriever.HashArrayType, buff), fromConnectedPeer) + require.NotNil(t, err) assert.True(t, strings.Contains(err.Error(), dataRetriever.ErrValidatorInfoNotFound.Error())) }) t.Run("pack data in chuncks returns error", func(t *testing.T) { @@ -601,7 +601,6 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { numOfCallsSend := 0 args.SenderResolver = &mock.TopicResolverSenderStub{ SendCalled: func(buff []byte, peer core.PeerID) error { - println(numOfCallsSend) marshallerMock := testscommon.MarshalizerMock{} b := &batch.Batch{} _ = marshallerMock.Unmarshal(b, buff) diff --git a/genesis/process/disabled/requestHandler.go b/genesis/process/disabled/requestHandler.go index eec780cdc0a..26a07dc6c89 100644 --- a/genesis/process/disabled/requestHandler.go +++ b/genesis/process/disabled/requestHandler.go @@ -82,7 +82,7 @@ func (r *RequestHandler) CreateTrieNodeIdentifier(_ []byte, _ uint32) []byte { func (r *RequestHandler) RequestValidatorInfo(_ []byte) { } -// RequestValidatorsInfo - +// RequestValidatorsInfo does nothing func (r *RequestHandler) RequestValidatorsInfo(_ [][]byte) { } From d01844932f49155ed20366f6c46a5bec51fa1170 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 19 May 2022 09:43:18 +0300 Subject: [PATCH 24/70] * Prepared validator info preprocessor for peers min blocks new approach --- cmd/node/config/config.toml | 2 +- dataRetriever/constants.go | 3 + .../currentBlockValidatorInfoPool_test.go | 2 +- dataRetriever/dataPool/dataPool.go | 6 +- dataRetriever/dataPool/dataPool_test.go | 4 +- dataRetriever/factory/dataPoolFactory.go | 3 +- dataRetriever/factory/dataPoolFactory_test.go | 5 +- dataRetriever/interface.go | 2 +- .../resolvers/validatorInfoResolver.go | 6 +- .../resolvers/validatorInfoResolver_test.go | 38 +++---- epochStart/metachain/baseRewards.go | 6 +- epochStart/metachain/baseRewards_test.go | 32 +++--- .../metachain/rewardsCreatorProxy_test.go | 12 +-- epochStart/metachain/rewards_test.go | 4 +- epochStart/metachain/validators.go | 5 +- epochStart/metachain/validators_test.go | 35 ++++--- node/nodeMemoryConfig_test.go | 1 + process/block/metablock.go | 22 ++++- process/block/preprocess/transactions_test.go | 6 ++ .../preprocess/validatorInfoPreProcessor.go | 82 ++++++++++----- .../validatorInfoPreProcessor_test.go | 99 +++++++++++++++++++ .../shard/preProcessorsContainerFactory.go | 3 + .../validatorInfoInterceptorProcessor.go | 9 +- .../validatorInfoInterceptorProcessor_test.go | 12 +-- testscommon/dataRetriever/poolFactory.go | 14 ++- testscommon/dataRetriever/poolsHolderMock.go | 10 +- testscommon/dataRetriever/poolsHolderStub.go | 6 +- testscommon/generalConfig.go | 6 +- 28 files changed, 308 insertions(+), 127 deletions(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index 50347d7a48d..f7b5a4a91b0 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -442,8 +442,8 @@ [ValidatorInfoPool] Name = "ValidatorInfoPool" Capacity = 10000 - Type = "SizeLRU" SizeInBytes = 31457280 #30MB + Shards = 4 #PublicKeyPeerId represents the main cache used to map Elrond block signing public keys to their associated peer id's. [PublicKeyPeerId] diff --git a/dataRetriever/constants.go b/dataRetriever/constants.go index 957c4a41cd2..926cffe11fa 100644 --- a/dataRetriever/constants.go +++ b/dataRetriever/constants.go @@ -11,3 +11,6 @@ const UnsignedTxPoolName = "uTxPool" // RewardTxPoolName defines the name of the reward transactions pool const RewardTxPoolName = "rewardTxPool" + +// ValidatorsInfoPoolName defines the name of the validators info pool +const ValidatorsInfoPoolName = "validatorsInfoPool" diff --git a/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go b/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go index b6389753ea8..6d4a40694c1 100644 --- a/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go +++ b/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go @@ -11,7 +11,7 @@ func TestCurrentBlockValidatorInfoPool_AddGetCleanTx(t *testing.T) { t.Parallel() validatorInfoHash := []byte("hash") - validatorInfo := &state.ValidatorInfo{} + validatorInfo := &state.ShardValidatorInfo{} currentValidatorInfoPool := NewCurrentBlockValidatorInfoPool() require.False(t, currentValidatorInfoPool.IsInterfaceNil()) diff --git a/dataRetriever/dataPool/dataPool.go b/dataRetriever/dataPool/dataPool.go index 16a25abb43b..38d54dded14 100644 --- a/dataRetriever/dataPool/dataPool.go +++ b/dataRetriever/dataPool/dataPool.go @@ -20,7 +20,7 @@ type dataPool struct { currBlockTxs dataRetriever.TransactionCacher currBlockValidatorInfo dataRetriever.ValidatorInfoCacher smartContracts storage.Cacher - validatorsInfo storage.Cacher + validatorsInfo dataRetriever.ShardedDataCacherNotifier } // DataPoolArgs represents the data pool's constructor structure @@ -36,7 +36,7 @@ type DataPoolArgs struct { CurrentBlockTransactions dataRetriever.TransactionCacher CurrentBlockValidatorInfo dataRetriever.ValidatorInfoCacher SmartContracts storage.Cacher - ValidatorsInfo storage.Cacher + ValidatorsInfo dataRetriever.ShardedDataCacherNotifier } // NewDataPool creates a data pools holder object @@ -150,7 +150,7 @@ func (dp *dataPool) SmartContracts() storage.Cacher { } // ValidatorsInfo returns the holder for validators info -func (dp *dataPool) ValidatorsInfo() storage.Cacher { +func (dp *dataPool) ValidatorsInfo() dataRetriever.ShardedDataCacherNotifier { return dp.validatorsInfo } diff --git a/dataRetriever/dataPool/dataPool_test.go b/dataRetriever/dataPool/dataPool_test.go index f27acfbd367..45a7ab852f3 100644 --- a/dataRetriever/dataPool/dataPool_test.go +++ b/dataRetriever/dataPool/dataPool_test.go @@ -26,7 +26,7 @@ func createMockDataPoolArgs() dataPool.DataPoolArgs { CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, CurrentBlockValidatorInfo: &mock.ValidatorInfoForCurrentBlockStub{}, SmartContracts: testscommon.NewCacherStub(), - ValidatorsInfo: testscommon.NewCacherStub(), + ValidatorsInfo: testscommon.NewShardedDataStub(), } } @@ -175,7 +175,7 @@ func TestNewDataPool_OkValsShouldWork(t *testing.T) { CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, CurrentBlockValidatorInfo: &mock.ValidatorInfoForCurrentBlockStub{}, SmartContracts: testscommon.NewCacherStub(), - ValidatorsInfo: testscommon.NewCacherStub(), + ValidatorsInfo: testscommon.NewShardedDataStub(), } tdp, err := dataPool.NewDataPool(args) diff --git a/dataRetriever/factory/dataPoolFactory.go b/dataRetriever/factory/dataPoolFactory.go index e8d48dd4be7..d14d1bebc0d 100644 --- a/dataRetriever/factory/dataPoolFactory.go +++ b/dataRetriever/factory/dataPoolFactory.go @@ -124,8 +124,7 @@ func NewDataPoolFromConfig(args ArgsDataPool) (dataRetriever.PoolsHolder, error) return nil, fmt.Errorf("%w while creating the cache for the smartcontract results", err) } - cacherCfg = factory.GetCacherFromConfig(mainConfig.ValidatorInfoPool) - validatorsInfo, err := storageUnit.NewCache(cacherCfg) + validatorsInfo, err := shardedData.NewShardedData(dataRetriever.ValidatorsInfoPoolName, factory.GetCacherFromConfig(mainConfig.ValidatorInfoPool)) if err != nil { return nil, fmt.Errorf("%w while creating the cache for the validator info results", err) } diff --git a/dataRetriever/factory/dataPoolFactory_test.go b/dataRetriever/factory/dataPoolFactory_test.go index 84d8b94bca1..e7207b4589a 100644 --- a/dataRetriever/factory/dataPoolFactory_test.go +++ b/dataRetriever/factory/dataPoolFactory_test.go @@ -130,11 +130,10 @@ func TestNewDataPoolFromConfig_BadConfigShouldErr(t *testing.T) { require.True(t, strings.Contains(err.Error(), "the cache for the smartcontract results")) args = getGoodArgs() - args.Config.ValidatorInfoPool.Type = "invalid cache type" + args.Config.ValidatorInfoPool.Capacity = 0 holder, err = NewDataPoolFromConfig(args) require.Nil(t, holder) - fmt.Println(err) - require.True(t, errors.Is(err, storage.ErrNotSupportedCacheType)) + require.True(t, errors.Is(err, storage.ErrInvalidConfig)) require.True(t, strings.Contains(err.Error(), "the cache for the validator info results")) } diff --git a/dataRetriever/interface.go b/dataRetriever/interface.go index d65265866e1..ee639fbf4f1 100644 --- a/dataRetriever/interface.go +++ b/dataRetriever/interface.go @@ -337,7 +337,7 @@ type PoolsHolder interface { SmartContracts() storage.Cacher CurrentBlockTxs() TransactionCacher CurrentBlockValidatorInfo() ValidatorInfoCacher - ValidatorsInfo() storage.Cacher + ValidatorsInfo() ShardedDataCacherNotifier IsInterfaceNil() bool } diff --git a/dataRetriever/resolvers/validatorInfoResolver.go b/dataRetriever/resolvers/validatorInfoResolver.go index 9462a903548..19b5a3e90d4 100644 --- a/dataRetriever/resolvers/validatorInfoResolver.go +++ b/dataRetriever/resolvers/validatorInfoResolver.go @@ -23,7 +23,7 @@ type ArgValidatorInfoResolver struct { Marshaller marshal.Marshalizer AntifloodHandler dataRetriever.P2PAntifloodHandler Throttler dataRetriever.ResolverThrottler - ValidatorInfoPool storage.Cacher + ValidatorInfoPool dataRetriever.ShardedDataCacherNotifier ValidatorInfoStorage storage.Storer DataPacker dataRetriever.DataPacker IsFullHistoryNode bool @@ -34,7 +34,7 @@ type validatorInfoResolver struct { dataRetriever.TopicResolverSender messageProcessor baseStorageResolver - validatorInfoPool storage.Cacher + validatorInfoPool dataRetriever.ShardedDataCacherNotifier validatorInfoStorage storage.Storer dataPacker dataRetriever.DataPacker } @@ -209,7 +209,7 @@ func (res *validatorInfoResolver) fetchValidatorInfoForHashes(hashes [][]byte, e } func (res *validatorInfoResolver) fetchValidatorInfoByteSlice(hash []byte, epoch uint32) ([]byte, error) { - data, ok := res.validatorInfoPool.Get(hash) + data, ok := res.validatorInfoPool.SearchFirstData(hash) if ok { return res.marshalizer.Marshal(data) } diff --git a/dataRetriever/resolvers/validatorInfoResolver_test.go b/dataRetriever/resolvers/validatorInfoResolver_test.go index 895e9827091..ac895edf0d5 100644 --- a/dataRetriever/resolvers/validatorInfoResolver_test.go +++ b/dataRetriever/resolvers/validatorInfoResolver_test.go @@ -29,7 +29,7 @@ func createMockArgValidatorInfoResolver() resolvers.ArgValidatorInfoResolver { Marshaller: &mock.MarshalizerMock{}, AntifloodHandler: &mock.P2PAntifloodHandlerStub{}, Throttler: &mock.ThrottlerStub{}, - ValidatorInfoPool: testscommon.NewCacherStub(), + ValidatorInfoPool: testscommon.NewShardedDataStub(), ValidatorInfoStorage: &storage.StorerStub{}, DataPacker: &mock.DataPackerStub{}, IsFullHistoryNode: false, @@ -297,8 +297,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { expectedErr := errors.New("expected err") args := createMockArgValidatorInfoResolver() - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { return nil, false }, } @@ -319,8 +319,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { expectedErr := errors.New("expected err") marshallerMock := testscommon.MarshalizerMock{} args := createMockArgValidatorInfoResolver() - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { return []byte("some value"), true }, } @@ -344,8 +344,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { expectedErr := errors.New("expected err") marshallerMock := testscommon.MarshalizerMock{} args := createMockArgValidatorInfoResolver() - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { return nil, false }, } @@ -374,8 +374,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { wasCalled := false providedValue := createMockValidatorInfo([]byte("provided pk")) args := createMockArgValidatorInfoResolver() - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { return providedValue, true }, } @@ -407,8 +407,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { wasCalled := false providedValue := createMockValidatorInfo([]byte("provided pk")) args := createMockArgValidatorInfoResolver() - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { return nil, false }, } @@ -468,8 +468,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { t.Parallel() args := createMockArgValidatorInfoResolver() - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { return nil, false }, } @@ -494,8 +494,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { expectedErr := errors.New("expected err") args := createMockArgValidatorInfoResolver() - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { return key, true }, } @@ -534,8 +534,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { } args := createMockArgValidatorInfoResolver() numOfCalls := 0 - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { val := providedData[numOfCalls] numOfCalls++ return val, true @@ -591,8 +591,8 @@ func TestValidatorInfoResolver_ProcessReceivedMessage(t *testing.T) { providedDataMap[string(hash)] = struct{}{} } numOfCalls := 0 - args.ValidatorInfoPool = &testscommon.CacherStub{ - GetCalled: func(key []byte) (value interface{}, ok bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { val := providedData[numOfCalls] numOfCalls++ return val, true diff --git a/epochStart/metachain/baseRewards.go b/epochStart/metachain/baseRewards.go index 271e803cc15..7c35a42fbcf 100644 --- a/epochStart/metachain/baseRewards.go +++ b/epochStart/metachain/baseRewards.go @@ -134,7 +134,7 @@ func (brc *baseRewardsCreator) CreateMarshalledData(body *block.Body) map[string continue } - broadcastTopic := createBroadcastTopic(brc.shardCoordinator, miniBlock.ReceiverShardID) + broadcastTopic := createBroadcastTopic(factory.RewardsTransactionTopic, brc.shardCoordinator, miniBlock.ReceiverShardID) if _, ok := marshalledRewardsTxs[broadcastTopic]; !ok { marshalledRewardsTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) } @@ -493,8 +493,8 @@ func getMiniBlockWithReceiverShardID(shardId uint32, miniBlocks block.MiniBlockS return nil } -func createBroadcastTopic(shardC sharding.Coordinator, destShId uint32) string { - transactionTopic := factory.RewardsTransactionTopic + shardC.CommunicationIdentifier(destShId) +func createBroadcastTopic(topic string, shardC sharding.Coordinator, destShId uint32) string { + transactionTopic := topic + shardC.CommunicationIdentifier(destShId) return transactionTopic } diff --git a/epochStart/metachain/baseRewards_test.go b/epochStart/metachain/baseRewards_test.go index 6702bb6c524..a5eca3df9c8 100644 --- a/epochStart/metachain/baseRewards_test.go +++ b/epochStart/metachain/baseRewards_test.go @@ -21,10 +21,10 @@ import ( "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/state/factory" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" + "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" stateMock "github.com/ElrondNetwork/elrond-go/testscommon/state" trieMock "github.com/ElrondNetwork/elrond-go/testscommon/trie" - "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/trie" vmcommon "github.com/ElrondNetwork/elrond-vm-common" "github.com/stretchr/testify/assert" @@ -276,7 +276,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataNilMiniblocksEmptyMap(t *testin require.Nil(t, err) require.NotNil(t, rwd) - result := rwd.CreateMarshalizedData(nil) + result := rwd.CreateMarshalledData(nil) require.Equal(t, 0, len(result)) } @@ -288,7 +288,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataEmptyMiniblocksEmptyMap(t *test require.Nil(t, err) require.NotNil(t, rwd) - result := rwd.CreateMarshalizedData(&block.Body{}) + result := rwd.CreateMarshalledData(&block.Body{}) require.Equal(t, 0, len(result)) } @@ -313,7 +313,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataOnlyRewardsMiniblocksGetMarshal for _, mbType := range miniBlockTypes { dummyMiniBlock.Type = mbType - result := rwd.CreateMarshalizedData(&block.Body{ + result := rwd.CreateMarshalledData(&block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -322,7 +322,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataOnlyRewardsMiniblocksGetMarshal } dummyMiniBlock.Type = block.RewardsBlock - result := rwd.CreateMarshalizedData(&block.Body{ + result := rwd.CreateMarshalledData(&block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -353,7 +353,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataWrongSenderNotIncluded(t *testi dummyMiniBlock := createDummyRewardTxMiniblock(rwd) dummyMiniBlock.Type = block.RewardsBlock dummyMiniBlock.SenderShardID = args.ShardCoordinator.SelfId() + 1 - result := rwd.CreateMarshalizedData(&block.Body{ + result := rwd.CreateMarshalledData(&block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -372,7 +372,7 @@ func TestBaseRewardsCreator_CreateMarshalizedDataNotFoundTxHashIgnored(t *testin dummyMiniBlock := createDummyRewardTxMiniblock(rwd) dummyMiniBlock.Type = block.RewardsBlock dummyMiniBlock.TxHashes = [][]byte{[]byte("not found txHash")} - result := rwd.CreateMarshalizedData(&block.Body{ + result := rwd.CreateMarshalledData(&block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -446,7 +446,7 @@ func TestBaseRewardsCreator_SaveTxBlockToStorageNilBodyNoPanic(t *testing.T) { require.Nil(t, err) require.NotNil(t, rwd) - rwd.SaveTxBlockToStorage(nil, nil) + rwd.SaveBlockDataToStorage(nil, nil) } func TestBaseRewardsCreator_SaveTxBlockToStorageNonRewardsMiniBlocksAreIgnored(t *testing.T) { @@ -472,7 +472,7 @@ func TestBaseRewardsCreator_SaveTxBlockToStorageNonRewardsMiniBlocksAreIgnored(t for _, mbType := range miniBlockTypes { dummyMiniBlock.Type = mbType - rwd.SaveTxBlockToStorage(nil, &block.Body{ + rwd.SaveBlockDataToStorage(nil, &block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -487,7 +487,7 @@ func TestBaseRewardsCreator_SaveTxBlockToStorageNonRewardsMiniBlocksAreIgnored(t } dummyMiniBlock.Type = block.RewardsBlock - rwd.SaveTxBlockToStorage(nil, &block.Body{ + rwd.SaveBlockDataToStorage(nil, &block.Body{ MiniBlocks: block.MiniBlockSlice{ dummyMiniBlock, }, @@ -516,7 +516,7 @@ func TestBaseRewardsCreator_SaveTxBlockToStorageNotFoundTxIgnored(t *testing.T) dummyMb := createDummyRewardTxMiniblock(rwd) dummyMb.TxHashes = [][]byte{rwTxHash} - rwd.SaveTxBlockToStorage(nil, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) + rwd.SaveBlockDataToStorage(nil, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) mmb, err := args.Marshalizer.Marshal(dummyMb) require.Nil(t, err) @@ -544,7 +544,7 @@ func TestBaseRewardsCreator_DeleteTxsFromStorageNilMetablockNoPanic(t *testing.T require.NotNil(t, rwd) dummyMb := createDummyRewardTxMiniblock(rwd) - rwd.DeleteTxsFromStorage(nil, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) + rwd.DeleteBlockDataFromStorage(nil, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) } func TestBaseRewardsCreator_DeleteTxsFromStorageNilBlockBodyNoPanic(t *testing.T) { @@ -565,7 +565,7 @@ func TestBaseRewardsCreator_DeleteTxsFromStorageNilBlockBodyNoPanic(t *testing.T DevFeesInEpoch: big.NewInt(0), } - rwd.DeleteTxsFromStorage(metaBlk, nil) + rwd.DeleteBlockDataFromStorage(metaBlk, nil) } func TestBaseRewardsCreator_DeleteTxsFromStorageNonRewardsMiniBlocksIgnored(t *testing.T) { @@ -609,7 +609,7 @@ func TestBaseRewardsCreator_DeleteTxsFromStorageNonRewardsMiniBlocksIgnored(t *t dummyMbMarshalled, _ := args.Marshalizer.Marshal(dummyMb) _ = rwd.miniBlockStorage.Put(mbHash, dummyMbMarshalled) - rwd.DeleteTxsFromStorage(metaBlk, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) + rwd.DeleteBlockDataFromStorage(metaBlk, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) tx, err = rwd.rewardsStorage.Get(rwTxHash) require.Nil(t, err) require.NotNil(t, tx) @@ -651,7 +651,7 @@ func TestBaseRewardsCreator_DeleteTxsFromStorage(t *testing.T) { dummyMbMarshalled, _ := args.Marshalizer.Marshal(dummyMb) _ = rwd.miniBlockStorage.Put(mbHash, dummyMbMarshalled) - rwd.DeleteTxsFromStorage(metaBlk, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) + rwd.DeleteBlockDataFromStorage(metaBlk, &block.Body{MiniBlocks: block.MiniBlockSlice{dummyMb}}) tx, err := rwd.rewardsStorage.Get(rwTxHash) require.NotNil(t, err) require.Nil(t, tx) @@ -696,7 +696,7 @@ func TestBaseRewardsCreator_RemoveBlockDataFromPoolsNilBlockBodyNoPanic(t *testi DevFeesInEpoch: big.NewInt(0), } - rwd.DeleteTxsFromStorage(metaBlk, nil) + rwd.DeleteBlockDataFromStorage(metaBlk, nil) } func TestBaseRewardsCreator_RemoveBlockDataFromPoolsNonRewardsMiniBlocksIgnored(t *testing.T) { diff --git a/epochStart/metachain/rewardsCreatorProxy_test.go b/epochStart/metachain/rewardsCreatorProxy_test.go index 0be19faba25..ab82188ad23 100644 --- a/epochStart/metachain/rewardsCreatorProxy_test.go +++ b/epochStart/metachain/rewardsCreatorProxy_test.go @@ -214,7 +214,7 @@ func TestRewardsCreatorProxy_CreateMarshalizedData(t *testing.T) { blockBody := createDefaultBlockBody() rewardCreatorV1 := &mock.RewardsCreatorStub{ - CreateMarshalizedDataCalled: func(body *block.Body) map[string][][]byte { + CreateMarshalledDataCalled: func(body *block.Body) map[string][][]byte { if blockBody == body { return expectedValue } @@ -224,7 +224,7 @@ func TestRewardsCreatorProxy_CreateMarshalizedData(t *testing.T) { rewardsCreatorProxy, _, _ := createTestData(rewardCreatorV1, rCreatorV1) - protocolSustainabilityRewards := rewardsCreatorProxy.CreateMarshalizedData(blockBody) + protocolSustainabilityRewards := rewardsCreatorProxy.CreateMarshalledData(blockBody) require.Equal(t, expectedValue, protocolSustainabilityRewards) } @@ -259,14 +259,14 @@ func TestRewardsCreatorProxy_SaveTxBlockToStorage(t *testing.T) { functionCalled := false rewardCreatorV1 := &mock.RewardsCreatorStub{ - SaveTxBlockToStorageCalled: func(metaBlock data.MetaHeaderHandler, body *block.Body) { + SaveBlockDataToStorageCalled: func(metaBlock data.MetaHeaderHandler, body *block.Body) { functionCalled = true }, } rewardsCreatorProxy, _, metaBlock := createTestData(rewardCreatorV1, rCreatorV1) - rewardsCreatorProxy.SaveTxBlockToStorage(metaBlock, blockBody) + rewardsCreatorProxy.SaveBlockDataToStorage(metaBlock, blockBody) require.Equal(t, true, functionCalled) } @@ -277,14 +277,14 @@ func TestRewardsCreatorProxy_DeleteTxsFromStorage(t *testing.T) { functionCalled := false rewardCreatorV1 := &mock.RewardsCreatorStub{ - DeleteTxsFromStorageCalled: func(metaBlock data.MetaHeaderHandler, body *block.Body) { + DeleteBlockDataFromStorageCalled: func(metaBlock data.MetaHeaderHandler, body *block.Body) { functionCalled = true }, } rewardsCreatorProxy, _, metaBlock := createTestData(rewardCreatorV1, rCreatorV1) - rewardsCreatorProxy.DeleteTxsFromStorage(metaBlock, blockBody) + rewardsCreatorProxy.DeleteBlockDataFromStorage(metaBlock, blockBody) require.Equal(t, true, functionCalled) } diff --git a/epochStart/metachain/rewards_test.go b/epochStart/metachain/rewards_test.go index ec30f0d96d0..517ccc7eb03 100644 --- a/epochStart/metachain/rewards_test.go +++ b/epochStart/metachain/rewards_test.go @@ -514,7 +514,7 @@ func TestRewardsCreator_CreateMarshalizedData(t *testing.T) { }, }, } - res := rwd.CreateMarshalizedData(&bdy) + res := rwd.CreateMarshalledData(&bdy) assert.NotNil(t, res) } @@ -579,7 +579,7 @@ func TestRewardsCreator_SaveTxBlockToStorage(t *testing.T) { }, }, } - rwd.SaveTxBlockToStorage(&mb2, &bdy) + rwd.SaveBlockDataToStorage(&mb2, &bdy) assert.True(t, putRwdTxWasCalled) assert.True(t, putMbWasCalled) diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 43de0d161a7..c6a419eb072 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -15,6 +15,7 @@ import ( "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/process/factory" "github.com/ElrondNetwork/elrond-go/sharding" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" @@ -233,7 +234,7 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri continue } - broadcastTopic := createBroadcastTopic(vic.shardCoordinator, miniBlock.ReceiverShardID) + broadcastTopic := createBroadcastTopic(factory.UnsignedTransactionTopic, vic.shardCoordinator, miniBlock.ReceiverShardID) if _, ok := marshalledValidatorInfoTxs[broadcastTopic]; !ok { marshalledValidatorInfoTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) } @@ -360,7 +361,7 @@ func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderH } for _, txHash := range miniBlock.TxHashes { - validatorInfoPool.Remove(txHash) + validatorInfoPool.RemoveDataFromAllShards(txHash) } } diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index cc85eb4fb1d..6dfefaa301f 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -128,7 +128,7 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator return argsNewEpochEconomics } -func verifyMiniBlocks(bl *block.MiniBlock, infos []*state.ValidatorInfo, marshalizer marshal.Marshalizer) bool { +func verifyMiniBlocks(bl *block.MiniBlock, infos []*state.ValidatorInfo, marshalledShardValidatorsInfo [][]byte, marshalizer marshal.Marshalizer) bool { if bl.SenderShardID != core.MetachainShardId || bl.ReceiverShardID != core.AllShardId || len(bl.TxHashes) == 0 || @@ -142,10 +142,10 @@ func verifyMiniBlocks(bl *block.MiniBlock, infos []*state.ValidatorInfo, marshal return bytes.Compare(validatorCopy[a].PublicKey, validatorCopy[b].PublicKey) < 0 }) - for i, txHash := range bl.TxHashes { - vi := createShardValidatorInfo(validatorCopy[i]) + for i, marshalledShardValidatorInfo := range marshalledShardValidatorsInfo { + vi := createShardValidatorInfo(infos[i]) unmarshaledVi := &state.ShardValidatorInfo{} - _ = marshalizer.Unmarshal(unmarshaledVi, txHash) + _ = marshalizer.Unmarshal(unmarshaledVi, marshalledShardValidatorInfo) if !reflect.DeepEqual(unmarshaledVi, vi) { return false } @@ -265,9 +265,22 @@ func TestEpochValidatorInfoCreator_CreateValidatorInfoMiniBlocksShouldBeCorrect( vic, _ := NewValidatorInfoCreator(arguments) mbs, _ := vic.CreateValidatorInfoMiniBlocks(validatorInfo) - correctMB0 := verifyMiniBlocks(mbs[0], validatorInfo[0], arguments.Marshalizer) + shardValidatorInfo := make([]*state.ShardValidatorInfo, len(validatorInfo[0])) + marshalledShardValidatorInfo := make([][]byte, len(validatorInfo[0])) + for i := 0; i < len(validatorInfo[0]); i++ { + shardValidatorInfo[i] = createShardValidatorInfo(validatorInfo[0][i]) + marshalledShardValidatorInfo[i], _ = arguments.Marshalizer.Marshal(shardValidatorInfo[i]) + } + correctMB0 := verifyMiniBlocks(mbs[0], validatorInfo[0], marshalledShardValidatorInfo, arguments.Marshalizer) require.True(t, correctMB0) - correctMbMeta := verifyMiniBlocks(mbs[1], validatorInfo[core.MetachainShardId], arguments.Marshalizer) + + shardValidatorInfo = make([]*state.ShardValidatorInfo, len(validatorInfo[core.MetachainShardId])) + marshalledShardValidatorInfo = make([][]byte, len(validatorInfo[core.MetachainShardId])) + for i := 0; i < len(validatorInfo[core.MetachainShardId]); i++ { + shardValidatorInfo[i] = createShardValidatorInfo(validatorInfo[core.MetachainShardId][i]) + marshalledShardValidatorInfo[i], _ = arguments.Marshalizer.Marshal(shardValidatorInfo[i]) + } + correctMbMeta := verifyMiniBlocks(mbs[1], validatorInfo[core.MetachainShardId], marshalledShardValidatorInfo, arguments.Marshalizer) require.True(t, correctMbMeta) } @@ -368,9 +381,9 @@ func createValidatorInfoMiniBlocks( }) for index, validator := range validatorCopy { - shardValidator := createShardValidatorInfo(validator) - marshalizedValidator, _ := arguments.Marshalizer.Marshal(shardValidator) - miniBlock.TxHashes[index] = marshalizedValidator + shardValidatorInfo := createShardValidatorInfo(validator) + shardValidatorInfoHash, _ := core.CalculateHash(arguments.Marshalizer, arguments.Hasher, shardValidatorInfo) + miniBlock.TxHashes[index] = shardValidatorInfoHash } miniblocks = append(miniblocks, miniBlock) @@ -428,7 +441,7 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoBlockDataToStorage(t *testin } body := &block.Body{MiniBlocks: miniblocks} - vic.SaveValidatorInfoBlockDataToStorage(meta, body) + vic.SaveBlockDataToStorage(meta, body) for i, mbHeader := range meta.MiniBlockHeaders { mb, err := miniBlockStorage.Get(mbHeader.Hash) @@ -506,7 +519,7 @@ func testDeleteValidatorInfoBlockData(t *testing.T, blockType block.Type, should } body := &block.Body{} - vic.DeleteValidatorInfoBlockDataFromStorage(meta, body) + vic.DeleteBlockDataFromStorage(meta, body) for _, mbHeader := range meta.MiniBlockHeaders { mb, err := mbStorage.Get(mbHeader.Hash) diff --git a/node/nodeMemoryConfig_test.go b/node/nodeMemoryConfig_test.go index ac353de4917..2ac27f707c9 100644 --- a/node/nodeMemoryConfig_test.go +++ b/node/nodeMemoryConfig_test.go @@ -44,6 +44,7 @@ func TestMemoryConfig(t *testing.T) { plannedMemory += nodeConfig.UnsignedTransactionDataPool.SizeInBytes * uint64(numShardsIncludingMeta*(numShardsIncludingMeta-1)) / 2 // One cache for each pair (meta, shard) plannedMemory += nodeConfig.RewardTransactionDataPool.SizeInBytes * uint64(numShards) + plannedMemory += nodeConfig.ValidatorInfoPool.SizeInBytes require.LessOrEqual(t, int(plannedMemory), 3000*core.MegabyteSize) } diff --git a/process/block/metablock.go b/process/block/metablock.go index fda3609f95c..1a6b270d21f 100644 --- a/process/block/metablock.go +++ b/process/block/metablock.go @@ -1310,6 +1310,7 @@ func (mp *metaProcessor) CommitBlock( mp.blockTracker.CleanupInvalidCrossHeaders(header.Epoch, header.Round) } + // TODO: Should pe sent alongside rewardsTxs also validatorInfoTxs -> mp.validatorInfoCreator.GetValidatorInfoTxs(body) ? mp.indexBlock(header, headerHash, body, lastMetaBlock, notarizedHeadersHashes, rewardsTxs) mp.recordBlockInHistory(headerHash, headerHandler, bodyHandler) @@ -2381,9 +2382,7 @@ func (mp *metaProcessor) MarshalizedDataToBroadcast( var mrsTxs map[string][][]byte if hdr.IsStartOfEpochBlock() { - mrsTxs = mp.epochRewardsCreator.CreateMarshalledData(body) - //TODO: Append also validator info txs to be broadcast - //mrsTxs = mp.validatorInfoCreator.CreateMarshalledData(body) + mrsTxs = mp.getAllMarshalledTxs(body) } else { mrsTxs = mp.txCoordinator.CreateMarshalizedData(body) } @@ -2410,6 +2409,23 @@ func (mp *metaProcessor) MarshalizedDataToBroadcast( return mrsData, mrsTxs, nil } +func (mp *metaProcessor) getAllMarshalledTxs(body *block.Body) map[string][][]byte { + allMarshalledTxs := make(map[string][][]byte) + + marshalledRewardsTxs := mp.epochRewardsCreator.CreateMarshalledData(body) + marshalledValidatorInfoTxs := mp.validatorInfoCreator.CreateMarshalledData(body) + + for topic, marshalledTxs := range marshalledRewardsTxs { + allMarshalledTxs[topic] = append(allMarshalledTxs[topic], marshalledTxs...) + } + + for topic, marshalledTxs := range marshalledValidatorInfoTxs { + allMarshalledTxs[topic] = append(allMarshalledTxs[topic], marshalledTxs...) + } + + return allMarshalledTxs +} + func getTxCount(shardInfo []data.ShardDataHandler) uint32 { txs := uint32(0) for i := 0; i < len(shardInfo); i++ { diff --git a/process/block/preprocess/transactions_test.go b/process/block/preprocess/transactions_test.go index ca33eb3f1c7..d465af51002 100644 --- a/process/block/preprocess/transactions_test.go +++ b/process/block/preprocess/transactions_test.go @@ -147,6 +147,12 @@ func initDataPool() *dataRetrieverMock.PoolsHolderStub { }, } }, + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + RemoveSetOfDataFromPoolCalled: func(keys [][]byte, destCacheID string) { + }, + } + }, MetaBlocksCalled: func() storage.Cacher { return &testscommon.CacherStub{ GetCalled: func(key []byte) (value interface{}, ok bool) { diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index ef77c2f34f1..86fa6bbb1d5 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -1,6 +1,7 @@ package preprocess import ( + "github.com/ElrondNetwork/elrond-go/dataRetriever" "time" "github.com/ElrondNetwork/elrond-go-core/core" @@ -17,9 +18,12 @@ var _ process.DataMarshalizer = (*validatorInfoPreprocessor)(nil) var _ process.PreProcessor = (*validatorInfoPreprocessor)(nil) type validatorInfoPreprocessor struct { - hasher hashing.Hasher - marshalizer marshal.Marshalizer - blockSizeComputation BlockSizeComputationHandler + *basePreProcess + chReceivedAllValidatorInfo chan bool + onRequestValidatorsInfo func(txHashes [][]byte) + validatorInfoForBlock txsForBlock + validatorInfoPool dataRetriever.ShardedDataCacherNotifier + storage dataRetriever.StorageService } // NewValidatorInfoPreprocessor creates a new validatorInfo preprocessor object @@ -27,7 +31,11 @@ func NewValidatorInfoPreprocessor( hasher hashing.Hasher, marshalizer marshal.Marshalizer, blockSizeComputation BlockSizeComputationHandler, + validatorInfoPool dataRetriever.ShardedDataCacherNotifier, + store dataRetriever.StorageService, + onRequestValidatorsInfo func(txHashes [][]byte), ) (*validatorInfoPreprocessor, error) { + if check.IfNil(hasher) { return nil, process.ErrNilHasher } @@ -37,13 +45,34 @@ func NewValidatorInfoPreprocessor( if check.IfNil(blockSizeComputation) { return nil, process.ErrNilBlockSizeComputationHandler } + if check.IfNil(validatorInfoPool) { + return nil, process.ErrNilValidatorInfoPool + } + if check.IfNil(store) { + return nil, process.ErrNilStorage + } + if onRequestValidatorsInfo == nil { + return nil, process.ErrNilRequestHandler + } - rtp := &validatorInfoPreprocessor{ + bpp := &basePreProcess{ hasher: hasher, marshalizer: marshalizer, blockSizeComputation: blockSizeComputation, } - return rtp, nil + + vip := &validatorInfoPreprocessor{ + basePreProcess: bpp, + storage: store, + validatorInfoPool: validatorInfoPool, + onRequestValidatorsInfo: onRequestValidatorsInfo, + } + + vip.chReceivedAllValidatorInfo = make(chan bool) + vip.validatorInfoPool.RegisterOnAdded(vip.receivedValidatorInfoTransaction) + vip.validatorInfoForBlock.txHashAndInfo = make(map[string]*txInfo) + + return vip, nil } // IsDataPrepared does nothing @@ -53,28 +82,7 @@ func (vip *validatorInfoPreprocessor) IsDataPrepared(_ int, _ func() time.Durati // RemoveBlockDataFromPools removes the peer miniblocks from pool func (vip *validatorInfoPreprocessor) RemoveBlockDataFromPools(body *block.Body, miniBlockPool storage.Cacher) error { - if check.IfNil(body) { - return process.ErrNilBlockBody - } - if check.IfNil(miniBlockPool) { - return process.ErrNilMiniBlockPool - } - - for i := 0; i < len(body.MiniBlocks); i++ { - currentMiniBlock := body.MiniBlocks[i] - if currentMiniBlock.Type != block.PeerBlock { - continue - } - - miniBlockHash, err := core.CalculateHash(vip.marshalizer, vip.hasher, currentMiniBlock) - if err != nil { - return err - } - - miniBlockPool.Remove(miniBlockHash) - } - - return nil + return vip.removeBlockDataFromPools(body, miniBlockPool, vip.validatorInfoPool, vip.isMiniBlockCorrect) } // RemoveTxsFromPools does nothing for validatorInfoPreprocessor implementation @@ -128,6 +136,22 @@ func (vip *validatorInfoPreprocessor) SaveTxsToStorage(_ *block.Body) error { return nil } +// receivedValidatorInfoTransaction is a callback function called when a new validator info transaction +// is added in the validator info transactions pool +func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(key []byte, value interface{}) { + tx, ok := value.(data.TransactionHandler) + if !ok { + log.Warn("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "error", process.ErrWrongTypeAssertion) + return + } + + receivedAllMissing := vip.baseReceivedTransaction(key, tx, &vip.validatorInfoForBlock) + + if receivedAllMissing { + vip.chReceivedAllValidatorInfo <- true + } +} + // CreateBlockStarted does nothing func (vip *validatorInfoPreprocessor) CreateBlockStarted() { } @@ -193,3 +217,7 @@ func (vip *validatorInfoPreprocessor) AddTransactions(_ []data.TransactionHandle func (vip *validatorInfoPreprocessor) IsInterfaceNil() bool { return vip == nil } + +func (vip *validatorInfoPreprocessor) isMiniBlockCorrect(mbType block.Type) bool { + return mbType == block.PeerBlock +} diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index 43e961a2bba..b3a28b5a41d 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -6,6 +6,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/process/mock" "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/stretchr/testify/assert" @@ -14,10 +15,14 @@ import ( func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, err := NewValidatorInfoPreprocessor( nil, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) assert.Nil(t, rtp) @@ -27,10 +32,14 @@ func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, err := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, nil, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) assert.Nil(t, rtp) @@ -40,23 +49,81 @@ func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { func TestNewValidatorInfoPreprocessor_NilBlockSizeComputationHandlerShouldErr(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, err := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, nil, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) assert.Nil(t, rtp) assert.Equal(t, process.ErrNilBlockSizeComputationHandler, err) } +func TestNewValidatorInfoPreprocessor_NilValidatorInfoPoolShouldErr(t *testing.T) { + t.Parallel() + + rtp, err := NewValidatorInfoPreprocessor( + &hashingMocks.HasherMock{}, + &testscommon.MarshalizerMock{}, + &testscommon.BlockSizeComputationStub{}, + nil, + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, + ) + + assert.Nil(t, rtp) + assert.Equal(t, process.ErrNilValidatorInfoPool, err) +} + +func TestNewValidatorInfoPreprocessor_NilStoreShouldErr(t *testing.T) { + t.Parallel() + + tdp := initDataPool() + rtp, err := NewValidatorInfoPreprocessor( + &hashingMocks.HasherMock{}, + &testscommon.MarshalizerMock{}, + &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + nil, + func(txHashes [][]byte) {}, + ) + + assert.Nil(t, rtp) + assert.Equal(t, process.ErrNilStorage, err) +} + +func TestNewValidatorInfoPreprocessor_NilRequestHandlerShouldErr(t *testing.T) { + t.Parallel() + + tdp := initDataPool() + rtp, err := NewValidatorInfoPreprocessor( + &hashingMocks.HasherMock{}, + &testscommon.MarshalizerMock{}, + &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + nil, + ) + + assert.Nil(t, rtp) + assert.Equal(t, process.ErrNilRequestHandler, err) +} + func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, err := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) assert.Nil(t, err) assert.NotNil(t, rtp) @@ -65,10 +132,14 @@ func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) hash := make([][]byte, 0) @@ -81,10 +152,14 @@ func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShouldErr(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) txHashes := make([][]byte, 0) @@ -102,10 +177,14 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShould func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) txHashes := make([][]byte, 0) @@ -123,10 +202,14 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { func TestNewValidatorInfoPreprocessor_ProcessMiniBlockNotFromMeta(t *testing.T) { t.Parallel() + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( &hashingMocks.HasherMock{}, &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) txHashes := make([][]byte, 0) @@ -148,10 +231,14 @@ func TestNewValidatorInfoPreprocessor_RestorePeerBlockIntoPools(t *testing.T) { marshalizer := &testscommon.MarshalizerMock{} blockSizeComputation := &testscommon.BlockSizeComputationStub{} + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( hasher, marshalizer, blockSizeComputation, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -189,10 +276,14 @@ func TestNewValidatorInfoPreprocessor_RestoreOtherBlockTypeIntoPoolsShouldNotRes marshalizer := &testscommon.MarshalizerMock{} blockSizeComputation := &testscommon.BlockSizeComputationStub{} + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( hasher, marshalizer, blockSizeComputation, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -230,10 +321,14 @@ func TestNewValidatorInfoPreprocessor_RemovePeerBlockFromPool(t *testing.T) { marshalizer := &testscommon.MarshalizerMock{} blockSizeComputation := &testscommon.BlockSizeComputationStub{} + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( hasher, marshalizer, blockSizeComputation, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -271,10 +366,14 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov marshalizer := &testscommon.MarshalizerMock{} blockSizeComputation := &testscommon.BlockSizeComputationStub{} + tdp := initDataPool() rtp, _ := NewValidatorInfoPreprocessor( hasher, marshalizer, blockSizeComputation, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, ) txHashes := [][]byte{[]byte("tx_hash1")} diff --git a/process/factory/shard/preProcessorsContainerFactory.go b/process/factory/shard/preProcessorsContainerFactory.go index aded2affd7d..11e32f9ee9c 100644 --- a/process/factory/shard/preProcessorsContainerFactory.go +++ b/process/factory/shard/preProcessorsContainerFactory.go @@ -280,6 +280,9 @@ func (ppcm *preProcessorsContainerFactory) createValidatorInfoPreProcessor() (pr ppcm.hasher, ppcm.marshalizer, ppcm.blockSizeComputation, + ppcm.dataPool.ValidatorsInfo(), + ppcm.store, + ppcm.requestHandler.RequestValidatorsInfo, ) return validatorInfoPreprocessor, err diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go index f30cd0d9625..5d831d820f0 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -3,17 +3,17 @@ package processor import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" - "github.com/ElrondNetwork/elrond-go/storage" ) // ArgValidatorInfoInterceptorProcessor is the argument structure used to create a new validator info interceptor processor type ArgValidatorInfoInterceptorProcessor struct { - ValidatorInfoPool storage.Cacher + ValidatorInfoPool dataRetriever.ShardedDataCacherNotifier } type validatorInfoInterceptorProcessor struct { - validatorInfoPool storage.Cacher + validatorInfoPool dataRetriever.ShardedDataCacherNotifier } // NewValidatorInfoInterceptorProcessor creates a new validator info interceptor processor @@ -51,7 +51,8 @@ func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData validatorInfo := ivi.ValidatorInfo() hash := ivi.Hash() - viip.validatorInfoPool.HasOrAdd(hash, validatorInfo, validatorInfo.Size()) + strCache := process.ShardCacherIdentifier(core.MetachainShardId, core.AllShardId) + viip.validatorInfoPool.AddData(hash, validatorInfo, validatorInfo.Size(), strCache) return nil } diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go index a65df5f4768..e630f01781f 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go @@ -42,7 +42,7 @@ func createMockInterceptedValidatorInfo() process.InterceptedData { func createMockArgValidatorInfoInterceptorProcessor() processor.ArgValidatorInfoInterceptorProcessor { return processor.ArgValidatorInfoInterceptorProcessor{ - ValidatorInfoPool: testscommon.NewCacherStub(), + ValidatorInfoPool: testscommon.NewShardedDataStub(), } } @@ -84,10 +84,9 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { providedData := mock.NewInterceptedMetaBlockMock(nil, []byte("hash")) // unable to cast to intercepted validator info wasCalled := false args := createMockArgValidatorInfoInterceptorProcessor() - args.ValidatorInfoPool = &testscommon.CacherStub{ - HasOrAddCalled: func(key []byte, value interface{}, sizeInBytes int) (has, added bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + AddDataCalled: func(key []byte, data interface{}, sizeInBytes int, cacheID string) { wasCalled = true - return false, false }, } @@ -109,11 +108,10 @@ func TestValidatorInfoInterceptorProcessor_Save(t *testing.T) { hasher := hashingMocks.HasherMock{} providedHash := hasher.Compute(string(providedBuff)) - args.ValidatorInfoPool = &testscommon.CacherStub{ - HasOrAddCalled: func(key []byte, value interface{}, sizeInBytes int) (has, added bool) { + args.ValidatorInfoPool = &testscommon.ShardedDataStub{ + AddDataCalled: func(key []byte, data interface{}, sizeInBytes int, cacheID string) { assert.Equal(t, providedHash, key) wasHasOrAddCalled = true - return false, false }, } diff --git a/testscommon/dataRetriever/poolFactory.go b/testscommon/dataRetriever/poolFactory.go index ea27150be81..fe05709ac45 100644 --- a/testscommon/dataRetriever/poolFactory.go +++ b/testscommon/dataRetriever/poolFactory.go @@ -112,8 +112,11 @@ func CreatePoolsHolder(numShards uint32, selfShard uint32) dataRetriever.PoolsHo smartContracts, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolder", err) - cacherConfig = storageUnit.CacheConfig{Capacity: 50000, Type: storageUnit.LRUCache} - validatorsInfo, err := storageUnit.NewCache(cacherConfig) + validatorsInfo, err := shardedData.NewShardedData("validatorsInfoPool", storageUnit.CacheConfig{ + Capacity: 300, + SizeInBytes: 300000, + Shards: 1, + }) panicIfError("CreatePoolsHolder", err) currentBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() @@ -181,8 +184,11 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) smartContracts, err := storageUnit.NewCache(cacherConfig) panicIfError("CreatePoolsHolderWithTxPool", err) - cacherConfig = storageUnit.CacheConfig{Capacity: 50000, Type: storageUnit.LRUCache} - validatorsInfo, err := storageUnit.NewCache(cacherConfig) + validatorsInfo, err := shardedData.NewShardedData("validatorsInfoPool", storageUnit.CacheConfig{ + Capacity: 300, + SizeInBytes: 300000, + Shards: 1, + }) panicIfError("CreatePoolsHolderWithTxPool", err) currentBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() diff --git a/testscommon/dataRetriever/poolsHolderMock.go b/testscommon/dataRetriever/poolsHolderMock.go index 5bc0755bf89..a6044a03c26 100644 --- a/testscommon/dataRetriever/poolsHolderMock.go +++ b/testscommon/dataRetriever/poolsHolderMock.go @@ -25,7 +25,7 @@ type PoolsHolderMock struct { smartContracts storage.Cacher currBlockTxs dataRetriever.TransactionCacher currBlockValidatorInfo dataRetriever.ValidatorInfoCacher - validatorsInfo storage.Cacher + validatorsInfo dataRetriever.ShardedDataCacherNotifier } // NewPoolsHolderMock - @@ -87,7 +87,11 @@ func NewPoolsHolderMock() *PoolsHolderMock { holder.smartContracts, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.LRUCache, Capacity: 10000, Shards: 1, SizeInBytes: 0}) panicIfError("NewPoolsHolderMock", err) - holder.validatorsInfo, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.LRUCache, Capacity: 10000, Shards: 1, SizeInBytes: 0}) + holder.validatorsInfo, err = shardedData.NewShardedData("validatorsInfoPool", storageUnit.CacheConfig{ + Capacity: 100, + SizeInBytes: 100000, + Shards: 1, + }) panicIfError("NewPoolsHolderMock", err) return holder @@ -159,7 +163,7 @@ func (holder *PoolsHolderMock) SmartContracts() storage.Cacher { } // ValidatorsInfo - -func (holder *PoolsHolderMock) ValidatorsInfo() storage.Cacher { +func (holder *PoolsHolderMock) ValidatorsInfo() dataRetriever.ShardedDataCacherNotifier { return holder.validatorsInfo } diff --git a/testscommon/dataRetriever/poolsHolderStub.go b/testscommon/dataRetriever/poolsHolderStub.go index bdbd9df067f..42351cee75b 100644 --- a/testscommon/dataRetriever/poolsHolderStub.go +++ b/testscommon/dataRetriever/poolsHolderStub.go @@ -20,7 +20,7 @@ type PoolsHolderStub struct { TrieNodesChunksCalled func() storage.Cacher PeerChangesBlocksCalled func() storage.Cacher SmartContractsCalled func() storage.Cacher - ValidatorsInfoCalled func() storage.Cacher + ValidatorsInfoCalled func() dataRetriever.ShardedDataCacherNotifier } // NewPoolsHolderStub - @@ -137,12 +137,12 @@ func (holder *PoolsHolderStub) SmartContracts() storage.Cacher { } // ValidatorsInfo - -func (holder *PoolsHolderStub) ValidatorsInfo() storage.Cacher { +func (holder *PoolsHolderStub) ValidatorsInfo() dataRetriever.ShardedDataCacherNotifier { if holder.ValidatorsInfoCalled != nil { return holder.ValidatorsInfoCalled() } - return testscommon.NewCacherStub() + return testscommon.NewShardedDataStub() } // IsInterfaceNil returns true if there is no value under the interface diff --git a/testscommon/generalConfig.go b/testscommon/generalConfig.go index cbbb0bbaaed..221d83a37a3 100644 --- a/testscommon/generalConfig.go +++ b/testscommon/generalConfig.go @@ -145,7 +145,11 @@ func GetGeneralConfig() config.Config { SizeInBytes: 1000000000, Shards: 1, }, - ValidatorInfoPool: getLRUCacheConfig(), + ValidatorInfoPool: config.CacheConfig{ + Capacity: 10000, + SizeInBytes: 1000000000, + Shards: 1, + }, HeadersPoolConfig: config.HeadersPoolConfig{ MaxHeadersPerShard: 100, NumElementsToRemoveOnEviction: 1, From c26944e513ac559bae6dc5f843ec3c4b6dbed9c4 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Mon, 23 May 2022 17:45:40 +0300 Subject: [PATCH 25/70] * Prepared validator info in epochStart for peers min blocks new approach --- epochStart/bootstrap/process_test.go | 10 + epochStart/bootstrap/syncValidatorStatus.go | 1 + .../bootstrap/syncValidatorStatus_test.go | 5 + epochStart/interface.go | 1 + epochStart/metachain/validators.go | 25 +- epochStart/metachain/validators_test.go | 5 + factory/shardingFactory.go | 2 + integrationTests/consensus/testInitializer.go | 3 + .../consensusComponents_test.go | 1 + .../processComponents_test.go | 1 + .../statusComponents/statusComponents_test.go | 1 + .../intermediateTransactionHandlerMock.go | 10 +- integrationTests/nodesCoordinatorFactory.go | 3 + integrationTests/testP2PNode.go | 5 + .../testProcessorNodeWithCoordinator.go | 3 + .../testProcessorNodeWithMultisigner.go | 5 + node/nodeRunner.go | 1 + process/block/postprocess/basePostProcess.go | 6 +- .../postprocess/intermediateResults_test.go | 6 +- process/block/preprocess/basePreProcess.go | 6 +- .../block/preprocess/rewardTxPreProcessor.go | 90 ++++--- .../preprocess/rewardTxPreProcessor_test.go | 2 +- .../block/preprocess/smartContractResults.go | 82 +++--- process/block/preprocess/transactions.go | 86 ++++--- .../preprocess/validatorInfoPreProcessor.go | 233 +++++++++++++++--- .../validatorInfoPreProcessor_test.go | 2 +- process/coordinator/process.go | 10 +- process/interface.go | 2 +- process/mock/intermProcessorStub.go | 10 +- .../intermediateTransactionHandlerMock.go | 10 +- process/mock/preprocessorMock.go | 10 +- sharding/nodesCoordinator/errors.go | 3 + .../indexHashedNodesCoordinator.go | 17 +- .../indexHashedNodesCoordinatorLite_test.go | 4 +- ...dexHashedNodesCoordinatorWithRater_test.go | 11 +- .../indexHashedNodesCoordinator_test.go | 43 +++- sharding/nodesCoordinator/shardingArgs.go | 2 + .../validatorInfoCacherMock.go | 38 +++ 38 files changed, 540 insertions(+), 215 deletions(-) create mode 100644 testscommon/validatorInfoCacherMock/validatorInfoCacherMock.go diff --git a/epochStart/bootstrap/process_test.go b/epochStart/bootstrap/process_test.go index e879ed62e6b..6bb1c7753e2 100644 --- a/epochStart/bootstrap/process_test.go +++ b/epochStart/bootstrap/process_test.go @@ -41,6 +41,7 @@ import ( statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageMocks "github.com/ElrondNetwork/elrond-go/testscommon/storage" "github.com/ElrondNetwork/elrond-go/testscommon/syncer" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" "github.com/ElrondNetwork/elrond-go/trie/factory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -1735,6 +1736,9 @@ func TestRequestAndProcessing(t *testing.T) { HeadersCalled: func() dataRetriever.HeadersPool { return &mock.HeadersCacherStub{} }, + CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} epochStartProvider.miniBlocksSyncer = &epochStartMocks.PendingMiniBlockSyncHandlerStub{} @@ -1802,6 +1806,9 @@ func TestRequestAndProcessing(t *testing.T) { HeadersCalled: func() dataRetriever.HeadersPool { return &mock.HeadersCacherStub{} }, + CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} epochStartProvider.miniBlocksSyncer = &epochStartMocks.PendingMiniBlockSyncHandlerStub{} @@ -1957,6 +1964,9 @@ func TestEpochStartBootstrap_WithDisabledShardIDAsObserver(t *testing.T) { TrieNodesCalled: func() storage.Cacher { return testscommon.NewCacherStub() }, + CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} epochStartProvider.epochStartMeta = &block.MetaBlock{Epoch: 0} diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index 33f124e94a8..4115f18ae8b 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -111,6 +111,7 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat ChanStopNode: args.ChanNodeStop, NodeTypeProvider: args.NodeTypeProvider, IsFullArchive: args.IsFullArchive, + ValidatorInfoCacher: args.DataPool.CurrentBlockValidatorInfo(), } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argsNodesCoordinator) if err != nil { diff --git a/epochStart/bootstrap/syncValidatorStatus_test.go b/epochStart/bootstrap/syncValidatorStatus_test.go index 7d5a9fbce51..b8e77ce8b91 100644 --- a/epochStart/bootstrap/syncValidatorStatus_test.go +++ b/epochStart/bootstrap/syncValidatorStatus_test.go @@ -9,6 +9,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/endProcess" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart/mock" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" @@ -18,6 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -245,6 +247,9 @@ func getSyncValidatorStatusArgs() ArgsNewSyncValidatorStatus { MiniBlocksCalled: func() storage.Cacher { return testscommon.NewCacherStub() }, + CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, }, Marshalizer: &mock.MarshalizerMock{}, Hasher: &hashingMocks.HasherMock{}, diff --git a/epochStart/interface.go b/epochStart/interface.go index 82c285a2630..441262997a0 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -157,6 +157,7 @@ type TransactionCacher interface { // ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current block type ValidatorInfoCacher interface { GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) + AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) IsInterfaceNil() bool } diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index c6a419eb072..270d9c9c42a 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -12,7 +12,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/dataRetriever" - "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/process/factory" @@ -34,7 +33,6 @@ type ArgsNewValidatorInfoCreator struct { } type validatorInfoCreator struct { - currValidatorInfo dataRetriever.ValidatorInfoCacher shardCoordinator sharding.Coordinator validatorInfoStorage storage.Storer miniBlockStorage storage.Storer @@ -64,10 +62,13 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr if check.IfNil(args.DataPool) { return nil, epochStart.ErrNilDataPoolsHolder } + if check.IfNil(args.DataPool.CurrentBlockValidatorInfo()) { + return nil, epochStart.ErrNilValidatorInfo + } - currValidatorInfoCache := dataPool.NewCurrentBlockValidatorInfoPool() + //TODO: currValidatorInfoCache := dataPool.NewCurrentBlockValidatorInfoPool() should be replaced by + //args.DataPool.CurrentBlockValidatorInfo(), as this pool is already created vic := &validatorInfoCreator{ - currValidatorInfo: currValidatorInfoCache, shardCoordinator: args.ShardCoordinator, hasher: args.Hasher, marshalizer: args.Marshalizer, @@ -134,6 +135,7 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat return bytes.Compare(validatorCopy[a].PublicKey, validatorCopy[b].PublicKey) < 0 }) + currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() for index, validator := range validatorCopy { shardValidatorInfo := createShardValidatorInfo(validator) shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) @@ -141,6 +143,7 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat return nil, err } + currentBlockValidatorInfo.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) miniBlock.TxHashes[index] = shardValidatorInfoHash } @@ -214,7 +217,7 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( // GetLocalValidatorInfoCache returns the local validator info cache which holds all the validator info for the current block func (vic *validatorInfoCreator) GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher { - return vic.currValidatorInfo + return vic.dataPool.CurrentBlockValidatorInfo() } // CreateMarshalledData creates the marshalled data to be sent to shards @@ -224,6 +227,7 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri } marshalledValidatorInfoTxs := make(map[string][][]byte) + currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { @@ -240,7 +244,7 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri } for _, txHash := range miniBlock.TxHashes { - validatorInfoTx, err := vic.currValidatorInfo.GetValidatorInfo(txHash) + validatorInfoTx, err := currentBlockValidatorInfo.GetValidatorInfo(txHash) if err != nil { log.Error("validatorInfoCreator.CreateMarshalledData.GetValidatorInfo", "hash", txHash, "error", err) continue @@ -266,13 +270,14 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri // GetValidatorInfoTxs returns validator info txs for the current block func (vic *validatorInfoCreator) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { validatorInfoTxs := make(map[string]*state.ShardValidatorInfo) + currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { continue } for _, txHash := range miniBlock.TxHashes { - validatorInfoTx, err := vic.currValidatorInfo.GetValidatorInfo(txHash) + validatorInfoTx, err := currentBlockValidatorInfo.GetValidatorInfo(txHash) if err != nil { continue } @@ -293,6 +298,7 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo var validatorInfo *state.ShardValidatorInfo var marshalledData []byte var err error + currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { @@ -300,7 +306,7 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo } for _, validatorInfoHash := range miniBlock.TxHashes { - validatorInfo, err = vic.currValidatorInfo.GetValidatorInfo(validatorInfoHash) + validatorInfo, err = currentBlockValidatorInfo.GetValidatorInfo(validatorInfoHash) if err != nil { continue } @@ -382,7 +388,8 @@ func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderH } func (vic *validatorInfoCreator) clean() { - vic.currValidatorInfo.Clean() + currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() + currentBlockValidatorInfo.Clean() } // IsInterfaceNil return true if underlying object is nil diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index 6dfefaa301f..99adeefb9d3 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -11,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process/mock" "github.com/ElrondNetwork/elrond-go/state" @@ -18,6 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" "github.com/stretchr/testify/require" ) @@ -123,6 +125,9 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator RemoveCalled: func(key []byte) {}, } }, + CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, }, } return argsNewEpochEconomics diff --git a/factory/shardingFactory.go b/factory/shardingFactory.go index df141564a07..f72a6b2f978 100644 --- a/factory/shardingFactory.go +++ b/factory/shardingFactory.go @@ -105,6 +105,7 @@ func CreateNodesCoordinator( waitingListFixEnabledEpoch uint32, chanNodeStop chan endProcess.ArgEndProcess, nodeTypeProvider core.NodeTypeProviderHandler, + validatorInfoCacher epochStart.ValidatorInfoCacher, ) (nodesCoordinator.NodesCoordinator, error) { if chanNodeStop == nil { return nil, nodesCoordinator.ErrNilNodeStopChannel @@ -194,6 +195,7 @@ func CreateNodesCoordinator( ChanStopNode: chanNodeStop, NodeTypeProvider: nodeTypeProvider, IsFullArchive: prefsConfig.FullArchive, + ValidatorInfoCacher: validatorInfoCacher, } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/consensus/testInitializer.go b/integrationTests/consensus/testInitializer.go index eebdac8c258..2959c2f4b67 100644 --- a/integrationTests/consensus/testInitializer.go +++ b/integrationTests/consensus/testInitializer.go @@ -25,6 +25,7 @@ import ( "github.com/ElrondNetwork/elrond-go/consensus/round" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/dataRetriever/blockchain" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart/metachain" "github.com/ElrondNetwork/elrond-go/epochStart/notifier" mainFactory "github.com/ElrondNetwork/elrond-go/factory" @@ -509,6 +510,7 @@ func createNodes( epochStartRegistrationHandler := notifier.NewEpochStartSubscriptionHandler() bootStorer := integrationTests.CreateMemUnit() consensusCache, _ := lrucache.NewCache(10000) + validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ ShardConsensusGroupSize: consensusSize, @@ -528,6 +530,7 @@ func createNodes( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: validatorInfoCacher, } nodesCoord, _ := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/factory/consensusComponents/consensusComponents_test.go b/integrationTests/factory/consensusComponents/consensusComponents_test.go index 5c74cfdec98..fdd2ac44aa7 100644 --- a/integrationTests/factory/consensusComponents/consensusComponents_test.go +++ b/integrationTests/factory/consensusComponents/consensusComponents_test.go @@ -65,6 +65,7 @@ func TestConsensusComponents_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), + managedDataComponents.Datapool().CurrentBlockValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/processComponents/processComponents_test.go b/integrationTests/factory/processComponents/processComponents_test.go index 3f0371137f7..cabdd5ec8fa 100644 --- a/integrationTests/factory/processComponents/processComponents_test.go +++ b/integrationTests/factory/processComponents/processComponents_test.go @@ -66,6 +66,7 @@ func TestProcessComponents_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), + managedDataComponents.Datapool().CurrentBlockValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/statusComponents/statusComponents_test.go b/integrationTests/factory/statusComponents/statusComponents_test.go index 30da3113aad..2e58c92a1fb 100644 --- a/integrationTests/factory/statusComponents/statusComponents_test.go +++ b/integrationTests/factory/statusComponents/statusComponents_test.go @@ -66,6 +66,7 @@ func TestStatusComponents_Create_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), + managedDataComponents.Datapool().CurrentBlockValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/mock/intermediateTransactionHandlerMock.go b/integrationTests/mock/intermediateTransactionHandlerMock.go index 1bbbbcbd1cc..7c59deefdac 100644 --- a/integrationTests/mock/intermediateTransactionHandlerMock.go +++ b/integrationTests/mock/intermediateTransactionHandlerMock.go @@ -13,7 +13,7 @@ type IntermediateTransactionHandlerMock struct { VerifyInterMiniBlocksCalled func(body *block.Body) error SaveCurrentIntermediateTxToStorageCalled func() CreateBlockStartedCalled func() - CreateMarshalizedDataCalled func(txHashes [][]byte) ([][]byte, error) + CreateMarshalledDataCalled func(txHashes [][]byte) ([][]byte, error) GetAllCurrentFinishedTxsCalled func() map[string]data.TransactionHandler RemoveProcessedResultsCalled func() [][]byte InitProcessedResultsCalled func() @@ -48,12 +48,12 @@ func (ith *IntermediateTransactionHandlerMock) GetAllCurrentFinishedTxs() map[st return nil } -// CreateMarshalizedData - -func (ith *IntermediateTransactionHandlerMock) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - if ith.CreateMarshalizedDataCalled == nil { +// CreateMarshalledData - +func (ith *IntermediateTransactionHandlerMock) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + if ith.CreateMarshalledDataCalled == nil { return nil, nil } - return ith.CreateMarshalizedDataCalled(txHashes) + return ith.CreateMarshalledDataCalled(txHashes) } // AddIntermediateTransactions - diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 30de1b24a80..15d40d83cec 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -10,6 +10,7 @@ import ( "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" ) // ArgIndexHashedNodesCoordinatorFactory - @@ -70,6 +71,7 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -122,6 +124,7 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } baseCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testP2PNode.go b/integrationTests/testP2PNode.go index db8f6765b95..31443b56f20 100644 --- a/integrationTests/testP2PNode.go +++ b/integrationTests/testP2PNode.go @@ -15,6 +15,7 @@ import ( mclsig "github.com/ElrondNetwork/elrond-go-crypto/signing/mcl/singlesig" "github.com/ElrondNetwork/elrond-go/config" "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart/notifier" "github.com/ElrondNetwork/elrond-go/factory" "github.com/ElrondNetwork/elrond-go/factory/peerSignatureHandler" @@ -329,6 +330,8 @@ func CreateNodesWithTestP2PNodes( nodesMap := make(map[uint32][]*TestP2PNode) cacherCfg := storageUnit.CacheConfig{Capacity: 10000, Type: storageUnit.LRUCache, Shards: 1} cache, _ := storageUnit.NewCache(cacherCfg) + validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() + for shardId, validatorList := range validatorsMap { argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ ShardConsensusGroupSize: shardConsensusGroupSize, @@ -350,6 +353,7 @@ func CreateNodesWithTestP2PNodes( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: validatorInfoCacher, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -395,6 +399,7 @@ func CreateNodesWithTestP2PNodes( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: validatorInfoCacher, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index 3dc8c5b3e6d..67588748583 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -15,6 +15,7 @@ import ( "github.com/ElrondNetwork/elrond-go-crypto/signing/mcl" multisig2 "github.com/ElrondNetwork/elrond-go-crypto/signing/mcl/multisig" "github.com/ElrondNetwork/elrond-go-crypto/signing/multisig" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/integrationTests/mock" p2pRating "github.com/ElrondNetwork/elrond-go/p2p/rating" "github.com/ElrondNetwork/elrond-go/sharding" @@ -61,6 +62,7 @@ func CreateProcessorNodesWithNodesCoordinator( pubKeys := PubKeysMapFromKeysMap(cp.Keys) validatorsMap := GenValidatorsFromPubKeys(pubKeys, nbShards) validatorsMapForNodesCoordinator, _ := nodesCoordinator.NodesInfoToValidators(validatorsMap) + validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() cpWaiting := CreateCryptoParams(1, 1, nbShards) pubKeysWaiting := PubKeysMapFromKeysMap(cpWaiting.Keys) @@ -94,6 +96,7 @@ func CreateProcessorNodesWithNodesCoordinator( WaitingListFixEnabledEpoch: 0, ChanStopNode: endProcess.GetDummyEndProcessChannel(), IsFullArchive: false, + ValidatorInfoCacher: validatorInfoCacher, } nodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testProcessorNodeWithMultisigner.go b/integrationTests/testProcessorNodeWithMultisigner.go index 469022b6ee7..62f25ae2b8e 100644 --- a/integrationTests/testProcessorNodeWithMultisigner.go +++ b/integrationTests/testProcessorNodeWithMultisigner.go @@ -19,6 +19,7 @@ import ( mclmultisig "github.com/ElrondNetwork/elrond-go-crypto/signing/mcl/multisig" "github.com/ElrondNetwork/elrond-go-crypto/signing/multisig" "github.com/ElrondNetwork/elrond-go/common/forking" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart/notifier" "github.com/ElrondNetwork/elrond-go/factory/peerSignatureHandler" "github.com/ElrondNetwork/elrond-go/integrationTests/mock" @@ -486,6 +487,7 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( pubKeys := PubKeysMapFromKeysMap(cp.Keys) validatorsMap := GenValidatorsFromPubKeys(pubKeys, uint32(nbShards)) validatorsMapForNodesCoordinator, _ := nodesCoordinator.NodesInfoToValidators(validatorsMap) + validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() nodesMap := make(map[uint32][]*TestProcessorNode) shufflerArgs := &nodesCoordinator.NodesShufflerArgs{ @@ -528,6 +530,7 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: validatorInfoCacher, } nodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) @@ -588,6 +591,7 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( pubKeys := PubKeysMapFromKeysMap(cp.Keys) validatorsMap := GenValidatorsFromPubKeys(pubKeys, uint32(nbShards)) validatorsMapForNodesCoordinator, _ := nodesCoordinator.NodesInfoToValidators(validatorsMap) + validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() cpWaiting := CreateCryptoParams(2, 2, uint32(nbShards)) pubKeysWaiting := PubKeysMapFromKeysMap(cpWaiting.Keys) @@ -627,6 +631,7 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: validatorInfoCacher, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/node/nodeRunner.go b/node/nodeRunner.go index 26afcddc2a1..d2639dad148 100644 --- a/node/nodeRunner.go +++ b/node/nodeRunner.go @@ -329,6 +329,7 @@ func (nr *nodeRunner) executeOneComponentCreationCycle( configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), + managedDataComponents.Datapool().CurrentBlockValidatorInfo(), ) if err != nil { return true, err diff --git a/process/block/postprocess/basePostProcess.go b/process/block/postprocess/basePostProcess.go index c2e290b4455..ccbdcacecd3 100644 --- a/process/block/postprocess/basePostProcess.go +++ b/process/block/postprocess/basePostProcess.go @@ -80,8 +80,8 @@ func (bpp *basePostProcessor) CreateBlockStarted() { bpp.mutInterResultsForBlock.Unlock() } -// CreateMarshalizedData creates the marshalized data for broadcasting purposes -func (bpp *basePostProcessor) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { +// CreateMarshalledData creates the marshalled data for broadcasting purposes +func (bpp *basePostProcessor) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { bpp.mutInterResultsForBlock.Lock() defer bpp.mutInterResultsForBlock.Unlock() @@ -89,7 +89,7 @@ func (bpp *basePostProcessor) CreateMarshalizedData(txHashes [][]byte) ([][]byte for _, txHash := range txHashes { txInfoObject := bpp.interResultsForBlock[string(txHash)] if txInfoObject == nil || check.IfNil(txInfoObject.tx) { - log.Warn("basePostProcessor.CreateMarshalizedData: tx not found", "hash", txHash) + log.Warn("basePostProcessor.CreateMarshalledData: tx not found", "hash", txHash) continue } diff --git a/process/block/postprocess/intermediateResults_test.go b/process/block/postprocess/intermediateResults_test.go index 2271f47c30e..76bf4474a86 100644 --- a/process/block/postprocess/intermediateResults_test.go +++ b/process/block/postprocess/intermediateResults_test.go @@ -895,12 +895,12 @@ func TestIntermediateResultsProcessor_CreateMarshalizedDataNothingToMarshal(t *t assert.Nil(t, err) // nothing to marshal - mrsTxs, err := irp.CreateMarshalizedData(nil) + mrsTxs, err := irp.CreateMarshalledData(nil) assert.Nil(t, err) assert.Equal(t, 0, len(mrsTxs)) // nothing saved in local cacher to marshal - mrsTxs, err = irp.CreateMarshalizedData(nil) + mrsTxs, err = irp.CreateMarshalledData(nil) assert.Nil(t, err) assert.Equal(t, 0, len(mrsTxs)) } @@ -961,7 +961,7 @@ func TestIntermediateResultsProcessor_CreateMarshalizedData(t *testing.T) { err = irp.AddIntermediateTransactions(txs) assert.Nil(t, err) - mrsTxs, err := irp.CreateMarshalizedData(txHashes) + mrsTxs, err := irp.CreateMarshalledData(txHashes) assert.Nil(t, err) assert.Equal(t, len(txs), len(mrsTxs)) diff --git a/process/block/preprocess/basePreProcess.go b/process/block/preprocess/basePreProcess.go index 4be1674b427..8ba78159988 100644 --- a/process/block/preprocess/basePreProcess.go +++ b/process/block/preprocess/basePreProcess.go @@ -199,7 +199,7 @@ func (bpp *basePreProcess) removeMiniBlocksFromPools( return nil } -func (bpp *basePreProcess) createMarshalizedData(txHashes [][]byte, forBlock *txsForBlock) ([][]byte, error) { +func (bpp *basePreProcess) createMarshalledData(txHashes [][]byte, forBlock *txsForBlock) ([][]byte, error) { mrsTxs := make([][]byte, 0, len(txHashes)) for _, txHash := range txHashes { forBlock.mutTxsForBlock.RLock() @@ -207,7 +207,7 @@ func (bpp *basePreProcess) createMarshalizedData(txHashes [][]byte, forBlock *tx forBlock.mutTxsForBlock.RUnlock() if txInfoFromMap == nil || check.IfNil(txInfoFromMap.tx) { - log.Warn("basePreProcess.createMarshalizedData: tx not found", "hash", txHash) + log.Warn("basePreProcess.createMarshalledData: tx not found", "hash", txHash) continue } @@ -218,7 +218,7 @@ func (bpp *basePreProcess) createMarshalizedData(txHashes [][]byte, forBlock *tx mrsTxs = append(mrsTxs, txMrs) } - log.Trace("basePreProcess.createMarshalizedData", + log.Trace("basePreProcess.createMarshalledData", "num txs", len(mrsTxs), ) diff --git a/process/block/preprocess/rewardTxPreProcessor.go b/process/block/preprocess/rewardTxPreProcessor.go index 571cbe97f07..a3e270c3c30 100644 --- a/process/block/preprocess/rewardTxPreProcessor.go +++ b/process/block/preprocess/rewardTxPreProcessor.go @@ -5,7 +5,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" - "github.com/ElrondNetwork/elrond-go-core/core/sliceUtil" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/rewardTx" @@ -171,28 +170,11 @@ func (rtp *rewardTxPreprocessor) RestoreBlockDataIntoPools( continue } - strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) - rewardTxBuff, err := rtp.storage.GetAll(dataRetriever.RewardTransactionUnit, miniBlock.TxHashes) + err := rtp.restoreRewardTxs(miniBlock) if err != nil { - log.Debug("reward tx from mini block was not found in RewardTransactionUnit", - "sender shard ID", miniBlock.SenderShardID, - "receiver shard ID", miniBlock.ReceiverShardID, - "num txs", len(miniBlock.TxHashes), - ) - return rewardTxsRestored, err } - for txHash, txBuff := range rewardTxBuff { - tx := rewardTx.RewardTx{} - err = rtp.marshalizer.Unmarshal(&tx, txBuff) - if err != nil { - return rewardTxsRestored, err - } - - rtp.rewardTxPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) - } - miniBlockHash, err := core.CalculateHash(rtp.marshalizer, rtp.hasher, miniBlock) if err != nil { return rewardTxsRestored, err @@ -206,6 +188,32 @@ func (rtp *rewardTxPreprocessor) RestoreBlockDataIntoPools( return rewardTxsRestored, nil } +func (rtp *rewardTxPreprocessor) restoreRewardTxs(miniBlock *block.MiniBlock) error { + strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) + rewardTxsBuff, err := rtp.storage.GetAll(dataRetriever.RewardTransactionUnit, miniBlock.TxHashes) + if err != nil { + log.Debug("reward txs from mini block were not found in RewardTransactionUnit", + "sender shard ID", miniBlock.SenderShardID, + "receiver shard ID", miniBlock.ReceiverShardID, + "num txs", len(miniBlock.TxHashes), + ) + + return err + } + + for txHash, txBuff := range rewardTxsBuff { + tx := rewardTx.RewardTx{} + err = rtp.marshalizer.Unmarshal(&tx, txBuff) + if err != nil { + return err + } + + rtp.rewardTxPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) + } + + return nil +} + // ProcessBlockTransactions processes all the reward transactions from the block.Body, updates the state func (rtp *rewardTxPreprocessor) ProcessBlockTransactions( _ data.HeaderHandler, @@ -313,7 +321,7 @@ func (rtp *rewardTxPreprocessor) RequestBlockTransactions(body *block.Body) int // computeExistingAndRequestMissingRewardTxsForShards calculates what reward transactions are available and requests // what are missing from block.Body func (rtp *rewardTxPreprocessor) computeExistingAndRequestMissingRewardTxsForShards(body *block.Body) int { - rewardTxs := block.Body{} + rewardTxsBody := block.Body{} for _, mb := range body.MiniBlocks { if mb.Type != block.RewardsBlock { continue @@ -322,11 +330,11 @@ func (rtp *rewardTxPreprocessor) computeExistingAndRequestMissingRewardTxsForSha continue } - rewardTxs.MiniBlocks = append(rewardTxs.MiniBlocks, mb) + rewardTxsBody.MiniBlocks = append(rewardTxsBody.MiniBlocks, mb) } numMissingTxsForShards := rtp.computeExistingAndRequestMissing( - &rewardTxs, + &rewardTxsBody, &rtp.rewardTxsForBlock, rtp.chReceivedAllRewardTxs, rtp.isMiniBlockCorrect, @@ -343,19 +351,20 @@ func (rtp *rewardTxPreprocessor) RequestTransactionsForMiniBlock(miniBlock *bloc return 0 } - missingRewardTxsForMiniBlock := rtp.computeMissingRewardTxsForMiniBlock(miniBlock) - if len(missingRewardTxsForMiniBlock) > 0 { - rtp.onRequestRewardTx(miniBlock.SenderShardID, missingRewardTxsForMiniBlock) + missingRewardTxsHashesForMiniBlock := rtp.computeMissingRewardTxsHashesForMiniBlock(miniBlock) + if len(missingRewardTxsHashesForMiniBlock) > 0 { + rtp.onRequestRewardTx(miniBlock.SenderShardID, missingRewardTxsHashesForMiniBlock) } - return len(missingRewardTxsForMiniBlock) + return len(missingRewardTxsHashesForMiniBlock) } -// computeMissingRewardTxsForMiniBlock computes missing reward transactions for a certain miniblock -func (rtp *rewardTxPreprocessor) computeMissingRewardTxsForMiniBlock(miniBlock *block.MiniBlock) [][]byte { - missingRewardTxs := make([][]byte, 0, len(miniBlock.TxHashes)) +// computeMissingRewardTxsHashesForMiniBlock computes missing reward transactions hashes for a certain miniblock +func (rtp *rewardTxPreprocessor) computeMissingRewardTxsHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { + missingRewardTxsHashes := make([][]byte, 0) + if miniBlock.Type != block.RewardsBlock { - return missingRewardTxs + return missingRewardTxsHashes } for _, txHash := range miniBlock.TxHashes { @@ -368,11 +377,11 @@ func (rtp *rewardTxPreprocessor) computeMissingRewardTxsForMiniBlock(miniBlock * ) if tx == nil { - missingRewardTxs = append(missingRewardTxs, txHash) + missingRewardTxsHashes = append(missingRewardTxsHashes, txHash) } } - return sliceUtil.TrimSliceSliceByte(missingRewardTxs) + return missingRewardTxsHashes } // getAllRewardTxsFromMiniBlock gets all the reward transactions from a miniblock into a new structure @@ -422,8 +431,7 @@ func (rtp *rewardTxPreprocessor) CreateAndProcessMiniBlocks( return make(block.MiniBlockSlice, 0), nil } -// ProcessMiniBlock processes all the reward transactions from a miniblock and saves the processed reward transactions -// in local cache +// ProcessMiniBlock processes all the reward transactions from the given miniblock and saves the processed ones in a local cache func (rtp *rewardTxPreprocessor) ProcessMiniBlock(miniBlock *block.MiniBlock, haveTime func() bool, _ func() bool, _ func() (int, int), _ bool) ([][]byte, int, error) { if miniBlock.Type != block.RewardsBlock { @@ -472,26 +480,26 @@ func (rtp *rewardTxPreprocessor) ProcessMiniBlock(miniBlock *block.MiniBlock, ha return nil, len(processedTxHashes), nil } -// CreateMarshalizedData marshalizes reward transaction hashes and and saves them into a new structure -func (rtp *rewardTxPreprocessor) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - marshaledRewardTxs, err := rtp.createMarshalizedData(txHashes, &rtp.rewardTxsForBlock) +// CreateMarshalledData marshals reward transactions hashes and saves them into a new structure +func (rtp *rewardTxPreprocessor) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + marshalledRewardTxs, err := rtp.createMarshalledData(txHashes, &rtp.rewardTxsForBlock) if err != nil { return nil, err } - return marshaledRewardTxs, nil + return marshalledRewardTxs, nil } // GetAllCurrentUsedTxs returns all the reward transactions used at current creation / processing func (rtp *rewardTxPreprocessor) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { rtp.rewardTxsForBlock.mutTxsForBlock.RLock() - rewardTxPool := make(map[string]data.TransactionHandler, len(rtp.rewardTxsForBlock.txHashAndInfo)) + rewardTxsPool := make(map[string]data.TransactionHandler, len(rtp.rewardTxsForBlock.txHashAndInfo)) for txHash, txData := range rtp.rewardTxsForBlock.txHashAndInfo { - rewardTxPool[txHash] = txData.tx + rewardTxsPool[txHash] = txData.tx } rtp.rewardTxsForBlock.mutTxsForBlock.RUnlock() - return rewardTxPool + return rewardTxsPool } // AddTxsFromMiniBlocks does nothing diff --git a/process/block/preprocess/rewardTxPreProcessor_test.go b/process/block/preprocess/rewardTxPreProcessor_test.go index 9f4e79ffbc7..1dda44fe703 100644 --- a/process/block/preprocess/rewardTxPreProcessor_test.go +++ b/process/block/preprocess/rewardTxPreProcessor_test.go @@ -342,7 +342,7 @@ func TestRewardTxPreprocessor_CreateMarshalizedDataShouldWork(t *testing.T) { txs := []data.TransactionHandler{&rewardTx.RewardTx{}} rtp.AddTxs(txHashes, txs) - res, err := rtp.CreateMarshalizedData(txHashes) + res, err := rtp.CreateMarshalledData(txHashes) assert.Nil(t, err) assert.Equal(t, 1, len(res)) diff --git a/process/block/preprocess/smartContractResults.go b/process/block/preprocess/smartContractResults.go index 02f83b8646b..7cdf02473d7 100644 --- a/process/block/preprocess/smartContractResults.go +++ b/process/block/preprocess/smartContractResults.go @@ -185,28 +185,11 @@ func (scr *smartContractResults) RestoreBlockDataIntoPools( continue } - strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) - scrBuff, err := scr.storage.GetAll(dataRetriever.UnsignedTransactionUnit, miniBlock.TxHashes) + err := scr.restoreSmartContractResults(miniBlock) if err != nil { - log.Debug("unsigned tx from mini block was not found in UnsignedTransactionUnit", - "sender shard ID", miniBlock.SenderShardID, - "receiver shard ID", miniBlock.ReceiverShardID, - "num txs", len(miniBlock.TxHashes), - ) - return scrRestored, err } - for txHash, txBuff := range scrBuff { - tx := smartContractResult.SmartContractResult{} - err = scr.marshalizer.Unmarshal(&tx, txBuff) - if err != nil { - return scrRestored, err - } - - scr.scrPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) - } - // TODO: Should be analyzed if restoring into pool only cross-shard miniblocks with destination in self shard, // would create problems or not if miniBlock.SenderShardID != scr.shardCoordinator.SelfId() { @@ -224,6 +207,32 @@ func (scr *smartContractResults) RestoreBlockDataIntoPools( return scrRestored, nil } +func (scr *smartContractResults) restoreSmartContractResults(miniBlock *block.MiniBlock) error { + strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) + scrsBuff, err := scr.storage.GetAll(dataRetriever.UnsignedTransactionUnit, miniBlock.TxHashes) + if err != nil { + log.Debug("smart contract results from mini block were not found in UnsignedTransactionUnit", + "sender shard ID", miniBlock.SenderShardID, + "receiver shard ID", miniBlock.ReceiverShardID, + "num txs", len(miniBlock.TxHashes), + ) + + return err + } + + for txHash, txBuff := range scrsBuff { + tx := smartContractResult.SmartContractResult{} + err = scr.marshalizer.Unmarshal(&tx, txBuff) + if err != nil { + return err + } + + scr.scrPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) + } + + return nil +} + // ProcessBlockTransactions processes all the smartContractResult from the block.Body, updates the state func (scr *smartContractResults) ProcessBlockTransactions( _ data.HeaderHandler, @@ -415,21 +424,22 @@ func (scr *smartContractResults) RequestTransactionsForMiniBlock(miniBlock *bloc return 0 } - missingScrsForMiniBlock := scr.computeMissingScrsForMiniBlock(miniBlock) - if len(missingScrsForMiniBlock) > 0 { - scr.onRequestSmartContractResult(miniBlock.SenderShardID, missingScrsForMiniBlock) + missingScrsHashesForMiniBlock := scr.computeMissingScrsHashesForMiniBlock(miniBlock) + if len(missingScrsHashesForMiniBlock) > 0 { + scr.onRequestSmartContractResult(miniBlock.SenderShardID, missingScrsHashesForMiniBlock) } - return len(missingScrsForMiniBlock) + return len(missingScrsHashesForMiniBlock) } -// computeMissingScrsForMiniBlock computes missing smartContractResults for a certain miniblock -func (scr *smartContractResults) computeMissingScrsForMiniBlock(miniBlock *block.MiniBlock) [][]byte { +// computeMissingScrsHashesForMiniBlock computes missing smart contract results hashes for a certain miniblock +func (scr *smartContractResults) computeMissingScrsHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { + missingSmartContractResultsHashes := make([][]byte, 0) + if miniBlock.Type != block.SmartContractResultBlock { - return [][]byte{} + return missingSmartContractResultsHashes } - missingSmartContractResults := make([][]byte, 0, len(miniBlock.TxHashes)) for _, txHash := range miniBlock.TxHashes { tx, _ := process.GetTransactionHandlerFromPool( miniBlock.SenderShardID, @@ -439,11 +449,11 @@ func (scr *smartContractResults) computeMissingScrsForMiniBlock(miniBlock *block false) if check.IfNil(tx) { - missingSmartContractResults = append(missingSmartContractResults, txHash) + missingSmartContractResultsHashes = append(missingSmartContractResultsHashes, txHash) } } - return sliceUtil.TrimSliceSliceByte(missingSmartContractResults) + return missingSmartContractResultsHashes } // getAllScrsFromMiniBlock gets all the smartContractResults from a miniblock into a new structure @@ -489,7 +499,7 @@ func (scr *smartContractResults) CreateAndProcessMiniBlocks(_ func() bool, _ []b return make(block.MiniBlockSlice, 0), nil } -// ProcessMiniBlock processes all the smartContractResults from a and saves the processed smartContractResults in local cache complete miniblock +// ProcessMiniBlock processes all the smart contract results from the given miniblock and saves the processed ones in a local cache func (scr *smartContractResults) ProcessMiniBlock( miniBlock *block.MiniBlock, haveTime func() bool, @@ -607,26 +617,26 @@ func (scr *smartContractResults) ProcessMiniBlock( return nil, len(processedTxHashes), nil } -// CreateMarshalizedData marshalizes smartContractResults and creates and saves them into a new structure -func (scr *smartContractResults) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - mrsScrs, err := scr.createMarshalizedData(txHashes, &scr.scrForBlock) +// CreateMarshalledData marshals smart contract results hashes and saves them into a new structure +func (scr *smartContractResults) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + marshalledScrs, err := scr.createMarshalledData(txHashes, &scr.scrForBlock) if err != nil { return nil, err } - return mrsScrs, nil + return marshalledScrs, nil } // GetAllCurrentUsedTxs returns all the smartContractResults used at current creation / processing func (scr *smartContractResults) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { scr.scrForBlock.mutTxsForBlock.RLock() - scrPool := make(map[string]data.TransactionHandler, len(scr.scrForBlock.txHashAndInfo)) + scrsPool := make(map[string]data.TransactionHandler, len(scr.scrForBlock.txHashAndInfo)) for txHash, txInfoFromMap := range scr.scrForBlock.txHashAndInfo { - scrPool[txHash] = txInfoFromMap.tx + scrsPool[txHash] = txInfoFromMap.tx } scr.scrForBlock.mutTxsForBlock.RUnlock() - return scrPool + return scrsPool } // AddTxsFromMiniBlocks does nothing diff --git a/process/block/preprocess/transactions.go b/process/block/preprocess/transactions.go index 7b2cf070982..f9ccdd12439 100644 --- a/process/block/preprocess/transactions.go +++ b/process/block/preprocess/transactions.go @@ -11,7 +11,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" - "github.com/ElrondNetwork/elrond-go-core/core/sliceUtil" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/transaction" @@ -253,29 +252,11 @@ func (txs *transactions) RestoreBlockDataIntoPools( continue } - miniBlockStrCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) - txsBuff, err := txs.storage.GetAll(dataRetriever.TransactionUnit, miniBlock.TxHashes) + err := txs.restoreTxs(miniBlock) if err != nil { - log.Debug("tx from mini block was not found in TransactionUnit", - "sender shard ID", miniBlock.SenderShardID, - "receiver shard ID", miniBlock.ReceiverShardID, - "num txs", len(miniBlock.TxHashes), - ) - return txsRestored, err } - for txHash, txBuff := range txsBuff { - tx := transaction.Transaction{} - err = txs.marshalizer.Unmarshal(&tx, txBuff) - if err != nil { - return txsRestored, err - } - - strCache := txs.computeCacheIdentifier(miniBlockStrCache, &tx, miniBlock.Type) - txs.txPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) - } - if miniBlock.SenderShardID != txs.shardCoordinator.SelfId() { miniBlockHash, errHash := core.CalculateHash(txs.marshalizer, txs.hasher, miniBlock) if errHash != nil { @@ -291,6 +272,33 @@ func (txs *transactions) RestoreBlockDataIntoPools( return txsRestored, nil } +func (txs *transactions) restoreTxs(miniBlock *block.MiniBlock) error { + miniBlockStrCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) + txsBuff, err := txs.storage.GetAll(dataRetriever.TransactionUnit, miniBlock.TxHashes) + if err != nil { + log.Debug("txs from mini block were not found in TransactionUnit", + "sender shard ID", miniBlock.SenderShardID, + "receiver shard ID", miniBlock.ReceiverShardID, + "num txs", len(miniBlock.TxHashes), + ) + + return err + } + + for txHash, txBuff := range txsBuff { + tx := transaction.Transaction{} + err = txs.marshalizer.Unmarshal(&tx, txBuff) + if err != nil { + return err + } + + strCache := txs.computeCacheIdentifier(miniBlockStrCache, &tx, miniBlock.Type) + txs.txPool.AddData([]byte(txHash), &tx, tx.Size(), strCache) + } + + return nil +} + func (txs *transactions) computeCacheIdentifier(miniBlockStrCache string, tx *transaction.Transaction, miniBlockType block.Type) string { if miniBlockType != block.InvalidBlock { return miniBlockStrCache @@ -900,21 +908,22 @@ func (txs *transactions) RequestTransactionsForMiniBlock(miniBlock *block.MiniBl return 0 } - missingTxsForMiniBlock := txs.computeMissingTxsForMiniBlock(miniBlock) - if len(missingTxsForMiniBlock) > 0 { - txs.onRequestTransaction(miniBlock.SenderShardID, missingTxsForMiniBlock) + missingTxsHashesForMiniBlock := txs.computeMissingTxsHashesForMiniBlock(miniBlock) + if len(missingTxsHashesForMiniBlock) > 0 { + txs.onRequestTransaction(miniBlock.SenderShardID, missingTxsHashesForMiniBlock) } - return len(missingTxsForMiniBlock) + return len(missingTxsHashesForMiniBlock) } -// computeMissingTxsForMiniBlock computes missing transactions for a certain miniblock -func (txs *transactions) computeMissingTxsForMiniBlock(miniBlock *block.MiniBlock) [][]byte { +// computeMissingTxsHashesForMiniBlock computes missing transactions hashes for a certain miniblock +func (txs *transactions) computeMissingTxsHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { + missingTransactionsHashes := make([][]byte, 0) + if miniBlock.Type != txs.blockType { - return nil + return missingTransactionsHashes } - missingTransactions := make([][]byte, 0, len(miniBlock.TxHashes)) searchFirst := txs.blockType == block.InvalidBlock for _, txHash := range miniBlock.TxHashes { @@ -926,11 +935,11 @@ func (txs *transactions) computeMissingTxsForMiniBlock(miniBlock *block.MiniBloc searchFirst) if tx == nil || tx.IsInterfaceNil() { - missingTransactions = append(missingTransactions, txHash) + missingTransactionsHashes = append(missingTransactionsHashes, txHash) } } - return sliceUtil.TrimSliceSliceByte(missingTransactions) + return missingTransactionsHashes } // getAllTxsFromMiniBlock gets all the transactions from a miniblock into a new structure @@ -1405,7 +1414,7 @@ func (txs *transactions) computeSortedTxs( return selectedTxs, remainingTxs, nil } -// ProcessMiniBlock processes all the transactions from a and saves the processed transactions in local cache complete miniblock +// ProcessMiniBlock processes all the transactions from the given miniblock and saves the processed ones in a local cache func (txs *transactions) ProcessMiniBlock( miniBlock *block.MiniBlock, haveTime func() bool, @@ -1565,27 +1574,26 @@ func (txs *transactions) ProcessMiniBlock( return nil, len(processedTxHashes), nil } -// CreateMarshalizedData marshalizes transactions and creates and saves them into a new structure -func (txs *transactions) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - mrsScrs, err := txs.createMarshalizedData(txHashes, &txs.txsForCurrBlock) +// CreateMarshalledData marshals transactions hashes and saves them into a new structure +func (txs *transactions) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + marshalledTxs, err := txs.createMarshalledData(txHashes, &txs.txsForCurrBlock) if err != nil { return nil, err } - return mrsScrs, nil + return marshalledTxs, nil } // GetAllCurrentUsedTxs returns all the transactions used at current creation / processing func (txs *transactions) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { - txPool := make(map[string]data.TransactionHandler, len(txs.txsForCurrBlock.txHashAndInfo)) - txs.txsForCurrBlock.mutTxsForBlock.RLock() + txsPool := make(map[string]data.TransactionHandler, len(txs.txsForCurrBlock.txHashAndInfo)) for txHash, txInfoFromMap := range txs.txsForCurrBlock.txHashAndInfo { - txPool[txHash] = txInfoFromMap.tx + txsPool[txHash] = txInfoFromMap.tx } txs.txsForCurrBlock.mutTxsForBlock.RUnlock() - return txPool + return txsPool } // EpochConfirmed is called whenever a new epoch is confirmed diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index 86fa6bbb1d5..8da54753cae 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -1,7 +1,6 @@ package preprocess import ( - "github.com/ElrondNetwork/elrond-go/dataRetriever" "time" "github.com/ElrondNetwork/elrond-go-core/core" @@ -10,7 +9,9 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -19,11 +20,11 @@ var _ process.PreProcessor = (*validatorInfoPreprocessor)(nil) type validatorInfoPreprocessor struct { *basePreProcess - chReceivedAllValidatorInfo chan bool - onRequestValidatorsInfo func(txHashes [][]byte) - validatorInfoForBlock txsForBlock - validatorInfoPool dataRetriever.ShardedDataCacherNotifier - storage dataRetriever.StorageService + chReceivedAllValidatorsInfo chan bool + onRequestValidatorsInfo func(txHashes [][]byte) + validatorsInfoForBlock txsForBlock + validatorsInfoPool dataRetriever.ShardedDataCacherNotifier + storage dataRetriever.StorageService } // NewValidatorInfoPreprocessor creates a new validatorInfo preprocessor object @@ -31,7 +32,7 @@ func NewValidatorInfoPreprocessor( hasher hashing.Hasher, marshalizer marshal.Marshalizer, blockSizeComputation BlockSizeComputationHandler, - validatorInfoPool dataRetriever.ShardedDataCacherNotifier, + validatorsInfoPool dataRetriever.ShardedDataCacherNotifier, store dataRetriever.StorageService, onRequestValidatorsInfo func(txHashes [][]byte), ) (*validatorInfoPreprocessor, error) { @@ -45,7 +46,7 @@ func NewValidatorInfoPreprocessor( if check.IfNil(blockSizeComputation) { return nil, process.ErrNilBlockSizeComputationHandler } - if check.IfNil(validatorInfoPool) { + if check.IfNil(validatorsInfoPool) { return nil, process.ErrNilValidatorInfoPool } if check.IfNil(store) { @@ -64,30 +65,54 @@ func NewValidatorInfoPreprocessor( vip := &validatorInfoPreprocessor{ basePreProcess: bpp, storage: store, - validatorInfoPool: validatorInfoPool, + validatorsInfoPool: validatorsInfoPool, onRequestValidatorsInfo: onRequestValidatorsInfo, } - vip.chReceivedAllValidatorInfo = make(chan bool) - vip.validatorInfoPool.RegisterOnAdded(vip.receivedValidatorInfoTransaction) - vip.validatorInfoForBlock.txHashAndInfo = make(map[string]*txInfo) + vip.chReceivedAllValidatorsInfo = make(chan bool) + vip.validatorsInfoPool.RegisterOnAdded(vip.receivedValidatorInfoTransaction) + vip.validatorsInfoForBlock.txHashAndInfo = make(map[string]*txInfo) return vip, nil } -// IsDataPrepared does nothing -func (vip *validatorInfoPreprocessor) IsDataPrepared(_ int, _ func() time.Duration) error { +// waitForValidatorsInfoHashes waits for a call whether all the requested validators info appeared +func (vip *validatorInfoPreprocessor) waitForValidatorsInfoHashes(waitTime time.Duration) error { + select { + case <-vip.chReceivedAllValidatorsInfo: + return nil + case <-time.After(waitTime): + return process.ErrTimeIsOut + } +} + +// IsDataPrepared returns non error if all the requested validators info arrived and were saved into the pool +func (vip *validatorInfoPreprocessor) IsDataPrepared(requestedValidatorsInfo int, haveTime func() time.Duration) error { + if requestedValidatorsInfo > 0 { + log.Debug("requested missing validators info", + "num validators info", requestedValidatorsInfo) + err := vip.waitForValidatorsInfoHashes(haveTime()) + vip.validatorsInfoForBlock.mutTxsForBlock.Lock() + missingValidatorsInfo := vip.validatorsInfoForBlock.missingTxs + vip.validatorsInfoForBlock.missingTxs = 0 + vip.validatorsInfoForBlock.mutTxsForBlock.Unlock() + log.Debug("received validators info", + "num validators info", requestedValidatorsInfo-missingValidatorsInfo) + if err != nil { + return err + } + } return nil } // RemoveBlockDataFromPools removes the peer miniblocks from pool func (vip *validatorInfoPreprocessor) RemoveBlockDataFromPools(body *block.Body, miniBlockPool storage.Cacher) error { - return vip.removeBlockDataFromPools(body, miniBlockPool, vip.validatorInfoPool, vip.isMiniBlockCorrect) + return vip.removeBlockDataFromPools(body, miniBlockPool, vip.validatorsInfoPool, vip.isMiniBlockCorrect) } -// RemoveTxsFromPools does nothing for validatorInfoPreprocessor implementation -func (vip *validatorInfoPreprocessor) RemoveTxsFromPools(_ *block.Body) error { - return nil +// RemoveTxsFromPools removes validators info from associated pools +func (vip *validatorInfoPreprocessor) RemoveTxsFromPools(body *block.Body) error { + return vip.removeTxsFromPools(body, vip.validatorsInfoPool, vip.isMiniBlockCorrect) } // RestoreBlockDataIntoPools restores the peer miniblocks to the pool @@ -109,6 +134,11 @@ func (vip *validatorInfoPreprocessor) RestoreBlockDataIntoPools( continue } + err := vip.restoreValidatorsInfo(miniBlock) + if err != nil { + return validatorsInfoRestored, err + } + miniBlockHash, err := core.CalculateHash(vip.marshalizer, vip.hasher, miniBlock) if err != nil { return validatorsInfoRestored, err @@ -122,6 +152,32 @@ func (vip *validatorInfoPreprocessor) RestoreBlockDataIntoPools( return validatorsInfoRestored, nil } +func (vip *validatorInfoPreprocessor) restoreValidatorsInfo(miniBlock *block.MiniBlock) error { + strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) + validatorsInfoBuff, err := vip.storage.GetAll(dataRetriever.UnsignedTransactionUnit, miniBlock.TxHashes) + if err != nil { + log.Debug("validators info from mini block were not found in UnsignedTransactionUnit", + "sender shard ID", miniBlock.SenderShardID, + "receiver shard ID", miniBlock.ReceiverShardID, + "num txs", len(miniBlock.TxHashes), + ) + + return err + } + + for validatorInfoHash, validatorInfoBuff := range validatorsInfoBuff { + shardValidatorInfo := state.ShardValidatorInfo{} + err = vip.marshalizer.Unmarshal(&shardValidatorInfo, validatorInfoBuff) + if err != nil { + return err + } + + vip.validatorsInfoPool.AddData([]byte(validatorInfoHash), &shardValidatorInfo, shardValidatorInfo.Size(), strCache) + } + + return nil +} + // ProcessBlockTransactions does nothing func (vip *validatorInfoPreprocessor) ProcessBlockTransactions( _ data.HeaderHandler, @@ -131,8 +187,26 @@ func (vip *validatorInfoPreprocessor) ProcessBlockTransactions( return nil } -// SaveTxsToStorage does nothing -func (vip *validatorInfoPreprocessor) SaveTxsToStorage(_ *block.Body) error { +// SaveTxsToStorage saves the validators info from body into storage +func (vip *validatorInfoPreprocessor) SaveTxsToStorage(body *block.Body) error { + if check.IfNil(body) { + return process.ErrNilBlockBody + } + + for i := 0; i < len(body.MiniBlocks); i++ { + miniBlock := body.MiniBlocks[i] + if miniBlock.Type != block.PeerBlock { + continue + } + + vip.saveTxsToStorage( + miniBlock.TxHashes, + &vip.validatorsInfoForBlock, + vip.storage, + dataRetriever.UnsignedTransactionUnit, + ) + } + return nil } @@ -145,25 +219,100 @@ func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(key []byt return } - receivedAllMissing := vip.baseReceivedTransaction(key, tx, &vip.validatorInfoForBlock) + receivedAllMissing := vip.baseReceivedTransaction(key, tx, &vip.validatorsInfoForBlock) if receivedAllMissing { - vip.chReceivedAllValidatorInfo <- true + vip.chReceivedAllValidatorsInfo <- true } } -// CreateBlockStarted does nothing +// CreateBlockStarted cleans the local cache map for processed/created validators info at this round func (vip *validatorInfoPreprocessor) CreateBlockStarted() { + _ = core.EmptyChannel(vip.chReceivedAllValidatorsInfo) + + vip.validatorsInfoForBlock.mutTxsForBlock.Lock() + vip.validatorsInfoForBlock.missingTxs = 0 + vip.validatorsInfoForBlock.txHashAndInfo = make(map[string]*txInfo) + vip.validatorsInfoForBlock.mutTxsForBlock.Unlock() } -// RequestBlockTransactions does nothing -func (vip *validatorInfoPreprocessor) RequestBlockTransactions(_ *block.Body) int { - return 0 +// RequestBlockTransactions request for validators info if missing from a block.Body +func (vip *validatorInfoPreprocessor) RequestBlockTransactions(body *block.Body) int { + if check.IfNil(body) { + return 0 + } + + return vip.computeExistingAndRequestMissingValidatorsInfoForShards(body) } -// RequestTransactionsForMiniBlock does nothing -func (vip *validatorInfoPreprocessor) RequestTransactionsForMiniBlock(_ *block.MiniBlock) int { - return 0 +// computeExistingAndRequestMissingValidatorsInfoForShards calculates what validators info are available and requests +// what are missing from block.Body +func (vip *validatorInfoPreprocessor) computeExistingAndRequestMissingValidatorsInfoForShards(body *block.Body) int { + validatorsInfoBody := block.Body{} + for _, mb := range body.MiniBlocks { + if mb.Type != block.PeerBlock { + continue + } + if mb.SenderShardID != core.MetachainShardId { + continue + } + + validatorsInfoBody.MiniBlocks = append(validatorsInfoBody.MiniBlocks, mb) + } + + numMissingTxsForShards := vip.computeExistingAndRequestMissing( + &validatorsInfoBody, + &vip.validatorsInfoForBlock, + vip.chReceivedAllValidatorsInfo, + vip.isMiniBlockCorrect, + vip.validatorsInfoPool, + vip.onRequestValidatorsInfoWithShard, + ) + + return numMissingTxsForShards +} + +func (vip *validatorInfoPreprocessor) onRequestValidatorsInfoWithShard(_ uint32, txHashes [][]byte) { + vip.onRequestValidatorsInfo(txHashes) +} + +// RequestTransactionsForMiniBlock requests missing validators info for a certain miniblock +func (vip *validatorInfoPreprocessor) RequestTransactionsForMiniBlock(miniBlock *block.MiniBlock) int { + if miniBlock == nil { + return 0 + } + + missingValidatorsInfoHashesForMiniBlock := vip.computeMissingValidatorsInfoHashesForMiniBlock(miniBlock) + if len(missingValidatorsInfoHashesForMiniBlock) > 0 { + vip.onRequestValidatorsInfo(missingValidatorsInfoHashesForMiniBlock) + } + + return len(missingValidatorsInfoHashesForMiniBlock) +} + +// computeMissingValidatorsInfoHashesForMiniBlock computes missing validators info hashes for a certain miniblock +func (vip *validatorInfoPreprocessor) computeMissingValidatorsInfoHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { + missingValidatorsInfoHashes := make([][]byte, 0) + + if miniBlock.Type != block.PeerBlock { + return missingValidatorsInfoHashes + } + + for _, txHash := range miniBlock.TxHashes { + tx, _ := process.GetTransactionHandlerFromPool( + miniBlock.SenderShardID, + miniBlock.ReceiverShardID, + txHash, + vip.validatorsInfoPool, + false, + ) + + if tx == nil { + missingValidatorsInfoHashes = append(missingValidatorsInfoHashes, txHash) + } + } + + return missingValidatorsInfoHashes } // CreateAndProcessMiniBlocks does nothing @@ -181,8 +330,6 @@ func (vip *validatorInfoPreprocessor) ProcessMiniBlock(miniBlock *block.MiniBloc return nil, 0, process.ErrValidatorInfoMiniBlockNotFromMeta } - //TODO: We need another function in the BlockSizeComputationHandler implementation that will better handle - //the PeerBlock miniblocks as those are not hashes if vip.blockSizeComputation.IsMaxBlockSizeWithoutThrottleReached(1, len(miniBlock.TxHashes)) { return nil, 0, process.ErrMaxBlockSizeReached } @@ -193,16 +340,26 @@ func (vip *validatorInfoPreprocessor) ProcessMiniBlock(miniBlock *block.MiniBloc return nil, len(miniBlock.TxHashes), nil } -// CreateMarshalizedData does nothing -func (vip *validatorInfoPreprocessor) CreateMarshalizedData(_ [][]byte) ([][]byte, error) { - marshalized := make([][]byte, 0) - return marshalized, nil +// CreateMarshalledData marshals validators info hashes and saves them into a new structure +func (vip *validatorInfoPreprocessor) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + marshalledValidatorsInfo, err := vip.createMarshalledData(txHashes, &vip.validatorsInfoForBlock) + if err != nil { + return nil, err + } + + return marshalledValidatorsInfo, nil } -// GetAllCurrentUsedTxs does nothing +// GetAllCurrentUsedTxs returns all the validators info used at current creation / processing func (vip *validatorInfoPreprocessor) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { - validatorInfoTxPool := make(map[string]data.TransactionHandler) - return validatorInfoTxPool + vip.validatorsInfoForBlock.mutTxsForBlock.RLock() + validatorsInfoPool := make(map[string]data.TransactionHandler, len(vip.validatorsInfoForBlock.txHashAndInfo)) + for txHash, txData := range vip.validatorsInfoForBlock.txHashAndInfo { + validatorsInfoPool[txHash] = txData.tx + } + vip.validatorsInfoForBlock.mutTxsForBlock.RUnlock() + + return validatorsInfoPool } // AddTxsFromMiniBlocks does nothing diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index b3a28b5a41d..0dd81b5151d 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -143,7 +143,7 @@ func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing ) hash := make([][]byte, 0) - res, err := rtp.CreateMarshalizedData(hash) + res, err := rtp.CreateMarshalledData(hash) assert.Nil(t, err) assert.Equal(t, 0, len(res)) diff --git a/process/coordinator/process.go b/process/coordinator/process.go index 0e6fca0f741..b9255a7adf8 100644 --- a/process/coordinator/process.go +++ b/process/coordinator/process.go @@ -922,7 +922,7 @@ func (tc *transactionCoordinator) CreateMarshalizedData(body *block.Body) map[st dataMarshalizer, ok := preproc.(process.DataMarshalizer) if ok { // preproc supports marshalizing items - tc.appendMarshalizedItems( + tc.appendMarshalledItems( dataMarshalizer, miniBlock.TxHashes, mrsTxs, @@ -936,7 +936,7 @@ func (tc *transactionCoordinator) CreateMarshalizedData(body *block.Body) map[st dataMarshalizer, ok := interimProc.(process.DataMarshalizer) if ok { // interimProc supports marshalizing items - tc.appendMarshalizedItems( + tc.appendMarshalledItems( dataMarshalizer, miniBlock.TxHashes, mrsTxs, @@ -949,15 +949,15 @@ func (tc *transactionCoordinator) CreateMarshalizedData(body *block.Body) map[st return mrsTxs } -func (tc *transactionCoordinator) appendMarshalizedItems( +func (tc *transactionCoordinator) appendMarshalledItems( dataMarshalizer process.DataMarshalizer, txHashes [][]byte, mrsTxs map[string][][]byte, broadcastTopic string, ) { - currMrsTxs, err := dataMarshalizer.CreateMarshalizedData(txHashes) + currMrsTxs, err := dataMarshalizer.CreateMarshalledData(txHashes) if err != nil { - log.Debug("appendMarshalizedItems.CreateMarshalizedData", "error", err.Error()) + log.Debug("appendMarshalledItems.CreateMarshalledData", "error", err.Error()) return } diff --git a/process/interface.go b/process/interface.go index 7d9d0ea20f0..729d1d74c59 100644 --- a/process/interface.go +++ b/process/interface.go @@ -183,7 +183,7 @@ type IntermediateTransactionHandler interface { // DataMarshalizer defines the behavior of a structure that is able to marshalize containing data type DataMarshalizer interface { - CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) + CreateMarshalledData(txHashes [][]byte) ([][]byte, error) } // TransactionVerifier interface validates if the transaction is good and if it should be processed diff --git a/process/mock/intermProcessorStub.go b/process/mock/intermProcessorStub.go index 12fa0e7899d..380b2751a3c 100644 --- a/process/mock/intermProcessorStub.go +++ b/process/mock/intermProcessorStub.go @@ -13,7 +13,7 @@ type IntermediateTransactionHandlerStub struct { VerifyInterMiniBlocksCalled func(body *block.Body) error SaveCurrentIntermediateTxToStorageCalled func() CreateBlockStartedCalled func() - CreateMarshalizedDataCalled func(txHashes [][]byte) ([][]byte, error) + CreateMarshalledDataCalled func(txHashes [][]byte) ([][]byte, error) GetAllCurrentFinishedTxsCalled func() map[string]data.TransactionHandler RemoveProcessedResultsCalled func() [][]byte InitProcessedResultsCalled func() @@ -35,12 +35,12 @@ func (ith *IntermediateTransactionHandlerStub) InitProcessedResults() { } } -// CreateMarshalizedData - -func (ith *IntermediateTransactionHandlerStub) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - if ith.CreateMarshalizedDataCalled == nil { +// CreateMarshalledData - +func (ith *IntermediateTransactionHandlerStub) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + if ith.CreateMarshalledDataCalled == nil { return nil, nil } - return ith.CreateMarshalizedDataCalled(txHashes) + return ith.CreateMarshalledDataCalled(txHashes) } // AddIntermediateTransactions - diff --git a/process/mock/intermediateTransactionHandlerMock.go b/process/mock/intermediateTransactionHandlerMock.go index ecce957815d..57983a78d89 100644 --- a/process/mock/intermediateTransactionHandlerMock.go +++ b/process/mock/intermediateTransactionHandlerMock.go @@ -13,7 +13,7 @@ type IntermediateTransactionHandlerMock struct { VerifyInterMiniBlocksCalled func(body *block.Body) error SaveCurrentIntermediateTxToStorageCalled func() CreateBlockStartedCalled func() - CreateMarshalizedDataCalled func(txHashes [][]byte) ([][]byte, error) + CreateMarshalledDataCalled func(txHashes [][]byte) ([][]byte, error) GetAllCurrentFinishedTxsCalled func() map[string]data.TransactionHandler RemoveProcessedResultsCalled func() [][]byte InitProcessedResultsCalled func() @@ -36,12 +36,12 @@ func (ith *IntermediateTransactionHandlerMock) InitProcessedResults() { } } -// CreateMarshalizedData - -func (ith *IntermediateTransactionHandlerMock) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - if ith.CreateMarshalizedDataCalled == nil { +// CreateMarshalledData - +func (ith *IntermediateTransactionHandlerMock) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + if ith.CreateMarshalledDataCalled == nil { return nil, nil } - return ith.CreateMarshalizedDataCalled(txHashes) + return ith.CreateMarshalledDataCalled(txHashes) } // AddIntermediateTransactions - diff --git a/process/mock/preprocessorMock.go b/process/mock/preprocessorMock.go index 633bea57b34..ed576c65bb2 100644 --- a/process/mock/preprocessorMock.go +++ b/process/mock/preprocessorMock.go @@ -18,7 +18,7 @@ type PreProcessorMock struct { SaveTxsToStorageCalled func(body *block.Body) error ProcessBlockTransactionsCalled func(header data.HeaderHandler, body *block.Body, haveTime func() bool) error RequestBlockTransactionsCalled func(body *block.Body) int - CreateMarshalizedDataCalled func(txHashes [][]byte) ([][]byte, error) + CreateMarshalledDataCalled func(txHashes [][]byte) ([][]byte, error) RequestTransactionsForMiniBlockCalled func(miniBlock *block.MiniBlock) int ProcessMiniBlockCalled func(miniBlock *block.MiniBlock, haveTime func() bool, haveAdditionalTime func() bool, getNumOfCrossInterMbsAndTxs func() (int, int), scheduledMode bool) ([][]byte, int, error) CreateAndProcessMiniBlocksCalled func(haveTime func() bool) (block.MiniBlockSlice, error) @@ -91,12 +91,12 @@ func (ppm *PreProcessorMock) RequestBlockTransactions(body *block.Body) int { return ppm.RequestBlockTransactionsCalled(body) } -// CreateMarshalizedData - -func (ppm *PreProcessorMock) CreateMarshalizedData(txHashes [][]byte) ([][]byte, error) { - if ppm.CreateMarshalizedDataCalled == nil { +// CreateMarshalledData - +func (ppm *PreProcessorMock) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { + if ppm.CreateMarshalledDataCalled == nil { return nil, nil } - return ppm.CreateMarshalizedDataCalled(txHashes) + return ppm.CreateMarshalledDataCalled(txHashes) } // RequestTransactionsForMiniBlock - diff --git a/sharding/nodesCoordinator/errors.go b/sharding/nodesCoordinator/errors.go index ab63ba12f8c..369d7aa5b6c 100644 --- a/sharding/nodesCoordinator/errors.go +++ b/sharding/nodesCoordinator/errors.go @@ -111,3 +111,6 @@ var ErrValidatorCannotBeFullArchive = errors.New("validator cannot be a full arc // ErrNilNodeTypeProvider signals that a nil node type provider has been given var ErrNilNodeTypeProvider = errors.New("nil node type provider") + +// ErrNilPoolsHolder signals that a nil validator info cacher has been provided +var ErrNilValidatorInfoCacher = errors.New("nil validator info cacher") diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go index d06f6fa09a1..5f8859dfe97 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go @@ -17,6 +17,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/marshal" logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -93,6 +94,7 @@ type indexHashedNodesCoordinator struct { chanStopNode chan endProcess.ArgEndProcess flagWaitingListFix atomicFlags.Flag nodeTypeProvider NodeTypeProviderHandler + validatorInfoCacher epochStart.ValidatorInfoCacher } // NewIndexHashedNodesCoordinator creates a new index hashed group selector @@ -137,6 +139,7 @@ func NewIndexHashedNodesCoordinator(arguments ArgNodesCoordinator) (*indexHashed chanStopNode: arguments.ChanStopNode, nodeTypeProvider: arguments.NodeTypeProvider, isFullArchive: arguments.IsFullArchive, + validatorInfoCacher: arguments.ValidatorInfoCacher, } log.Debug("indexHashedNodesCoordinator: enable epoch for waiting waiting list", "epoch", ihnc.waitingListFixEnableEpoch) @@ -214,6 +217,9 @@ func checkArguments(arguments ArgNodesCoordinator) error { if nil == arguments.ChanStopNode { return ErrNilNodeStopChannel } + if check.IfNil(arguments.ValidatorInfoCacher) { + return ErrNilValidatorInfoCacher + } return nil } @@ -555,9 +561,9 @@ func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHa return } - allValidatorInfo, err := createValidatorInfoFromBody(body, ihnc.marshalizer, ihnc.numTotalEligible) + allValidatorInfo, err := createValidatorInfoFromBody(body, ihnc.numTotalEligible, ihnc.validatorInfoCacher) if err != nil { - log.Error("could not create validator info from body - do nothing on nodesCoordinator epochStartPrepare") + log.Error("could not create validator info from body - do nothing on nodesCoordinator epochStartPrepare", "error", err.Error()) return } @@ -1160,8 +1166,8 @@ func selectValidators( // createValidatorInfoFromBody unmarshalls body data to create validator info func createValidatorInfoFromBody( body data.BodyHandler, - marshalizer marshal.Marshalizer, previousTotal uint64, + validatorInfoCacher epochStart.ValidatorInfoCacher, ) ([]*state.ShardValidatorInfo, error) { if check.IfNil(body) { return nil, ErrNilBlockBody @@ -1179,13 +1185,12 @@ func createValidatorInfoFromBody( } for _, txHash := range peerMiniBlock.TxHashes { - vid := &state.ShardValidatorInfo{} - err := marshalizer.Unmarshal(vid, txHash) + shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) if err != nil { return nil, err } - allValidatorInfo = append(allValidatorInfo, vid) + allValidatorInfo = append(allValidatorInfo, shardValidatorInfo) } } diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go index 994f182af98..873b95d42cc 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/state" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -156,6 +157,7 @@ func TestIndexHashedNodesCoordinator_IsEpochInConfig(t *testing.T) { arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentBlockValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) @@ -163,7 +165,7 @@ func TestIndexHashedNodesCoordinator_IsEpochInConfig(t *testing.T) { ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] body := createBlockBodyFromNodesCoordinator(ihnc, epoch) - validatorsInfo, _ := createValidatorInfoFromBody(body, arguments.Marshalizer, 10) + validatorsInfo, _ := createValidatorInfoFromBody(body, 10, arguments.ValidatorInfoCacher) err = ihnc.SetNodesConfigFromValidatorsInfo(epoch, []byte{}, validatorsInfo) require.Nil(t, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go index 49dcb65658a..f98278b3d3a 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go @@ -15,9 +15,10 @@ import ( "github.com/ElrondNetwork/elrond-go-core/hashing/sha256" "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/sharding/mock" - "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -93,6 +94,7 @@ func TestIndexHashedGroupSelectorWithRater_OkValShouldWork(t *testing.T) { ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, err := NewIndexHashedNodesCoordinator(arguments) assert.Nil(t, err) @@ -187,6 +189,7 @@ func BenchmarkIndexHashedGroupSelectorWithRater_ComputeValidatorsGroup63of400(b ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -260,6 +263,7 @@ func Test_ComputeValidatorsGroup63of400(t *testing.T) { ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) numRounds := uint64(1000000) @@ -333,6 +337,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -385,6 +390,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -451,6 +457,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldWork(t ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -534,6 +541,7 @@ func TestIndexHashedGroupSelectorWithRater_GetAllEligibleValidatorsPublicKeys(t ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -838,6 +846,7 @@ func BenchmarkIndexHashedWithRaterGroupSelector_ComputeValidatorsGroup21of400(b ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index 4f090fa894b..617c6895f60 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -16,14 +16,18 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/endProcess" + "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/hashing/sha256" "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/sharding/mock" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage/lrucache" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -91,6 +95,7 @@ func createArguments() ArgNodesCoordinator { epochStartSubscriber := &mock.EpochStartNotifierStub{} bootStorer := mock.NewStorerMock() + validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() arguments := ArgNodesCoordinator{ ShardConsensusGroupSize: 1, @@ -110,6 +115,7 @@ func createArguments() ArgNodesCoordinator { IsFullArchive: false, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: validatorInfoCacher, } return arguments } @@ -258,6 +264,7 @@ func TestIndexHashedNodesCoordinator_OkValShouldWork(t *testing.T) { WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -316,6 +323,7 @@ func TestIndexHashedNodesCoordinator_NewCoordinatorTooFewNodesShouldErr(t *testi WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -388,6 +396,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup1ValidatorShouldRetur WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) list2, err := ihnc.ComputeConsensusGroup([]byte("randomness"), 0, 0, 0) @@ -446,6 +455,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10locksNoM WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -532,6 +542,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10BlocksMe WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -601,6 +612,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup63of400TestEqualSameP WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -663,6 +675,7 @@ func BenchmarkIndexHashedGroupSelector_ComputeValidatorsGroup21of400(b *testing. WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -734,6 +747,7 @@ func runBenchmark(consensusGroupCache Cacher, consensusGroupSize int, nodesMap m WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -782,6 +796,7 @@ func computeMemoryRequirements(consensusGroupCache Cacher, consensusGroupSize in WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -920,6 +935,7 @@ func TestIndexHashedNodesCoordinator_GetValidatorWithPublicKeyShouldWork(t *test WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1002,6 +1018,7 @@ func TestIndexHashedGroupSelector_GetAllEligibleValidatorsPublicKeys(t *testing. WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1079,6 +1096,7 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1091,24 +1109,31 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T func createBlockBodyFromNodesCoordinator(ihnc *indexHashedNodesCoordinator, epoch uint32) *block.Body { body := &block.Body{MiniBlocks: make([]*block.MiniBlock, 0)} - mbs := createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].eligibleMap, string(common.EligibleList), ihnc.marshalizer) + mbs := createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].eligibleMap, string(common.EligibleList), ihnc.marshalizer, ihnc.hasher, ihnc.validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) - mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].waitingMap, string(common.WaitingList), ihnc.marshalizer) + mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].waitingMap, string(common.WaitingList), ihnc.marshalizer, ihnc.hasher, ihnc.validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) - mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].leavingMap, string(common.LeavingList), ihnc.marshalizer) + mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].leavingMap, string(common.LeavingList), ihnc.marshalizer, ihnc.hasher, ihnc.validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) return body } -func createMiniBlocksForNodesMap(nodesMap map[uint32][]Validator, list string, marshalizer marshal.Marshalizer) []*block.MiniBlock { +func createMiniBlocksForNodesMap( + nodesMap map[uint32][]Validator, + list string, + marshaller marshal.Marshalizer, + hasher hashing.Hasher, + validatorInfoCacher epochStart.ValidatorInfoCacher, +) []*block.MiniBlock { + miniBlocks := make([]*block.MiniBlock, 0) for shId, eligibleList := range nodesMap { miniBlock := &block.MiniBlock{Type: block.PeerBlock} for index, eligible := range eligibleList { - shardVInfo := &state.ShardValidatorInfo{ + shardValidatorInfo := &state.ShardValidatorInfo{ PublicKey: eligible.PubKey(), ShardId: shId, List: list, @@ -1116,8 +1141,10 @@ func createMiniBlocksForNodesMap(nodesMap map[uint32][]Validator, list string, m TempRating: 10, } - marshaledData, _ := marshalizer.Marshal(shardVInfo) - miniBlock.TxHashes = append(miniBlock.TxHashes, marshaledData) + shardValidatorInfoHash, _ := core.CalculateHash(marshaller, hasher, shardValidatorInfo) + + miniBlock.TxHashes = append(miniBlock.TxHashes, shardValidatorInfoHash) + validatorInfoCacher.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) } miniBlocks = append(miniBlocks, miniBlock) } @@ -1406,6 +1433,7 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t epochStartSubscriber := &mock.EpochStartNotifierStub{} bootStorer := mock.NewStorerMock() + validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() arguments := ArgNodesCoordinator{ ShardConsensusGroupSize: 1, @@ -1424,6 +1452,7 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ValidatorInfoCacher: validatorInfoCacher, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) diff --git a/sharding/nodesCoordinator/shardingArgs.go b/sharding/nodesCoordinator/shardingArgs.go index 2b3173f9aa3..f690b208b40 100644 --- a/sharding/nodesCoordinator/shardingArgs.go +++ b/sharding/nodesCoordinator/shardingArgs.go @@ -4,6 +4,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/endProcess" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -29,4 +30,5 @@ type ArgNodesCoordinator struct { ChanStopNode chan endProcess.ArgEndProcess NodeTypeProvider NodeTypeProviderHandler IsFullArchive bool + ValidatorInfoCacher epochStart.ValidatorInfoCacher } diff --git a/testscommon/validatorInfoCacherMock/validatorInfoCacherMock.go b/testscommon/validatorInfoCacherMock/validatorInfoCacherMock.go new file mode 100644 index 00000000000..d448d4ee8df --- /dev/null +++ b/testscommon/validatorInfoCacherMock/validatorInfoCacherMock.go @@ -0,0 +1,38 @@ +package validatorInfoCacherMock + +import "github.com/ElrondNetwork/elrond-go/state" + +// ValidatorInfoCacherMock - +type ValidatorInfoCacherMock struct { + CleanCalled func() + AddValidatorInfoCalled func(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) + GetValidatorInfoCalled func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) +} + +// Clean - +func (vicm *ValidatorInfoCacherMock) Clean() { + if vicm.CleanCalled != nil { + vicm.CleanCalled() + } +} + +// GetValidatorInfo - +func (vicm *ValidatorInfoCacherMock) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if vicm.GetValidatorInfoCalled != nil { + return vicm.GetValidatorInfoCalled(validatorInfoHash) + } + + return nil, nil +} + +// AddValidatorInfo - +func (vicm *ValidatorInfoCacherMock) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { + if vicm.AddValidatorInfoCalled != nil { + vicm.AddValidatorInfoCalled(validatorInfoHash, validatorInfo) + } +} + +// IsInterfaceNil returns true if there is no value under the interface +func (vicm *ValidatorInfoCacherMock) IsInterfaceNil() bool { + return vicm == nil +} From 3269cf5da43630d13d0629f4659545a7c91d6fe7 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 24 May 2022 18:29:19 +0300 Subject: [PATCH 26/70] * Changed initilaization for ValidatorInfoPool --- consensus/mock/epochStartNotifierStub.go | 4 +- consensus/spos/bls/subroundStartRound.go | 3 +- ...ol.go => currentEpochValidatorInfoPool.go} | 16 ++-- ... => currentEpochValidatorInfoPool_test.go} | 8 +- dataRetriever/dataPool/dataPool.go | 16 ++-- dataRetriever/dataPool/dataPool_test.go | 12 +-- dataRetriever/errors.go | 8 +- dataRetriever/factory/dataPoolFactory.go | 4 +- dataRetriever/interface.go | 4 +- ...go => validatorInfoForCurrentEpochMock.go} | 12 +-- .../disabled/disabledEpochStartNotifier.go | 2 +- epochStart/bootstrap/interface.go | 3 +- epochStart/bootstrap/process_test.go | 8 +- epochStart/bootstrap/syncValidatorStatus.go | 3 +- .../bootstrap/syncValidatorStatus_test.go | 7 +- epochStart/interface.go | 6 +- epochStart/metachain/trigger.go | 11 ++- epochStart/metachain/triggerRegistry_test.go | 1 + epochStart/metachain/trigger_test.go | 7 ++ epochStart/metachain/validators.go | 32 ++++---- epochStart/metachain/validators_test.go | 4 +- epochStart/mock/epochStartNotifierStub.go | 11 ++- epochStart/notifier/common.go | 2 +- .../notifier/epochStartSubscriptionHandler.go | 6 +- epochStart/shardchain/trigger.go | 10 ++- epochStart/shardchain/triggerRegistry_test.go | 1 + epochStart/shardchain/trigger_test.go | 7 ++ factory/interface.go | 2 +- factory/mock/epochStartNotifierStub.go | 6 +- factory/processComponents.go | 1 + factory/shardingFactory.go | 2 - heartbeat/mock/epochStartNotifier.go | 8 +- integrationTests/consensus/testInitializer.go | 8 +- .../consensusComponents_test.go | 1 - .../processComponents_test.go | 1 - .../statusComponents/statusComponents_test.go | 1 - integrationTests/mock/epochStartNotifier.go | 8 +- integrationTests/nodesCoordinatorFactory.go | 4 - integrationTests/testP2PNode.go | 4 - integrationTests/testProcessorNode.go | 8 +- .../testProcessorNodeWithCoordinator.go | 3 - .../testProcessorNodeWithMultisigner.go | 5 -- node/mock/epochStartNotifier.go | 8 +- node/nodeRunner.go | 1 - process/mock/epochStartNotifierStub.go | 8 +- sharding/mock/epochStartNotifierStub.go | 6 +- sharding/networksharding/peerShardMapper.go | 3 +- .../networksharding/peerShardMapper_test.go | 3 +- .../indexHashedNodesCoordinator.go | 9 +-- .../indexHashedNodesCoordinatorLite_test.go | 6 +- ...dexHashedNodesCoordinatorWithRater_test.go | 9 --- .../indexHashedNodesCoordinator_test.go | 78 +++++++++---------- sharding/nodesCoordinator/interface.go | 2 +- sharding/nodesCoordinator/shardingArgs.go | 2 - testscommon/dataRetriever/poolFactory.go | 8 +- testscommon/dataRetriever/poolsHolderMock.go | 10 +-- testscommon/dataRetriever/poolsHolderStub.go | 10 +-- testscommon/genericMocks/actionHandlerStub.go | 11 ++- .../shardingMocks/nodesCoordinatorStub.go | 7 +- .../validatorInfoCacherMock.go | 0 update/mock/epochStartNotifierStub.go | 8 +- 61 files changed, 230 insertions(+), 229 deletions(-) rename dataRetriever/dataPool/{currentBlockValidatorInfoPool.go => currentEpochValidatorInfoPool.go} (70%) rename dataRetriever/dataPool/{currentBlockValidatorInfoPool_test.go => currentEpochValidatorInfoPool_test.go} (79%) rename dataRetriever/mock/{validatorInfoForCurrentBlockMock.go => validatorInfoForCurrentEpochMock.go} (70%) rename testscommon/{validatorInfoCacherMock => validatorInfoCacher}/validatorInfoCacherMock.go (100%) diff --git a/consensus/mock/epochStartNotifierStub.go b/consensus/mock/epochStartNotifierStub.go index 157bd99020b..a0f67c63778 100644 --- a/consensus/mock/epochStartNotifierStub.go +++ b/consensus/mock/epochStartNotifierStub.go @@ -38,13 +38,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { esnm.NotifyAllPrepareCalled(metaHdr, body) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body) + hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) } } diff --git a/consensus/spos/bls/subroundStartRound.go b/consensus/spos/bls/subroundStartRound.go index a18fe9d893e..9b5302ef2ef 100644 --- a/consensus/spos/bls/subroundStartRound.go +++ b/consensus/spos/bls/subroundStartRound.go @@ -4,6 +4,7 @@ import ( "context" "encoding/hex" "fmt" + "github.com/ElrondNetwork/elrond-go/epochStart" "sync" "time" @@ -302,7 +303,7 @@ func (sr *subroundStartRound) generateNextConsensusGroup(roundIndex int64) error // EpochStartPrepare wis called when an epoch start event is observed, but not yet confirmed/committed. // Some components may need to do initialisation on this event -func (sr *subroundStartRound) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler) { +func (sr *subroundStartRound) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler, _ epochStart.ValidatorInfoCacher) { log.Trace(fmt.Sprintf("epoch %d start prepare in consensus", metaHdr.GetEpoch())) } diff --git a/dataRetriever/dataPool/currentBlockValidatorInfoPool.go b/dataRetriever/dataPool/currentEpochValidatorInfoPool.go similarity index 70% rename from dataRetriever/dataPool/currentBlockValidatorInfoPool.go rename to dataRetriever/dataPool/currentEpochValidatorInfoPool.go index bdae2de508c..6e07183c6a0 100644 --- a/dataRetriever/dataPool/currentBlockValidatorInfoPool.go +++ b/dataRetriever/dataPool/currentEpochValidatorInfoPool.go @@ -12,21 +12,21 @@ var _ dataRetriever.ValidatorInfoCacher = (*validatorInfoMapCacher)(nil) type validatorInfoMapCacher struct { mutValidatorInfo sync.RWMutex - validatorInfoForBlock map[string]*state.ShardValidatorInfo + validatorInfoForEpoch map[string]*state.ShardValidatorInfo } -// NewCurrentBlockValidatorInfoPool returns a new validator info pool to be used for the current block -func NewCurrentBlockValidatorInfoPool() *validatorInfoMapCacher { +// NewCurrentEpochValidatorInfoPool returns a new validator info pool to be used for the current epoch +func NewCurrentEpochValidatorInfoPool() *validatorInfoMapCacher { return &validatorInfoMapCacher{ mutValidatorInfo: sync.RWMutex{}, - validatorInfoForBlock: make(map[string]*state.ShardValidatorInfo), + validatorInfoForEpoch: make(map[string]*state.ShardValidatorInfo), } } // Clean creates a new validator info pool func (vimc *validatorInfoMapCacher) Clean() { vimc.mutValidatorInfo.Lock() - vimc.validatorInfoForBlock = make(map[string]*state.ShardValidatorInfo) + vimc.validatorInfoForEpoch = make(map[string]*state.ShardValidatorInfo) vimc.mutValidatorInfo.Unlock() } @@ -35,9 +35,9 @@ func (vimc *validatorInfoMapCacher) GetValidatorInfo(validatorInfoHash []byte) ( vimc.mutValidatorInfo.RLock() defer vimc.mutValidatorInfo.RUnlock() - validatorInfo, ok := vimc.validatorInfoForBlock[string(validatorInfoHash)] + validatorInfo, ok := vimc.validatorInfoForEpoch[string(validatorInfoHash)] if !ok { - return nil, dataRetriever.ErrValidatorInfoNotFoundInBlockPool + return nil, dataRetriever.ErrValidatorInfoNotFoundInEpochPool } return validatorInfo, nil @@ -50,7 +50,7 @@ func (vimc *validatorInfoMapCacher) AddValidatorInfo(validatorInfoHash []byte, v } vimc.mutValidatorInfo.Lock() - vimc.validatorInfoForBlock[string(validatorInfoHash)] = validatorInfo + vimc.validatorInfoForEpoch[string(validatorInfoHash)] = validatorInfo vimc.mutValidatorInfo.Unlock() } diff --git a/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go b/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go similarity index 79% rename from dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go rename to dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go index 6d4a40694c1..ab39cd14bee 100644 --- a/dataRetriever/dataPool/currentBlockValidatorInfoPool_test.go +++ b/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go @@ -7,12 +7,12 @@ import ( "testing" ) -func TestCurrentBlockValidatorInfoPool_AddGetCleanTx(t *testing.T) { +func TestCurrentEpochValidatorInfoPool_AddGetCleanTx(t *testing.T) { t.Parallel() validatorInfoHash := []byte("hash") validatorInfo := &state.ShardValidatorInfo{} - currentValidatorInfoPool := NewCurrentBlockValidatorInfoPool() + currentValidatorInfoPool := NewCurrentEpochValidatorInfoPool() require.False(t, currentValidatorInfoPool.IsInterfaceNil()) currentValidatorInfoPool.AddValidatorInfo(validatorInfoHash, validatorInfo) @@ -20,7 +20,7 @@ func TestCurrentBlockValidatorInfoPool_AddGetCleanTx(t *testing.T) { validatorInfoFromPool, err := currentValidatorInfoPool.GetValidatorInfo([]byte("wrong hash")) require.Nil(t, validatorInfoFromPool) - require.Equal(t, dataRetriever.ErrValidatorInfoNotFoundInBlockPool, err) + require.Equal(t, dataRetriever.ErrValidatorInfoNotFoundInEpochPool, err) validatorInfoFromPool, err = currentValidatorInfoPool.GetValidatorInfo(validatorInfoHash) require.Nil(t, err) @@ -29,5 +29,5 @@ func TestCurrentBlockValidatorInfoPool_AddGetCleanTx(t *testing.T) { currentValidatorInfoPool.Clean() validatorInfoFromPool, err = currentValidatorInfoPool.GetValidatorInfo(validatorInfoHash) require.Nil(t, validatorInfoFromPool) - require.Equal(t, dataRetriever.ErrValidatorInfoNotFoundInBlockPool, err) + require.Equal(t, dataRetriever.ErrValidatorInfoNotFoundInEpochPool, err) } diff --git a/dataRetriever/dataPool/dataPool.go b/dataRetriever/dataPool/dataPool.go index 38d54dded14..821bcdf5d9e 100644 --- a/dataRetriever/dataPool/dataPool.go +++ b/dataRetriever/dataPool/dataPool.go @@ -18,7 +18,7 @@ type dataPool struct { trieNodes storage.Cacher trieNodesChunks storage.Cacher currBlockTxs dataRetriever.TransactionCacher - currBlockValidatorInfo dataRetriever.ValidatorInfoCacher + currEpochValidatorInfo dataRetriever.ValidatorInfoCacher smartContracts storage.Cacher validatorsInfo dataRetriever.ShardedDataCacherNotifier } @@ -34,7 +34,7 @@ type DataPoolArgs struct { TrieNodes storage.Cacher TrieNodesChunks storage.Cacher CurrentBlockTransactions dataRetriever.TransactionCacher - CurrentBlockValidatorInfo dataRetriever.ValidatorInfoCacher + CurrentEpochValidatorInfo dataRetriever.ValidatorInfoCacher SmartContracts storage.Cacher ValidatorsInfo dataRetriever.ShardedDataCacherNotifier } @@ -62,8 +62,8 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { if check.IfNil(args.CurrentBlockTransactions) { return nil, dataRetriever.ErrNilCurrBlockTxs } - if check.IfNil(args.CurrentBlockValidatorInfo) { - return nil, dataRetriever.ErrNilCurrBlockValidatorInfo + if check.IfNil(args.CurrentEpochValidatorInfo) { + return nil, dataRetriever.ErrNilCurrEpochValidatorInfo } if check.IfNil(args.TrieNodes) { return nil, dataRetriever.ErrNilTrieNodesPool @@ -88,7 +88,7 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { trieNodes: args.TrieNodes, trieNodesChunks: args.TrieNodesChunks, currBlockTxs: args.CurrentBlockTransactions, - currBlockValidatorInfo: args.CurrentBlockValidatorInfo, + currEpochValidatorInfo: args.CurrentEpochValidatorInfo, smartContracts: args.SmartContracts, validatorsInfo: args.ValidatorsInfo, }, nil @@ -99,9 +99,9 @@ func (dp *dataPool) CurrentBlockTxs() dataRetriever.TransactionCacher { return dp.currBlockTxs } -// CurrentBlockValidatorInfo returns the holder for current block validator info -func (dp *dataPool) CurrentBlockValidatorInfo() dataRetriever.ValidatorInfoCacher { - return dp.currBlockValidatorInfo +// CurrentEpochValidatorInfo returns the holder for current epoch validator info +func (dp *dataPool) CurrentEpochValidatorInfo() dataRetriever.ValidatorInfoCacher { + return dp.currEpochValidatorInfo } // Transactions returns the holder for transactions diff --git a/dataRetriever/dataPool/dataPool_test.go b/dataRetriever/dataPool/dataPool_test.go index 45a7ab852f3..031b935b3af 100644 --- a/dataRetriever/dataPool/dataPool_test.go +++ b/dataRetriever/dataPool/dataPool_test.go @@ -24,7 +24,7 @@ func createMockDataPoolArgs() dataPool.DataPoolArgs { TrieNodes: testscommon.NewCacherStub(), TrieNodesChunks: testscommon.NewCacherStub(), CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, - CurrentBlockValidatorInfo: &mock.ValidatorInfoForCurrentBlockStub{}, + CurrentEpochValidatorInfo: &mock.ValidatorInfoForCurrentEpochStub{}, SmartContracts: testscommon.NewCacherStub(), ValidatorsInfo: testscommon.NewShardedDataStub(), } @@ -151,15 +151,15 @@ func TestNewDataPool_NilCurrBlockTransactionsShouldErr(t *testing.T) { require.Equal(t, dataRetriever.ErrNilCurrBlockTxs, err) } -func TestNewDataPool_NilCurrBlockValidatorInfoShouldErr(t *testing.T) { +func TestNewDataPool_NilCurrEpochValidatorInfoShouldErr(t *testing.T) { t.Parallel() args := createMockDataPoolArgs() - args.CurrentBlockValidatorInfo = nil + args.CurrentEpochValidatorInfo = nil tdp, err := dataPool.NewDataPool(args) require.Nil(t, tdp) - require.Equal(t, dataRetriever.ErrNilCurrBlockValidatorInfo, err) + require.Equal(t, dataRetriever.ErrNilCurrEpochValidatorInfo, err) } func TestNewDataPool_OkValsShouldWork(t *testing.T) { @@ -173,7 +173,7 @@ func TestNewDataPool_OkValsShouldWork(t *testing.T) { TrieNodes: testscommon.NewCacherStub(), TrieNodesChunks: testscommon.NewCacherStub(), CurrentBlockTransactions: &mock.TxForCurrentBlockStub{}, - CurrentBlockValidatorInfo: &mock.ValidatorInfoForCurrentBlockStub{}, + CurrentEpochValidatorInfo: &mock.ValidatorInfoForCurrentEpochStub{}, SmartContracts: testscommon.NewCacherStub(), ValidatorsInfo: testscommon.NewShardedDataStub(), } @@ -190,7 +190,7 @@ func TestNewDataPool_OkValsShouldWork(t *testing.T) { assert.True(t, args.MiniBlocks == tdp.MiniBlocks()) assert.True(t, args.PeerChangesBlocks == tdp.PeerChangesBlocks()) assert.True(t, args.CurrentBlockTransactions == tdp.CurrentBlockTxs()) - assert.True(t, args.CurrentBlockValidatorInfo == tdp.CurrentBlockValidatorInfo()) + assert.True(t, args.CurrentEpochValidatorInfo == tdp.CurrentEpochValidatorInfo()) assert.True(t, args.TrieNodes == tdp.TrieNodes()) assert.True(t, args.TrieNodesChunks == tdp.TrieNodesChunks()) assert.True(t, args.SmartContracts == tdp.SmartContracts()) diff --git a/dataRetriever/errors.go b/dataRetriever/errors.go index 70f6fa40160..c1dd5323916 100644 --- a/dataRetriever/errors.go +++ b/dataRetriever/errors.go @@ -16,8 +16,8 @@ var ErrNilValue = errors.New("nil value") // ErrTxNotFoundInBlockPool signals that transaction was not found in the current block pool var ErrTxNotFoundInBlockPool = errors.New("transaction was not found in the current block pool") -// ErrValidatorInfoNotFoundInBlockPool signals that validator info was not found in the current block pool -var ErrValidatorInfoNotFoundInBlockPool = errors.New("validator info was not found in the current block pool") +// ErrValidatorInfoNotFoundInEpochPool signals that validator info was not found in the current epoch pool +var ErrValidatorInfoNotFoundInEpochPool = errors.New("validator info was not found in the current epoch pool") // ErrNilMarshalizer signals that an operation has been attempted to or with a nil Marshalizer implementation var ErrNilMarshalizer = errors.New("nil Marshalizer") @@ -152,8 +152,8 @@ var ErrNilTrieDataGetter = errors.New("nil trie data getter provided") // ErrNilCurrBlockTxs signals that nil current block txs holder was provided var ErrNilCurrBlockTxs = errors.New("nil current block txs holder") -// ErrNilCurrBlockValidatorInfo signals that nil current block validator info holder was provided -var ErrNilCurrBlockValidatorInfo = errors.New("nil current block validator info holder") +// ErrNilCurrEpochValidatorInfo signals that nil current epoch validator info holder was provided +var ErrNilCurrEpochValidatorInfo = errors.New("nil current epoch validator info holder") // ErrNilRequestedItemsHandler signals that a nil requested items handler was provided var ErrNilRequestedItemsHandler = errors.New("nil requested items handler") diff --git a/dataRetriever/factory/dataPoolFactory.go b/dataRetriever/factory/dataPoolFactory.go index d14d1bebc0d..3504d34f87b 100644 --- a/dataRetriever/factory/dataPoolFactory.go +++ b/dataRetriever/factory/dataPoolFactory.go @@ -130,7 +130,7 @@ func NewDataPoolFromConfig(args ArgsDataPool) (dataRetriever.PoolsHolder, error) } currBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() - currBlockValidatorInfo := dataPool.NewCurrentBlockValidatorInfoPool() + currEpochValidatorInfo := dataPool.NewCurrentEpochValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, UnsignedTransactions: uTxPool, @@ -141,7 +141,7 @@ func NewDataPoolFromConfig(args ArgsDataPool) (dataRetriever.PoolsHolder, error) TrieNodes: adaptedTrieNodesStorage, TrieNodesChunks: trieNodesChunks, CurrentBlockTransactions: currBlockTransactions, - CurrentBlockValidatorInfo: currBlockValidatorInfo, + CurrentEpochValidatorInfo: currEpochValidatorInfo, SmartContracts: smartContracts, ValidatorsInfo: validatorsInfo, } diff --git a/dataRetriever/interface.go b/dataRetriever/interface.go index ee639fbf4f1..5d6f81d80dc 100644 --- a/dataRetriever/interface.go +++ b/dataRetriever/interface.go @@ -316,7 +316,7 @@ type TransactionCacher interface { IsInterfaceNil() bool } -// ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current block +// ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current epoch type ValidatorInfoCacher interface { Clean() GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) @@ -336,7 +336,7 @@ type PoolsHolder interface { TrieNodesChunks() storage.Cacher SmartContracts() storage.Cacher CurrentBlockTxs() TransactionCacher - CurrentBlockValidatorInfo() ValidatorInfoCacher + CurrentEpochValidatorInfo() ValidatorInfoCacher ValidatorsInfo() ShardedDataCacherNotifier IsInterfaceNil() bool } diff --git a/dataRetriever/mock/validatorInfoForCurrentBlockMock.go b/dataRetriever/mock/validatorInfoForCurrentEpochMock.go similarity index 70% rename from dataRetriever/mock/validatorInfoForCurrentBlockMock.go rename to dataRetriever/mock/validatorInfoForCurrentEpochMock.go index 3162cc1d612..cc9adfcf464 100644 --- a/dataRetriever/mock/validatorInfoForCurrentBlockMock.go +++ b/dataRetriever/mock/validatorInfoForCurrentEpochMock.go @@ -4,22 +4,22 @@ import ( "github.com/ElrondNetwork/elrond-go/state" ) -// ValidatorInfoForCurrentBlockStub - -type ValidatorInfoForCurrentBlockStub struct { +// ValidatorInfoForCurrentEpochStub - +type ValidatorInfoForCurrentEpochStub struct { CleanCalled func() GetValidatorInfoCalled func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) AddValidatorInfoCalled func(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) } // Clean - -func (t *ValidatorInfoForCurrentBlockStub) Clean() { +func (t *ValidatorInfoForCurrentEpochStub) Clean() { if t.CleanCalled != nil { t.CleanCalled() } } // GetValidatorInfo - -func (v *ValidatorInfoForCurrentBlockStub) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { +func (v *ValidatorInfoForCurrentEpochStub) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if v.GetValidatorInfoCalled != nil { return v.GetValidatorInfoCalled(validatorInfoHash) } @@ -27,13 +27,13 @@ func (v *ValidatorInfoForCurrentBlockStub) GetValidatorInfo(validatorInfoHash [] } // AddValidatorInfo - -func (v *ValidatorInfoForCurrentBlockStub) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { +func (v *ValidatorInfoForCurrentEpochStub) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { if v.AddValidatorInfoCalled != nil { v.AddValidatorInfoCalled(validatorInfoHash, validatorInfo) } } // IsInterfaceNil - -func (v *ValidatorInfoForCurrentBlockStub) IsInterfaceNil() bool { +func (v *ValidatorInfoForCurrentEpochStub) IsInterfaceNil() bool { return v == nil } diff --git a/epochStart/bootstrap/disabled/disabledEpochStartNotifier.go b/epochStart/bootstrap/disabled/disabledEpochStartNotifier.go index caf898fb359..01f413e9a28 100644 --- a/epochStart/bootstrap/disabled/disabledEpochStartNotifier.go +++ b/epochStart/bootstrap/disabled/disabledEpochStartNotifier.go @@ -23,7 +23,7 @@ func (desn *EpochStartNotifier) UnregisterHandler(_ epochStart.ActionHandler) { } // NotifyAllPrepare - -func (desn *EpochStartNotifier) NotifyAllPrepare(_ data.HeaderHandler, _ data.BodyHandler) { +func (desn *EpochStartNotifier) NotifyAllPrepare(_ data.HeaderHandler, _ data.BodyHandler, _ epochStart.ValidatorInfoCacher) { } // NotifyAll - diff --git a/epochStart/bootstrap/interface.go b/epochStart/bootstrap/interface.go index f51db6484d7..21476439d11 100644 --- a/epochStart/bootstrap/interface.go +++ b/epochStart/bootstrap/interface.go @@ -2,6 +2,7 @@ package bootstrap import ( "context" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data" @@ -24,7 +25,7 @@ type EpochStartMetaBlockInterceptorProcessor interface { // StartInEpochNodesCoordinator defines the methods to process and save nodesCoordinator information to storage type StartInEpochNodesCoordinator interface { - EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) + EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NodesCoordinatorToRegistry() *nodesCoordinator.NodesCoordinatorRegistry ShardIdForEpoch(epoch uint32) (uint32, error) IsInterfaceNil() bool diff --git a/epochStart/bootstrap/process_test.go b/epochStart/bootstrap/process_test.go index 6bb1c7753e2..cc30ef6b779 100644 --- a/epochStart/bootstrap/process_test.go +++ b/epochStart/bootstrap/process_test.go @@ -41,7 +41,7 @@ import ( statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageMocks "github.com/ElrondNetwork/elrond-go/testscommon/storage" "github.com/ElrondNetwork/elrond-go/testscommon/syncer" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" + "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/trie/factory" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -1736,7 +1736,7 @@ func TestRequestAndProcessing(t *testing.T) { HeadersCalled: func() dataRetriever.HeadersPool { return &mock.HeadersCacherStub{} }, - CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { return &validatorInfoCacherMock.ValidatorInfoCacherMock{} }, } @@ -1806,7 +1806,7 @@ func TestRequestAndProcessing(t *testing.T) { HeadersCalled: func() dataRetriever.HeadersPool { return &mock.HeadersCacherStub{} }, - CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { return &validatorInfoCacherMock.ValidatorInfoCacherMock{} }, } @@ -1964,7 +1964,7 @@ func TestEpochStartBootstrap_WithDisabledShardIDAsObserver(t *testing.T) { TrieNodesCalled: func() storage.Cacher { return testscommon.NewCacherStub() }, - CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { return &validatorInfoCacherMock.ValidatorInfoCacherMock{} }, } diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index 4115f18ae8b..19cf6baaf85 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -111,7 +111,6 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat ChanStopNode: args.ChanNodeStop, NodeTypeProvider: args.NodeTypeProvider, IsFullArchive: args.IsFullArchive, - ValidatorInfoCacher: args.DataPool.CurrentBlockValidatorInfo(), } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argsNodesCoordinator) if err != nil { @@ -170,7 +169,7 @@ func (s *syncValidatorStatus) processValidatorChangesFor(metaBlock data.HeaderHa if err != nil { return err } - s.nodeCoordinator.EpochStartPrepare(metaBlock, blockBody) + s.nodeCoordinator.EpochStartPrepare(metaBlock, blockBody, s.dataPool.CurrentEpochValidatorInfo()) return nil } diff --git a/epochStart/bootstrap/syncValidatorStatus_test.go b/epochStart/bootstrap/syncValidatorStatus_test.go index b8e77ce8b91..f617a51f62e 100644 --- a/epochStart/bootstrap/syncValidatorStatus_test.go +++ b/epochStart/bootstrap/syncValidatorStatus_test.go @@ -2,6 +2,7 @@ package bootstrap import ( "context" + "github.com/ElrondNetwork/elrond-go/epochStart" "testing" "github.com/ElrondNetwork/elrond-go-core/core" @@ -19,7 +20,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" + "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -138,7 +139,7 @@ func TestSyncValidatorStatus_processValidatorChangesFor(t *testing.T) { wasCalled := false svs.nodeCoordinator = &shardingMocks.NodesCoordinatorStub{ - EpochStartPrepareCalled: func(metaHdr data.HeaderHandler, body data.BodyHandler) { + EpochStartPrepareCalled: func(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { wasCalled = true assert.Equal(t, metaBlock, metaHdr) assert.Equal(t, expectedBody, body) @@ -247,7 +248,7 @@ func getSyncValidatorStatusArgs() ArgsNewSyncValidatorStatus { MiniBlocksCalled: func() storage.Cacher { return testscommon.NewCacherStub() }, - CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { return &validatorInfoCacherMock.ValidatorInfoCacherMock{} }, }, diff --git a/epochStart/interface.go b/epochStart/interface.go index 441262997a0..c80341ccdf2 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -65,7 +65,7 @@ type RequestHandler interface { // ActionHandler defines the action taken on epoch start event type ActionHandler interface { EpochStartAction(hdr data.HeaderHandler) - EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) + EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher ValidatorInfoCacher) NotifyOrder() uint32 } @@ -79,7 +79,7 @@ type RegistrationHandler interface { // Notifier defines which actions should be done for handling new epoch's events type Notifier interface { NotifyAll(hdr data.HeaderHandler) - NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher ValidatorInfoCacher) NotifyEpochChangeConfirmed(epoch uint32) IsInterfaceNil() bool } @@ -154,7 +154,7 @@ type TransactionCacher interface { IsInterfaceNil() bool } -// ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current block +// ValidatorInfoCacher defines the methods for the local validator info cacher, needed for the current epoch type ValidatorInfoCacher interface { GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) diff --git a/epochStart/metachain/trigger.go b/epochStart/metachain/trigger.go index 688c490c2d4..bd021b556ca 100644 --- a/epochStart/metachain/trigger.go +++ b/epochStart/metachain/trigger.go @@ -46,6 +46,7 @@ type ArgsNewMetaEpochStartTrigger struct { Hasher hashing.Hasher Storage dataRetriever.StorageService AppStatusHandler core.AppStatusHandler + DataPool dataRetriever.PoolsHolder } type trigger struct { @@ -69,6 +70,7 @@ type trigger struct { marshaller marshal.Marshalizer hasher hashing.Hasher appStatusHandler core.AppStatusHandler + validatorInfoPool epochStart.ValidatorInfoCacher } // NewEpochStartTrigger creates a trigger for start of epoch @@ -103,6 +105,12 @@ func NewEpochStartTrigger(args *ArgsNewMetaEpochStartTrigger) (*trigger, error) if check.IfNil(args.AppStatusHandler) { return nil, epochStart.ErrNilStatusHandler } + if check.IfNil(args.DataPool) { + return nil, epochStart.ErrNilDataPoolsHolder + } + if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { + return nil, epochStart.ErrNilValidatorInfo + } triggerStorage := args.Storage.GetStorer(dataRetriever.BootstrapUnit) if check.IfNil(triggerStorage) { @@ -133,6 +141,7 @@ func NewEpochStartTrigger(args *ArgsNewMetaEpochStartTrigger) (*trigger, error) epochStartMeta: &block.MetaBlock{}, appStatusHandler: args.AppStatusHandler, nextEpochStartRound: disabledRoundForForceEpochStart, + validatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), } err := trig.saveState(trig.triggerStateKey) @@ -247,7 +256,7 @@ func (t *trigger) SetProcessed(header data.HeaderHandler, body data.BodyHandler) t.epochStartMeta = metaBlock t.epochStartMetaHash = metaHash - t.epochStartNotifier.NotifyAllPrepare(metaBlock, body) + t.epochStartNotifier.NotifyAllPrepare(metaBlock, body, t.validatorInfoPool) t.epochStartNotifier.NotifyAll(metaBlock) t.saveCurrentState(metaBlock.Round) diff --git a/epochStart/metachain/triggerRegistry_test.go b/epochStart/metachain/triggerRegistry_test.go index c47fc5f1ad1..9e0435286f0 100644 --- a/epochStart/metachain/triggerRegistry_test.go +++ b/epochStart/metachain/triggerRegistry_test.go @@ -34,6 +34,7 @@ func cloneTrigger(t *trigger) *trigger { rt.hasher = t.hasher rt.appStatusHandler = t.appStatusHandler rt.nextEpochStartRound = t.nextEpochStartRound + rt.validatorInfoPool = t.validatorInfoPool return rt } diff --git a/epochStart/metachain/trigger_test.go b/epochStart/metachain/trigger_test.go index aca61fa3be6..a48161364a7 100644 --- a/epochStart/metachain/trigger_test.go +++ b/epochStart/metachain/trigger_test.go @@ -2,6 +2,8 @@ package metachain import ( "errors" + dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "math" "math/big" "testing" @@ -52,6 +54,11 @@ func createMockEpochStartTriggerArguments() *ArgsNewMetaEpochStartTrigger { } }, }, + DataPool: &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, + }, } } diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 270d9c9c42a..06e2d3d4dd2 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -62,12 +62,12 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr if check.IfNil(args.DataPool) { return nil, epochStart.ErrNilDataPoolsHolder } - if check.IfNil(args.DataPool.CurrentBlockValidatorInfo()) { + if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { return nil, epochStart.ErrNilValidatorInfo } - //TODO: currValidatorInfoCache := dataPool.NewCurrentBlockValidatorInfoPool() should be replaced by - //args.DataPool.CurrentBlockValidatorInfo(), as this pool is already created + //TODO: currValidatorInfoCache := dataPool.NewCurrentEpochValidatorInfoPool() should be replaced by + //args.DataPool.CurrentEpochValidatorInfo(), as this pool is already created vic := &validatorInfoCreator{ shardCoordinator: args.ShardCoordinator, hasher: args.Hasher, @@ -135,7 +135,7 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat return bytes.Compare(validatorCopy[a].PublicKey, validatorCopy[b].PublicKey) < 0 }) - currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() + currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() for index, validator := range validatorCopy { shardValidatorInfo := createShardValidatorInfo(validator) shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) @@ -143,7 +143,7 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat return nil, err } - currentBlockValidatorInfo.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) + currentEpochValidatorInfo.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) miniBlock.TxHashes[index] = shardValidatorInfoHash } @@ -215,9 +215,9 @@ func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( return nil } -// GetLocalValidatorInfoCache returns the local validator info cache which holds all the validator info for the current block +// GetLocalValidatorInfoCache returns the local validator info cache which holds all the validator info for the current epoch func (vic *validatorInfoCreator) GetLocalValidatorInfoCache() epochStart.ValidatorInfoCacher { - return vic.dataPool.CurrentBlockValidatorInfo() + return vic.dataPool.CurrentEpochValidatorInfo() } // CreateMarshalledData creates the marshalled data to be sent to shards @@ -227,7 +227,7 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri } marshalledValidatorInfoTxs := make(map[string][][]byte) - currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() + currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { @@ -244,7 +244,7 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri } for _, txHash := range miniBlock.TxHashes { - validatorInfoTx, err := currentBlockValidatorInfo.GetValidatorInfo(txHash) + validatorInfoTx, err := currentEpochValidatorInfo.GetValidatorInfo(txHash) if err != nil { log.Error("validatorInfoCreator.CreateMarshalledData.GetValidatorInfo", "hash", txHash, "error", err) continue @@ -267,17 +267,17 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri return marshalledValidatorInfoTxs } -// GetValidatorInfoTxs returns validator info txs for the current block +// GetValidatorInfoTxs returns validator info txs for the current epoch func (vic *validatorInfoCreator) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { validatorInfoTxs := make(map[string]*state.ShardValidatorInfo) - currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() + currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { continue } for _, txHash := range miniBlock.TxHashes { - validatorInfoTx, err := currentBlockValidatorInfo.GetValidatorInfo(txHash) + validatorInfoTx, err := currentEpochValidatorInfo.GetValidatorInfo(txHash) if err != nil { continue } @@ -298,7 +298,7 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo var validatorInfo *state.ShardValidatorInfo var marshalledData []byte var err error - currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() + currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { @@ -306,7 +306,7 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo } for _, validatorInfoHash := range miniBlock.TxHashes { - validatorInfo, err = currentBlockValidatorInfo.GetValidatorInfo(validatorInfoHash) + validatorInfo, err = currentEpochValidatorInfo.GetValidatorInfo(validatorInfoHash) if err != nil { continue } @@ -388,8 +388,8 @@ func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderH } func (vic *validatorInfoCreator) clean() { - currentBlockValidatorInfo := vic.dataPool.CurrentBlockValidatorInfo() - currentBlockValidatorInfo.Clean() + currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() + currentEpochValidatorInfo.Clean() } // IsInterfaceNil return true if underlying object is nil diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index 99adeefb9d3..00c956f6d1f 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -19,7 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" + "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/require" ) @@ -125,7 +125,7 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator RemoveCalled: func(key []byte) {}, } }, - CurrBlockValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { return &validatorInfoCacherMock.ValidatorInfoCacherMock{} }, }, diff --git a/epochStart/mock/epochStartNotifierStub.go b/epochStart/mock/epochStartNotifierStub.go index c8599c685f2..5f7c2f7b565 100644 --- a/epochStart/mock/epochStartNotifierStub.go +++ b/epochStart/mock/epochStartNotifierStub.go @@ -1,11 +1,14 @@ package mock -import "github.com/ElrondNetwork/elrond-go-core/data" +import ( + "github.com/ElrondNetwork/elrond-go-core/data" + "github.com/ElrondNetwork/elrond-go/epochStart" +) // EpochStartNotifierStub - type EpochStartNotifierStub struct { NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyEpochChangeConfirmedCalled func(epoch uint32) } @@ -24,9 +27,9 @@ func (esnm *EpochStartNotifierStub) NotifyAll(hdr data.HeaderHandler) { } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } } diff --git a/epochStart/notifier/common.go b/epochStart/notifier/common.go index cd2b48ccdbd..ec0d2cd8ee3 100644 --- a/epochStart/notifier/common.go +++ b/epochStart/notifier/common.go @@ -31,7 +31,7 @@ func NewHandlerForEpochStart( // EpochStartPrepare will notify the subscriber to prepare for a start of epoch. // The event can be triggered multiple times -func (hs *handlerStruct) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler) { +func (hs *handlerStruct) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler, _ epochStart.ValidatorInfoCacher) { if hs.act != nil { hs.prepare(metaHdr) } diff --git a/epochStart/notifier/epochStartSubscriptionHandler.go b/epochStart/notifier/epochStartSubscriptionHandler.go index bf87a3b8f95..cb6a5ff68a3 100644 --- a/epochStart/notifier/epochStartSubscriptionHandler.go +++ b/epochStart/notifier/epochStartSubscriptionHandler.go @@ -13,7 +13,7 @@ type EpochStartNotifier interface { RegisterHandler(handler epochStart.ActionHandler) UnregisterHandler(handler epochStart.ActionHandler) NotifyAll(hdr data.HeaderHandler) - NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyEpochChangeConfirmed(epoch uint32) RegisterForEpochChangeConfirmed(handler func(epoch uint32)) IsInterfaceNil() bool @@ -75,7 +75,7 @@ func (essh *epochStartSubscriptionHandler) NotifyAll(hdr data.HeaderHandler) { // NotifyAllPrepare will call all the subscribed clients to notify them that an epoch change block has been // observed, but not yet confirmed/committed. Some components may need to do some initialisation/preparation -func (essh *epochStartSubscriptionHandler) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (essh *epochStartSubscriptionHandler) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { essh.mutEpochStartHandler.RLock() sort.Slice(essh.epochStartHandlers, func(i, j int) bool { @@ -83,7 +83,7 @@ func (essh *epochStartSubscriptionHandler) NotifyAllPrepare(metaHdr data.HeaderH }) for i := 0; i < len(essh.epochStartHandlers); i++ { - essh.epochStartHandlers[i].EpochStartPrepare(metaHdr, body) + essh.epochStartHandlers[i].EpochStartPrepare(metaHdr, body, validatorInfoCacher) } essh.mutEpochStartHandler.RUnlock() } diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index acb3b570990..18b9bfe6dfc 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -78,6 +78,7 @@ type trigger struct { headersPool dataRetriever.HeadersPool miniBlocksPool storage.Cacher + validatorInfoPool epochStart.ValidatorInfoCacher shardHdrStorage storage.Storer metaHdrStorage storage.Storer triggerStorage storage.Storer @@ -161,6 +162,12 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { if check.IfNil(args.DataPool.Headers()) { return nil, epochStart.ErrNilMetaBlocksPool } + if check.IfNil(args.DataPool.MiniBlocks()) { + return nil, epochStart.ErrNilMiniBlockPool + } + if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { + return nil, epochStart.ErrNilValidatorInfo + } if check.IfNil(args.PeerMiniBlocksSyncer) { return nil, epochStart.ErrNilValidatorInfoProcessor } @@ -217,6 +224,7 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { mapFinalizedEpochs: make(map[uint32]string), headersPool: args.DataPool.Headers(), miniBlocksPool: args.DataPool.MiniBlocks(), + validatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), metaHdrStorage: metaHdrStorage, shardHdrStorage: shardHdrStorage, triggerStorage: triggerStorage, @@ -651,7 +659,7 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH return false, 0 } - t.epochStartNotifier.NotifyAllPrepare(metaHdr, blockBody) + t.epochStartNotifier.NotifyAllPrepare(metaHdr, blockBody, t.validatorInfoPool) isMetaHdrFinal, finalityAttestingRound := t.isMetaBlockFinal(hash, metaHdr) return isMetaHdrFinal, finalityAttestingRound diff --git a/epochStart/shardchain/triggerRegistry_test.go b/epochStart/shardchain/triggerRegistry_test.go index 78b3ca88673..90c36dd5d40 100644 --- a/epochStart/shardchain/triggerRegistry_test.go +++ b/epochStart/shardchain/triggerRegistry_test.go @@ -47,6 +47,7 @@ func cloneTrigger(t *trigger) *trigger { rt.peerMiniBlocksSyncer = t.peerMiniBlocksSyncer rt.appStatusHandler = t.appStatusHandler rt.miniBlocksPool = t.miniBlocksPool + rt.validatorInfoPool = t.validatorInfoPool rt.mapMissingMiniblocks = t.mapMissingMiniblocks rt.mapFinalizedEpochs = t.mapFinalizedEpochs rt.roundHandler = t.roundHandler diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index ac291dacdbc..6645721d5eb 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -3,6 +3,7 @@ package shardchain import ( "bytes" "fmt" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "testing" "github.com/ElrondNetwork/elrond-go-core/core" @@ -39,6 +40,9 @@ func createMockShardEpochStartTriggerArguments() *ArgsShardEpochStartTrigger { MiniBlocksCalled: func() storage.Cacher { return testscommon.NewCacherStub() }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, }, Storage: &mock.ChainStorerStub{ GetStorerCalled: func(unitType dataRetriever.UnitType) storage.Storer { @@ -397,6 +401,9 @@ func TestTrigger_ReceivedHeaderIsEpochStartTrueWithPeerMiniblocks(t *testing.T) }, } }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, } args.Uint64Converter = &mock.Uint64ByteSliceConverterMock{ ToByteSliceCalled: func(u uint64) []byte { diff --git a/factory/interface.go b/factory/interface.go index 58b59bd4134..463a390e01f 100644 --- a/factory/interface.go +++ b/factory/interface.go @@ -42,7 +42,7 @@ type EpochStartNotifier interface { RegisterHandler(handler epochStart.ActionHandler) UnregisterHandler(handler epochStart.ActionHandler) NotifyAll(hdr data.HeaderHandler) - NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyEpochChangeConfirmed(epoch uint32) IsInterfaceNil() bool } diff --git a/factory/mock/epochStartNotifierStub.go b/factory/mock/epochStartNotifierStub.go index 4eb9a938a3d..27b18094eac 100644 --- a/factory/mock/epochStartNotifierStub.go +++ b/factory/mock/epochStartNotifierStub.go @@ -9,7 +9,7 @@ import ( type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyAllCalled func(hdr data.HeaderHandler) NotifyEpochChangeConfirmedCalled func(epoch uint32) } @@ -29,9 +29,9 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } } diff --git a/factory/processComponents.go b/factory/processComponents.go index 74026921159..1e469ce82c7 100644 --- a/factory/processComponents.go +++ b/factory/processComponents.go @@ -711,6 +711,7 @@ func (pcf *processComponentsFactory) newEpochStartTrigger(requestHandler epochSt Marshalizer: pcf.coreData.InternalMarshalizer(), Hasher: pcf.coreData.Hasher(), AppStatusHandler: pcf.coreData.StatusHandler(), + DataPool: pcf.data.Datapool(), } epochStartTrigger, err := metachain.NewEpochStartTrigger(argEpochStart) if err != nil { diff --git a/factory/shardingFactory.go b/factory/shardingFactory.go index f72a6b2f978..df141564a07 100644 --- a/factory/shardingFactory.go +++ b/factory/shardingFactory.go @@ -105,7 +105,6 @@ func CreateNodesCoordinator( waitingListFixEnabledEpoch uint32, chanNodeStop chan endProcess.ArgEndProcess, nodeTypeProvider core.NodeTypeProviderHandler, - validatorInfoCacher epochStart.ValidatorInfoCacher, ) (nodesCoordinator.NodesCoordinator, error) { if chanNodeStop == nil { return nil, nodesCoordinator.ErrNilNodeStopChannel @@ -195,7 +194,6 @@ func CreateNodesCoordinator( ChanStopNode: chanNodeStop, NodeTypeProvider: nodeTypeProvider, IsFullArchive: prefsConfig.FullArchive, - ValidatorInfoCacher: validatorInfoCacher, } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/heartbeat/mock/epochStartNotifier.go b/heartbeat/mock/epochStartNotifier.go index 157bd99020b..3c3679c9737 100644 --- a/heartbeat/mock/epochStartNotifier.go +++ b/heartbeat/mock/epochStartNotifier.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) epochStartHdls []epochStart.ActionHandler } @@ -38,13 +38,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body) + hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) } } diff --git a/integrationTests/consensus/testInitializer.go b/integrationTests/consensus/testInitializer.go index 2959c2f4b67..7d3b2cdadda 100644 --- a/integrationTests/consensus/testInitializer.go +++ b/integrationTests/consensus/testInitializer.go @@ -25,7 +25,6 @@ import ( "github.com/ElrondNetwork/elrond-go/consensus/round" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/dataRetriever/blockchain" - "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart/metachain" "github.com/ElrondNetwork/elrond-go/epochStart/notifier" mainFactory "github.com/ElrondNetwork/elrond-go/factory" @@ -334,6 +333,8 @@ func createConsensusOnlyNode( syncer, 0) + dataPool := dataRetrieverMock.CreatePoolsHolder(1, 0) + argsNewMetaEpochStart := &metachain.ArgsNewMetaEpochStartTrigger{ GenesisTime: time.Unix(startTime, 0), EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), @@ -346,6 +347,7 @@ func createConsensusOnlyNode( Marshalizer: testMarshalizer, Hasher: testHasher, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + DataPool: dataPool, } epochStartTrigger, _ := metachain.NewEpochStartTrigger(argsNewMetaEpochStart) @@ -440,7 +442,7 @@ func createConsensusOnlyNode( dataComponents := integrationTests.GetDefaultDataComponents() dataComponents.BlockChain = blockChain - dataComponents.DataPool = dataRetrieverMock.CreatePoolsHolder(1, 0) + dataComponents.DataPool = dataPool dataComponents.Store = createTestStore() stateComponents := integrationTests.GetDefaultStateComponents() @@ -510,7 +512,6 @@ func createNodes( epochStartRegistrationHandler := notifier.NewEpochStartSubscriptionHandler() bootStorer := integrationTests.CreateMemUnit() consensusCache, _ := lrucache.NewCache(10000) - validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ ShardConsensusGroupSize: consensusSize, @@ -530,7 +531,6 @@ func createNodes( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: validatorInfoCacher, } nodesCoord, _ := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/factory/consensusComponents/consensusComponents_test.go b/integrationTests/factory/consensusComponents/consensusComponents_test.go index fdd2ac44aa7..5c74cfdec98 100644 --- a/integrationTests/factory/consensusComponents/consensusComponents_test.go +++ b/integrationTests/factory/consensusComponents/consensusComponents_test.go @@ -65,7 +65,6 @@ func TestConsensusComponents_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), - managedDataComponents.Datapool().CurrentBlockValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/processComponents/processComponents_test.go b/integrationTests/factory/processComponents/processComponents_test.go index cabdd5ec8fa..3f0371137f7 100644 --- a/integrationTests/factory/processComponents/processComponents_test.go +++ b/integrationTests/factory/processComponents/processComponents_test.go @@ -66,7 +66,6 @@ func TestProcessComponents_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), - managedDataComponents.Datapool().CurrentBlockValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/statusComponents/statusComponents_test.go b/integrationTests/factory/statusComponents/statusComponents_test.go index 2e58c92a1fb..30da3113aad 100644 --- a/integrationTests/factory/statusComponents/statusComponents_test.go +++ b/integrationTests/factory/statusComponents/statusComponents_test.go @@ -66,7 +66,6 @@ func TestStatusComponents_Create_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), - managedDataComponents.Datapool().CurrentBlockValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/mock/epochStartNotifier.go b/integrationTests/mock/epochStartNotifier.go index 14a7cbc9cc6..8024eefaac4 100644 --- a/integrationTests/mock/epochStartNotifier.go +++ b/integrationTests/mock/epochStartNotifier.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyEpochChangeConfirmedCalled func(epoch uint32) epochStartHdls []epochStart.ActionHandler } @@ -39,13 +39,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body) + hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) } } diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 15d40d83cec..669e294d405 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -2,7 +2,6 @@ package integrationTests import ( "fmt" - "github.com/ElrondNetwork/elrond-go-core/data/endProcess" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go/integrationTests/mock" @@ -10,7 +9,6 @@ import ( "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" ) // ArgIndexHashedNodesCoordinatorFactory - @@ -71,7 +69,6 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -124,7 +121,6 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } baseCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testP2PNode.go b/integrationTests/testP2PNode.go index 31443b56f20..86add1c7a78 100644 --- a/integrationTests/testP2PNode.go +++ b/integrationTests/testP2PNode.go @@ -15,7 +15,6 @@ import ( mclsig "github.com/ElrondNetwork/elrond-go-crypto/signing/mcl/singlesig" "github.com/ElrondNetwork/elrond-go/config" "github.com/ElrondNetwork/elrond-go/dataRetriever" - "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart/notifier" "github.com/ElrondNetwork/elrond-go/factory" "github.com/ElrondNetwork/elrond-go/factory/peerSignatureHandler" @@ -330,7 +329,6 @@ func CreateNodesWithTestP2PNodes( nodesMap := make(map[uint32][]*TestP2PNode) cacherCfg := storageUnit.CacheConfig{Capacity: 10000, Type: storageUnit.LRUCache, Shards: 1} cache, _ := storageUnit.NewCache(cacherCfg) - validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() for shardId, validatorList := range validatorsMap { argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ @@ -353,7 +351,6 @@ func CreateNodesWithTestP2PNodes( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: validatorInfoCacher, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -399,7 +396,6 @@ func CreateNodesWithTestP2PNodes( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: validatorInfoCacher, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testProcessorNode.go b/integrationTests/testProcessorNode.go index f84c2c12ede..2747f11aa6d 100644 --- a/integrationTests/testProcessorNode.go +++ b/integrationTests/testProcessorNode.go @@ -1229,6 +1229,7 @@ func (tpn *TestProcessorNode) initInterceptors() { Marshalizer: TestMarshalizer, Hasher: TestHasher, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + DataPool: tpn.DataPool, } epochStartTrigger, _ := metachain.NewEpochStartTrigger(argsEpochStart) tpn.EpochStartTrigger = &metachain.TestTrigger{} @@ -2050,6 +2051,7 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { Marshalizer: TestMarshalizer, Hasher: TestHasher, AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + DataPool: tpn.DataPool, } epochStartTrigger, _ := metachain.NewEpochStartTrigger(argsEpochStart) tpn.EpochStartTrigger = &metachain.TestTrigger{} @@ -2499,9 +2501,9 @@ func (tpn *TestProcessorNode) ProposeBlock(round uint64, nonce uint64) (data.Bod } for _, mb := range shardBlockBody.MiniBlocks { - if mb.Type == dataBlock.PeerBlock { - continue - } + //if mb.Type == dataBlock.PeerBlock { + // continue + //} for _, hash := range mb.TxHashes { copiedHash := make([]byte, len(hash)) copy(copiedHash, hash) diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index 67588748583..3dc8c5b3e6d 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -15,7 +15,6 @@ import ( "github.com/ElrondNetwork/elrond-go-crypto/signing/mcl" multisig2 "github.com/ElrondNetwork/elrond-go-crypto/signing/mcl/multisig" "github.com/ElrondNetwork/elrond-go-crypto/signing/multisig" - "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/integrationTests/mock" p2pRating "github.com/ElrondNetwork/elrond-go/p2p/rating" "github.com/ElrondNetwork/elrond-go/sharding" @@ -62,7 +61,6 @@ func CreateProcessorNodesWithNodesCoordinator( pubKeys := PubKeysMapFromKeysMap(cp.Keys) validatorsMap := GenValidatorsFromPubKeys(pubKeys, nbShards) validatorsMapForNodesCoordinator, _ := nodesCoordinator.NodesInfoToValidators(validatorsMap) - validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() cpWaiting := CreateCryptoParams(1, 1, nbShards) pubKeysWaiting := PubKeysMapFromKeysMap(cpWaiting.Keys) @@ -96,7 +94,6 @@ func CreateProcessorNodesWithNodesCoordinator( WaitingListFixEnabledEpoch: 0, ChanStopNode: endProcess.GetDummyEndProcessChannel(), IsFullArchive: false, - ValidatorInfoCacher: validatorInfoCacher, } nodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testProcessorNodeWithMultisigner.go b/integrationTests/testProcessorNodeWithMultisigner.go index 62f25ae2b8e..469022b6ee7 100644 --- a/integrationTests/testProcessorNodeWithMultisigner.go +++ b/integrationTests/testProcessorNodeWithMultisigner.go @@ -19,7 +19,6 @@ import ( mclmultisig "github.com/ElrondNetwork/elrond-go-crypto/signing/mcl/multisig" "github.com/ElrondNetwork/elrond-go-crypto/signing/multisig" "github.com/ElrondNetwork/elrond-go/common/forking" - "github.com/ElrondNetwork/elrond-go/dataRetriever/dataPool" "github.com/ElrondNetwork/elrond-go/epochStart/notifier" "github.com/ElrondNetwork/elrond-go/factory/peerSignatureHandler" "github.com/ElrondNetwork/elrond-go/integrationTests/mock" @@ -487,7 +486,6 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( pubKeys := PubKeysMapFromKeysMap(cp.Keys) validatorsMap := GenValidatorsFromPubKeys(pubKeys, uint32(nbShards)) validatorsMapForNodesCoordinator, _ := nodesCoordinator.NodesInfoToValidators(validatorsMap) - validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() nodesMap := make(map[uint32][]*TestProcessorNode) shufflerArgs := &nodesCoordinator.NodesShufflerArgs{ @@ -530,7 +528,6 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: validatorInfoCacher, } nodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) @@ -591,7 +588,6 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( pubKeys := PubKeysMapFromKeysMap(cp.Keys) validatorsMap := GenValidatorsFromPubKeys(pubKeys, uint32(nbShards)) validatorsMapForNodesCoordinator, _ := nodesCoordinator.NodesInfoToValidators(validatorsMap) - validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() cpWaiting := CreateCryptoParams(2, 2, uint32(nbShards)) pubKeysWaiting := PubKeysMapFromKeysMap(cpWaiting.Keys) @@ -631,7 +627,6 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: validatorInfoCacher, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/node/mock/epochStartNotifier.go b/node/mock/epochStartNotifier.go index 14a7cbc9cc6..8024eefaac4 100644 --- a/node/mock/epochStartNotifier.go +++ b/node/mock/epochStartNotifier.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyEpochChangeConfirmedCalled func(epoch uint32) epochStartHdls []epochStart.ActionHandler } @@ -39,13 +39,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body) + hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) } } diff --git a/node/nodeRunner.go b/node/nodeRunner.go index d2639dad148..26afcddc2a1 100644 --- a/node/nodeRunner.go +++ b/node/nodeRunner.go @@ -329,7 +329,6 @@ func (nr *nodeRunner) executeOneComponentCreationCycle( configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), - managedDataComponents.Datapool().CurrentBlockValidatorInfo(), ) if err != nil { return true, err diff --git a/process/mock/epochStartNotifierStub.go b/process/mock/epochStartNotifierStub.go index 157bd99020b..3c3679c9737 100644 --- a/process/mock/epochStartNotifierStub.go +++ b/process/mock/epochStartNotifierStub.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) epochStartHdls []epochStart.ActionHandler } @@ -38,13 +38,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body) + hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) } } diff --git a/sharding/mock/epochStartNotifierStub.go b/sharding/mock/epochStartNotifierStub.go index c8cd1f3278b..2ddd42f2bbb 100644 --- a/sharding/mock/epochStartNotifierStub.go +++ b/sharding/mock/epochStartNotifierStub.go @@ -9,7 +9,7 @@ import ( type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyAllCalled func(hdr data.HeaderHandler) } @@ -28,9 +28,9 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } } diff --git a/sharding/networksharding/peerShardMapper.go b/sharding/networksharding/peerShardMapper.go index a332db6f1ef..8b44afe8953 100644 --- a/sharding/networksharding/peerShardMapper.go +++ b/sharding/networksharding/peerShardMapper.go @@ -4,6 +4,7 @@ import ( "bytes" "encoding/hex" "fmt" + "github.com/ElrondNetwork/elrond-go/epochStart" "sync" "github.com/ElrondNetwork/elrond-go-core/core" @@ -359,7 +360,7 @@ func (psm *PeerShardMapper) EpochStartAction(hdr data.HeaderHandler) { } // EpochStartPrepare is the method called whenever an action needs to be undertaken in respect to the epoch preparation change -func (psm *PeerShardMapper) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler) { +func (psm *PeerShardMapper) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler, _ epochStart.ValidatorInfoCacher) { if check.IfNil(metaHdr) { log.Warn("nil header on PeerShardMapper.EpochStartPrepare") return diff --git a/sharding/networksharding/peerShardMapper_test.go b/sharding/networksharding/peerShardMapper_test.go index 343570172f2..9c497224b87 100644 --- a/sharding/networksharding/peerShardMapper_test.go +++ b/sharding/networksharding/peerShardMapper_test.go @@ -497,12 +497,13 @@ func TestPeerShardMapper_EpochStartPrepareShouldNotPanic(t *testing.T) { }() psm := createPeerShardMapper() - psm.EpochStartPrepare(nil, nil) + psm.EpochStartPrepare(nil, nil, nil) psm.EpochStartPrepare( &testscommon.HeaderHandlerStub{ EpochField: 0, }, nil, + nil, ) } diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go index 5f8859dfe97..c7d1bd97c40 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go @@ -94,7 +94,6 @@ type indexHashedNodesCoordinator struct { chanStopNode chan endProcess.ArgEndProcess flagWaitingListFix atomicFlags.Flag nodeTypeProvider NodeTypeProviderHandler - validatorInfoCacher epochStart.ValidatorInfoCacher } // NewIndexHashedNodesCoordinator creates a new index hashed group selector @@ -139,7 +138,6 @@ func NewIndexHashedNodesCoordinator(arguments ArgNodesCoordinator) (*indexHashed chanStopNode: arguments.ChanStopNode, nodeTypeProvider: arguments.NodeTypeProvider, isFullArchive: arguments.IsFullArchive, - validatorInfoCacher: arguments.ValidatorInfoCacher, } log.Debug("indexHashedNodesCoordinator: enable epoch for waiting waiting list", "epoch", ihnc.waitingListFixEnableEpoch) @@ -217,9 +215,6 @@ func checkArguments(arguments ArgNodesCoordinator) error { if nil == arguments.ChanStopNode { return ErrNilNodeStopChannel } - if check.IfNil(arguments.ValidatorInfoCacher) { - return ErrNilValidatorInfoCacher - } return nil } @@ -542,7 +537,7 @@ func (ihnc *indexHashedNodesCoordinator) GetValidatorsIndexes( // EpochStartPrepare is called when an epoch start event is observed, but not yet confirmed/committed. // Some components may need to do some initialisation on this event -func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if !metaHdr.IsStartOfEpochBlock() { log.Error("could not process EpochStartPrepare on nodesCoordinator - not epoch start block") return @@ -561,7 +556,7 @@ func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHa return } - allValidatorInfo, err := createValidatorInfoFromBody(body, ihnc.numTotalEligible, ihnc.validatorInfoCacher) + allValidatorInfo, err := createValidatorInfoFromBody(body, ihnc.numTotalEligible, validatorInfoCacher) if err != nil { log.Error("could not create validator info from body - do nothing on nodesCoordinator epochStartPrepare", "error", err.Error()) return diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go index 873b95d42cc..5076bfbee13 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go @@ -157,15 +157,15 @@ func TestIndexHashedNodesCoordinator_IsEpochInConfig(t *testing.T) { arguments := createArguments() - arguments.ValidatorInfoCacher = dataPool.NewCurrentBlockValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) epoch := uint32(1) ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) - validatorsInfo, _ := createValidatorInfoFromBody(body, 10, arguments.ValidatorInfoCacher) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) + validatorsInfo, _ := createValidatorInfoFromBody(body, 10, validatorInfoCacher) err = ihnc.SetNodesConfigFromValidatorsInfo(epoch, []byte{}, validatorsInfo) require.Nil(t, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go index f98278b3d3a..1f6d356a165 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go @@ -18,7 +18,6 @@ import ( "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" - "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -94,7 +93,6 @@ func TestIndexHashedGroupSelectorWithRater_OkValShouldWork(t *testing.T) { ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, err := NewIndexHashedNodesCoordinator(arguments) assert.Nil(t, err) @@ -189,7 +187,6 @@ func BenchmarkIndexHashedGroupSelectorWithRater_ComputeValidatorsGroup63of400(b ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -263,7 +260,6 @@ func Test_ComputeValidatorsGroup63of400(t *testing.T) { ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) numRounds := uint64(1000000) @@ -337,7 +333,6 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -390,7 +385,6 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -457,7 +451,6 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldWork(t ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -541,7 +534,6 @@ func TestIndexHashedGroupSelectorWithRater_GetAllEligibleValidatorsPublicKeys(t ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -846,7 +838,6 @@ func BenchmarkIndexHashedWithRaterGroupSelector_ComputeValidatorsGroup21of400(b ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index 617c6895f60..aae4efdfa02 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -27,7 +27,6 @@ import ( "github.com/ElrondNetwork/elrond-go/storage/lrucache" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" - "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacherMock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -95,7 +94,6 @@ func createArguments() ArgNodesCoordinator { epochStartSubscriber := &mock.EpochStartNotifierStub{} bootStorer := mock.NewStorerMock() - validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() arguments := ArgNodesCoordinator{ ShardConsensusGroupSize: 1, @@ -115,7 +113,6 @@ func createArguments() ArgNodesCoordinator { IsFullArchive: false, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: validatorInfoCacher, } return arguments } @@ -264,7 +261,6 @@ func TestIndexHashedNodesCoordinator_OkValShouldWork(t *testing.T) { WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -323,7 +319,6 @@ func TestIndexHashedNodesCoordinator_NewCoordinatorTooFewNodesShouldErr(t *testi WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -396,7 +391,6 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup1ValidatorShouldRetur WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) list2, err := ihnc.ComputeConsensusGroup([]byte("randomness"), 0, 0, 0) @@ -455,7 +449,6 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10locksNoM WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -542,7 +535,6 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10BlocksMe WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -612,7 +604,6 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup63of400TestEqualSameP WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -675,7 +666,6 @@ func BenchmarkIndexHashedGroupSelector_ComputeValidatorsGroup21of400(b *testing. WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -747,7 +737,6 @@ func runBenchmark(consensusGroupCache Cacher, consensusGroupSize int, nodesMap m WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -796,7 +785,6 @@ func computeMemoryRequirements(consensusGroupCache Cacher, consensusGroupSize in WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -935,7 +923,6 @@ func TestIndexHashedNodesCoordinator_GetValidatorWithPublicKeyShouldWork(t *test WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1018,7 +1005,6 @@ func TestIndexHashedGroupSelector_GetAllEligibleValidatorsPublicKeys(t *testing. WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1096,7 +1082,6 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherMock{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1106,16 +1091,16 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T require.Nil(t, err) } -func createBlockBodyFromNodesCoordinator(ihnc *indexHashedNodesCoordinator, epoch uint32) *block.Body { +func createBlockBodyFromNodesCoordinator(ihnc *indexHashedNodesCoordinator, epoch uint32, validatorInfoCacher epochStart.ValidatorInfoCacher) *block.Body { body := &block.Body{MiniBlocks: make([]*block.MiniBlock, 0)} - mbs := createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].eligibleMap, string(common.EligibleList), ihnc.marshalizer, ihnc.hasher, ihnc.validatorInfoCacher) + mbs := createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].eligibleMap, string(common.EligibleList), ihnc.marshalizer, ihnc.hasher, validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) - mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].waitingMap, string(common.WaitingList), ihnc.marshalizer, ihnc.hasher, ihnc.validatorInfoCacher) + mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].waitingMap, string(common.WaitingList), ihnc.marshalizer, ihnc.hasher, validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) - mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].leavingMap, string(common.LeavingList), ihnc.marshalizer, ihnc.hasher, ihnc.validatorInfoCacher) + mbs = createMiniBlocksForNodesMap(ihnc.nodesConfig[epoch].leavingMap, string(common.LeavingList), ihnc.marshalizer, ihnc.hasher, validatorInfoCacher) body.MiniBlocks = append(body.MiniBlocks, mbs...) return body @@ -1168,8 +1153,9 @@ func TestIndexHashedNodesCoordinator_EpochStart(t *testing.T) { ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) - ihnc.EpochStartPrepare(header, body) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) validators, err := ihnc.GetAllEligibleValidatorsPublicKeys(epoch) @@ -1322,8 +1308,9 @@ func TestIndexHashedNodesCoordinator_EpochStartInEligible(t *testing.T) { }, }, } - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) - ihnc.EpochStartPrepare(header, body) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) computedShardId, isValidator := ihnc.computeShardForSelfPublicKey(ihnc.nodesConfig[epoch]) @@ -1357,8 +1344,9 @@ func TestIndexHashedNodesCoordinator_EpochStartInWaiting(t *testing.T) { }, }, } - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) - ihnc.EpochStartPrepare(header, body) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) computedShardId, isValidator := ihnc.computeShardForSelfPublicKey(ihnc.nodesConfig[epoch]) @@ -1396,8 +1384,9 @@ func TestIndexHashedNodesCoordinator_EpochStartInLeaving(t *testing.T) { }, }, } - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) - ihnc.EpochStartPrepare(header, body) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) computedShardId, isValidator := ihnc.computeShardForSelfPublicKey(ihnc.nodesConfig[epoch]) @@ -1433,7 +1422,6 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t epochStartSubscriber := &mock.EpochStartNotifierStub{} bootStorer := mock.NewStorerMock() - validatorInfoCacher := dataPool.NewCurrentBlockValidatorInfoPool() arguments := ArgNodesCoordinator{ ShardConsensusGroupSize: 1, @@ -1452,7 +1440,6 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t WaitingListFixEnabledEpoch: 0, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - ValidatorInfoCacher: validatorInfoCacher, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1467,8 +1454,9 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] - body := createBlockBodyFromNodesCoordinator(ihnc, epoch) - ihnc.EpochStartPrepare(header, body) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) newNodesConfig := ihnc.nodesConfig[1] @@ -1558,8 +1546,9 @@ func TestIndexHashedNodesCoordinator_GetSavedStateKey(t *testing.T) { Epoch: 1, } - body := createBlockBodyFromNodesCoordinator(ihnc, 0) - ihnc.EpochStartPrepare(header, body) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + body := createBlockBodyFromNodesCoordinator(ihnc, 0, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) key := ihnc.GetSavedStateKey() @@ -1642,8 +1631,9 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesEpoch1(t *testi Epoch: 1, } - body := createBlockBodyFromNodesCoordinator(ihnc, 0) - ihnc.EpochStartPrepare(header, body) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + body := createBlockBodyFromNodesCoordinator(ihnc, 0, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) nodesPrevEpoch, err := ihnc.GetAllEligibleValidatorsPublicKeys(0) @@ -1683,35 +1673,37 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesAfterRevertToEp Epoch: 1, } - body := createBlockBodyFromNodesCoordinator(ihnc, 0) - ihnc.EpochStartPrepare(header, body) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() + + body := createBlockBodyFromNodesCoordinator(ihnc, 0, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 1) + body = createBlockBodyFromNodesCoordinator(ihnc, 1, validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, Epoch: 2, } - ihnc.EpochStartPrepare(header, body) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 2) + body = createBlockBodyFromNodesCoordinator(ihnc, 2, validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, Epoch: 3, } - ihnc.EpochStartPrepare(header, body) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 3) + body = createBlockBodyFromNodesCoordinator(ihnc, 3, validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, Epoch: 4, } - ihnc.EpochStartPrepare(header, body) + ihnc.EpochStartPrepare(header, body, validatorInfoCacher) ihnc.EpochStartAction(header) nodesEpoch1, err := ihnc.GetAllEligibleValidatorsPublicKeys(1) diff --git a/sharding/nodesCoordinator/interface.go b/sharding/nodesCoordinator/interface.go index b53506fc473..2db2aab8d5e 100644 --- a/sharding/nodesCoordinator/interface.go +++ b/sharding/nodesCoordinator/interface.go @@ -98,7 +98,7 @@ type RandomSelector interface { // EpochStartActionHandler defines the action taken on epoch start event type EpochStartActionHandler interface { EpochStartAction(hdr data.HeaderHandler) - EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) + EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyOrder() uint32 } diff --git a/sharding/nodesCoordinator/shardingArgs.go b/sharding/nodesCoordinator/shardingArgs.go index f690b208b40..2b3173f9aa3 100644 --- a/sharding/nodesCoordinator/shardingArgs.go +++ b/sharding/nodesCoordinator/shardingArgs.go @@ -4,7 +4,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/endProcess" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" - "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -30,5 +29,4 @@ type ArgNodesCoordinator struct { ChanStopNode chan endProcess.ArgEndProcess NodeTypeProvider NodeTypeProviderHandler IsFullArchive bool - ValidatorInfoCacher epochStart.ValidatorInfoCacher } diff --git a/testscommon/dataRetriever/poolFactory.go b/testscommon/dataRetriever/poolFactory.go index fe05709ac45..012d25fb764 100644 --- a/testscommon/dataRetriever/poolFactory.go +++ b/testscommon/dataRetriever/poolFactory.go @@ -120,7 +120,7 @@ func CreatePoolsHolder(numShards uint32, selfShard uint32) dataRetriever.PoolsHo panicIfError("CreatePoolsHolder", err) currentBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() - currentBlockValidatorInfo := dataPool.NewCurrentBlockValidatorInfoPool() + currentEpochValidatorInfo := dataPool.NewCurrentEpochValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, UnsignedTransactions: unsignedTxPool, @@ -131,7 +131,7 @@ func CreatePoolsHolder(numShards uint32, selfShard uint32) dataRetriever.PoolsHo TrieNodes: adaptedTrieNodesStorage, TrieNodesChunks: trieNodesChunks, CurrentBlockTransactions: currentBlockTransactions, - CurrentBlockValidatorInfo: currentBlockValidatorInfo, + CurrentEpochValidatorInfo: currentEpochValidatorInfo, SmartContracts: smartContracts, ValidatorsInfo: validatorsInfo, } @@ -192,7 +192,7 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) panicIfError("CreatePoolsHolderWithTxPool", err) currentBlockTransactions := dataPool.NewCurrentBlockTransactionsPool() - currentBlockValidatorInfo := dataPool.NewCurrentBlockValidatorInfoPool() + currentEpochValidatorInfo := dataPool.NewCurrentEpochValidatorInfoPool() dataPoolArgs := dataPool.DataPoolArgs{ Transactions: txPool, UnsignedTransactions: unsignedTxPool, @@ -203,7 +203,7 @@ func CreatePoolsHolderWithTxPool(txPool dataRetriever.ShardedDataCacherNotifier) TrieNodes: trieNodes, TrieNodesChunks: trieNodesChunks, CurrentBlockTransactions: currentBlockTransactions, - CurrentBlockValidatorInfo: currentBlockValidatorInfo, + CurrentEpochValidatorInfo: currentEpochValidatorInfo, SmartContracts: smartContracts, ValidatorsInfo: validatorsInfo, } diff --git a/testscommon/dataRetriever/poolsHolderMock.go b/testscommon/dataRetriever/poolsHolderMock.go index a6044a03c26..f11421c9b54 100644 --- a/testscommon/dataRetriever/poolsHolderMock.go +++ b/testscommon/dataRetriever/poolsHolderMock.go @@ -24,7 +24,7 @@ type PoolsHolderMock struct { trieNodesChunks storage.Cacher smartContracts storage.Cacher currBlockTxs dataRetriever.TransactionCacher - currBlockValidatorInfo dataRetriever.ValidatorInfoCacher + currEpochValidatorInfo dataRetriever.ValidatorInfoCacher validatorsInfo dataRetriever.ShardedDataCacherNotifier } @@ -76,7 +76,7 @@ func NewPoolsHolderMock() *PoolsHolderMock { panicIfError("NewPoolsHolderMock", err) holder.currBlockTxs = dataPool.NewCurrentBlockTransactionsPool() - holder.currBlockValidatorInfo = dataPool.NewCurrentBlockValidatorInfoPool() + holder.currEpochValidatorInfo = dataPool.NewCurrentEpochValidatorInfoPool() holder.trieNodes, err = storageUnit.NewCache(storageUnit.CacheConfig{Type: storageUnit.SizeLRUCache, Capacity: 900000, Shards: 1, SizeInBytes: 314572800}) panicIfError("NewPoolsHolderMock", err) @@ -102,9 +102,9 @@ func (holder *PoolsHolderMock) CurrentBlockTxs() dataRetriever.TransactionCacher return holder.currBlockTxs } -// CurrentBlockValidatorInfo - -func (holder *PoolsHolderMock) CurrentBlockValidatorInfo() dataRetriever.ValidatorInfoCacher { - return holder.currBlockValidatorInfo +// CurrentEpochValidatorInfo - +func (holder *PoolsHolderMock) CurrentEpochValidatorInfo() dataRetriever.ValidatorInfoCacher { + return holder.currEpochValidatorInfo } // Transactions - diff --git a/testscommon/dataRetriever/poolsHolderStub.go b/testscommon/dataRetriever/poolsHolderStub.go index 42351cee75b..ef4fb1df804 100644 --- a/testscommon/dataRetriever/poolsHolderStub.go +++ b/testscommon/dataRetriever/poolsHolderStub.go @@ -15,7 +15,7 @@ type PoolsHolderStub struct { MiniBlocksCalled func() storage.Cacher MetaBlocksCalled func() storage.Cacher CurrBlockTxsCalled func() dataRetriever.TransactionCacher - CurrBlockValidatorInfoCalled func() dataRetriever.ValidatorInfoCacher + CurrEpochValidatorInfoCalled func() dataRetriever.ValidatorInfoCacher TrieNodesCalled func() storage.Cacher TrieNodesChunksCalled func() storage.Cacher PeerChangesBlocksCalled func() storage.Cacher @@ -91,10 +91,10 @@ func (holder *PoolsHolderStub) CurrentBlockTxs() dataRetriever.TransactionCacher return nil } -// CurrentBlockValidatorInfo - -func (holder *PoolsHolderStub) CurrentBlockValidatorInfo() dataRetriever.ValidatorInfoCacher { - if holder.CurrBlockValidatorInfoCalled != nil { - return holder.CurrBlockValidatorInfoCalled() +// CurrentEpochValidatorInfo - +func (holder *PoolsHolderStub) CurrentEpochValidatorInfo() dataRetriever.ValidatorInfoCacher { + if holder.CurrEpochValidatorInfoCalled != nil { + return holder.CurrEpochValidatorInfoCalled() } return nil diff --git a/testscommon/genericMocks/actionHandlerStub.go b/testscommon/genericMocks/actionHandlerStub.go index 09aaa2d6324..fd4b0444cab 100644 --- a/testscommon/genericMocks/actionHandlerStub.go +++ b/testscommon/genericMocks/actionHandlerStub.go @@ -1,11 +1,14 @@ package genericMocks -import "github.com/ElrondNetwork/elrond-go-core/data" +import ( + "github.com/ElrondNetwork/elrond-go-core/data" + "github.com/ElrondNetwork/elrond-go/epochStart" +) // ActionHandlerStub - type ActionHandlerStub struct { EpochStartActionCalled func(hdr data.HeaderHandler) - EpochStartPrepareCalled func(metaHdr data.HeaderHandler, body data.BodyHandler) + EpochStartPrepareCalled func(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) NotifyOrderCalled func() uint32 } @@ -17,9 +20,9 @@ func (ahs *ActionHandlerStub) EpochStartAction(hdr data.HeaderHandler) { } // EpochStartPrepare - -func (ahs *ActionHandlerStub) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (ahs *ActionHandlerStub) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if ahs.EpochStartPrepareCalled != nil { - ahs.EpochStartPrepareCalled(metaHdr, body) + ahs.EpochStartPrepareCalled(metaHdr, body, validatorInfoCacher) } } diff --git a/testscommon/shardingMocks/nodesCoordinatorStub.go b/testscommon/shardingMocks/nodesCoordinatorStub.go index 874f319ad66..22ae12118e1 100644 --- a/testscommon/shardingMocks/nodesCoordinatorStub.go +++ b/testscommon/shardingMocks/nodesCoordinatorStub.go @@ -2,6 +2,7 @@ package shardingMocks import ( "github.com/ElrondNetwork/elrond-go-core/data" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" state "github.com/ElrondNetwork/elrond-go/state" ) @@ -15,7 +16,7 @@ type NodesCoordinatorStub struct { GetAllValidatorsPublicKeysCalled func() (map[uint32][][]byte, error) ConsensusGroupSizeCalled func(shardID uint32) int ComputeConsensusGroupCalled func(randomness []byte, round uint64, shardId uint32, epoch uint32) (validatorsGroup []nodesCoordinator.Validator, err error) - EpochStartPrepareCalled func(metaHdr data.HeaderHandler, body data.BodyHandler) + EpochStartPrepareCalled func(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) } // NodesCoordinatorToRegistry - @@ -24,9 +25,9 @@ func (ncm *NodesCoordinatorStub) NodesCoordinatorToRegistry() *nodesCoordinator. } // EpochStartPrepare - -func (ncm *NodesCoordinatorStub) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (ncm *NodesCoordinatorStub) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if ncm.EpochStartPrepareCalled != nil { - ncm.EpochStartPrepareCalled(metaHdr, body) + ncm.EpochStartPrepareCalled(metaHdr, body, validatorInfoCacher) } } diff --git a/testscommon/validatorInfoCacherMock/validatorInfoCacherMock.go b/testscommon/validatorInfoCacher/validatorInfoCacherMock.go similarity index 100% rename from testscommon/validatorInfoCacherMock/validatorInfoCacherMock.go rename to testscommon/validatorInfoCacher/validatorInfoCacherMock.go diff --git a/update/mock/epochStartNotifierStub.go b/update/mock/epochStartNotifierStub.go index 50ac82f413b..e0716b6d111 100644 --- a/update/mock/epochStartNotifierStub.go +++ b/update/mock/epochStartNotifierStub.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) epochStartHdls []epochStart.ActionHandler NotifyEpochChangeConfirmedCalled func(epoch uint32) RegisterForEpochChangeConfirmedCalled func(handler func(epoch uint32)) @@ -54,13 +54,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body) + esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body) + hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) } } From 02e992cf5ead22880dda9817882f0f118717e746 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 27 May 2022 13:13:07 +0300 Subject: [PATCH 27/70] * Finalized implementation for peers mini blocks refactor in epochStart component --- epochStart/bootstrap/process.go | 2 +- epochStart/bootstrap/syncValidatorStatus.go | 35 ++- epochStart/errors.go | 25 +- epochStart/metachain/trigger.go | 2 +- epochStart/metachain/validators.go | 6 +- epochStart/mock/validatorInfoSyncerStub.go | 6 + epochStart/shardchain/peerMiniBlocksSyncer.go | 259 +++++++++++++---- .../shardchain/peerMiniBlocksSyncer_test.go | 22 +- epochStart/shardchain/trigger.go | 262 ++++++++++++------ epochStart/shardchain/triggerRegistry_test.go | 4 +- factory/processComponents.go | 5 +- integrationTests/testProcessorNode.go | 24 +- .../preprocess/validatorInfoPreProcessor.go | 73 ++--- process/common.go | 42 +++ process/errors.go | 6 + process/interceptors/processor/interface.go | 2 +- process/interface.go | 3 +- process/peer/interceptedValidatorInfo.go | 17 +- process/peer/interceptedValidatorInfo_test.go | 12 +- process/peer/validatorsProvider_test.go | 11 + .../syncer/transactionsSyncHandlerMock.go | 18 ++ update/factory/exportHandlerFactory.go | 7 +- update/genesis/base.go | 8 + update/genesis/export.go | 38 +++ update/interface.go | 3 + update/mock/stateSyncStub.go | 10 + update/process/baseProcess.go | 4 - update/sync/coordinator.go | 6 + update/sync/syncTransactions.go | 220 +++++++++++++-- update/sync/syncTransactions_test.go | 4 +- 30 files changed, 885 insertions(+), 251 deletions(-) diff --git a/epochStart/bootstrap/process.go b/epochStart/bootstrap/process.go index d4da4fed0f6..7a658b2a782 100644 --- a/epochStart/bootstrap/process.go +++ b/epochStart/bootstrap/process.go @@ -562,7 +562,7 @@ func (e *epochStartBootstrap) createSyncers() error { syncTxsArgs := updateSync.ArgsNewTransactionsSyncer{ DataPools: e.dataPool, Storages: dataRetriever.NewChainStorer(), - Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), + Marshaller: e.coreComponentsHolder.InternalMarshalizer(), RequestHandler: e.requestHandler, } diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index 19cf6baaf85..26d071afd61 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -17,6 +17,7 @@ import ( "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/storage/lrucache" + "github.com/ElrondNetwork/elrond-go/update" "github.com/ElrondNetwork/elrond-go/update/sync" ) @@ -24,6 +25,7 @@ const consensusGroupCacheSize = 50 type syncValidatorStatus struct { miniBlocksSyncer epochStart.PendingMiniBlocksSyncHandler + transactionsSyncer update.TransactionsSyncHandler dataPool dataRetriever.PoolsHolder marshalizer marshal.Marshalizer requestHandler process.RequestHandler @@ -61,18 +63,31 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat requestHandler: args.RequestHandler, genesisNodesConfig: args.GenesisNodesConfig, } + + var err error + syncMiniBlocksArgs := sync.ArgsNewPendingMiniBlocksSyncer{ Storage: disabled.CreateMemUnit(), Cache: s.dataPool.MiniBlocks(), Marshalizer: s.marshalizer, RequestHandler: s.requestHandler, } - var err error s.miniBlocksSyncer, err = sync.NewPendingMiniBlocksSyncer(syncMiniBlocksArgs) if err != nil { return nil, err } + syncTxsArgs := sync.ArgsNewTransactionsSyncer{ + DataPools: s.dataPool, + Storages: dataRetriever.NewChainStorer(), + Marshaller: s.marshalizer, + RequestHandler: s.requestHandler, + } + s.transactionsSyncer, err = sync.NewTransactionsSyncer(syncTxsArgs) + if err != nil { + return nil, err + } + eligibleNodesInfo, waitingNodesInfo := args.GenesisNodesConfig.InitialNodesInfo() eligibleValidators, err := nodesCoordinator.NodesInfoToValidators(eligibleNodesInfo) @@ -205,6 +220,24 @@ func (s *syncValidatorStatus) getPeerBlockBodyForMeta( return nil, err } + s.transactionsSyncer.ClearFields() + ctx, cancel = context.WithTimeout(context.Background(), time.Minute) + err = s.transactionsSyncer.SyncTransactionsFor(peerMiniBlocks, metaBlock.GetEpoch(), ctx) + cancel() + if err != nil { + return nil, err + } + + validatorsInfo, err := s.transactionsSyncer.GetValidatorsInfo() + if err != nil { + return nil, err + } + + currentEpochValidatorInfoPool := s.dataPool.CurrentEpochValidatorInfo() + for validatorInfoHash, validatorInfo := range validatorsInfo { + currentEpochValidatorInfoPool.AddValidatorInfo([]byte(validatorInfoHash), validatorInfo) + } + blockBody := &block.Body{MiniBlocks: make([]*block.MiniBlock, 0, len(peerMiniBlocks))} for _, mbHeader := range shardMBHeaders { blockBody.MiniBlocks = append(blockBody.MiniBlocks, peerMiniBlocks[string(mbHeader.GetHash())]) diff --git a/epochStart/errors.go b/epochStart/errors.go index 122fee8c8cf..1ee94785589 100644 --- a/epochStart/errors.go +++ b/epochStart/errors.go @@ -140,8 +140,17 @@ var ErrValidatorInfoMiniBlocksNumDoesNotMatch = errors.New("number of created an // ErrNilValidatorInfo signals that a nil value for the validatorInfo has been provided var ErrNilValidatorInfo = errors.New("validator info is nil") -// ErrNilMetaBlock signals that a nil metablock has been provided -var ErrNilMetaBlock = errors.New("nil metablock") +// ErrNilValidatorsInfoPool signals that a nil value for the validatorsInfoPool has been provided +var ErrNilValidatorsInfoPool = errors.New("validators info pool is nil") + +// ErrNilCurrentEpochValidatorsInfoPool signals that a nil value for the currentEpochValidatorsInfoPool has been provided +var ErrNilCurrentEpochValidatorsInfoPool = errors.New("current epoch validators info pool is nil") + +// ErrNilMetaBlock signals that a nil meta block has been provided +var ErrNilMetaBlock = errors.New("nil meta block") + +// ErrNilBlockBody signals that a nil block body has been provided +var ErrNilBlockBody = errors.New("nil block body") // ErrNilMiniBlockPool signals that a nil mini blocks pool was used var ErrNilMiniBlockPool = errors.New("nil mini block pool") @@ -155,9 +164,6 @@ var ErrEpochStartDataForShardNotFound = errors.New("epoch start data for current // ErrMissingHeader signals that searched header is missing var ErrMissingHeader = errors.New("missing header") -// ErrMissingMiniBlock signals that the searched miniBlock is missing -var ErrMissingMiniBlock = errors.New("missing miniBlock") - // ErrNilPathManager signals that a nil path manager has been provided var ErrNilPathManager = errors.New("nil path manager") @@ -188,9 +194,6 @@ var ErrNilGenesisNodesConfig = errors.New("nil genesis nodes config") // ErrNilRater signals that a nil rater has been provided var ErrNilRater = errors.New("nil rater") -// ErrInvalidWorkingDir signals that an invalid working directory has been provided -var ErrInvalidWorkingDir = errors.New("invalid working directory") - // ErrTimeoutWaitingForMetaBlock signals that a timeout event was raised while waiting for the epoch start meta block var ErrTimeoutWaitingForMetaBlock = errors.New("timeout while waiting for epoch start meta block") @@ -272,12 +275,6 @@ var ErrNilDataTrie = errors.New("nil data trie") // ErrInvalidMinNodePrice signals that the minimum node price is invalid (e.g negative, not a number, etc) var ErrInvalidMinNodePrice = errors.New("minimum node price is invalid") -// ErrInvalidRewardsTopUpGradientPoint signals that the given point controlling the top-up gradient is invalid -var ErrInvalidRewardsTopUpGradientPoint = errors.New("top-up gradient point invalid") - -// ErrInvalidRewardsTopUpFactor signals that the factor for computing the top-up rewards out of the full rewards is invalid -var ErrInvalidRewardsTopUpFactor = errors.New("top-up factor invalid") - // ErrNilEconomicsDataProvider signals that the economics data provider is nil var ErrNilEconomicsDataProvider = errors.New("end of epoch economics data provider is nil") diff --git a/epochStart/metachain/trigger.go b/epochStart/metachain/trigger.go index bd021b556ca..9f29613ecf4 100644 --- a/epochStart/metachain/trigger.go +++ b/epochStart/metachain/trigger.go @@ -109,7 +109,7 @@ func NewEpochStartTrigger(args *ArgsNewMetaEpochStartTrigger) (*trigger, error) return nil, epochStart.ErrNilDataPoolsHolder } if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { - return nil, epochStart.ErrNilValidatorInfo + return nil, epochStart.ErrNilCurrentEpochValidatorsInfoPool } triggerStorage := args.Storage.GetStorer(dataRetriever.BootstrapUnit) diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 06e2d3d4dd2..35e46f6f1ff 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -11,10 +11,10 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" - "github.com/ElrondNetwork/elrond-go/process/factory" "github.com/ElrondNetwork/elrond-go/sharding" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" @@ -63,7 +63,7 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr return nil, epochStart.ErrNilDataPoolsHolder } if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { - return nil, epochStart.ErrNilValidatorInfo + return nil, epochStart.ErrNilCurrentEpochValidatorsInfoPool } //TODO: currValidatorInfoCache := dataPool.NewCurrentEpochValidatorInfoPool() should be replaced by @@ -238,7 +238,7 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri continue } - broadcastTopic := createBroadcastTopic(factory.UnsignedTransactionTopic, vic.shardCoordinator, miniBlock.ReceiverShardID) + broadcastTopic := common.ValidatorInfoTopic if _, ok := marshalledValidatorInfoTxs[broadcastTopic]; !ok { marshalledValidatorInfoTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) } diff --git a/epochStart/mock/validatorInfoSyncerStub.go b/epochStart/mock/validatorInfoSyncerStub.go index 29a006317c4..3dedd99d58e 100644 --- a/epochStart/mock/validatorInfoSyncerStub.go +++ b/epochStart/mock/validatorInfoSyncerStub.go @@ -2,6 +2,7 @@ package mock import ( "github.com/ElrondNetwork/elrond-go-core/data" + "github.com/ElrondNetwork/elrond-go/state" ) // ValidatorInfoSyncerStub - @@ -13,6 +14,11 @@ func (vip *ValidatorInfoSyncerStub) SyncMiniBlocks(_ data.HeaderHandler) ([][]by return nil, nil, nil } +// SyncValidatorsInfo - +func (vip *ValidatorInfoSyncerStub) SyncValidatorsInfo(_ data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) { + return nil, nil, nil +} + // IsInterfaceNil - func (vip *ValidatorInfoSyncerStub) IsInterfaceNil() bool { return vip == nil diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index c3cabc0432e..d247be53966 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -9,8 +9,10 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -21,19 +23,25 @@ const waitTime = 5 * time.Second // ArgPeerMiniBlockSyncer holds all dependencies required to create a peerMiniBlockSyncer type ArgPeerMiniBlockSyncer struct { - MiniBlocksPool storage.Cacher - Requesthandler epochStart.RequestHandler + MiniBlocksPool storage.Cacher + ValidatorsInfoPool dataRetriever.ShardedDataCacherNotifier + RequestHandler epochStart.RequestHandler } -// peerMiniBlockSyncer implements validator info processing for miniblocks of type peerMiniblock +// peerMiniBlockSyncer implements validator info processing for mini blocks of type PeerMiniBlock type peerMiniBlockSyncer struct { - miniBlocksPool storage.Cacher - requestHandler epochStart.RequestHandler - - mapAllPeerMiniblocks map[string]*block.MiniBlock - chRcvAllMiniblocks chan struct{} - mutMiniBlocksForBlock sync.RWMutex - numMissingPeerMiniblocks uint32 + miniBlocksPool storage.Cacher + validatorsInfoPool dataRetriever.ShardedDataCacherNotifier + requestHandler epochStart.RequestHandler + + mapAllPeerMiniBlocks map[string]*block.MiniBlock + mapAllValidatorsInfo map[string]*state.ShardValidatorInfo + chRcvAllMiniBlocks chan struct{} + chRcvAllValidatorsInfo chan struct{} + mutMiniBlocksForBlock sync.RWMutex + mutValidatorsInfoForBlock sync.RWMutex + numMissingPeerMiniBlocks uint32 + numMissingValidatorsInfo uint32 } // NewPeerMiniBlockSyncer creates a new peerMiniBlockSyncer object @@ -41,49 +49,86 @@ func NewPeerMiniBlockSyncer(arguments ArgPeerMiniBlockSyncer) (*peerMiniBlockSyn if check.IfNil(arguments.MiniBlocksPool) { return nil, epochStart.ErrNilMiniBlockPool } - if check.IfNil(arguments.Requesthandler) { + if check.IfNil(arguments.ValidatorsInfoPool) { + return nil, epochStart.ErrNilValidatorsInfoPool + } + if check.IfNil(arguments.RequestHandler) { return nil, epochStart.ErrNilRequestHandler } p := &peerMiniBlockSyncer{ - miniBlocksPool: arguments.MiniBlocksPool, - requestHandler: arguments.Requesthandler, + miniBlocksPool: arguments.MiniBlocksPool, + validatorsInfoPool: arguments.ValidatorsInfoPool, + requestHandler: arguments.RequestHandler, } //TODO: change the registerHandler for the miniblockPool to call //directly with hash and value - like func (sp *shardProcessor) receivedMetaBlock p.miniBlocksPool.RegisterHandler(p.receivedMiniBlock, core.UniqueIdentifier()) + p.validatorsInfoPool.RegisterOnAdded(p.receivedValidatorInfo) return p, nil } -func (p *peerMiniBlockSyncer) init() { +func (p *peerMiniBlockSyncer) initMiniBlocks() { p.mutMiniBlocksForBlock.Lock() - p.mapAllPeerMiniblocks = make(map[string]*block.MiniBlock) - p.chRcvAllMiniblocks = make(chan struct{}) + p.mapAllPeerMiniBlocks = make(map[string]*block.MiniBlock) + p.chRcvAllMiniBlocks = make(chan struct{}) p.mutMiniBlocksForBlock.Unlock() } -// SyncMiniBlocks processes an epochstart block asyncrhonous, processing the PeerMiniblocks -func (p *peerMiniBlockSyncer) SyncMiniBlocks(metaBlock data.HeaderHandler) ([][]byte, data.BodyHandler, error) { - if check.IfNil(metaBlock) { +func (p *peerMiniBlockSyncer) initValidatorsInfo() { + p.mutValidatorsInfoForBlock.Lock() + p.mapAllValidatorsInfo = make(map[string]*state.ShardValidatorInfo) + p.chRcvAllValidatorsInfo = make(chan struct{}) + p.mutValidatorsInfoForBlock.Unlock() +} + +// SyncMiniBlocks synchronizes peers mini blocks from an epoch start meta block +func (p *peerMiniBlockSyncer) SyncMiniBlocks(headerHandler data.HeaderHandler) ([][]byte, data.BodyHandler, error) { + if check.IfNil(headerHandler) { return nil, nil, epochStart.ErrNilMetaBlock } - p.init() + p.initMiniBlocks() - p.computeMissingPeerBlocks(metaBlock) + p.computeMissingPeerBlocks(headerHandler) - allMissingPeerMiniblocksHashes, err := p.retrieveMissingBlocks() + allMissingPeerMiniBlocksHashes, err := p.retrieveMissingMiniBlocks() if err != nil { - return allMissingPeerMiniblocksHashes, nil, err + return allMissingPeerMiniBlocksHashes, nil, err } - peerBlockBody := p.getAllPeerMiniBlocks(metaBlock) + peerBlockBody := p.getAllPeerMiniBlocks(headerHandler) return nil, peerBlockBody, nil } +// SyncValidatorsInfo synchronizes validators info from a block body of an epoch start meta block +func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) { + if check.IfNil(bodyHandler) { + return nil, nil, epochStart.ErrNilBlockBody + } + + body, ok := bodyHandler.(*block.Body) + if !ok { + return nil, nil, epochStart.ErrWrongTypeAssertion + } + + p.initValidatorsInfo() + + p.computeMissingValidatorsInfo(body) + + allMissingValidatorsInfoHashes, err := p.retrieveMissingValidatorsInfo() + if err != nil { + return allMissingValidatorsInfoHashes, nil, err + } + + validatorsInfo := p.getAllValidatorsInfo(body) + + return nil, validatorsInfo, nil +} + func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { peerMb, ok := val.(*block.MiniBlock) if !ok || peerMb.Type != block.PeerBlock { @@ -93,19 +138,45 @@ func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { log.Trace(fmt.Sprintf("received miniblock of type %s", peerMb.Type)) p.mutMiniBlocksForBlock.Lock() - havingPeerMb, ok := p.mapAllPeerMiniblocks[string(key)] + havingPeerMb, ok := p.mapAllPeerMiniBlocks[string(key)] if !ok || havingPeerMb != nil { p.mutMiniBlocksForBlock.Unlock() return } - p.mapAllPeerMiniblocks[string(key)] = peerMb - p.numMissingPeerMiniblocks-- - numMissingPeerMiniblocks := p.numMissingPeerMiniblocks + p.mapAllPeerMiniBlocks[string(key)] = peerMb + p.numMissingPeerMiniBlocks-- + numMissingPeerMiniBlocks := p.numMissingPeerMiniBlocks p.mutMiniBlocksForBlock.Unlock() - if numMissingPeerMiniblocks == 0 { - p.chRcvAllMiniblocks <- struct{}{} + if numMissingPeerMiniBlocks == 0 { + p.chRcvAllMiniBlocks <- struct{}{} + } +} + +func (p *peerMiniBlockSyncer) receivedValidatorInfo(key []byte, val interface{}) { + validatorInfo, ok := val.(*state.ShardValidatorInfo) + if !ok { + log.Error("receivedValidatorInfo", "key", key, "error", epochStart.ErrWrongTypeAssertion) + return + } + + log.Trace(fmt.Sprintf("received validator info of pk %s", validatorInfo.PublicKey)) + + p.mutValidatorsInfoForBlock.Lock() + havingValidatorInfo, ok := p.mapAllValidatorsInfo[string(key)] + if !ok || havingValidatorInfo != nil { + p.mutValidatorsInfoForBlock.Unlock() + return + } + + p.mapAllValidatorsInfo[string(key)] = validatorInfo + p.numMissingValidatorsInfo-- + numMissingValidatorsInfo := p.numMissingValidatorsInfo + p.mutValidatorsInfoForBlock.Unlock() + + if numMissingValidatorsInfo == 0 { + p.chRcvAllValidatorsInfo <- struct{}{} } } @@ -121,15 +192,34 @@ func (p *peerMiniBlockSyncer) getAllPeerMiniBlocks(metaBlock data.HeaderHandler) continue } - mb := p.mapAllPeerMiniblocks[string(peerMiniBlock.GetHash())] + mb := p.mapAllPeerMiniBlocks[string(peerMiniBlock.GetHash())] peerBlockBody.MiniBlocks = append(peerBlockBody.MiniBlocks, mb) } return peerBlockBody } +func (p *peerMiniBlockSyncer) getAllValidatorsInfo(body *block.Body) map[string]*state.ShardValidatorInfo { + p.mutValidatorsInfoForBlock.Lock() + defer p.mutValidatorsInfoForBlock.Unlock() + + validatorsInfo := make(map[string]*state.ShardValidatorInfo) + for _, mb := range body.MiniBlocks { + if mb.Type != block.PeerBlock { + continue + } + + for _, txHash := range mb.TxHashes { + validatorInfo := p.mapAllValidatorsInfo[string(txHash)] + validatorsInfo[string(txHash)] = validatorInfo + } + } + + return validatorsInfo +} + func (p *peerMiniBlockSyncer) computeMissingPeerBlocks(metaBlock data.HeaderHandler) { - numMissingPeerMiniblocks := uint32(0) + numMissingPeerMiniBlocks := uint32(0) p.mutMiniBlocksForBlock.Lock() for _, mb := range metaBlock.GetMiniBlockHeaderHandlers() { @@ -137,58 +227,117 @@ func (p *peerMiniBlockSyncer) computeMissingPeerBlocks(metaBlock data.HeaderHand continue } - p.mapAllPeerMiniblocks[string(mb.GetHash())] = nil + p.mapAllPeerMiniBlocks[string(mb.GetHash())] = nil mbObjectFound, ok := p.miniBlocksPool.Peek(mb.GetHash()) if !ok { - numMissingPeerMiniblocks++ + numMissingPeerMiniBlocks++ continue } mbFound, ok := mbObjectFound.(*block.MiniBlock) if !ok { - numMissingPeerMiniblocks++ + numMissingPeerMiniBlocks++ continue } - p.mapAllPeerMiniblocks[string(mb.GetHash())] = mbFound + p.mapAllPeerMiniBlocks[string(mb.GetHash())] = mbFound } - p.numMissingPeerMiniblocks = numMissingPeerMiniblocks + p.numMissingPeerMiniBlocks = numMissingPeerMiniBlocks p.mutMiniBlocksForBlock.Unlock() } -func (p *peerMiniBlockSyncer) retrieveMissingBlocks() ([][]byte, error) { +func (p *peerMiniBlockSyncer) computeMissingValidatorsInfo(body *block.Body) { + numMissingValidatorsInfo := uint32(0) + p.mutValidatorsInfoForBlock.Lock() + + for _, mb := range body.MiniBlocks { + if mb.Type != block.PeerBlock { + continue + } + + for _, txHash := range mb.TxHashes { + p.mapAllValidatorsInfo[string(txHash)] = nil + + mbObjectFound, ok := p.validatorsInfoPool.SearchFirstData(txHash) + if !ok { + numMissingValidatorsInfo++ + continue + } + + mbFound, ok := mbObjectFound.(*state.ShardValidatorInfo) + if !ok { + numMissingValidatorsInfo++ + continue + } + + p.mapAllValidatorsInfo[string(txHash)] = mbFound + } + } + + p.numMissingValidatorsInfo = numMissingValidatorsInfo + p.mutValidatorsInfoForBlock.Unlock() +} + +func (p *peerMiniBlockSyncer) retrieveMissingMiniBlocks() ([][]byte, error) { p.mutMiniBlocksForBlock.Lock() - missingMiniblocks := make([][]byte, 0) - for mbHash, mb := range p.mapAllPeerMiniblocks { + missingMiniBlocks := make([][]byte, 0) + for mbHash, mb := range p.mapAllPeerMiniBlocks { if mb == nil { - missingMiniblocks = append(missingMiniblocks, []byte(mbHash)) + missingMiniBlocks = append(missingMiniBlocks, []byte(mbHash)) } } - p.numMissingPeerMiniblocks = uint32(len(missingMiniblocks)) + p.numMissingPeerMiniBlocks = uint32(len(missingMiniBlocks)) p.mutMiniBlocksForBlock.Unlock() - if len(missingMiniblocks) == 0 { + if len(missingMiniBlocks) == 0 { + return nil, nil + } + + go p.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniBlocks) + + select { + case <-p.chRcvAllMiniBlocks: return nil, nil + case <-time.After(waitTime): + return p.getAllMissingPeerMiniBlocksHashes(), process.ErrTimeIsOut } +} - go p.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniblocks) +func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) { + p.mutValidatorsInfoForBlock.Lock() + missingValidatorsInfo := make([][]byte, 0) + for validatorInfoHash, validatorInfo := range p.mapAllValidatorsInfo { + if validatorInfo == nil { + missingValidatorsInfo = append(missingValidatorsInfo, []byte(validatorInfoHash)) + } + } + p.numMissingValidatorsInfo = uint32(len(missingValidatorsInfo)) + p.mutValidatorsInfoForBlock.Unlock() + + if len(missingValidatorsInfo) == 0 { + return nil, nil + } + + for index := range missingValidatorsInfo { + go p.requestHandler.RequestValidatorInfo(missingValidatorsInfo[index]) + } select { - case <-p.chRcvAllMiniblocks: + case <-p.chRcvAllValidatorsInfo: return nil, nil case <-time.After(waitTime): - return p.getAllMissingPeerMiniblocksHashes(), process.ErrTimeIsOut + return p.getAllMissingValidatorsInfoHashes(), process.ErrTimeIsOut } } -func (p *peerMiniBlockSyncer) getAllMissingPeerMiniblocksHashes() [][]byte { +func (p *peerMiniBlockSyncer) getAllMissingPeerMiniBlocksHashes() [][]byte { p.mutMiniBlocksForBlock.RLock() defer p.mutMiniBlocksForBlock.RUnlock() missingPeerMiniBlocksHashes := make([][]byte, 0) - for hash, mb := range p.mapAllPeerMiniblocks { + for hash, mb := range p.mapAllPeerMiniBlocks { if mb == nil { missingPeerMiniBlocksHashes = append(missingPeerMiniBlocksHashes, []byte(hash)) } @@ -197,6 +346,20 @@ func (p *peerMiniBlockSyncer) getAllMissingPeerMiniblocksHashes() [][]byte { return missingPeerMiniBlocksHashes } +func (p *peerMiniBlockSyncer) getAllMissingValidatorsInfoHashes() [][]byte { + p.mutValidatorsInfoForBlock.RLock() + defer p.mutValidatorsInfoForBlock.RUnlock() + + missingValidatorsInfoHashes := make([][]byte, 0) + for validatorInfoHash, validatorInfo := range p.mapAllValidatorsInfo { + if validatorInfo == nil { + missingValidatorsInfoHashes = append(missingValidatorsInfoHashes, []byte(validatorInfoHash)) + } + } + + return missingValidatorsInfoHashes +} + // IsInterfaceNil returns true if underlying object is nil func (p *peerMiniBlockSyncer) IsInterfaceNil() bool { return p == nil diff --git a/epochStart/shardchain/peerMiniBlocksSyncer_test.go b/epochStart/shardchain/peerMiniBlocksSyncer_test.go index a72b794ab18..8c644a83bf8 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer_test.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer_test.go @@ -18,8 +18,9 @@ import ( func createDefaultArguments() ArgPeerMiniBlockSyncer { defaultArgs := ArgPeerMiniBlockSyncer{ - MiniBlocksPool: testscommon.NewCacherStub(), - Requesthandler: &testscommon.RequestHandlerStub{}, + MiniBlocksPool: testscommon.NewCacherStub(), + ValidatorsInfoPool: testscommon.NewShardedDataStub(), + RequestHandler: &testscommon.RequestHandlerStub{}, } return defaultArgs @@ -36,11 +37,22 @@ func TestNewValidatorInfoProcessor_NilMiniBlocksPoolErr(t *testing.T) { require.Equal(t, epochStart.ErrNilMiniBlockPool, err) } +func TestNewValidatorInfoProcessor_NilValidatorsInfoPoolShouldErr(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + args.ValidatorsInfoPool = nil + syncer, err := NewPeerMiniBlockSyncer(args) + + require.Nil(t, syncer) + require.Equal(t, epochStart.ErrNilValidatorsInfoPool, err) +} + func TestNewValidatorInfoProcessor_NilRequestHandlerShouldErr(t *testing.T) { t.Parallel() args := createDefaultArguments() - args.Requesthandler = nil + args.RequestHandler = nil syncer, err := NewPeerMiniBlockSyncer(args) require.Nil(t, syncer) @@ -253,7 +265,7 @@ func TestValidatorInfoProcessor_ProcesStartOfEpochWithMissinPeerMiniblocksShould }, } - args.Requesthandler = &testscommon.RequestHandlerStub{ + args.RequestHandler = &testscommon.RequestHandlerStub{ RequestMiniBlocksHandlerCalled: func(destShardID uint32, miniblockHashes [][]byte) { if destShardID == core.MetachainShardId && bytes.Equal(miniblockHashes[0], peerMiniBlockHash) { @@ -317,7 +329,7 @@ func TestValidatorInfoProcessor_ProcesStartOfEpochWithMissinPeerMiniblocksTimeou }, } - args.Requesthandler = &testscommon.RequestHandlerStub{ + args.RequestHandler = &testscommon.RequestHandlerStub{ RequestMiniBlocksHandlerCalled: func(destShardID uint32, miniblockHashes [][]byte) { if destShardID == core.MetachainShardId && bytes.Equal(miniblockHashes[0], peerMiniBlockHash) { diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 18b9bfe6dfc..bb1e9bee313 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -35,7 +35,7 @@ var _ process.EpochStartTriggerHandler = (*trigger)(nil) var _ process.EpochBootstrapper = (*trigger)(nil) var _ closing.Closer = (*trigger)(nil) -// sleepTime defines the time in milliseconds between each iteration made in requestMissingMiniblocks method +// sleepTime defines the time in milliseconds between each iteration made in requestMissingMiniBlocks method const sleepTime = 1 * time.Second // ArgsShardEpochStartTrigger struct { defines the arguments needed for new start of epoch trigger @@ -76,13 +76,14 @@ type trigger struct { mapEpochStartHdrs map[string]data.HeaderHandler mapFinalizedEpochs map[uint32]string - headersPool dataRetriever.HeadersPool - miniBlocksPool storage.Cacher - validatorInfoPool epochStart.ValidatorInfoCacher - shardHdrStorage storage.Storer - metaHdrStorage storage.Storer - triggerStorage storage.Storer - metaNonceHdrStorage storage.Storer + headersPool dataRetriever.HeadersPool + miniBlocksPool storage.Cacher + validatorInfoPool dataRetriever.ShardedDataCacherNotifier + currentEpochValidatorInfoPool epochStart.ValidatorInfoCacher + shardHdrStorage storage.Storer + metaHdrStorage storage.Storer + triggerStorage storage.Storer + metaNonceHdrStorage storage.Storer uint64Converter typeConverters.Uint64ByteSliceConverter @@ -104,9 +105,11 @@ type trigger struct { appStatusHandler core.AppStatusHandler - mapMissingMiniblocks map[string]uint32 - mutMissingMiniblocks sync.RWMutex - cancelFunc func() + mapMissingMiniBlocks map[string]uint32 + mapMissingValidatorsInfo map[string]uint32 + mutMissingMiniBlocks sync.RWMutex + mutMissingValidatorsInfo sync.RWMutex + cancelFunc func() } type metaInfo struct { @@ -165,8 +168,11 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { if check.IfNil(args.DataPool.MiniBlocks()) { return nil, epochStart.ErrNilMiniBlockPool } + if check.IfNil(args.DataPool.ValidatorsInfo()) { + return nil, epochStart.ErrNilValidatorsInfoPool + } if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { - return nil, epochStart.ErrNilValidatorInfo + return nil, epochStart.ErrNilCurrentEpochValidatorsInfoPool } if check.IfNil(args.PeerMiniBlocksSyncer) { return nil, epochStart.ErrNilValidatorInfoProcessor @@ -207,40 +213,41 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { trigggerStateKey := common.TriggerRegistryInitialKeyPrefix + fmt.Sprintf("%d", args.Epoch) t := &trigger{ - triggerStateKey: []byte(trigggerStateKey), - epoch: args.Epoch, - metaEpoch: args.Epoch, - currentRoundIndex: 0, - epochStartRound: 0, - epochFinalityAttestingRound: 0, - isEpochStart: false, - validity: args.Validity, - finality: args.Finality, - newEpochHdrReceived: false, - mutTrigger: sync.RWMutex{}, - mapHashHdr: make(map[string]data.HeaderHandler), - mapNonceHashes: make(map[uint64][]string), - mapEpochStartHdrs: make(map[string]data.HeaderHandler), - mapFinalizedEpochs: make(map[uint32]string), - headersPool: args.DataPool.Headers(), - miniBlocksPool: args.DataPool.MiniBlocks(), - validatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), - metaHdrStorage: metaHdrStorage, - shardHdrStorage: shardHdrStorage, - triggerStorage: triggerStorage, - metaNonceHdrStorage: metaHdrNoncesStorage, - uint64Converter: args.Uint64Converter, - marshaller: args.Marshalizer, - hasher: args.Hasher, - headerValidator: args.HeaderValidator, - requestHandler: args.RequestHandler, - epochMetaBlockHash: nil, - epochStartNotifier: args.EpochStartNotifier, - epochStartMeta: &block.MetaBlock{}, - epochStartShardHeader: &block.Header{}, - peerMiniBlocksSyncer: args.PeerMiniBlocksSyncer, - appStatusHandler: args.AppStatusHandler, - roundHandler: args.RoundHandler, + triggerStateKey: []byte(trigggerStateKey), + epoch: args.Epoch, + metaEpoch: args.Epoch, + currentRoundIndex: 0, + epochStartRound: 0, + epochFinalityAttestingRound: 0, + isEpochStart: false, + validity: args.Validity, + finality: args.Finality, + newEpochHdrReceived: false, + mutTrigger: sync.RWMutex{}, + mapHashHdr: make(map[string]data.HeaderHandler), + mapNonceHashes: make(map[uint64][]string), + mapEpochStartHdrs: make(map[string]data.HeaderHandler), + mapFinalizedEpochs: make(map[uint32]string), + headersPool: args.DataPool.Headers(), + miniBlocksPool: args.DataPool.MiniBlocks(), + validatorInfoPool: args.DataPool.ValidatorsInfo(), + currentEpochValidatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), + metaHdrStorage: metaHdrStorage, + shardHdrStorage: shardHdrStorage, + triggerStorage: triggerStorage, + metaNonceHdrStorage: metaHdrNoncesStorage, + uint64Converter: args.Uint64Converter, + marshaller: args.Marshalizer, + hasher: args.Hasher, + headerValidator: args.HeaderValidator, + requestHandler: args.RequestHandler, + epochMetaBlockHash: nil, + epochStartNotifier: args.EpochStartNotifier, + epochStartMeta: &block.MetaBlock{}, + epochStartShardHeader: &block.Header{}, + peerMiniBlocksSyncer: args.PeerMiniBlocksSyncer, + appStatusHandler: args.AppStatusHandler, + roundHandler: args.RoundHandler, } t.headersPool.RegisterHandler(t.receivedMetaBlock) @@ -250,27 +257,75 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { return nil, err } - t.mapMissingMiniblocks = make(map[string]uint32) + t.mapMissingMiniBlocks = make(map[string]uint32) + t.mapMissingValidatorsInfo = make(map[string]uint32) var ctx context.Context ctx, t.cancelFunc = context.WithCancel(context.Background()) - go t.requestMissingMiniblocks(ctx) + go t.requestMissingMiniBlocks(ctx) + go t.requestMissingValidatorsInfo(ctx) return t, nil } -func (t *trigger) clearMissingMiniblocksMap(epoch uint32) { - t.mutMissingMiniblocks.Lock() - defer t.mutMissingMiniblocks.Unlock() +func (t *trigger) clearMissingMiniBlocksMap(epoch uint32) { + t.mutMissingMiniBlocks.Lock() + defer t.mutMissingMiniBlocks.Unlock() - for hash, epochOfMissingMb := range t.mapMissingMiniblocks { + for hash, epochOfMissingMb := range t.mapMissingMiniBlocks { if epochOfMissingMb <= epoch { - delete(t.mapMissingMiniblocks, hash) + delete(t.mapMissingMiniBlocks, hash) + } + } +} + +func (t *trigger) clearMissingValidatorsInfoMap(epoch uint32) { + t.mutMissingValidatorsInfo.Lock() + defer t.mutMissingValidatorsInfo.Unlock() + + for hash, epochOfMissingValidatorInfo := range t.mapMissingValidatorsInfo { + if epochOfMissingValidatorInfo <= epoch { + delete(t.mapMissingValidatorsInfo, hash) + } + } +} + +func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { + for { + select { + case <-ctx.Done(): + log.Debug("trigger's go routine is stopping...") + return + case <-time.After(sleepTime): + } + + t.mutMissingMiniBlocks.RLock() + if len(t.mapMissingMiniBlocks) == 0 { + t.mutMissingMiniBlocks.RUnlock() + continue + } + + missingMiniBlocks := make([][]byte, 0, len(t.mapMissingMiniBlocks)) + for hash := range t.mapMissingMiniBlocks { + missingMiniBlocks = append(missingMiniBlocks, []byte(hash)) + log.Debug("trigger.requestMissingMiniBlocks", "hash", []byte(hash)) + } + t.mutMissingMiniBlocks.RUnlock() + + go t.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniBlocks) + + select { + case <-ctx.Done(): + log.Debug("trigger's go routine is stopping...") + return + case <-time.After(waitTime): } + + t.updateMissingMiniBlocks() } } -func (t *trigger) requestMissingMiniblocks(ctx context.Context) { +func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { for { select { case <-ctx.Done(): @@ -279,20 +334,22 @@ func (t *trigger) requestMissingMiniblocks(ctx context.Context) { case <-time.After(sleepTime): } - t.mutMissingMiniblocks.RLock() - if len(t.mapMissingMiniblocks) == 0 { - t.mutMissingMiniblocks.RUnlock() + t.mutMissingValidatorsInfo.RLock() + if len(t.mapMissingValidatorsInfo) == 0 { + t.mutMissingValidatorsInfo.RUnlock() continue } - missingMiniblocks := make([][]byte, 0, len(t.mapMissingMiniblocks)) - for hash := range t.mapMissingMiniblocks { - missingMiniblocks = append(missingMiniblocks, []byte(hash)) - log.Debug("trigger.requestMissingMiniblocks", "hash", []byte(hash)) + missingValidatorsInfo := make([][]byte, 0, len(t.mapMissingValidatorsInfo)) + for hash := range t.mapMissingValidatorsInfo { + missingValidatorsInfo = append(missingValidatorsInfo, []byte(hash)) + log.Debug("trigger.requestMissingValidatorsInfo", "hash", []byte(hash)) } - t.mutMissingMiniblocks.RUnlock() + t.mutMissingValidatorsInfo.RUnlock() - go t.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniblocks) + for index := range missingValidatorsInfo { + go t.requestHandler.RequestValidatorInfo(missingValidatorsInfo[index]) + } select { case <-ctx.Done(): @@ -301,22 +358,40 @@ func (t *trigger) requestMissingMiniblocks(ctx context.Context) { case <-time.After(waitTime): } - t.updateMissingMiniblocks() + t.updateMissingValidatorsInfo() } } -func (t *trigger) updateMissingMiniblocks() { - t.mutMissingMiniblocks.Lock() - for hash := range t.mapMissingMiniblocks { +func (t *trigger) updateMissingMiniBlocks() { + t.mutMissingMiniBlocks.Lock() + for hash := range t.mapMissingMiniBlocks { if t.miniBlocksPool.Has([]byte(hash)) { - delete(t.mapMissingMiniblocks, hash) + delete(t.mapMissingMiniBlocks, hash) } } - numMissingMiniblocks := len(t.mapMissingMiniblocks) - t.mutMissingMiniblocks.Unlock() + numMissingMiniBlocks := len(t.mapMissingMiniBlocks) + t.mutMissingMiniBlocks.Unlock() - if numMissingMiniblocks == 0 { - log.Debug("trigger.updateMissingMiniblocks -> updateTriggerFromMeta") + if numMissingMiniBlocks == 0 { + log.Debug("trigger.updateMissingMiniBlocks -> updateTriggerFromMeta") + t.mutTrigger.Lock() + t.updateTriggerFromMeta() + t.mutTrigger.Unlock() + } +} + +func (t *trigger) updateMissingValidatorsInfo() { + t.mutMissingValidatorsInfo.Lock() + for hash := range t.mapMissingValidatorsInfo { + if _, ok := t.validatorInfoPool.SearchFirstData([]byte(hash)); ok { + delete(t.mapMissingValidatorsInfo, hash) + } + } + numMissingValidatorsInfo := len(t.mapMissingValidatorsInfo) + t.mutMissingValidatorsInfo.Unlock() + + if numMissingValidatorsInfo == 0 { + log.Debug("trigger.updateMissingValidatorsInfo -> updateTriggerFromMeta") t.mutTrigger.Lock() t.updateTriggerFromMeta() t.mutTrigger.Unlock() @@ -556,7 +631,7 @@ func (t *trigger) updateTriggerFromMeta() { log.Debug(display.Headline(msg, "", "#")) log.Debug("trigger.updateTriggerFromMeta", "isEpochStart", t.isEpochStart) logger.SetCorrelationEpoch(t.metaEpoch) - t.clearMissingMiniblocksMap(t.metaEpoch) + t.clearMissingMiniBlocksMap(t.metaEpoch) } // save all final-valid epoch start blocks @@ -652,26 +727,47 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH return false, 0 } - missingMiniblocksHashes, blockBody, err := t.peerMiniBlocksSyncer.SyncMiniBlocks(metaHdr) + missingMiniBlocksHashes, blockBody, err := t.peerMiniBlocksSyncer.SyncMiniBlocks(metaHdr) + if err != nil { + t.addMissingMiniBlocks(metaHdr.GetEpoch(), missingMiniBlocksHashes) + log.Warn("checkIfTriggerCanBeActivated.SyncMiniBlocks", "num missing mini blocks", len(missingMiniBlocksHashes), "error", err) + return false, 0 + } + + missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) if err != nil { - t.addMissingMiniblocks(metaHdr.GetEpoch(), missingMiniblocksHashes) - log.Warn("processMetablock failed", "error", err) + t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) + log.Warn("checkIfTriggerCanBeActivated.SyncValidatorsInfo", "num missing validators info", len(missingValidatorsInfoHashes), "error", err) return false, 0 } - t.epochStartNotifier.NotifyAllPrepare(metaHdr, blockBody, t.validatorInfoPool) + for validatorInfoHash, validatorInfo := range validatorsInfo { + t.currentEpochValidatorInfoPool.AddValidatorInfo([]byte(validatorInfoHash), validatorInfo) + } + + t.epochStartNotifier.NotifyAllPrepare(metaHdr, blockBody, t.currentEpochValidatorInfoPool) isMetaHdrFinal, finalityAttestingRound := t.isMetaBlockFinal(hash, metaHdr) return isMetaHdrFinal, finalityAttestingRound } -func (t *trigger) addMissingMiniblocks(epoch uint32, missingMiniblocksHashes [][]byte) { - t.mutMissingMiniblocks.Lock() - defer t.mutMissingMiniblocks.Unlock() +func (t *trigger) addMissingMiniBlocks(epoch uint32, missingMiniBlocksHashes [][]byte) { + t.mutMissingMiniBlocks.Lock() + defer t.mutMissingMiniBlocks.Unlock() + + for _, hash := range missingMiniBlocksHashes { + t.mapMissingMiniBlocks[string(hash)] = epoch + log.Debug("trigger.addMissingMiniBlocks", "epoch", epoch, "hash", hash) + } +} + +func (t *trigger) addMissingValidatorsInfo(epoch uint32, missingValidatorsInfoHashes [][]byte) { + t.mutMissingValidatorsInfo.Lock() + defer t.mutMissingValidatorsInfo.Unlock() - for _, hash := range missingMiniblocksHashes { - t.mapMissingMiniblocks[string(hash)] = epoch - log.Debug("trigger.addMissingMiniblocks", "epoch", epoch, "hash", hash) + for _, hash := range missingValidatorsInfoHashes { + t.mapMissingValidatorsInfo[string(hash)] = epoch + log.Debug("trigger.addMissingValidatorsInfo", "epoch", epoch, "hash", hash) } } diff --git a/epochStart/shardchain/triggerRegistry_test.go b/epochStart/shardchain/triggerRegistry_test.go index 90c36dd5d40..2563067fe73 100644 --- a/epochStart/shardchain/triggerRegistry_test.go +++ b/epochStart/shardchain/triggerRegistry_test.go @@ -47,8 +47,10 @@ func cloneTrigger(t *trigger) *trigger { rt.peerMiniBlocksSyncer = t.peerMiniBlocksSyncer rt.appStatusHandler = t.appStatusHandler rt.miniBlocksPool = t.miniBlocksPool + rt.currentEpochValidatorInfoPool = t.currentEpochValidatorInfoPool rt.validatorInfoPool = t.validatorInfoPool - rt.mapMissingMiniblocks = t.mapMissingMiniblocks + rt.mapMissingValidatorsInfo = t.mapMissingValidatorsInfo + rt.mapMissingMiniBlocks = t.mapMissingMiniBlocks rt.mapFinalizedEpochs = t.mapFinalizedEpochs rt.roundHandler = t.roundHandler return rt diff --git a/factory/processComponents.go b/factory/processComponents.go index 1e469ce82c7..7425b9f3233 100644 --- a/factory/processComponents.go +++ b/factory/processComponents.go @@ -667,8 +667,9 @@ func (pcf *processComponentsFactory) newEpochStartTrigger(requestHandler epochSt } argsPeerMiniBlockSyncer := shardchain.ArgPeerMiniBlockSyncer{ - MiniBlocksPool: pcf.data.Datapool().MiniBlocks(), - Requesthandler: requestHandler, + MiniBlocksPool: pcf.data.Datapool().MiniBlocks(), + ValidatorsInfoPool: pcf.data.Datapool().ValidatorsInfo(), + RequestHandler: requestHandler, } peerMiniBlockSyncer, err := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlockSyncer) diff --git a/integrationTests/testProcessorNode.go b/integrationTests/testProcessorNode.go index 2747f11aa6d..16337abdcbf 100644 --- a/integrationTests/testProcessorNode.go +++ b/integrationTests/testProcessorNode.go @@ -382,7 +382,7 @@ func newBaseTestProcessorNode( return numNodes }, } - nodesCoordinator := &shardingMocks.NodesCoordinatorStub{ + nodesCoordinatorStub := &shardingMocks.NodesCoordinatorStub{ ComputeValidatorsGroupCalled: func(randomness []byte, round uint64, shardId uint32, epoch uint32) (validators []nodesCoordinator.Validator, err error) { v, _ := nodesCoordinator.NewValidator(pksBytes[shardId], 1, defaultChancesSelection) return []nodesCoordinator.Validator{v}, nil @@ -416,7 +416,7 @@ func newBaseTestProcessorNode( tpn := &TestProcessorNode{ ShardCoordinator: shardCoordinator, Messenger: messenger, - NodesCoordinator: nodesCoordinator, + NodesCoordinator: nodesCoordinatorStub, HeaderSigVerifier: &mock.HeaderSigVerifierStub{}, HeaderIntegrityVerifier: CreateHeaderIntegrityVerifier(), ChainID: ChainID, @@ -596,7 +596,7 @@ func NewTestProcessorNodeWithCustomDataPool(maxShards uint32, nodeShardId uint32 messenger := CreateMessengerWithNoDiscoveryAndPeersRatingHandler(peersRatingHandler) _ = messenger.SetThresholdMinConnectedPeers(minConnectedPeers) - nodesCoordinator := &shardingMocks.NodesCoordinatorMock{} + nodesCoordinatorMock := &shardingMocks.NodesCoordinatorMock{} kg := &mock.KeyGenMock{} sk, pk := kg.GeneratePair() @@ -604,7 +604,7 @@ func NewTestProcessorNodeWithCustomDataPool(maxShards uint32, nodeShardId uint32 tpn := &TestProcessorNode{ ShardCoordinator: shardCoordinator, Messenger: messenger, - NodesCoordinator: nodesCoordinator, + NodesCoordinator: nodesCoordinatorMock, HeaderSigVerifier: &mock.HeaderSigVerifierStub{}, HeaderIntegrityVerifier: CreateHeaderIntegrityVerifier(), ChainID: ChainID, @@ -1267,8 +1267,9 @@ func (tpn *TestProcessorNode) initInterceptors() { } } else { argsPeerMiniBlocksSyncer := shardchain.ArgPeerMiniBlockSyncer{ - MiniBlocksPool: tpn.DataPool.MiniBlocks(), - Requesthandler: tpn.RequestHandler, + MiniBlocksPool: tpn.DataPool.MiniBlocks(), + ValidatorsInfoPool: tpn.DataPool.ValidatorsInfo(), + RequestHandler: tpn.RequestHandler, } peerMiniBlockSyncer, _ := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) argsShardEpochStart := &shardchain.ArgsShardEpochStartTrigger{ @@ -2182,8 +2183,9 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { } else { if check.IfNil(tpn.EpochStartTrigger) { argsPeerMiniBlocksSyncer := shardchain.ArgPeerMiniBlockSyncer{ - MiniBlocksPool: tpn.DataPool.MiniBlocks(), - Requesthandler: tpn.RequestHandler, + MiniBlocksPool: tpn.DataPool.MiniBlocks(), + ValidatorsInfoPool: tpn.DataPool.ValidatorsInfo(), + RequestHandler: tpn.RequestHandler, } peerMiniBlocksSyncer, _ := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) argsShardEpochStart := &shardchain.ArgsShardEpochStartTrigger{ @@ -2501,9 +2503,9 @@ func (tpn *TestProcessorNode) ProposeBlock(round uint64, nonce uint64) (data.Bod } for _, mb := range shardBlockBody.MiniBlocks { - //if mb.Type == dataBlock.PeerBlock { - // continue - //} + if mb.Type == dataBlock.PeerBlock { + continue + } for _, hash := range mb.TxHashes { copiedHash := make([]byte, len(hash)) copy(copiedHash, hash) diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index 8da54753cae..11e36ec4763 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -166,13 +166,13 @@ func (vip *validatorInfoPreprocessor) restoreValidatorsInfo(miniBlock *block.Min } for validatorInfoHash, validatorInfoBuff := range validatorsInfoBuff { - shardValidatorInfo := state.ShardValidatorInfo{} - err = vip.marshalizer.Unmarshal(&shardValidatorInfo, validatorInfoBuff) + shardValidatorInfo := &state.ShardValidatorInfo{} + err = vip.marshalizer.Unmarshal(shardValidatorInfo, validatorInfoBuff) if err != nil { return err } - vip.validatorsInfoPool.AddData([]byte(validatorInfoHash), &shardValidatorInfo, shardValidatorInfo.Size(), strCache) + vip.validatorsInfoPool.AddData([]byte(validatorInfoHash), shardValidatorInfo, shardValidatorInfo.Size(), strCache) } return nil @@ -212,18 +212,19 @@ func (vip *validatorInfoPreprocessor) SaveTxsToStorage(body *block.Body) error { // receivedValidatorInfoTransaction is a callback function called when a new validator info transaction // is added in the validator info transactions pool -func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(key []byte, value interface{}) { - tx, ok := value.(data.TransactionHandler) +func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(_ []byte, value interface{}) { + validatorInfo, ok := value.(*state.ShardValidatorInfo) if !ok { log.Warn("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "error", process.ErrWrongTypeAssertion) return } - receivedAllMissing := vip.baseReceivedTransaction(key, tx, &vip.validatorsInfoForBlock) - - if receivedAllMissing { - vip.chReceivedAllValidatorsInfo <- true - } + log.Debug("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "pk", validatorInfo.PublicKey) + //receivedAllMissing := vip.baseReceivedTransaction(key, tx, &vip.validatorsInfoForBlock) + // + //if receivedAllMissing { + // vip.chReceivedAllValidatorsInfo <- true + //} } // CreateBlockStarted cleans the local cache map for processed/created validators info at this round @@ -242,7 +243,8 @@ func (vip *validatorInfoPreprocessor) RequestBlockTransactions(body *block.Body) return 0 } - return vip.computeExistingAndRequestMissingValidatorsInfoForShards(body) + return 0 + //return vip.computeExistingAndRequestMissingValidatorsInfoForShards(body) } // computeExistingAndRequestMissingValidatorsInfoForShards calculates what validators info are available and requests @@ -282,12 +284,13 @@ func (vip *validatorInfoPreprocessor) RequestTransactionsForMiniBlock(miniBlock return 0 } - missingValidatorsInfoHashesForMiniBlock := vip.computeMissingValidatorsInfoHashesForMiniBlock(miniBlock) - if len(missingValidatorsInfoHashesForMiniBlock) > 0 { - vip.onRequestValidatorsInfo(missingValidatorsInfoHashesForMiniBlock) - } - - return len(missingValidatorsInfoHashesForMiniBlock) + return 0 + //missingValidatorsInfoHashesForMiniBlock := vip.computeMissingValidatorsInfoHashesForMiniBlock(miniBlock) + //if len(missingValidatorsInfoHashesForMiniBlock) > 0 { + // vip.onRequestValidatorsInfo(missingValidatorsInfoHashesForMiniBlock) + //} + // + //return len(missingValidatorsInfoHashesForMiniBlock) } // computeMissingValidatorsInfoHashesForMiniBlock computes missing validators info hashes for a certain miniblock @@ -299,7 +302,7 @@ func (vip *validatorInfoPreprocessor) computeMissingValidatorsInfoHashesForMiniB } for _, txHash := range miniBlock.TxHashes { - tx, _ := process.GetTransactionHandlerFromPool( + validatorInfo, _ := process.GetValidatorInfoFromPool( miniBlock.SenderShardID, miniBlock.ReceiverShardID, txHash, @@ -307,7 +310,7 @@ func (vip *validatorInfoPreprocessor) computeMissingValidatorsInfoHashesForMiniB false, ) - if tx == nil { + if validatorInfo == nil { missingValidatorsInfoHashes = append(missingValidatorsInfoHashes, txHash) } } @@ -341,25 +344,27 @@ func (vip *validatorInfoPreprocessor) ProcessMiniBlock(miniBlock *block.MiniBloc } // CreateMarshalledData marshals validators info hashes and saves them into a new structure -func (vip *validatorInfoPreprocessor) CreateMarshalledData(txHashes [][]byte) ([][]byte, error) { - marshalledValidatorsInfo, err := vip.createMarshalledData(txHashes, &vip.validatorsInfoForBlock) - if err != nil { - return nil, err - } - - return marshalledValidatorsInfo, nil +func (vip *validatorInfoPreprocessor) CreateMarshalledData(_ [][]byte) ([][]byte, error) { + return make([][]byte, 0), nil + //marshalledValidatorsInfo, err := vip.createMarshalledData(txHashes, &vip.validatorsInfoForBlock) + //if err != nil { + // return nil, err + //} + // + //return marshalledValidatorsInfo, nil } // GetAllCurrentUsedTxs returns all the validators info used at current creation / processing func (vip *validatorInfoPreprocessor) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { - vip.validatorsInfoForBlock.mutTxsForBlock.RLock() - validatorsInfoPool := make(map[string]data.TransactionHandler, len(vip.validatorsInfoForBlock.txHashAndInfo)) - for txHash, txData := range vip.validatorsInfoForBlock.txHashAndInfo { - validatorsInfoPool[txHash] = txData.tx - } - vip.validatorsInfoForBlock.mutTxsForBlock.RUnlock() - - return validatorsInfoPool + return make(map[string]data.TransactionHandler) + //vip.validatorsInfoForBlock.mutTxsForBlock.RLock() + //validatorsInfoPool := make(map[string]data.TransactionHandler, len(vip.validatorsInfoForBlock.txHashAndInfo)) + //for txHash, txData := range vip.validatorsInfoForBlock.txHashAndInfo { + // validatorsInfoPool[txHash] = txData.tx + //} + //vip.validatorsInfoForBlock.mutTxsForBlock.RUnlock() + // + //return validatorsInfoPool } // AddTxsFromMiniBlocks does nothing diff --git a/process/common.go b/process/common.go index 42dd884b38d..d2cb744323c 100644 --- a/process/common.go +++ b/process/common.go @@ -409,6 +409,48 @@ func GetTransactionHandlerFromPool( return tx, nil } +// GetValidatorInfoFromPool gets the validator info from pool with a given sender/receiver shardId and txHash +func GetValidatorInfoFromPool( + senderShardID uint32, + destShardID uint32, + validatorInfoHash []byte, + shardedDataCacherNotifier dataRetriever.ShardedDataCacherNotifier, + searchFirst bool, +) (*state.ShardValidatorInfo, error) { + + if shardedDataCacherNotifier == nil { + return nil, ErrNilShardedDataCacherNotifier + } + + var val interface{} + ok := false + if searchFirst { + val, ok = shardedDataCacherNotifier.SearchFirstData(validatorInfoHash) + if !ok { + return nil, ErrValidatorInfoNotFound + } + } else { + strCache := ShardCacherIdentifier(senderShardID, destShardID) + txStore := shardedDataCacherNotifier.ShardDataStore(strCache) + if txStore == nil { + return nil, ErrNilStorage + } + + val, ok = txStore.Peek(validatorInfoHash) + } + + if !ok { + return nil, ErrValidatorInfoNotFound + } + + validatorInfo, ok := val.(*state.ShardValidatorInfo) + if !ok { + return nil, ErrInvalidValidatorInfoInPool + } + + return validatorInfo, nil +} + // GetTransactionHandlerFromStorage gets the transaction from storage with a given sender/receiver shardId and txHash func GetTransactionHandlerFromStorage( txHash []byte, diff --git a/process/errors.go b/process/errors.go index 3030a5ee6ed..651fd433b43 100644 --- a/process/errors.go +++ b/process/errors.go @@ -221,9 +221,15 @@ var ErrNilShardedDataCacherNotifier = errors.New("nil sharded data cacher notifi // ErrInvalidTxInPool signals an invalid transaction in the transactions pool var ErrInvalidTxInPool = errors.New("invalid transaction in the transactions pool") +// ErrInvalidValidatorInfoInPool signals an invalid validator info in the validators info pool +var ErrInvalidValidatorInfoInPool = errors.New("invalid validator info in the validators info pool") + // ErrTxNotFound signals that a transaction has not found var ErrTxNotFound = errors.New("transaction not found") +// ErrValidatorInfoNotFound signals that a validator info has not found +var ErrValidatorInfoNotFound = errors.New("validator info not found") + // ErrNilHeadersStorage signals that a nil header storage has been provided var ErrNilHeadersStorage = errors.New("nil headers storage") diff --git a/process/interceptors/processor/interface.go b/process/interceptors/processor/interface.go index 3419df9da3f..55d6fa88551 100644 --- a/process/interceptors/processor/interface.go +++ b/process/interceptors/processor/interface.go @@ -25,5 +25,5 @@ type ShardedPool interface { type interceptedValidatorInfo interface { Hash() []byte - ValidatorInfo() state.ValidatorInfo + ValidatorInfo() *state.ShardValidatorInfo } diff --git a/process/interface.go b/process/interface.go index 729d1d74c59..9d8982e63ab 100644 --- a/process/interface.go +++ b/process/interface.go @@ -981,7 +981,8 @@ type RatingsStepHandler interface { // ValidatorInfoSyncer defines the method needed for validatorInfoProcessing type ValidatorInfoSyncer interface { - SyncMiniBlocks(metaBlock data.HeaderHandler) ([][]byte, data.BodyHandler, error) + SyncMiniBlocks(headerHandler data.HeaderHandler) ([][]byte, data.BodyHandler, error) + SyncValidatorsInfo(bodyHandler data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) IsInterfaceNil() bool } diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go index 415fccf22af..4a4904d2f05 100644 --- a/process/peer/interceptedValidatorInfo.go +++ b/process/peer/interceptedValidatorInfo.go @@ -21,7 +21,7 @@ type ArgInterceptedValidatorInfo struct { // interceptedValidatorInfo is a wrapper over validatorInfo type interceptedValidatorInfo struct { - validatorInfo state.ValidatorInfo + validatorInfo *state.ShardValidatorInfo nodesCoordinator process.NodesCoordinator hash []byte } @@ -39,7 +39,7 @@ func NewInterceptedValidatorInfo(args ArgInterceptedValidatorInfo) (*intercepted } return &interceptedValidatorInfo{ - validatorInfo: *validatorInfo, + validatorInfo: validatorInfo, nodesCoordinator: args.NodesCoordinator, hash: args.Hasher.Compute(string(args.DataBuff)), }, nil @@ -62,8 +62,8 @@ func checkArgs(args ArgInterceptedValidatorInfo) error { return nil } -func createValidatorInfo(marshalizer marshal.Marshalizer, buff []byte) (*state.ValidatorInfo, error) { - validatorInfo := &state.ValidatorInfo{} +func createValidatorInfo(marshalizer marshal.Marshalizer, buff []byte) (*state.ShardValidatorInfo, error) { + validatorInfo := &state.ShardValidatorInfo{} err := marshalizer.Unmarshal(validatorInfo, buff) if err != nil { return nil, err @@ -83,10 +83,6 @@ func (ivi *interceptedValidatorInfo) CheckValidity() error { if err != nil { return err } - err = verifyPropertyLen(rewardAddressProperty, ivi.validatorInfo.RewardAddress, 0, minSizeInBytes, rewardAddressPropertyMaxPropertyBytesLen) - if err != nil { - return err - } // Check if the public key is a validator _, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.validatorInfo.PublicKey) @@ -99,7 +95,7 @@ func (ivi *interceptedValidatorInfo) IsForCurrentShard() bool { } // ValidatorInfo returns the current validator info structure -func (ivi *interceptedValidatorInfo) ValidatorInfo() state.ValidatorInfo { +func (ivi *interceptedValidatorInfo) ValidatorInfo() *state.ShardValidatorInfo { return ivi.validatorInfo } @@ -120,13 +116,12 @@ func (ivi *interceptedValidatorInfo) Identifiers() [][]byte { // String returns the validator's info most important fields as string func (ivi *interceptedValidatorInfo) String() string { - return fmt.Sprintf("pk=%s, shard=%d, list=%s, index=%d, tempRating=%d, rating=%d", + return fmt.Sprintf("pk=%s, shard=%d, list=%s, index=%d, tempRating=%d", logger.DisplayByteSlice(ivi.validatorInfo.PublicKey), ivi.validatorInfo.ShardId, ivi.validatorInfo.List, ivi.validatorInfo.Index, ivi.validatorInfo.TempRating, - ivi.validatorInfo.Rating, ) } diff --git a/process/peer/interceptedValidatorInfo_test.go b/process/peer/interceptedValidatorInfo_test.go index db6d7f7299f..662ad7c691a 100644 --- a/process/peer/interceptedValidatorInfo_test.go +++ b/process/peer/interceptedValidatorInfo_test.go @@ -23,7 +23,7 @@ func createMockArgInterceptedValidatorInfo() ArgInterceptedValidatorInfo { Hasher: &hashingMocks.HasherMock{}, NodesCoordinator: &shardingMocks.NodesCoordinatorStub{}, } - args.DataBuff, _ = args.Marshalizer.Marshal(createMockValidatorInfo()) + args.DataBuff, _ = args.Marshalizer.Marshal(createMockShardValidatorInfo()) return args } @@ -104,9 +104,6 @@ func TestInterceptedValidatorInfo_CheckValidity(t *testing.T) { t.Run("listProperty too short", testInterceptedValidatorInfoPropertyLen(listProperty, false)) t.Run("listProperty too long", testInterceptedValidatorInfoPropertyLen(listProperty, true)) - t.Run("rewardAddressProperty too short", testInterceptedValidatorInfoPropertyLen(rewardAddressProperty, false)) - t.Run("rewardAddressProperty too long", testInterceptedValidatorInfoPropertyLen(rewardAddressProperty, true)) - t.Run("not validator should error", func(t *testing.T) { t.Parallel() @@ -158,8 +155,6 @@ func testInterceptedValidatorInfoPropertyLen(property string, tooLong bool) func ivi.validatorInfo.PublicKey = value case listProperty: ivi.validatorInfo.List = string(value) - case rewardAddressProperty: - ivi.validatorInfo.RewardAddress = value default: assert.True(t, false) } @@ -176,12 +171,12 @@ func TestInterceptedValidatorInfo_Getters(t *testing.T) { ivi, _ := NewInterceptedValidatorInfo(args) require.False(t, check.IfNil(ivi)) - validatorInfo := createMockValidatorInfo() + validatorInfo := createMockShardValidatorInfo() validatorInfoBuff, _ := args.Marshalizer.Marshal(validatorInfo) hash := args.Hasher.Compute(string(validatorInfoBuff)) assert.True(t, ivi.IsForCurrentShard()) - assert.Equal(t, *validatorInfo, ivi.ValidatorInfo()) + assert.Equal(t, validatorInfo, ivi.ValidatorInfo()) assert.Equal(t, hash, ivi.Hash()) assert.Equal(t, interceptedValidatorInfoType, ivi.Type()) @@ -195,5 +190,4 @@ func TestInterceptedValidatorInfo_Getters(t *testing.T) { assert.True(t, strings.Contains(str, fmt.Sprintf("list=%s", validatorInfo.List))) assert.True(t, strings.Contains(str, fmt.Sprintf("index=%d", validatorInfo.Index))) assert.True(t, strings.Contains(str, fmt.Sprintf("tempRating=%d", validatorInfo.TempRating))) - assert.True(t, strings.Contains(str, fmt.Sprintf("rating=%d", validatorInfo.Rating))) } diff --git a/process/peer/validatorsProvider_test.go b/process/peer/validatorsProvider_test.go index af1c814cf25..42a553770c8 100644 --- a/process/peer/validatorsProvider_test.go +++ b/process/peer/validatorsProvider_test.go @@ -645,6 +645,17 @@ func createMockValidatorInfo() *state.ValidatorInfo { return initialInfo } +func createMockShardValidatorInfo() *state.ShardValidatorInfo { + initialInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + ShardId: 0, + List: "eligible", + Index: 1, + TempRating: 100, + } + return initialInfo +} + func createDefaultValidatorsProviderArg() ArgValidatorsProvider { return ArgValidatorsProvider{ NodesCoordinator: &shardingMocks.NodesCoordinatorMock{}, diff --git a/testscommon/syncer/transactionsSyncHandlerMock.go b/testscommon/syncer/transactionsSyncHandlerMock.go index c044176703e..7995e7206e8 100644 --- a/testscommon/syncer/transactionsSyncHandlerMock.go +++ b/testscommon/syncer/transactionsSyncHandlerMock.go @@ -3,6 +3,7 @@ package syncer import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/state" "golang.org/x/net/context" ) @@ -10,6 +11,8 @@ import ( type TransactionsSyncHandlerMock struct { SyncTransactionsForCalled func(miniBlocks map[string]*block.MiniBlock, epoch uint32, ctx context.Context) error GetTransactionsCalled func() (map[string]data.TransactionHandler, error) + GetValidatorsInfoCalled func() (map[string]*state.ShardValidatorInfo, error) + ClearFieldsCalled func() } // SyncTransactionsFor - @@ -28,6 +31,21 @@ func (et *TransactionsSyncHandlerMock) GetTransactions() (map[string]data.Transa return nil, nil } +// GetValidatorsInfo - +func (et *TransactionsSyncHandlerMock) GetValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) { + if et.GetValidatorsInfoCalled != nil { + return et.GetValidatorsInfoCalled() + } + return nil, nil +} + +// ClearFields - +func (et *TransactionsSyncHandlerMock) ClearFields() { + if et.ClearFieldsCalled != nil { + et.ClearFieldsCalled() + } +} + // IsInterfaceNil - func (et *TransactionsSyncHandlerMock) IsInterfaceNil() bool { return et == nil diff --git a/update/factory/exportHandlerFactory.go b/update/factory/exportHandlerFactory.go index 16ca4bea643..aa9099e5dc6 100644 --- a/update/factory/exportHandlerFactory.go +++ b/update/factory/exportHandlerFactory.go @@ -275,8 +275,9 @@ func (e *exportHandlerFactory) Create() (update.ExportHandler, error) { } argsPeerMiniBlocksSyncer := shardchain.ArgPeerMiniBlockSyncer{ - MiniBlocksPool: e.dataPool.MiniBlocks(), - Requesthandler: e.requestHandler, + MiniBlocksPool: e.dataPool.MiniBlocks(), + ValidatorsInfoPool: e.dataPool.ValidatorsInfo(), + RequestHandler: e.requestHandler, } peerMiniBlocksSyncer, err := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) if err != nil { @@ -420,7 +421,7 @@ func (e *exportHandlerFactory) Create() (update.ExportHandler, error) { argsPendingTransactions := sync.ArgsNewTransactionsSyncer{ DataPools: e.dataPool, Storages: e.storageService, - Marshalizer: e.CoreComponents.InternalMarshalizer(), + Marshaller: e.CoreComponents.InternalMarshalizer(), RequestHandler: e.requestHandler, } epochStartTransactionsSyncer, err := sync.NewTransactionsSyncer(argsPendingTransactions) diff --git a/update/genesis/base.go b/update/genesis/base.go index 7fdd41b2c9c..9336554b3e7 100644 --- a/update/genesis/base.go +++ b/update/genesis/base.go @@ -25,6 +25,9 @@ const UnFinishedMetaBlocksIdentifier = "unFinishedMetaBlocks" // TransactionsIdentifier is the constant which defines the export/import identifier for transactions const TransactionsIdentifier = "transactions" +// ValidatorsInfoIdentifier is the constant which defines the export/import identifier for validators info +const ValidatorsInfoIdentifier = "validatorsInfo" + // MiniBlocksIdentifier is the constant which defines the export/import identifier for miniBlocks const MiniBlocksIdentifier = "miniBlocks" @@ -249,3 +252,8 @@ func CreateTransactionKey(key string, tx data.TransactionHandler) string { return "tx" + atSep + "ukw" + atSep + hex.EncodeToString([]byte(key)) } } + +// CreateValidatorInfoKey returns a validator info key +func CreateValidatorInfoKey(key string) string { + return "vi" + atSep + hex.EncodeToString([]byte(key)) +} diff --git a/update/genesis/export.go b/update/genesis/export.go index 900f2cd967f..ec3fde205eb 100644 --- a/update/genesis/export.go +++ b/update/genesis/export.go @@ -134,6 +134,11 @@ func (se *stateExport) ExportAll(epoch uint32) error { return err } + err = se.exportAllValidatorsInfo() + if err != nil { + return err + } + return nil } @@ -154,6 +159,23 @@ func (se *stateExport) exportAllTransactions() error { return se.hardforkStorer.FinishedIdentifier(TransactionsIdentifier) } +func (se *stateExport) exportAllValidatorsInfo() error { + toExportValidatorsInfo, err := se.stateSyncer.GetAllValidatorsInfo() + if err != nil { + return err + } + + log.Debug("Starting export for validators info", "len", len(toExportValidatorsInfo)) + for key, validatorInfo := range toExportValidatorsInfo { + errExport := se.exportValidatorInfo(key, validatorInfo) + if errExport != nil { + return errExport + } + } + + return se.hardforkStorer.FinishedIdentifier(ValidatorsInfoIdentifier) +} + func (se *stateExport) exportAllMiniBlocks() error { toExportMBs, err := se.stateSyncer.GetAllMiniBlocks() if err != nil { @@ -393,6 +415,22 @@ func (se *stateExport) exportTx(key string, tx data.TransactionHandler) error { return nil } +func (se *stateExport) exportValidatorInfo(key string, validatorInfo *state.ShardValidatorInfo) error { + marshaledData, err := json.Marshal(validatorInfo) + if err != nil { + return err + } + + keyToSave := CreateValidatorInfoKey(key) + + err = se.hardforkStorer.Write(ValidatorsInfoIdentifier, []byte(keyToSave), marshaledData) + if err != nil { + return err + } + + return nil +} + func (se *stateExport) exportNodesSetupJson(validators map[uint32][]*state.ValidatorInfo) error { acceptedListsForExport := []common.PeerType{common.EligibleList, common.WaitingList, common.JailedList} initialNodes := make([]*sharding.InitialNode, 0) diff --git a/update/interface.go b/update/interface.go index f1b47ece497..ac5feabf38f 100644 --- a/update/interface.go +++ b/update/interface.go @@ -20,6 +20,7 @@ type StateSyncer interface { SyncAllState(epoch uint32) error GetAllTries() (map[string]common.Trie, error) GetAllTransactions() (map[string]data.TransactionHandler, error) + GetAllValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) GetAllMiniBlocks() (map[string]*block.MiniBlock, error) IsInterfaceNil() bool } @@ -143,6 +144,8 @@ type EpochStartPendingMiniBlocksSyncHandler interface { type TransactionsSyncHandler interface { SyncTransactionsFor(miniBlocks map[string]*block.MiniBlock, epoch uint32, ctx context.Context) error GetTransactions() (map[string]data.TransactionHandler, error) + GetValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) + ClearFields() IsInterfaceNil() bool } diff --git a/update/mock/stateSyncStub.go b/update/mock/stateSyncStub.go index 329bbff75aa..cff23e6447d 100644 --- a/update/mock/stateSyncStub.go +++ b/update/mock/stateSyncStub.go @@ -4,6 +4,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/state" ) // StateSyncStub - @@ -13,6 +14,7 @@ type StateSyncStub struct { SyncAllStateCalled func(epoch uint32) error GetAllTriesCalled func() (map[string]common.Trie, error) GetAllTransactionsCalled func() (map[string]data.TransactionHandler, error) + GetAllValidatorsInfoCalled func() (map[string]*state.ShardValidatorInfo, error) GetAllMiniBlocksCalled func() (map[string]*block.MiniBlock, error) } @@ -56,6 +58,14 @@ func (sss *StateSyncStub) GetAllTransactions() (map[string]data.TransactionHandl return nil, nil } +// GetAllValidatorsInfo - +func (sss *StateSyncStub) GetAllValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) { + if sss.GetAllValidatorsInfoCalled != nil { + return sss.GetAllValidatorsInfoCalled() + } + return nil, nil +} + // GetAllMiniBlocks - func (sss *StateSyncStub) GetAllMiniBlocks() (map[string]*block.MiniBlock, error) { if sss.GetAllMiniBlocksCalled != nil { diff --git a/update/process/baseProcess.go b/update/process/baseProcess.go index 8edd3cda4ba..7b5a26f0525 100644 --- a/update/process/baseProcess.go +++ b/update/process/baseProcess.go @@ -227,12 +227,8 @@ func (b *baseProcessor) saveAllBlockDataToStorageForSelfShard( func (b *baseProcessor) saveMiniBlocks(headerHandler data.HeaderHandler, body *block.Body) { miniBlockHeadersHashes := headerHandler.GetMiniBlockHeadersHashes() - mapBlockTypesTxs := make(map[block.Type]map[string]data.TransactionHandler) for i := 0; i < len(body.MiniBlocks); i++ { miniBlock := body.MiniBlocks[i] - if _, ok := mapBlockTypesTxs[miniBlock.Type]; !ok { - mapBlockTypesTxs[miniBlock.Type] = b.txCoordinator.GetAllCurrentUsedTxs(miniBlock.Type) - } marshalizedMiniBlock, errNotCritical := b.marshalizer.Marshal(miniBlock) if errNotCritical != nil { diff --git a/update/sync/coordinator.go b/update/sync/coordinator.go index 5e746affa74..ffcdc1027be 100644 --- a/update/sync/coordinator.go +++ b/update/sync/coordinator.go @@ -11,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/update" ) @@ -212,6 +213,11 @@ func (ss *syncState) GetAllTransactions() (map[string]data.TransactionHandler, e return ss.transactions.GetTransactions() } +// GetAllValidatorsInfo returns the synced validators info +func (ss *syncState) GetAllValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) { + return ss.transactions.GetValidatorsInfo() +} + // GetAllMiniBlocks returns the synced miniblocks func (ss *syncState) GetAllMiniBlocks() (map[string]*block.MiniBlock, error) { return ss.miniBlocks.GetMiniBlocks() diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index cc2265c6e05..d7281d254da 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -15,6 +15,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/update" ) @@ -23,12 +24,13 @@ var _ update.TransactionsSyncHandler = (*transactionsSync)(nil) type transactionsSync struct { mutPendingTx sync.Mutex mapTransactions map[string]data.TransactionHandler - mapHashes map[string]*block.MiniBlock + mapTxsToMiniBlocks map[string]*block.MiniBlock + mapValidatorsInfo map[string]*state.ShardValidatorInfo txPools map[block.Type]dataRetriever.ShardedDataCacherNotifier storage map[block.Type]update.HistoryStorer chReceivedAll chan bool requestHandler process.RequestHandler - marshalizer marshal.Marshalizer + marshaller marshal.Marshalizer epochToSync uint32 stopSync bool syncedAll bool @@ -39,7 +41,7 @@ type transactionsSync struct { type ArgsNewTransactionsSyncer struct { DataPools dataRetriever.PoolsHolder Storages dataRetriever.StorageService - Marshalizer marshal.Marshalizer + Marshaller marshal.Marshalizer RequestHandler process.RequestHandler } @@ -51,7 +53,7 @@ func NewTransactionsSyncer(args ArgsNewTransactionsSyncer) (*transactionsSync, e if check.IfNil(args.DataPools) { return nil, dataRetriever.ErrNilDataPoolHolder } - if check.IfNil(args.Marshalizer) { + if check.IfNil(args.Marshaller) { return nil, dataRetriever.ErrNilMarshalizer } if check.IfNil(args.RequestHandler) { @@ -61,10 +63,11 @@ func NewTransactionsSyncer(args ArgsNewTransactionsSyncer) (*transactionsSync, e ts := &transactionsSync{ mutPendingTx: sync.Mutex{}, mapTransactions: make(map[string]data.TransactionHandler), - mapHashes: make(map[string]*block.MiniBlock), + mapTxsToMiniBlocks: make(map[string]*block.MiniBlock), + mapValidatorsInfo: make(map[string]*state.ShardValidatorInfo), chReceivedAll: make(chan bool), requestHandler: args.RequestHandler, - marshalizer: args.Marshalizer, + marshaller: args.Marshaller, stopSync: true, syncedAll: true, waitTimeBetweenRequests: args.RequestHandler.RequestInterval(), @@ -74,13 +77,20 @@ func NewTransactionsSyncer(args ArgsNewTransactionsSyncer) (*transactionsSync, e ts.txPools[block.TxBlock] = args.DataPools.Transactions() ts.txPools[block.SmartContractResultBlock] = args.DataPools.UnsignedTransactions() ts.txPools[block.RewardsBlock] = args.DataPools.RewardTransactions() + ts.txPools[block.PeerBlock] = args.DataPools.ValidatorsInfo() ts.storage = make(map[block.Type]update.HistoryStorer) ts.storage[block.TxBlock] = args.Storages.GetStorer(dataRetriever.TransactionUnit) ts.storage[block.SmartContractResultBlock] = args.Storages.GetStorer(dataRetriever.UnsignedTransactionUnit) ts.storage[block.RewardsBlock] = args.Storages.GetStorer(dataRetriever.RewardTransactionUnit) + ts.storage[block.PeerBlock] = args.Storages.GetStorer(dataRetriever.UnsignedTransactionUnit) + + for poolType, pool := range ts.txPools { + if poolType == block.PeerBlock { + pool.RegisterOnAdded(ts.receivedValidatorInfo) + continue + } - for _, pool := range ts.txPools { pool.RegisterOnAdded(ts.receivedTransaction) } @@ -100,7 +110,7 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min numRequestedTxs := 0 for _, miniBlock := range miniBlocks { for _, txHash := range miniBlock.TxHashes { - ts.mapHashes[string(txHash)] = miniBlock + ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock log.Debug("transactionsSync.SyncTransactionsFor", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash needed", txHash) } numRequestedTxs += ts.requestTransactionsFor(miniBlock) @@ -124,7 +134,7 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min return nil case <-time.After(ts.waitTimeBetweenRequests): ts.mutPendingTx.Lock() - log.Debug("transactionsSync.SyncTransactionsFor", "num txs needed", len(ts.mapHashes), "num txs got", len(ts.mapTransactions)) + log.Debug("transactionsSync.SyncTransactionsFor", "num txs needed", len(ts.mapTxsToMiniBlocks), "num txs got", len(ts.mapTransactions)) ts.mutPendingTx.Unlock() continue case <-ctx.Done(): @@ -137,6 +147,15 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min } func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock) int { + if miniBlock.Type == block.PeerBlock { + return ts.requestTransactionsForPeerMiniBlock(miniBlock) + } + + return ts.requestTransactionsForNonPeerMiniBlock(miniBlock) + +} + +func (ts *transactionsSync) requestTransactionsForNonPeerMiniBlock(miniBlock *block.MiniBlock) int { missingTxs := make([][]byte, 0) for _, txHash := range miniBlock.TxHashes { if _, ok := ts.mapTransactions[string(txHash)]; ok { @@ -153,8 +172,8 @@ func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock) i } for _, txHash := range missingTxs { - ts.mapHashes[string(txHash)] = miniBlock - log.Debug("transactionsSync.requestTransactionsFor", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) + ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock + log.Debug("transactionsSync.requestTransactionsForNonPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) } mbType := miniBlock.Type @@ -177,6 +196,31 @@ func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock) i return len(missingTxs) } +func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block.MiniBlock) int { + missingTxs := make([][]byte, 0) + for _, txHash := range miniBlock.TxHashes { + if _, ok := ts.mapValidatorsInfo[string(txHash)]; ok { + continue + } + + validatorInfo, ok := ts.getValidatorInfoFromPoolOrStorage(txHash) + if ok { + ts.mapValidatorsInfo[string(txHash)] = validatorInfo + continue + } + + missingTxs = append(missingTxs, txHash) + } + + for _, txHash := range missingTxs { + ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock + log.Debug("transactionsSync.requestTransactionsForPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) + ts.requestHandler.RequestValidatorInfo(txHash) + } + + return len(missingTxs) +} + func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) { ts.mutPendingTx.Lock() if ts.stopSync { @@ -184,7 +228,7 @@ func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) return } - miniBlock, ok := ts.mapHashes[string(txHash)] + miniBlock, ok := ts.mapTxsToMiniBlocks[string(txHash)] if !ok { ts.mutPendingTx.Unlock() return @@ -198,13 +242,50 @@ func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) tx, ok := val.(data.TransactionHandler) if !ok { ts.mutPendingTx.Unlock() + log.Error("transactionsSync.receivedTransaction", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) return } log.Debug("transactionsSync.receivedTransaction", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash got", txHash) ts.mapTransactions[string(txHash)] = tx - receivedAllMissing := len(ts.mapHashes) == len(ts.mapTransactions) + receivedAllMissing := len(ts.mapTxsToMiniBlocks) == len(ts.mapTransactions)+len(ts.mapValidatorsInfo) + ts.mutPendingTx.Unlock() + + if receivedAllMissing { + ts.chReceivedAll <- true + } +} + +func (ts *transactionsSync) receivedValidatorInfo(txHash []byte, val interface{}) { + ts.mutPendingTx.Lock() + if ts.stopSync { + ts.mutPendingTx.Unlock() + return + } + + miniBlock, ok := ts.mapTxsToMiniBlocks[string(txHash)] + if !ok { + ts.mutPendingTx.Unlock() + return + } + _, ok = ts.mapValidatorsInfo[string(txHash)] + if ok { + ts.mutPendingTx.Unlock() + return + } + + validatorInfo, ok := val.(*state.ShardValidatorInfo) + if !ok { + ts.mutPendingTx.Unlock() + log.Error("transactionsSync.receivedValidatorInfo", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) + return + } + + log.Debug("transactionsSync.receivedValidatorInfo", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash got", txHash) + + ts.mapValidatorsInfo[string(txHash)] = validatorInfo + receivedAllMissing := len(ts.mapTxsToMiniBlocks) == len(ts.mapValidatorsInfo)+len(ts.mapTransactions) ts.mutPendingTx.Unlock() if receivedAllMissing { @@ -213,7 +294,7 @@ func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) } func (ts *transactionsSync) getTransactionFromPool(txHash []byte) (data.TransactionHandler, bool) { - mb, ok := ts.mapHashes[string(txHash)] + mb, ok := ts.mapTxsToMiniBlocks[string(txHash)] if !ok { return nil, false } @@ -247,12 +328,50 @@ func (ts *transactionsSync) getTransactionFromPool(txHash []byte) (data.Transact tx, ok := val.(data.TransactionHandler) if !ok { + log.Error("transactionsSync.getTransactionFromPool", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) return nil, false } return tx, true } +func (ts *transactionsSync) getValidatorInfoFromPool(txHash []byte) (*state.ShardValidatorInfo, bool) { + mb, ok := ts.mapTxsToMiniBlocks[string(txHash)] + if !ok { + return nil, false + } + + if _, ok = ts.txPools[block.PeerBlock]; !ok { + log.Debug("transactionsSync.getValidatorInfoFromPool: missing mini block type from sharded data cacher notifier map", + "tx hash", txHash, + "original mb type", mb.Type, + "mb type", block.PeerBlock, + "mb sender shard", mb.SenderShardID, + "mb receiver shard", mb.ReceiverShardID, + "mb num txs", len(mb.TxHashes)) + return nil, false + } + + storeId := process.ShardCacherIdentifier(mb.SenderShardID, mb.ReceiverShardID) + shardTxStore := ts.txPools[block.PeerBlock].ShardDataStore(storeId) + if check.IfNil(shardTxStore) { + return nil, false + } + + val, ok := shardTxStore.Peek(txHash) + if !ok { + return nil, false + } + + validatorInfo, ok := val.(*state.ShardValidatorInfo) + if !ok { + log.Error("transactionsSync.getValidatorInfoFromPool", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) + return nil, false + } + + return validatorInfo, true +} + func (ts *transactionsSync) getTransactionFromPoolWithSearchFirst( txHash []byte, cacher dataRetriever.ShardedDataCacherNotifier, @@ -270,13 +389,30 @@ func (ts *transactionsSync) getTransactionFromPoolWithSearchFirst( return tx, true } +func (ts *transactionsSync) getValidatorInfoFromPoolWithSearchFirst( + txHash []byte, + cacher dataRetriever.ShardedDataCacherNotifier, +) (*state.ShardValidatorInfo, bool) { + val, ok := cacher.SearchFirstData(txHash) + if !ok { + return nil, false + } + + validatorInfo, ok := val.(*state.ShardValidatorInfo) + if !ok { + return nil, false + } + + return validatorInfo, true +} + func (ts *transactionsSync) getTransactionFromPoolOrStorage(hash []byte) (data.TransactionHandler, bool) { txFromPool, ok := ts.getTransactionFromPool(hash) if ok { return txFromPool, true } - miniBlock, ok := ts.mapHashes[string(hash)] + miniBlock, ok := ts.mapTxsToMiniBlocks[string(hash)] if !ok { return nil, false } @@ -307,7 +443,7 @@ func (ts *transactionsSync) getTransactionFromPoolOrStorage(hash []byte) (data.T tx = &rewardTx.RewardTx{} } - err = ts.marshalizer.Unmarshal(tx, txData) + err = ts.marshaller.Unmarshal(tx, txData) if err != nil { return nil, false } @@ -315,6 +451,38 @@ func (ts *transactionsSync) getTransactionFromPoolOrStorage(hash []byte) (data.T return tx, true } +func (ts *transactionsSync) getValidatorInfoFromPoolOrStorage(hash []byte) (*state.ShardValidatorInfo, bool) { + validatorInfoFromPool, ok := ts.getValidatorInfoFromPool(hash) + if ok { + return validatorInfoFromPool, true + } + + miniBlock, ok := ts.mapTxsToMiniBlocks[string(hash)] + if !ok { + return nil, false + } + + validatorInfoFromPoolWithSearchFirst, ok := ts.getValidatorInfoFromPoolWithSearchFirst(hash, ts.txPools[block.PeerBlock]) + if ok { + log.Debug("transactionsSync.getValidatorInfoFromPoolOrStorage: found transaction using search first", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash", hash) + return validatorInfoFromPoolWithSearchFirst, true + } + + validatorInfoData, err := GetDataFromStorage(hash, ts.storage[block.PeerBlock]) + if err != nil { + return nil, false + } + + validatorInfo := &state.ShardValidatorInfo{} + + err = ts.marshaller.Unmarshal(validatorInfo, validatorInfoData) + if err != nil { + return nil, false + } + + return validatorInfo, true +} + // GetTransactions returns the synced transactions func (ts *transactionsSync) GetTransactions() (map[string]data.TransactionHandler, error) { ts.mutPendingTx.Lock() @@ -326,6 +494,26 @@ func (ts *transactionsSync) GetTransactions() (map[string]data.TransactionHandle return ts.mapTransactions, nil } +// GetValidatorsInfo returns the synced validators info +func (ts *transactionsSync) GetValidatorsInfo() (map[string]*state.ShardValidatorInfo, error) { + ts.mutPendingTx.Lock() + defer ts.mutPendingTx.Unlock() + if !ts.syncedAll { + return nil, update.ErrNotSynced + } + + return ts.mapValidatorsInfo, nil +} + +// ClearFields will clear all the maps +func (ts *transactionsSync) ClearFields() { + ts.mutPendingTx.Lock() + ts.mapTransactions = make(map[string]data.TransactionHandler) + ts.mapTxsToMiniBlocks = make(map[string]*block.MiniBlock) + ts.mapValidatorsInfo = make(map[string]*state.ShardValidatorInfo) + ts.mutPendingTx.Unlock() +} + // IsInterfaceNil returns true if underlying object is nil func (ts *transactionsSync) IsInterfaceNil() bool { return ts == nil diff --git a/update/sync/syncTransactions_test.go b/update/sync/syncTransactions_test.go index 36f4ddda592..e65f90e8ad3 100644 --- a/update/sync/syncTransactions_test.go +++ b/update/sync/syncTransactions_test.go @@ -28,7 +28,7 @@ func createMockArgs() ArgsNewTransactionsSyncer { return &storageStubs.StorerStub{} }, }, - Marshalizer: &mock.MarshalizerFake{}, + Marshaller: &mock.MarshalizerFake{}, RequestHandler: &testscommon.RequestHandlerStub{}, } } @@ -70,7 +70,7 @@ func TestNewPendingTransactionsSyncer_NilMarshalizer(t *testing.T) { t.Parallel() args := createMockArgs() - args.Marshalizer = nil + args.Marshaller = nil pendingTxsSyncer, err := NewTransactionsSyncer(args) require.Nil(t, pendingTxsSyncer) From 4c0dacf61fc2819f4431e8719ef376a9b50f62bb Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 27 May 2022 14:31:30 +0300 Subject: [PATCH 28/70] * Finalized implementation for peers mini blocks refactor in epochStart component --- .../resolverscontainer/baseResolversContainerFactory.go | 2 +- process/errors.go | 6 ------ .../factory/interceptedDirectConnectionInfoFactory.go | 4 ++-- .../factory/interceptedValidatorInfoDataFactory.go | 4 ++-- 4 files changed, 5 insertions(+), 11 deletions(-) diff --git a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go index 00c0bcea2b3..567e85876ae 100644 --- a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go @@ -401,7 +401,7 @@ func (brcf *baseResolversContainerFactory) createTrieNodesResolver( func (brcf *baseResolversContainerFactory) generateValidatorInfoResolver() error { identifierValidatorInfo := common.ValidatorInfoTopic shardC := brcf.shardCoordinator - resolverSender, err := brcf.createOneResolverSender(identifierValidatorInfo, EmptyExcludePeersOnTopic, shardC.SelfId()) + resolverSender, err := brcf.createOneResolverSenderWithSpecifiedNumRequests(identifierValidatorInfo, EmptyExcludePeersOnTopic, shardC.SelfId(), brcf.numCrossShardPeers, brcf.numIntraShardPeers) if err != nil { return err } diff --git a/process/errors.go b/process/errors.go index 6e809d36a57..330641d189c 100644 --- a/process/errors.go +++ b/process/errors.go @@ -1077,12 +1077,6 @@ var ErrNilDoubleTransactionsDetector = errors.New("nil double transactions detec // ErrNoTxToProcess signals that no transaction were sent for processing var ErrNoTxToProcess = errors.New("no transaction to process") -// ErrPropertyTooLong signals that a heartbeat property was too long -var ErrPropertyTooLong = errors.New("property too long") - -// ErrPropertyTooShort signals that a heartbeat property was too short -var ErrPropertyTooShort = errors.New("property too short") - // ErrInvalidPeerSubType signals that an invalid peer subtype was provided var ErrInvalidPeerSubType = errors.New("invalid peer subtype") diff --git a/process/interceptors/factory/interceptedDirectConnectionInfoFactory.go b/process/interceptors/factory/interceptedDirectConnectionInfoFactory.go index de81b20cb45..9a3b447f016 100644 --- a/process/interceptors/factory/interceptedDirectConnectionInfoFactory.go +++ b/process/interceptors/factory/interceptedDirectConnectionInfoFactory.go @@ -15,7 +15,7 @@ type interceptedDirectConnectionInfoFactory struct { // NewInterceptedDirectConnectionInfoFactory creates an instance of interceptedDirectConnectionInfoFactory func NewInterceptedDirectConnectionInfoFactory(args ArgInterceptedDataFactory) (*interceptedDirectConnectionInfoFactory, error) { - err := checkArgs(args) + err := checkInterceptedDirectConnectionInfoFactoryArgs(args) if err != nil { return nil, err } @@ -26,7 +26,7 @@ func NewInterceptedDirectConnectionInfoFactory(args ArgInterceptedDataFactory) ( }, nil } -func checkArgs(args ArgInterceptedDataFactory) error { +func checkInterceptedDirectConnectionInfoFactoryArgs(args ArgInterceptedDataFactory) error { if check.IfNil(args.CoreComponents) { return process.ErrNilCoreComponentsHolder } diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go index 345a0337eff..afd6f02ccef 100644 --- a/process/interceptors/factory/interceptedValidatorInfoDataFactory.go +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go @@ -16,7 +16,7 @@ type interceptedValidatorInfoDataFactory struct { // NewInterceptedValidatorInfoDataFactory creates an instance of interceptedValidatorInfoDataFactory func NewInterceptedValidatorInfoDataFactory(args ArgInterceptedDataFactory) (*interceptedValidatorInfoDataFactory, error) { - err := checkArgs(args) + err := checkInterceptedValidatorInfoDataFactoryArgs(args) if err != nil { return nil, err } @@ -28,7 +28,7 @@ func NewInterceptedValidatorInfoDataFactory(args ArgInterceptedDataFactory) (*in }, nil } -func checkArgs(args ArgInterceptedDataFactory) error { +func checkInterceptedValidatorInfoDataFactoryArgs(args ArgInterceptedDataFactory) error { if check.IfNil(args.CoreComponents) { return process.ErrNilCoreComponentsHolder } From ff43beb92bdd48c09abecf9261a54aff53ce1aa6 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 27 May 2022 17:34:40 +0300 Subject: [PATCH 29/70] * Merged feat/refactor-peers-mbs --- process/block/preprocess/rewardTxPreProcessor.go | 1 - 1 file changed, 1 deletion(-) diff --git a/process/block/preprocess/rewardTxPreProcessor.go b/process/block/preprocess/rewardTxPreProcessor.go index a6ab41608a8..086b5fb066a 100644 --- a/process/block/preprocess/rewardTxPreProcessor.go +++ b/process/block/preprocess/rewardTxPreProcessor.go @@ -453,7 +453,6 @@ func (rtp *rewardTxPreprocessor) ProcessMiniBlock( miniBlock *block.MiniBlock, haveTime func() bool, _ func() bool, - _ func() (int, int), _ bool, partialMbExecutionMode bool, indexOfLastTxProcessed int, From 4f6230e000ba852a3bd683473d787f0b7edbd008 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Mon, 30 May 2022 14:46:36 +0300 Subject: [PATCH 30/70] * Fixed early return on error --- dataRetriever/requestHandlers/requestHandler.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dataRetriever/requestHandlers/requestHandler.go b/dataRetriever/requestHandlers/requestHandler.go index 7f1f434be51..fda20a0e149 100644 --- a/dataRetriever/requestHandlers/requestHandler.go +++ b/dataRetriever/requestHandlers/requestHandler.go @@ -586,6 +586,7 @@ func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { "hash", hash, "epoch", rrh.epoch, ) + return } rrh.addRequestedItems([][]byte{hash}, uniqueValidatorInfoSuffix) @@ -631,6 +632,7 @@ func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { "num hashes", len(unrequestedHashes), "epoch", rrh.epoch, ) + return } rrh.addRequestedItems(unrequestedHashes, uniqueValidatorInfoSuffix) From da276ada1355292e75af5c74b333c4c5b4a40592 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Mon, 30 May 2022 18:01:27 +0300 Subject: [PATCH 31/70] * Added requests from hashes array and fixed integration test --- epochStart/interface.go | 1 + epochStart/shardchain/peerMiniBlocksSyncer.go | 4 +- epochStart/shardchain/trigger.go | 8 ++- .../vm/systemVM/stakingSC_test.go | 50 ++++++++++++------- update/sync/syncTransactions.go | 3 +- 5 files changed, 38 insertions(+), 28 deletions(-) diff --git a/epochStart/interface.go b/epochStart/interface.go index c80341ccdf2..bf4c861a189 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -59,6 +59,7 @@ type RequestHandler interface { SetNumPeersToQuery(key string, intra int, cross int) error GetNumPeersToQuery(key string) (int, int, error) RequestValidatorInfo(hash []byte) + RequestValidatorsInfo(hashes [][]byte) IsInterfaceNil() bool } diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index d247be53966..80bbfa6245e 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -320,9 +320,7 @@ func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) return nil, nil } - for index := range missingValidatorsInfo { - go p.requestHandler.RequestValidatorInfo(missingValidatorsInfo[index]) - } + p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { case <-p.chRcvAllValidatorsInfo: diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 213cbf12997..ee406e06aa6 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -347,9 +347,7 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { } t.mutMissingValidatorsInfo.RUnlock() - for index := range missingValidatorsInfo { - go t.requestHandler.RequestValidatorInfo(missingValidatorsInfo[index]) - } + go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { case <-ctx.Done(): @@ -672,13 +670,13 @@ func (t *trigger) isMetaBlockValid(hash string, metaHdr data.HeaderHandler) bool for i := metaHdr.GetNonce() - 1; i >= metaHdr.GetNonce()-t.validity; i-- { neededHdr, err := t.getHeaderWithNonceAndHash(i, currHdr.GetPrevHash()) if err != nil { - log.Debug("isMetaBlockValid.getHeaderWithNonceAndHash", "hash", hash, "error", err.Error()) + log.Debug("isMetaBlockValid.getHeaderWithNonceAndHash", "hash", hash, "error", err.Error()) return false } err = t.headerValidator.IsHeaderConstructionValid(currHdr, neededHdr) if err != nil { - log.Debug("isMetaBlockValid.IsHeaderConstructionValid", "hash", hash, "error", err.Error()) + log.Debug("isMetaBlockValid.IsHeaderConstructionValid", "hash", hash, "error", err.Error()) return false } diff --git a/integrationTests/vm/systemVM/stakingSC_test.go b/integrationTests/vm/systemVM/stakingSC_test.go index cce8c2bb4c4..5dc3980729e 100644 --- a/integrationTests/vm/systemVM/stakingSC_test.go +++ b/integrationTests/vm/systemVM/stakingSC_test.go @@ -128,30 +128,42 @@ func TestStakingUnstakingAndUnbondingOnMultiShardEnvironmentWithValidatorStatist numOfShards := 2 nodesPerShard := 2 numMetachainNodes := 2 - shardConsensusGroupSize := 1 - metaConsensusGroupSize := 1 + //shardConsensusGroupSize := 1 + //metaConsensusGroupSize := 1 - nodesMap := integrationTests.CreateNodesWithNodesCoordinator( + nodes := integrationTests.CreateNodes( + numOfShards, nodesPerShard, numMetachainNodes, - numOfShards, - shardConsensusGroupSize, - metaConsensusGroupSize, ) - nodes := make([]*integrationTests.TestProcessorNode, 0) idxProposers := make([]int, numOfShards+1) - - for _, nds := range nodesMap { - nodes = append(nodes, nds...) + for i := 0; i < numOfShards; i++ { + idxProposers[i] = i * nodesPerShard } + idxProposers[numOfShards] = numOfShards * nodesPerShard - for _, nds := range nodesMap { - idx, err := integrationTestsVm.GetNodeIndex(nodes, nds[0]) - require.Nil(t, err) - - idxProposers = append(idxProposers, idx) - } + //nodesMap := integrationTests.CreateNodesWithNodesCoordinator( + // nodesPerShard, + // numMetachainNodes, + // numOfShards, + // shardConsensusGroupSize, + // metaConsensusGroupSize, + //) + + //nodes := make([]*integrationTests.TestProcessorNode, 0) + //idxProposers := make([]int, numOfShards+1) + // + //for _, nds := range nodesMap { + // nodes = append(nodes, nds...) + //} + // + //for _, nds := range nodesMap { + // idx, err := integrationTestsVm.GetNodeIndex(nodes, nds[0]) + // require.Nil(t, err) + // + // idxProposers = append(idxProposers, idx) + //} integrationTests.DisplayAndStartNodes(nodes) @@ -161,9 +173,9 @@ func TestStakingUnstakingAndUnbondingOnMultiShardEnvironmentWithValidatorStatist } }() - for _, nds := range nodesMap { - fmt.Println(integrationTests.MakeDisplayTable(nds)) - } + //for _, nds := range nodesMap { + // fmt.Println(integrationTests.MakeDisplayTable(nds)) + //} initialVal := big.NewInt(10000000000) integrationTests.MintAllNodes(nodes, initialVal) diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index d7281d254da..096ea7e747d 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -215,9 +215,10 @@ func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block for _, txHash := range missingTxs { ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock log.Debug("transactionsSync.requestTransactionsForPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) - ts.requestHandler.RequestValidatorInfo(txHash) } + ts.requestHandler.RequestValidatorsInfo(missingTxs) + return len(missingTxs) } From b91052029ef2bc8de0e068facedd8f400439b6e0 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Mon, 30 May 2022 22:10:58 +0300 Subject: [PATCH 32/70] * Added missing clean method call in shardChain trigger * Fixed linter errors in validatorInfoProcessor implementation --- epochStart/shardchain/trigger.go | 1 + .../preprocess/validatorInfoPreProcessor.go | 209 +++++++++--------- 2 files changed, 107 insertions(+), 103 deletions(-) diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index ee406e06aa6..31b2618bfb3 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -630,6 +630,7 @@ func (t *trigger) updateTriggerFromMeta() { log.Debug("trigger.updateTriggerFromMeta", "isEpochStart", t.isEpochStart) logger.SetCorrelationEpoch(t.metaEpoch) t.clearMissingMiniBlocksMap(t.metaEpoch) + t.clearMissingValidatorsInfoMap(t.metaEpoch) } // save all final-valid epoch start blocks diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index 8bbdb2f60bb..c07e1d7eaa3 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -77,32 +77,33 @@ func NewValidatorInfoPreprocessor( } // waitForValidatorsInfoHashes waits for a call whether all the requested validators info appeared -func (vip *validatorInfoPreprocessor) waitForValidatorsInfoHashes(waitTime time.Duration) error { - select { - case <-vip.chReceivedAllValidatorsInfo: - return nil - case <-time.After(waitTime): - return process.ErrTimeIsOut - } -} +//func (vip *validatorInfoPreprocessor) waitForValidatorsInfoHashes(waitTime time.Duration) error { +// select { +// case <-vip.chReceivedAllValidatorsInfo: +// return nil +// case <-time.After(waitTime): +// return process.ErrTimeIsOut +// } +//} // IsDataPrepared returns non error if all the requested validators info arrived and were saved into the pool -func (vip *validatorInfoPreprocessor) IsDataPrepared(requestedValidatorsInfo int, haveTime func() time.Duration) error { - if requestedValidatorsInfo > 0 { - log.Debug("requested missing validators info", - "num validators info", requestedValidatorsInfo) - err := vip.waitForValidatorsInfoHashes(haveTime()) - vip.validatorsInfoForBlock.mutTxsForBlock.Lock() - missingValidatorsInfo := vip.validatorsInfoForBlock.missingTxs - vip.validatorsInfoForBlock.missingTxs = 0 - vip.validatorsInfoForBlock.mutTxsForBlock.Unlock() - log.Debug("received validators info", - "num validators info", requestedValidatorsInfo-missingValidatorsInfo) - if err != nil { - return err - } - } +func (vip *validatorInfoPreprocessor) IsDataPrepared(_ int, _ func() time.Duration) error { return nil + //if requestedValidatorsInfo > 0 { + // log.Debug("requested missing validators info", + // "num validators info", requestedValidatorsInfo) + // err := vip.waitForValidatorsInfoHashes(haveTime()) + // vip.validatorsInfoForBlock.mutTxsForBlock.Lock() + // missingValidatorsInfo := vip.validatorsInfoForBlock.missingTxs + // vip.validatorsInfoForBlock.missingTxs = 0 + // vip.validatorsInfoForBlock.mutTxsForBlock.Unlock() + // log.Debug("received validators info", + // "num validators info", requestedValidatorsInfo-missingValidatorsInfo) + // if err != nil { + // return err + // } + //} + //return nil } // RemoveBlockDataFromPools removes the peer miniblocks from pool @@ -188,26 +189,27 @@ func (vip *validatorInfoPreprocessor) ProcessBlockTransactions( } // SaveTxsToStorage saves the validators info from body into storage -func (vip *validatorInfoPreprocessor) SaveTxsToStorage(body *block.Body) error { - if check.IfNil(body) { - return process.ErrNilBlockBody - } - - for i := 0; i < len(body.MiniBlocks); i++ { - miniBlock := body.MiniBlocks[i] - if miniBlock.Type != block.PeerBlock { - continue - } - - vip.saveTxsToStorage( - miniBlock.TxHashes, - &vip.validatorsInfoForBlock, - vip.storage, - dataRetriever.UnsignedTransactionUnit, - ) - } - +func (vip *validatorInfoPreprocessor) SaveTxsToStorage(_ *block.Body) error { return nil + //if check.IfNil(body) { + // return process.ErrNilBlockBody + //} + // + //for i := 0; i < len(body.MiniBlocks); i++ { + // miniBlock := body.MiniBlocks[i] + // if miniBlock.Type != block.PeerBlock { + // continue + // } + // + // vip.saveTxsToStorage( + // miniBlock.TxHashes, + // &vip.validatorsInfoForBlock, + // vip.storage, + // dataRetriever.UnsignedTransactionUnit, + // ) + //} + // + //return nil } // receivedValidatorInfoTransaction is a callback function called when a new validator info transaction @@ -238,53 +240,53 @@ func (vip *validatorInfoPreprocessor) CreateBlockStarted() { } // RequestBlockTransactions request for validators info if missing from a block.Body -func (vip *validatorInfoPreprocessor) RequestBlockTransactions(body *block.Body) int { - if check.IfNil(body) { - return 0 - } - +func (vip *validatorInfoPreprocessor) RequestBlockTransactions(_ *block.Body) int { return 0 + //if check.IfNil(body) { + // return 0 + //} + // //return vip.computeExistingAndRequestMissingValidatorsInfoForShards(body) } // computeExistingAndRequestMissingValidatorsInfoForShards calculates what validators info are available and requests // what are missing from block.Body -func (vip *validatorInfoPreprocessor) computeExistingAndRequestMissingValidatorsInfoForShards(body *block.Body) int { - validatorsInfoBody := block.Body{} - for _, mb := range body.MiniBlocks { - if mb.Type != block.PeerBlock { - continue - } - if mb.SenderShardID != core.MetachainShardId { - continue - } - - validatorsInfoBody.MiniBlocks = append(validatorsInfoBody.MiniBlocks, mb) - } - - numMissingTxsForShards := vip.computeExistingAndRequestMissing( - &validatorsInfoBody, - &vip.validatorsInfoForBlock, - vip.chReceivedAllValidatorsInfo, - vip.isMiniBlockCorrect, - vip.validatorsInfoPool, - vip.onRequestValidatorsInfoWithShard, - ) - - return numMissingTxsForShards -} - -func (vip *validatorInfoPreprocessor) onRequestValidatorsInfoWithShard(_ uint32, txHashes [][]byte) { - vip.onRequestValidatorsInfo(txHashes) -} +//func (vip *validatorInfoPreprocessor) computeExistingAndRequestMissingValidatorsInfoForShards(body *block.Body) int { +// validatorsInfoBody := block.Body{} +// for _, mb := range body.MiniBlocks { +// if mb.Type != block.PeerBlock { +// continue +// } +// if mb.SenderShardID != core.MetachainShardId { +// continue +// } +// +// validatorsInfoBody.MiniBlocks = append(validatorsInfoBody.MiniBlocks, mb) +// } +// +// numMissingTxsForShards := vip.computeExistingAndRequestMissing( +// &validatorsInfoBody, +// &vip.validatorsInfoForBlock, +// vip.chReceivedAllValidatorsInfo, +// vip.isMiniBlockCorrect, +// vip.validatorsInfoPool, +// vip.onRequestValidatorsInfoWithShard, +// ) +// +// return numMissingTxsForShards +//} + +//func (vip *validatorInfoPreprocessor) onRequestValidatorsInfoWithShard(_ uint32, txHashes [][]byte) { +// vip.onRequestValidatorsInfo(txHashes) +//} // RequestTransactionsForMiniBlock requests missing validators info for a certain miniblock -func (vip *validatorInfoPreprocessor) RequestTransactionsForMiniBlock(miniBlock *block.MiniBlock) int { - if miniBlock == nil { - return 0 - } - +func (vip *validatorInfoPreprocessor) RequestTransactionsForMiniBlock(_ *block.MiniBlock) int { return 0 + //if miniBlock == nil { + // return 0 + //} + // //missingValidatorsInfoHashesForMiniBlock := vip.computeMissingValidatorsInfoHashesForMiniBlock(miniBlock) //if len(missingValidatorsInfoHashesForMiniBlock) > 0 { // vip.onRequestValidatorsInfo(missingValidatorsInfoHashesForMiniBlock) @@ -294,29 +296,30 @@ func (vip *validatorInfoPreprocessor) RequestTransactionsForMiniBlock(miniBlock } // computeMissingValidatorsInfoHashesForMiniBlock computes missing validators info hashes for a certain miniblock -func (vip *validatorInfoPreprocessor) computeMissingValidatorsInfoHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { - missingValidatorsInfoHashes := make([][]byte, 0) - - if miniBlock.Type != block.PeerBlock { - return missingValidatorsInfoHashes - } - - for _, txHash := range miniBlock.TxHashes { - validatorInfo, _ := process.GetValidatorInfoFromPool( - miniBlock.SenderShardID, - miniBlock.ReceiverShardID, - txHash, - vip.validatorsInfoPool, - false, - ) - - if validatorInfo == nil { - missingValidatorsInfoHashes = append(missingValidatorsInfoHashes, txHash) - } - } - - return missingValidatorsInfoHashes -} +//func (vip *validatorInfoPreprocessor) computeMissingValidatorsInfoHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { +// missingValidatorsInfoHashes := make([][]byte, 0) +// return missingValidatorsInfoHashes +// +// if miniBlock.Type != block.PeerBlock { +// return missingValidatorsInfoHashes +// } +// +// for _, txHash := range miniBlock.TxHashes { +// validatorInfo, _ := process.GetValidatorInfoFromPool( +// miniBlock.SenderShardID, +// miniBlock.ReceiverShardID, +// txHash, +// vip.validatorsInfoPool, +// false, +// ) +// +// if validatorInfo == nil { +// missingValidatorsInfoHashes = append(missingValidatorsInfoHashes, txHash) +// } +// } +// +// return missingValidatorsInfoHashes +//} // CreateAndProcessMiniBlocks does nothing func (vip *validatorInfoPreprocessor) CreateAndProcessMiniBlocks(_ func() bool, _ []byte) (block.MiniBlockSlice, error) { From 5b115e7df751ebc9bf75b0bd28082aab02392c42 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 31 May 2022 13:46:25 +0300 Subject: [PATCH 33/70] * Fixed type assertion error --- update/sync/syncTransactions.go | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index 096ea7e747d..3b706c7805f 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -16,6 +16,7 @@ import ( "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/state" + "github.com/ElrondNetwork/elrond-go/storage/txcache" "github.com/ElrondNetwork/elrond-go/update" ) @@ -229,22 +230,31 @@ func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) return } - miniBlock, ok := ts.mapTxsToMiniBlocks[string(txHash)] - if !ok { + miniBlock, foundInMap := ts.mapTxsToMiniBlocks[string(txHash)] + if !foundInMap { ts.mutPendingTx.Unlock() return } - _, ok = ts.mapTransactions[string(txHash)] - if ok { + _, foundInMap = ts.mapTransactions[string(txHash)] + if foundInMap { ts.mutPendingTx.Unlock() return } - tx, ok := val.(data.TransactionHandler) + var tx data.TransactionHandler + var wrappedTx *txcache.WrappedTransaction + var ok bool + + tx, ok = val.(data.TransactionHandler) if !ok { - ts.mutPendingTx.Unlock() - log.Error("transactionsSync.receivedTransaction", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) - return + wrappedTx, ok = val.(*txcache.WrappedTransaction) + if !ok { + ts.mutPendingTx.Unlock() + log.Error("transactionsSync.receivedTransaction", "tx hash", txHash, "error", update.ErrWrongTypeAssertion) + return + } + + tx = wrappedTx.Tx } log.Debug("transactionsSync.receivedTransaction", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash got", txHash) From ddf42ebf643b3700f7940791b45897394a6e94c1 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 2 Jun 2022 12:36:27 +0300 Subject: [PATCH 34/70] * Changed log level from Warn to Debug in method checkIfTriggerCanBeActivated --- epochStart/shardchain/trigger.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 31b2618bfb3..a4bd936c2bd 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -729,14 +729,14 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH missingMiniBlocksHashes, blockBody, err := t.peerMiniBlocksSyncer.SyncMiniBlocks(metaHdr) if err != nil { t.addMissingMiniBlocks(metaHdr.GetEpoch(), missingMiniBlocksHashes) - log.Warn("checkIfTriggerCanBeActivated.SyncMiniBlocks", "num missing mini blocks", len(missingMiniBlocksHashes), "error", err) + log.Debug("checkIfTriggerCanBeActivated.SyncMiniBlocks", "num missing mini blocks", len(missingMiniBlocksHashes), "error", err) return false, 0 } missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) if err != nil { t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) - log.Warn("checkIfTriggerCanBeActivated.SyncValidatorsInfo", "num missing validators info", len(missingValidatorsInfoHashes), "error", err) + log.Debug("checkIfTriggerCanBeActivated.SyncValidatorsInfo", "num missing validators info", len(missingValidatorsInfoHashes), "error", err) return false, 0 } From 0365aeee0e7ab3a7e8e133a2dcfa9d948158599c Mon Sep 17 00:00:00 2001 From: Sorin Stanculeanu Date: Tue, 26 Jul 2022 17:58:46 +0300 Subject: [PATCH 35/70] added disabled validator info resolver on storage resolvers factory --- .../baseResolversContainerFactory.go | 8 +++ .../metaResolversContainerFactory.go | 5 ++ .../metaResolversContainerFactory_test.go | 3 +- .../shardResolversContainerFactory.go | 5 ++ .../shardResolversContainerFactory_test.go | 4 +- .../disabled/validatorInfoResolver.go | 54 +++++++++++++++++++ .../disabled/validatorInfoResolver_test.go | 47 ++++++++++++++++ 7 files changed, 124 insertions(+), 2 deletions(-) create mode 100644 dataRetriever/resolvers/disabled/validatorInfoResolver.go create mode 100644 dataRetriever/resolvers/disabled/validatorInfoResolver_test.go diff --git a/dataRetriever/factory/storageResolversContainer/baseResolversContainerFactory.go b/dataRetriever/factory/storageResolversContainer/baseResolversContainerFactory.go index a851d475847..cc93025d4f2 100644 --- a/dataRetriever/factory/storageResolversContainer/baseResolversContainerFactory.go +++ b/dataRetriever/factory/storageResolversContainer/baseResolversContainerFactory.go @@ -13,6 +13,7 @@ import ( "github.com/ElrondNetwork/elrond-go/common/disabled" "github.com/ElrondNetwork/elrond-go/config" "github.com/ElrondNetwork/elrond-go/dataRetriever" + disabledResolvers "github.com/ElrondNetwork/elrond-go/dataRetriever/resolvers/disabled" "github.com/ElrondNetwork/elrond-go/dataRetriever/storageResolvers" "github.com/ElrondNetwork/elrond-go/process/factory" "github.com/ElrondNetwork/elrond-go/sharding" @@ -225,3 +226,10 @@ func (brcf *baseResolversContainerFactory) newImportDBTrieStorage( } return trieFactoryInstance.Create(args) } + +func (brcf *baseResolversContainerFactory) generateValidatorInfoResolver() error { + identifierValidatorInfo := common.ValidatorInfoTopic + validatorInfoResolver := disabledResolvers.NewDisabledValidatorInfoResolver() + + return brcf.container.Add(identifierValidatorInfo, validatorInfoResolver) +} diff --git a/dataRetriever/factory/storageResolversContainer/metaResolversContainerFactory.go b/dataRetriever/factory/storageResolversContainer/metaResolversContainerFactory.go index bce563fb412..0951201a426 100644 --- a/dataRetriever/factory/storageResolversContainer/metaResolversContainerFactory.go +++ b/dataRetriever/factory/storageResolversContainer/metaResolversContainerFactory.go @@ -94,6 +94,11 @@ func (mrcf *metaResolversContainerFactory) Create() (dataRetriever.ResolversCont return nil, err } + err = mrcf.generateValidatorInfoResolver() + if err != nil { + return nil, err + } + return mrcf.container, nil } diff --git a/dataRetriever/factory/storageResolversContainer/metaResolversContainerFactory_test.go b/dataRetriever/factory/storageResolversContainer/metaResolversContainerFactory_test.go index 5dd56f57828..fd0b9ebcb1b 100644 --- a/dataRetriever/factory/storageResolversContainer/metaResolversContainerFactory_test.go +++ b/dataRetriever/factory/storageResolversContainer/metaResolversContainerFactory_test.go @@ -169,8 +169,9 @@ func TestMetaResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolversRewards := noOfShards numResolversTxs := noOfShards + 1 numResolversTrieNodes := 2 + numValidatorInfo := 1 totalResolvers := numResolversShardHeadersForMetachain + numResolverMetablocks + numResolversMiniBlocks + - numResolversUnsigned + numResolversTxs + numResolversTrieNodes + numResolversRewards + numResolversUnsigned + numResolversTxs + numResolversTrieNodes + numResolversRewards + numValidatorInfo assert.Equal(t, totalResolvers, container.Len()) assert.Equal(t, totalResolvers, container.Len()) diff --git a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory.go b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory.go index e4470ad674f..b2ad94eff01 100644 --- a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory.go +++ b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory.go @@ -94,6 +94,11 @@ func (srcf *shardResolversContainerFactory) Create() (dataRetriever.ResolversCon return nil, err } + err = srcf.generateValidatorInfoResolver() + if err != nil { + return nil, err + } + return srcf.container, nil } diff --git a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go index d1ae4138732..5665c21a58c 100644 --- a/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go +++ b/dataRetriever/factory/storageResolversContainer/shardResolversContainerFactory_test.go @@ -173,8 +173,10 @@ func TestShardResolversContainerFactory_With4ShardsShouldWork(t *testing.T) { numResolverMiniBlocks := noOfShards + 2 numResolverMetaBlockHeaders := 1 numResolverTrieNodes := 1 + numValidatorInfo := 1 totalResolvers := numResolverTxs + numResolverHeaders + numResolverMiniBlocks + - numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + numResolverTrieNodes + numResolverMetaBlockHeaders + numResolverSCRs + numResolverRewardTxs + + numResolverTrieNodes + numValidatorInfo assert.Equal(t, totalResolvers, container.Len()) } diff --git a/dataRetriever/resolvers/disabled/validatorInfoResolver.go b/dataRetriever/resolvers/disabled/validatorInfoResolver.go new file mode 100644 index 00000000000..c6322fe3051 --- /dev/null +++ b/dataRetriever/resolvers/disabled/validatorInfoResolver.go @@ -0,0 +1,54 @@ +package disabled + +import ( + "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/p2p" +) + +type validatorInfoResolver struct { +} + +// NewDisabledValidatorInfoResolver creates a new disabled validator info resolver instance +func NewDisabledValidatorInfoResolver() *validatorInfoResolver { + return &validatorInfoResolver{} +} + +// RequestDataFromHash does nothing and returns nil +func (res *validatorInfoResolver) RequestDataFromHash(_ []byte, _ uint32) error { + return nil +} + +// RequestDataFromHashArray does nothing and returns nil +func (res *validatorInfoResolver) RequestDataFromHashArray(_ [][]byte, _ uint32) error { + return nil +} + +// ProcessReceivedMessage does nothing and returns nil +func (res *validatorInfoResolver) ProcessReceivedMessage(_ p2p.MessageP2P, _ core.PeerID) error { + return nil +} + +// SetResolverDebugHandler does nothing and returns nil +func (res *validatorInfoResolver) SetResolverDebugHandler(_ dataRetriever.ResolverDebugHandler) error { + return nil +} + +// SetNumPeersToQuery does nothing +func (res *validatorInfoResolver) SetNumPeersToQuery(_ int, _ int) { +} + +// NumPeersToQuery returns 0 and 0 +func (res *validatorInfoResolver) NumPeersToQuery() (int, int) { + return 0, 0 +} + +// Close does nothing and returns nil +func (res *validatorInfoResolver) Close() error { + return nil +} + +// IsInterfaceNil returns true if there is no value under the interface +func (res *validatorInfoResolver) IsInterfaceNil() bool { + return res == nil +} diff --git a/dataRetriever/resolvers/disabled/validatorInfoResolver_test.go b/dataRetriever/resolvers/disabled/validatorInfoResolver_test.go new file mode 100644 index 00000000000..cdd43fda758 --- /dev/null +++ b/dataRetriever/resolvers/disabled/validatorInfoResolver_test.go @@ -0,0 +1,47 @@ +package disabled + +import ( + "fmt" + "testing" + + "github.com/ElrondNetwork/elrond-go-core/core/check" + "github.com/stretchr/testify/assert" +) + +func TestNewDisabledValidatorInfoResolver(t *testing.T) { + t.Parallel() + + resolver := NewDisabledValidatorInfoResolver() + assert.False(t, check.IfNil(resolver)) +} + +func Test_validatorInfoResolver_SetResolverDebugHandler(t *testing.T) { + t.Parallel() + + defer func() { + r := recover() + if r != nil { + assert.Fail(t, fmt.Sprintf("should have not failed %v", r)) + } + }() + + resolver := NewDisabledValidatorInfoResolver() + + err := resolver.RequestDataFromHash(nil, 0) + assert.Nil(t, err) + + err = resolver.RequestDataFromHashArray(nil, 0) + assert.Nil(t, err) + + err = resolver.SetResolverDebugHandler(nil) + assert.Nil(t, err) + + value1, value2 := resolver.NumPeersToQuery() + assert.Zero(t, value1) + assert.Zero(t, value2) + + err = resolver.Close() + assert.Nil(t, err) + + resolver.SetNumPeersToQuery(100, 100) +} From 919a80df506d3cf58d041f74e67b9d3ac3a0756f Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 2 Aug 2022 15:00:51 +0300 Subject: [PATCH 36/70] * Merged feat/refactor-peers-mbs nto refactor-peers-mbs-on-meta-proposer-side * Fixed number of return parameters --- epochStart/bootstrap/syncValidatorStatus.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index 3e40e4f8783..a67500a784d 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -228,12 +228,12 @@ func (s *syncValidatorStatus) getPeerBlockBodyForMeta( err = s.transactionsSyncer.SyncTransactionsFor(peerMiniBlocks, metaBlock.GetEpoch(), ctx) cancel() if err != nil { - return nil, err + return nil, nil, err } validatorsInfo, err := s.transactionsSyncer.GetValidatorsInfo() if err != nil { - return nil, err + return nil, nil, err } currentEpochValidatorInfoPool := s.dataPool.CurrentEpochValidatorInfo() From 4d6c3dd737b7ca3182c88c846fdaaa2721e4bf12 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 3 Aug 2022 18:32:38 +0300 Subject: [PATCH 37/70] * Added markers where flag is needed --- epochStart/bootstrap/syncValidatorStatus.go | 31 ++++++++++--------- epochStart/metachain/validators.go | 25 +++++++++++++-- epochStart/shardchain/trigger.go | 19 +++++++----- .../preprocess/validatorInfoPreProcessor.go | 9 ++++-- 4 files changed, 57 insertions(+), 27 deletions(-) diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index a67500a784d..d30951c9218 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -223,22 +223,25 @@ func (s *syncValidatorStatus) getPeerBlockBodyForMeta( return nil, nil, err } - s.transactionsSyncer.ClearFields() - ctx, cancel = context.WithTimeout(context.Background(), time.Minute) - err = s.transactionsSyncer.SyncTransactionsFor(peerMiniBlocks, metaBlock.GetEpoch(), ctx) - cancel() - if err != nil { - return nil, nil, err - } + // TODO: Use refactor peers mbs activation flag below + if true { + s.transactionsSyncer.ClearFields() + ctx, cancel = context.WithTimeout(context.Background(), time.Minute) + err = s.transactionsSyncer.SyncTransactionsFor(peerMiniBlocks, metaBlock.GetEpoch(), ctx) + cancel() + if err != nil { + return nil, nil, err + } - validatorsInfo, err := s.transactionsSyncer.GetValidatorsInfo() - if err != nil { - return nil, nil, err - } + validatorsInfo, err := s.transactionsSyncer.GetValidatorsInfo() + if err != nil { + return nil, nil, err + } - currentEpochValidatorInfoPool := s.dataPool.CurrentEpochValidatorInfo() - for validatorInfoHash, validatorInfo := range validatorsInfo { - currentEpochValidatorInfoPool.AddValidatorInfo([]byte(validatorInfoHash), validatorInfo) + currentEpochValidatorInfoPool := s.dataPool.CurrentEpochValidatorInfo() + for validatorInfoHash, validatorInfo := range validatorsInfo { + currentEpochValidatorInfoPool.AddValidatorInfo([]byte(validatorInfoHash), validatorInfo) + } } blockBody := &block.Body{MiniBlocks: make([]*block.MiniBlock, 0, len(peerMiniBlocks))} diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 35e46f6f1ff..7e3a1fe8b2b 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -136,18 +136,39 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat }) currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() + for index, validator := range validatorCopy { shardValidatorInfo := createShardValidatorInfo(validator) + + shardValidatorInfoData, err := vic.getShardValidatorInfoData(shardValidatorInfo, currentEpochValidatorInfo) + if err != nil { + return nil, err + } + + miniBlock.TxHashes[index] = shardValidatorInfoData + } + + return miniBlock, nil +} + +func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *state.ShardValidatorInfo, currentEpochValidatorInfo dataRetriever.ValidatorInfoCacher) ([]byte, error) { + // TODO: Use refactor peers mbs activation flag below + if true { shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) if err != nil { return nil, err } currentEpochValidatorInfo.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) - miniBlock.TxHashes[index] = shardValidatorInfoHash + return shardValidatorInfoHash, nil } - return miniBlock, nil + marshalledShardValidatorInfo, err := vic.marshalizer.Marshal(shardValidatorInfo) + if err != nil { + return nil, err + } + + return marshalledShardValidatorInfo, nil } func createShardValidatorInfo(validator *state.ValidatorInfo) *state.ShardValidatorInfo { diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index a4bd936c2bd..017a8af28bd 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -733,15 +733,18 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH return false, 0 } - missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) - if err != nil { - t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) - log.Debug("checkIfTriggerCanBeActivated.SyncValidatorsInfo", "num missing validators info", len(missingValidatorsInfoHashes), "error", err) - return false, 0 - } + // TODO: Use refactor peers mbs activation flag below + if true { + missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) + if err != nil { + t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) + log.Debug("checkIfTriggerCanBeActivated.SyncValidatorsInfo", "num missing validators info", len(missingValidatorsInfoHashes), "error", err) + return false, 0 + } - for validatorInfoHash, validatorInfo := range validatorsInfo { - t.currentEpochValidatorInfoPool.AddValidatorInfo([]byte(validatorInfoHash), validatorInfo) + for validatorInfoHash, validatorInfo := range validatorsInfo { + t.currentEpochValidatorInfoPool.AddValidatorInfo([]byte(validatorInfoHash), validatorInfo) + } } t.epochStartNotifier.NotifyAllPrepare(metaHdr, blockBody, t.currentEpochValidatorInfoPool) diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index c07e1d7eaa3..8a69cc88a7b 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -135,9 +135,12 @@ func (vip *validatorInfoPreprocessor) RestoreBlockDataIntoPools( continue } - err := vip.restoreValidatorsInfo(miniBlock) - if err != nil { - return validatorsInfoRestored, err + // TODO: Use refactor peers mbs activation flag below + if true { + err := vip.restoreValidatorsInfo(miniBlock) + if err != nil { + return validatorsInfoRestored, err + } } miniBlockHash, err := core.CalculateHash(vip.marshalizer, vip.hasher, miniBlock) From 594d41090e30f3038470b106d679449ac4304055 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 4 Aug 2022 12:20:53 +0300 Subject: [PATCH 38/70] * Refactored validatorInfo into shardValidatorInfo --- process/peer/interceptedValidatorInfo.go | 40 ++++++++++++------------ 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go index 4a4904d2f05..4de280911cd 100644 --- a/process/peer/interceptedValidatorInfo.go +++ b/process/peer/interceptedValidatorInfo.go @@ -21,9 +21,9 @@ type ArgInterceptedValidatorInfo struct { // interceptedValidatorInfo is a wrapper over validatorInfo type interceptedValidatorInfo struct { - validatorInfo *state.ShardValidatorInfo - nodesCoordinator process.NodesCoordinator - hash []byte + shardValidatorInfo *state.ShardValidatorInfo + nodesCoordinator process.NodesCoordinator + hash []byte } // NewInterceptedValidatorInfo creates a new intercepted validator info instance @@ -33,15 +33,15 @@ func NewInterceptedValidatorInfo(args ArgInterceptedValidatorInfo) (*intercepted return nil, err } - validatorInfo, err := createValidatorInfo(args.Marshalizer, args.DataBuff) + shardValidatorInfo, err := createShardValidatorInfo(args.Marshalizer, args.DataBuff) if err != nil { return nil, err } return &interceptedValidatorInfo{ - validatorInfo: validatorInfo, - nodesCoordinator: args.NodesCoordinator, - hash: args.Hasher.Compute(string(args.DataBuff)), + shardValidatorInfo: shardValidatorInfo, + nodesCoordinator: args.NodesCoordinator, + hash: args.Hasher.Compute(string(args.DataBuff)), }, nil } @@ -62,30 +62,30 @@ func checkArgs(args ArgInterceptedValidatorInfo) error { return nil } -func createValidatorInfo(marshalizer marshal.Marshalizer, buff []byte) (*state.ShardValidatorInfo, error) { - validatorInfo := &state.ShardValidatorInfo{} - err := marshalizer.Unmarshal(validatorInfo, buff) +func createShardValidatorInfo(marshalizer marshal.Marshalizer, buff []byte) (*state.ShardValidatorInfo, error) { + shardValidatorInfo := &state.ShardValidatorInfo{} + err := marshalizer.Unmarshal(shardValidatorInfo, buff) if err != nil { return nil, err } - return validatorInfo, nil + return shardValidatorInfo, nil } // CheckValidity checks the validity of the received validator info func (ivi *interceptedValidatorInfo) CheckValidity() error { // Verify string properties len - err := verifyPropertyLen(publicKeyProperty, ivi.validatorInfo.PublicKey, publicKeyPropertyRequiredBytesLen, minSizeInBytes, maxSizeInBytes) + err := verifyPropertyLen(publicKeyProperty, ivi.shardValidatorInfo.PublicKey, publicKeyPropertyRequiredBytesLen, minSizeInBytes, maxSizeInBytes) if err != nil { return err } - err = verifyPropertyLen(listProperty, []byte(ivi.validatorInfo.List), 0, minSizeInBytes, maxSizeInBytes) + err = verifyPropertyLen(listProperty, []byte(ivi.shardValidatorInfo.List), 0, minSizeInBytes, maxSizeInBytes) if err != nil { return err } // Check if the public key is a validator - _, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.validatorInfo.PublicKey) + _, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.shardValidatorInfo.PublicKey) return err } @@ -96,7 +96,7 @@ func (ivi *interceptedValidatorInfo) IsForCurrentShard() bool { // ValidatorInfo returns the current validator info structure func (ivi *interceptedValidatorInfo) ValidatorInfo() *state.ShardValidatorInfo { - return ivi.validatorInfo + return ivi.shardValidatorInfo } // Hash returns the hash of this validator info @@ -117,11 +117,11 @@ func (ivi *interceptedValidatorInfo) Identifiers() [][]byte { // String returns the validator's info most important fields as string func (ivi *interceptedValidatorInfo) String() string { return fmt.Sprintf("pk=%s, shard=%d, list=%s, index=%d, tempRating=%d", - logger.DisplayByteSlice(ivi.validatorInfo.PublicKey), - ivi.validatorInfo.ShardId, - ivi.validatorInfo.List, - ivi.validatorInfo.Index, - ivi.validatorInfo.TempRating, + logger.DisplayByteSlice(ivi.shardValidatorInfo.PublicKey), + ivi.shardValidatorInfo.ShardId, + ivi.shardValidatorInfo.List, + ivi.shardValidatorInfo.Index, + ivi.shardValidatorInfo.TempRating, ) } From f2a6b5359cf9c33e5f58d16dc9325ee211df2d65 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 4 Aug 2022 16:32:36 +0300 Subject: [PATCH 39/70] * Implemented activation flag for refactor peers mini blocks --- cmd/node/config/enableEpochs.toml | 3 + config/epochConfig.go | 1 + epochStart/bootstrap/process.go | 59 +-- epochStart/bootstrap/storageProcess.go | 25 +- epochStart/bootstrap/syncValidatorStatus.go | 93 ++-- epochStart/metachain/validators.go | 68 ++- epochStart/metachain/validators_test.go | 3 + epochStart/shardchain/trigger.go | 92 ++-- epochStart/shardchain/trigger_test.go | 11 +- factory/blockProcessorCreator.go | 15 +- factory/processComponents.go | 92 ++-- factory/shardingFactory.go | 42 +- genesis/process/shardGenesisBlockCreator.go | 2 + integrationTests/consensus/testInitializer.go | 35 +- .../consensusComponents_test.go | 1 + .../processComponents_test.go | 1 + .../statusComponents/statusComponents_test.go | 1 + .../multiShard/hardFork/hardFork_test.go | 11 +- integrationTests/nodesCoordinatorFactory.go | 74 +-- integrationTests/testHeartbeatNode.go | 78 ++-- integrationTests/testP2PNode.go | 78 ++-- integrationTests/testProcessorNode.go | 73 +-- .../testProcessorNodeWithCoordinator.go | 33 +- .../testProcessorNodeWithMultisigner.go | 74 +-- node/nodeRunner.go | 2 + .../preprocess/validatorInfoPreProcessor.go | 42 +- .../validatorInfoPreProcessor_test.go | 50 ++ process/block/shardblock_test.go | 6 + process/coordinator/process_test.go | 9 + .../shard/preProcessorsContainerFactory.go | 5 + .../preProcessorsContainerFactory_test.go | 25 + process/peer/interceptedValidatorInfo_test.go | 6 +- .../indexHashedNodesCoordinator.go | 127 +++-- .../indexHashedNodesCoordinatorLite_test.go | 4 +- ...dexHashedNodesCoordinatorWithRater_test.go | 276 +++++------ .../indexHashedNodesCoordinator_test.go | 435 +++++++++--------- sharding/nodesCoordinator/shardingArgs.go | 41 +- update/factory/exportHandlerFactory.go | 232 +++++----- 38 files changed, 1228 insertions(+), 997 deletions(-) diff --git a/cmd/node/config/enableEpochs.toml b/cmd/node/config/enableEpochs.toml index aaddfdbf513..42f116f382a 100644 --- a/cmd/node/config/enableEpochs.toml +++ b/cmd/node/config/enableEpochs.toml @@ -218,6 +218,9 @@ # MiniBlockPartialExecutionEnableEpoch represents the epoch when mini block partial execution will be enabled MiniBlockPartialExecutionEnableEpoch = 3 + # RefactorPeersMiniBlocksEnableEpoch represents the epoch when refactor of the peers mini blocks will be enabled + RefactorPeersMiniBlocksEnableEpoch = 5 + # MaxNodesChangeEnableEpoch holds configuration for changing the maximum number of nodes and the enabling epoch MaxNodesChangeEnableEpoch = [ { EpochEnable = 0, MaxNumNodes = 36, NodesToShufflePerShard = 4 }, diff --git a/config/epochConfig.go b/config/epochConfig.go index 1df8e5b5931..5a68efc74e0 100644 --- a/config/epochConfig.go +++ b/config/epochConfig.go @@ -85,6 +85,7 @@ type EnableEpochs struct { HeartbeatDisableEpoch uint32 MiniBlockPartialExecutionEnableEpoch uint32 ESDTMetadataContinuousCleanupEnableEpoch uint32 + RefactorPeersMiniBlocksEnableEpoch uint32 } // GasScheduleByEpochs represents a gas schedule toml entry that will be applied from the provided epoch diff --git a/epochStart/bootstrap/process.go b/epochStart/bootstrap/process.go index 9fa329f61a2..ded007827db 100644 --- a/epochStart/bootstrap/process.go +++ b/epochStart/bootstrap/process.go @@ -706,19 +706,20 @@ func (e *epochStartBootstrap) processNodesConfig(pubKey []byte) ([]*block.MiniBl shardId = e.genesisShardCoordinator.SelfId() } argsNewValidatorStatusSyncers := ArgsNewSyncValidatorStatus{ - DataPool: e.dataPool, - Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), - RequestHandler: e.requestHandler, - ChanceComputer: e.rater, - GenesisNodesConfig: e.genesisNodesConfig, - NodeShuffler: e.nodeShuffler, - Hasher: e.coreComponentsHolder.Hasher(), - PubKey: pubKey, - ShardIdAsObserver: shardId, - WaitingListFixEnableEpoch: e.enableEpochs.WaitingListFixEnableEpoch, - ChanNodeStop: e.coreComponentsHolder.ChanStopNodeProcess(), - NodeTypeProvider: e.coreComponentsHolder.NodeTypeProvider(), - IsFullArchive: e.prefsConfig.FullArchive, + DataPool: e.dataPool, + Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), + RequestHandler: e.requestHandler, + ChanceComputer: e.rater, + GenesisNodesConfig: e.genesisNodesConfig, + NodeShuffler: e.nodeShuffler, + Hasher: e.coreComponentsHolder.Hasher(), + PubKey: pubKey, + ShardIdAsObserver: shardId, + WaitingListFixEnableEpoch: e.enableEpochs.WaitingListFixEnableEpoch, + ChanNodeStop: e.coreComponentsHolder.ChanStopNodeProcess(), + NodeTypeProvider: e.coreComponentsHolder.NodeTypeProvider(), + IsFullArchive: e.prefsConfig.FullArchive, + RefactorPeersMiniBlocksEnableEpoch: e.enableEpochs.RefactorPeersMiniBlocksEnableEpoch, } e.nodesConfigHandler, err = NewSyncValidatorStatus(argsNewValidatorStatusSyncers) @@ -1143,22 +1144,22 @@ func (e *epochStartBootstrap) createRequestHandler() error { // this one should only be used before determining the correct shard where the node should reside log.Debug("epochStartBootstrap.createRequestHandler", "shard", e.shardCoordinator.SelfId()) resolversContainerArgs := resolverscontainer.FactoryArgs{ - ShardCoordinator: e.shardCoordinator, - Messenger: e.messenger, - Store: storageService, - Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), - DataPools: e.dataPool, - Uint64ByteSliceConverter: uint64ByteSlice.NewBigEndianConverter(), - NumConcurrentResolvingJobs: 10, - DataPacker: dataPacker, - TriesContainer: e.trieContainer, - SizeCheckDelta: 0, - InputAntifloodHandler: disabled.NewAntiFloodHandler(), - OutputAntifloodHandler: disabled.NewAntiFloodHandler(), - CurrentNetworkEpochProvider: disabled.NewCurrentNetworkEpochProviderHandler(), - PreferredPeersHolder: disabled.NewPreferredPeersHolder(), - ResolverConfig: e.generalConfig.Resolvers, - PeersRatingHandler: disabled.NewDisabledPeersRatingHandler(), + ShardCoordinator: e.shardCoordinator, + Messenger: e.messenger, + Store: storageService, + Marshalizer: e.coreComponentsHolder.InternalMarshalizer(), + DataPools: e.dataPool, + Uint64ByteSliceConverter: uint64ByteSlice.NewBigEndianConverter(), + NumConcurrentResolvingJobs: 10, + DataPacker: dataPacker, + TriesContainer: e.trieContainer, + SizeCheckDelta: 0, + InputAntifloodHandler: disabled.NewAntiFloodHandler(), + OutputAntifloodHandler: disabled.NewAntiFloodHandler(), + CurrentNetworkEpochProvider: disabled.NewCurrentNetworkEpochProviderHandler(), + PreferredPeersHolder: disabled.NewPreferredPeersHolder(), + ResolverConfig: e.generalConfig.Resolvers, + PeersRatingHandler: disabled.NewDisabledPeersRatingHandler(), NodesCoordinator: disabled.NewNodesCoordinator(), MaxNumOfPeerAuthenticationInResponse: e.generalConfig.HeartbeatV2.MaxNumOfPeerAuthenticationInResponse, PeerShardMapper: disabled.NewPeerShardMapper(), diff --git a/epochStart/bootstrap/storageProcess.go b/epochStart/bootstrap/storageProcess.go index b4049f1ee24..fad7f095631 100644 --- a/epochStart/bootstrap/storageProcess.go +++ b/epochStart/bootstrap/storageProcess.go @@ -399,18 +399,19 @@ func (sesb *storageEpochStartBootstrap) processNodesConfig(pubKey []byte) error shardId = sesb.genesisShardCoordinator.SelfId() } argsNewValidatorStatusSyncers := ArgsNewSyncValidatorStatus{ - DataPool: sesb.dataPool, - Marshalizer: sesb.coreComponentsHolder.InternalMarshalizer(), - RequestHandler: sesb.requestHandler, - ChanceComputer: sesb.rater, - GenesisNodesConfig: sesb.genesisNodesConfig, - NodeShuffler: sesb.nodeShuffler, - Hasher: sesb.coreComponentsHolder.Hasher(), - PubKey: pubKey, - ShardIdAsObserver: shardId, - ChanNodeStop: sesb.coreComponentsHolder.ChanStopNodeProcess(), - NodeTypeProvider: sesb.coreComponentsHolder.NodeTypeProvider(), - IsFullArchive: sesb.prefsConfig.FullArchive, + DataPool: sesb.dataPool, + Marshalizer: sesb.coreComponentsHolder.InternalMarshalizer(), + RequestHandler: sesb.requestHandler, + ChanceComputer: sesb.rater, + GenesisNodesConfig: sesb.genesisNodesConfig, + NodeShuffler: sesb.nodeShuffler, + Hasher: sesb.coreComponentsHolder.Hasher(), + PubKey: pubKey, + ShardIdAsObserver: shardId, + ChanNodeStop: sesb.coreComponentsHolder.ChanStopNodeProcess(), + NodeTypeProvider: sesb.coreComponentsHolder.NodeTypeProvider(), + IsFullArchive: sesb.prefsConfig.FullArchive, + RefactorPeersMiniBlocksEnableEpoch: sesb.enableEpochs.RefactorPeersMiniBlocksEnableEpoch, } sesb.nodesConfigHandler, err = NewSyncValidatorStatus(argsNewValidatorStatusSyncers) if err != nil { diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index d30951c9218..c40bd34cf5e 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -24,31 +24,33 @@ import ( const consensusGroupCacheSize = 50 type syncValidatorStatus struct { - miniBlocksSyncer epochStart.PendingMiniBlocksSyncHandler - transactionsSyncer update.TransactionsSyncHandler - dataPool dataRetriever.PoolsHolder - marshalizer marshal.Marshalizer - requestHandler process.RequestHandler - nodeCoordinator StartInEpochNodesCoordinator - genesisNodesConfig sharding.GenesisNodesSetupHandler - memDB storage.Storer + miniBlocksSyncer epochStart.PendingMiniBlocksSyncHandler + transactionsSyncer update.TransactionsSyncHandler + dataPool dataRetriever.PoolsHolder + marshalizer marshal.Marshalizer + requestHandler process.RequestHandler + nodeCoordinator StartInEpochNodesCoordinator + genesisNodesConfig sharding.GenesisNodesSetupHandler + memDB storage.Storer + refactorPeersMiniBlocksEnableEpoch uint32 } // ArgsNewSyncValidatorStatus holds the arguments needed for creating a new validator status process component type ArgsNewSyncValidatorStatus struct { - DataPool dataRetriever.PoolsHolder - Marshalizer marshal.Marshalizer - Hasher hashing.Hasher - RequestHandler process.RequestHandler - ChanceComputer nodesCoordinator.ChanceComputer - GenesisNodesConfig sharding.GenesisNodesSetupHandler - NodeShuffler nodesCoordinator.NodesShuffler - PubKey []byte - ShardIdAsObserver uint32 - WaitingListFixEnableEpoch uint32 - ChanNodeStop chan endProcess.ArgEndProcess - NodeTypeProvider NodeTypeProviderHandler - IsFullArchive bool + DataPool dataRetriever.PoolsHolder + Marshalizer marshal.Marshalizer + Hasher hashing.Hasher + RequestHandler process.RequestHandler + ChanceComputer nodesCoordinator.ChanceComputer + GenesisNodesConfig sharding.GenesisNodesSetupHandler + NodeShuffler nodesCoordinator.NodesShuffler + PubKey []byte + ShardIdAsObserver uint32 + WaitingListFixEnableEpoch uint32 + ChanNodeStop chan endProcess.ArgEndProcess + NodeTypeProvider NodeTypeProviderHandler + IsFullArchive bool + RefactorPeersMiniBlocksEnableEpoch uint32 } // NewSyncValidatorStatus creates a new validator status process component @@ -58,10 +60,11 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat } s := &syncValidatorStatus{ - dataPool: args.DataPool, - marshalizer: args.Marshalizer, - requestHandler: args.RequestHandler, - genesisNodesConfig: args.GenesisNodesConfig, + dataPool: args.DataPool, + marshalizer: args.Marshalizer, + requestHandler: args.RequestHandler, + genesisNodesConfig: args.GenesisNodesConfig, + refactorPeersMiniBlocksEnableEpoch: args.RefactorPeersMiniBlocksEnableEpoch, } var err error @@ -108,24 +111,25 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat s.memDB = disabled.CreateMemUnit() argsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: int(args.GenesisNodesConfig.GetShardConsensusGroupSize()), - MetaConsensusGroupSize: int(args.GenesisNodesConfig.GetMetaConsensusGroupSize()), - Marshalizer: args.Marshalizer, - Hasher: args.Hasher, - Shuffler: args.NodeShuffler, - EpochStartNotifier: &disabled.EpochStartNotifier{}, - BootStorer: s.memDB, - ShardIDAsObserver: args.ShardIdAsObserver, - NbShards: args.GenesisNodesConfig.NumberOfShards(), - EligibleNodes: eligibleValidators, - WaitingNodes: waitingValidators, - SelfPublicKey: args.PubKey, - ConsensusGroupCache: consensusGroupCache, - ShuffledOutHandler: disabled.NewShuffledOutHandler(), - WaitingListFixEnabledEpoch: args.WaitingListFixEnableEpoch, - ChanStopNode: args.ChanNodeStop, - NodeTypeProvider: args.NodeTypeProvider, - IsFullArchive: args.IsFullArchive, + ShardConsensusGroupSize: int(args.GenesisNodesConfig.GetShardConsensusGroupSize()), + MetaConsensusGroupSize: int(args.GenesisNodesConfig.GetMetaConsensusGroupSize()), + Marshalizer: args.Marshalizer, + Hasher: args.Hasher, + Shuffler: args.NodeShuffler, + EpochStartNotifier: &disabled.EpochStartNotifier{}, + BootStorer: s.memDB, + ShardIDAsObserver: args.ShardIdAsObserver, + NbShards: args.GenesisNodesConfig.NumberOfShards(), + EligibleNodes: eligibleValidators, + WaitingNodes: waitingValidators, + SelfPublicKey: args.PubKey, + ConsensusGroupCache: consensusGroupCache, + ShuffledOutHandler: disabled.NewShuffledOutHandler(), + WaitingListFixEnabledEpoch: args.WaitingListFixEnableEpoch, + ChanStopNode: args.ChanNodeStop, + NodeTypeProvider: args.NodeTypeProvider, + IsFullArchive: args.IsFullArchive, + RefactorPeersMiniBlocksEnableEpoch: args.RefactorPeersMiniBlocksEnableEpoch, } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argsNodesCoordinator) if err != nil { @@ -223,8 +227,7 @@ func (s *syncValidatorStatus) getPeerBlockBodyForMeta( return nil, nil, err } - // TODO: Use refactor peers mbs activation flag below - if true { + if metaBlock.GetEpoch() >= s.refactorPeersMiniBlocksEnableEpoch { s.transactionsSyncer.ClearFields() ctx, cancel = context.WithTimeout(context.Background(), time.Minute) err = s.transactionsSyncer.SyncTransactionsFor(peerMiniBlocks, metaBlock.GetEpoch(), ctx) diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 7e3a1fe8b2b..5e301eaef1a 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -2,6 +2,7 @@ package metachain import ( "bytes" + "github.com/ElrondNetwork/elrond-go-core/core/atomic" "sort" "sync" @@ -24,22 +25,26 @@ var _ process.EpochStartValidatorInfoCreator = (*validatorInfoCreator)(nil) // ArgsNewValidatorInfoCreator defines the arguments structure needed to create a new validatorInfo creator type ArgsNewValidatorInfoCreator struct { - ShardCoordinator sharding.Coordinator - ValidatorInfoStorage storage.Storer - MiniBlockStorage storage.Storer - Hasher hashing.Hasher - Marshalizer marshal.Marshalizer - DataPool dataRetriever.PoolsHolder + ShardCoordinator sharding.Coordinator + ValidatorInfoStorage storage.Storer + MiniBlockStorage storage.Storer + Hasher hashing.Hasher + Marshalizer marshal.Marshalizer + DataPool dataRetriever.PoolsHolder + EpochNotifier process.EpochNotifier + RefactorPeersMiniBlocksEnableEpoch uint32 } type validatorInfoCreator struct { - shardCoordinator sharding.Coordinator - validatorInfoStorage storage.Storer - miniBlockStorage storage.Storer - hasher hashing.Hasher - marshalizer marshal.Marshalizer - dataPool dataRetriever.PoolsHolder - mutValidatorInfo sync.Mutex + shardCoordinator sharding.Coordinator + validatorInfoStorage storage.Storer + miniBlockStorage storage.Storer + hasher hashing.Hasher + marshalizer marshal.Marshalizer + dataPool dataRetriever.PoolsHolder + mutValidatorInfo sync.Mutex + refactorPeersMiniBlocksEnableEpoch uint32 + flagRefactorPeersMiniBlocks atomic.Flag } // NewValidatorInfoCreator creates a new validatorInfo creator object @@ -65,18 +70,26 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { return nil, epochStart.ErrNilCurrentEpochValidatorsInfoPool } + if check.IfNil(args.EpochNotifier) { + return nil, epochStart.ErrNilEpochNotifier + } //TODO: currValidatorInfoCache := dataPool.NewCurrentEpochValidatorInfoPool() should be replaced by //args.DataPool.CurrentEpochValidatorInfo(), as this pool is already created vic := &validatorInfoCreator{ - shardCoordinator: args.ShardCoordinator, - hasher: args.Hasher, - marshalizer: args.Marshalizer, - validatorInfoStorage: args.ValidatorInfoStorage, - miniBlockStorage: args.MiniBlockStorage, - dataPool: args.DataPool, + shardCoordinator: args.ShardCoordinator, + hasher: args.Hasher, + marshalizer: args.Marshalizer, + validatorInfoStorage: args.ValidatorInfoStorage, + miniBlockStorage: args.MiniBlockStorage, + dataPool: args.DataPool, + refactorPeersMiniBlocksEnableEpoch: args.RefactorPeersMiniBlocksEnableEpoch, } + log.Debug("validatorInfoCreator: enable epoch for refactor peers mini blocks", "epoch", vic.refactorPeersMiniBlocksEnableEpoch) + + args.EpochNotifier.RegisterNotifyHandler(vic) + return vic, nil } @@ -135,12 +148,12 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat return bytes.Compare(validatorCopy[a].PublicKey, validatorCopy[b].PublicKey) < 0 }) - currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() for index, validator := range validatorCopy { shardValidatorInfo := createShardValidatorInfo(validator) - shardValidatorInfoData, err := vic.getShardValidatorInfoData(shardValidatorInfo, currentEpochValidatorInfo) + shardValidatorInfoData, err := vic.getShardValidatorInfoData(shardValidatorInfo, validatorInfoCacher) if err != nil { return nil, err } @@ -151,15 +164,14 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat return miniBlock, nil } -func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *state.ShardValidatorInfo, currentEpochValidatorInfo dataRetriever.ValidatorInfoCacher) ([]byte, error) { - // TODO: Use refactor peers mbs activation flag below - if true { +func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *state.ShardValidatorInfo, validatorInfoCacher dataRetriever.ValidatorInfoCacher) ([]byte, error) { + if vic.flagRefactorPeersMiniBlocks.IsSet() { shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) if err != nil { return nil, err } - currentEpochValidatorInfo.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) + validatorInfoCacher.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) return shardValidatorInfoHash, nil } @@ -417,3 +429,9 @@ func (vic *validatorInfoCreator) clean() { func (vic *validatorInfoCreator) IsInterfaceNil() bool { return vic == nil } + +// EpochConfirmed is called whenever a new epoch is confirmed +func (vic *validatorInfoCreator) EpochConfirmed(epoch uint32, _ uint64) { + vic.flagRefactorPeersMiniBlocks.SetValue(epoch >= vic.refactorPeersMiniBlocksEnableEpoch) + log.Debug("validatorInfoCreator: refactor peers mini blocks", "enabled", vic.flagRefactorPeersMiniBlocks.IsSet()) +} diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index 00c956f6d1f..e7c52f93205 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -3,6 +3,7 @@ package metachain import ( "bytes" "errors" + "github.com/ElrondNetwork/elrond-go/testscommon/epochNotifier" "math/big" "reflect" "sort" @@ -129,6 +130,8 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator return &validatorInfoCacherMock.ValidatorInfoCacherMock{} }, }, + EpochNotifier: &epochNotifier.EpochNotifierStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } return argsNewEpochEconomics } diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 017a8af28bd..c1c1b6c60a6 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -54,9 +54,10 @@ type ArgsShardEpochStartTrigger struct { RoundHandler process.RoundHandler AppStatusHandler core.AppStatusHandler - Epoch uint32 - Validity uint64 - Finality uint64 + Epoch uint32 + Validity uint64 + Finality uint64 + RefactorPeersMiniBlocksEnableEpoch uint32 } type trigger struct { @@ -105,11 +106,12 @@ type trigger struct { appStatusHandler core.AppStatusHandler - mapMissingMiniBlocks map[string]uint32 - mapMissingValidatorsInfo map[string]uint32 - mutMissingMiniBlocks sync.RWMutex - mutMissingValidatorsInfo sync.RWMutex - cancelFunc func() + mapMissingMiniBlocks map[string]uint32 + mapMissingValidatorsInfo map[string]uint32 + mutMissingMiniBlocks sync.RWMutex + mutMissingValidatorsInfo sync.RWMutex + cancelFunc func() + refactorPeersMiniBlocksEnableEpoch uint32 } type metaInfo struct { @@ -213,41 +215,42 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { trigggerStateKey := common.TriggerRegistryInitialKeyPrefix + fmt.Sprintf("%d", args.Epoch) t := &trigger{ - triggerStateKey: []byte(trigggerStateKey), - epoch: args.Epoch, - metaEpoch: args.Epoch, - currentRoundIndex: 0, - epochStartRound: 0, - epochFinalityAttestingRound: 0, - isEpochStart: false, - validity: args.Validity, - finality: args.Finality, - newEpochHdrReceived: false, - mutTrigger: sync.RWMutex{}, - mapHashHdr: make(map[string]data.HeaderHandler), - mapNonceHashes: make(map[uint64][]string), - mapEpochStartHdrs: make(map[string]data.HeaderHandler), - mapFinalizedEpochs: make(map[uint32]string), - headersPool: args.DataPool.Headers(), - miniBlocksPool: args.DataPool.MiniBlocks(), - validatorInfoPool: args.DataPool.ValidatorsInfo(), - currentEpochValidatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), - metaHdrStorage: metaHdrStorage, - shardHdrStorage: shardHdrStorage, - triggerStorage: triggerStorage, - metaNonceHdrStorage: metaHdrNoncesStorage, - uint64Converter: args.Uint64Converter, - marshaller: args.Marshalizer, - hasher: args.Hasher, - headerValidator: args.HeaderValidator, - requestHandler: args.RequestHandler, - epochMetaBlockHash: nil, - epochStartNotifier: args.EpochStartNotifier, - epochStartMeta: &block.MetaBlock{}, - epochStartShardHeader: &block.Header{}, - peerMiniBlocksSyncer: args.PeerMiniBlocksSyncer, - appStatusHandler: args.AppStatusHandler, - roundHandler: args.RoundHandler, + triggerStateKey: []byte(trigggerStateKey), + epoch: args.Epoch, + metaEpoch: args.Epoch, + currentRoundIndex: 0, + epochStartRound: 0, + epochFinalityAttestingRound: 0, + isEpochStart: false, + validity: args.Validity, + finality: args.Finality, + newEpochHdrReceived: false, + mutTrigger: sync.RWMutex{}, + mapHashHdr: make(map[string]data.HeaderHandler), + mapNonceHashes: make(map[uint64][]string), + mapEpochStartHdrs: make(map[string]data.HeaderHandler), + mapFinalizedEpochs: make(map[uint32]string), + headersPool: args.DataPool.Headers(), + miniBlocksPool: args.DataPool.MiniBlocks(), + validatorInfoPool: args.DataPool.ValidatorsInfo(), + currentEpochValidatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), + metaHdrStorage: metaHdrStorage, + shardHdrStorage: shardHdrStorage, + triggerStorage: triggerStorage, + metaNonceHdrStorage: metaHdrNoncesStorage, + uint64Converter: args.Uint64Converter, + marshaller: args.Marshalizer, + hasher: args.Hasher, + headerValidator: args.HeaderValidator, + requestHandler: args.RequestHandler, + epochMetaBlockHash: nil, + epochStartNotifier: args.EpochStartNotifier, + epochStartMeta: &block.MetaBlock{}, + epochStartShardHeader: &block.Header{}, + peerMiniBlocksSyncer: args.PeerMiniBlocksSyncer, + appStatusHandler: args.AppStatusHandler, + roundHandler: args.RoundHandler, + refactorPeersMiniBlocksEnableEpoch: args.RefactorPeersMiniBlocksEnableEpoch, } t.headersPool.RegisterHandler(t.receivedMetaBlock) @@ -733,8 +736,7 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH return false, 0 } - // TODO: Use refactor peers mbs activation flag below - if true { + if metaHdr.GetEpoch() >= t.refactorPeersMiniBlocksEnableEpoch { missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) if err != nil { t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index 6645721d5eb..b6e0b0c4fcf 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -56,11 +56,12 @@ func createMockShardEpochStartTriggerArguments() *ArgsShardEpochStartTrigger { } }, }, - RequestHandler: &testscommon.RequestHandlerStub{}, - EpochStartNotifier: &mock.EpochStartNotifierStub{}, - PeerMiniBlocksSyncer: &mock.ValidatorInfoSyncerStub{}, - RoundHandler: &mock.RoundHandlerStub{}, - AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + RequestHandler: &testscommon.RequestHandlerStub{}, + EpochStartNotifier: &mock.EpochStartNotifierStub{}, + PeerMiniBlocksSyncer: &mock.ValidatorInfoSyncerStub{}, + RoundHandler: &mock.RoundHandlerStub{}, + AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } } diff --git a/factory/blockProcessorCreator.go b/factory/blockProcessorCreator.go index 0a937b3eafd..eefcfc9268e 100644 --- a/factory/blockProcessorCreator.go +++ b/factory/blockProcessorCreator.go @@ -336,6 +336,7 @@ func (pcf *processComponentsFactory) newShardBlockProcessor( txTypeHandler, scheduledTxsExecutionHandler, processedMiniBlocksTracker, + enableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) if err != nil { return nil, err @@ -786,12 +787,14 @@ func (pcf *processComponentsFactory) newMetaBlockProcessor( validatorInfoStorage := pcf.data.StorageService().GetStorer(dataRetriever.UnsignedTransactionUnit) argsEpochValidatorInfo := metachainEpochStart.ArgsNewValidatorInfoCreator{ - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - ValidatorInfoStorage: validatorInfoStorage, - MiniBlockStorage: miniBlockStorage, - Hasher: pcf.coreData.Hasher(), - Marshalizer: pcf.coreData.InternalMarshalizer(), - DataPool: pcf.data.Datapool(), + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + ValidatorInfoStorage: validatorInfoStorage, + MiniBlockStorage: miniBlockStorage, + Hasher: pcf.coreData.Hasher(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + DataPool: pcf.data.Datapool(), + EpochNotifier: pcf.epochNotifier, + RefactorPeersMiniBlocksEnableEpoch: enableEpochs.RefactorPeersMiniBlocksEnableEpoch, } validatorInfoCreator, err := metachainEpochStart.NewValidatorInfoCreator(argsEpochValidatorInfo) if err != nil { diff --git a/factory/processComponents.go b/factory/processComponents.go index 0b0affab63f..967064c0ad7 100644 --- a/factory/processComponents.go +++ b/factory/processComponents.go @@ -729,20 +729,21 @@ func (pcf *processComponentsFactory) newEpochStartTrigger(requestHandler epochSt } argEpochStart := &shardchain.ArgsShardEpochStartTrigger{ - Marshalizer: pcf.coreData.InternalMarshalizer(), - Hasher: pcf.coreData.Hasher(), - HeaderValidator: headerValidator, - Uint64Converter: pcf.coreData.Uint64ByteSliceConverter(), - DataPool: pcf.data.Datapool(), - Storage: pcf.data.StorageService(), - RequestHandler: requestHandler, - Epoch: pcf.bootstrapComponents.EpochBootstrapParams().Epoch(), - EpochStartNotifier: pcf.coreData.EpochStartNotifierWithConfirm(), - Validity: process.MetaBlockValidity, - Finality: process.BlockFinality, - PeerMiniBlocksSyncer: peerMiniBlockSyncer, - RoundHandler: pcf.coreData.RoundHandler(), - AppStatusHandler: pcf.coreData.StatusHandler(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + Hasher: pcf.coreData.Hasher(), + HeaderValidator: headerValidator, + Uint64Converter: pcf.coreData.Uint64ByteSliceConverter(), + DataPool: pcf.data.Datapool(), + Storage: pcf.data.StorageService(), + RequestHandler: requestHandler, + Epoch: pcf.bootstrapComponents.EpochBootstrapParams().Epoch(), + EpochStartNotifier: pcf.coreData.EpochStartNotifierWithConfirm(), + Validity: process.MetaBlockValidity, + Finality: process.BlockFinality, + PeerMiniBlocksSyncer: peerMiniBlockSyncer, + RoundHandler: pcf.coreData.RoundHandler(), + AppStatusHandler: pcf.coreData.StatusHandler(), + RefactorPeersMiniBlocksEnableEpoch: pcf.epochConfig.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, } epochStartTrigger, err := shardchain.NewEpochStartTrigger(argEpochStart) if err != nil { @@ -1461,37 +1462,38 @@ func (pcf *processComponentsFactory) createExportFactoryHandler( accountsDBs[state.PeerAccountsState] = pcf.state.PeerAccounts() exportFolder := filepath.Join(pcf.workingDir, hardforkConfig.ImportFolder) argsExporter := updateFactory.ArgsExporter{ - CoreComponents: pcf.coreData, - CryptoComponents: pcf.crypto, - HeaderValidator: headerValidator, - DataPool: pcf.data.Datapool(), - StorageService: pcf.data.StorageService(), - RequestHandler: requestHandler, - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - Messenger: pcf.network.NetworkMessenger(), - ActiveAccountsDBs: accountsDBs, - ExistingResolvers: resolversFinder, - ExportFolder: exportFolder, - ExportTriesStorageConfig: hardforkConfig.ExportTriesStorageConfig, - ExportStateStorageConfig: hardforkConfig.ExportStateStorageConfig, - ExportStateKeysConfig: hardforkConfig.ExportKeysStorageConfig, - MaxTrieLevelInMemory: pcf.config.StateTriesConfig.MaxStateTrieLevelInMemory, - WhiteListHandler: pcf.whiteListHandler, - WhiteListerVerifiedTxs: pcf.whiteListerVerifiedTxs, - InterceptorsContainer: interceptorsContainer, - NodesCoordinator: pcf.nodesCoordinator, - HeaderSigVerifier: headerSigVerifier, - HeaderIntegrityVerifier: pcf.bootstrapComponents.HeaderIntegrityVerifier(), - ValidityAttester: blockTracker, - InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), - OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), - RoundHandler: pcf.coreData.RoundHandler(), - InterceptorDebugConfig: pcf.config.Debug.InterceptorResolver, - EnableSignTxWithHashEpoch: pcf.epochConfig.EnableEpochs.TransactionSignedWithTxHashEnableEpoch, - MaxHardCapForMissingNodes: pcf.config.TrieSync.MaxHardCapForMissingNodes, - NumConcurrentTrieSyncers: pcf.config.TrieSync.NumConcurrentTrieSyncers, - TrieSyncerVersion: pcf.config.TrieSync.TrieSyncerVersion, - PeersRatingHandler: pcf.network.PeersRatingHandler(), + CoreComponents: pcf.coreData, + CryptoComponents: pcf.crypto, + HeaderValidator: headerValidator, + DataPool: pcf.data.Datapool(), + StorageService: pcf.data.StorageService(), + RequestHandler: requestHandler, + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + Messenger: pcf.network.NetworkMessenger(), + ActiveAccountsDBs: accountsDBs, + ExistingResolvers: resolversFinder, + ExportFolder: exportFolder, + ExportTriesStorageConfig: hardforkConfig.ExportTriesStorageConfig, + ExportStateStorageConfig: hardforkConfig.ExportStateStorageConfig, + ExportStateKeysConfig: hardforkConfig.ExportKeysStorageConfig, + MaxTrieLevelInMemory: pcf.config.StateTriesConfig.MaxStateTrieLevelInMemory, + WhiteListHandler: pcf.whiteListHandler, + WhiteListerVerifiedTxs: pcf.whiteListerVerifiedTxs, + InterceptorsContainer: interceptorsContainer, + NodesCoordinator: pcf.nodesCoordinator, + HeaderSigVerifier: headerSigVerifier, + HeaderIntegrityVerifier: pcf.bootstrapComponents.HeaderIntegrityVerifier(), + ValidityAttester: blockTracker, + InputAntifloodHandler: pcf.network.InputAntiFloodHandler(), + OutputAntifloodHandler: pcf.network.OutputAntiFloodHandler(), + RoundHandler: pcf.coreData.RoundHandler(), + InterceptorDebugConfig: pcf.config.Debug.InterceptorResolver, + EnableSignTxWithHashEpoch: pcf.epochConfig.EnableEpochs.TransactionSignedWithTxHashEnableEpoch, + MaxHardCapForMissingNodes: pcf.config.TrieSync.MaxHardCapForMissingNodes, + NumConcurrentTrieSyncers: pcf.config.TrieSync.NumConcurrentTrieSyncers, + TrieSyncerVersion: pcf.config.TrieSync.TrieSyncerVersion, + PeersRatingHandler: pcf.network.PeersRatingHandler(), + RefactorPeersMiniBlocksEnableEpoch: pcf.epochConfig.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, } return updateFactory.NewExportHandlerFactory(argsExporter) } diff --git a/factory/shardingFactory.go b/factory/shardingFactory.go index df141564a07..38d394d6e5e 100644 --- a/factory/shardingFactory.go +++ b/factory/shardingFactory.go @@ -105,6 +105,7 @@ func CreateNodesCoordinator( waitingListFixEnabledEpoch uint32, chanNodeStop chan endProcess.ArgEndProcess, nodeTypeProvider core.NodeTypeProviderHandler, + refactorPeersMiniBlocksEnableEpoch uint32, ) (nodesCoordinator.NodesCoordinator, error) { if chanNodeStop == nil { return nil, nodesCoordinator.ErrNilNodeStopChannel @@ -174,26 +175,27 @@ func CreateNodesCoordinator( } argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: shardConsensusGroupSize, - MetaConsensusGroupSize: metaConsensusGroupSize, - Marshalizer: marshalizer, - Hasher: hasher, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartNotifier, - BootStorer: bootStorer, - ShardIDAsObserver: shardIDAsObserver, - NbShards: nbShards, - EligibleNodes: eligibleValidators, - WaitingNodes: waitingValidators, - SelfPublicKey: pubKeyBytes, - ConsensusGroupCache: consensusGroupCache, - ShuffledOutHandler: shuffledOutHandler, - Epoch: currentEpoch, - StartEpoch: startEpoch, - WaitingListFixEnabledEpoch: waitingListFixEnabledEpoch, - ChanStopNode: chanNodeStop, - NodeTypeProvider: nodeTypeProvider, - IsFullArchive: prefsConfig.FullArchive, + ShardConsensusGroupSize: shardConsensusGroupSize, + MetaConsensusGroupSize: metaConsensusGroupSize, + Marshalizer: marshalizer, + Hasher: hasher, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartNotifier, + BootStorer: bootStorer, + ShardIDAsObserver: shardIDAsObserver, + NbShards: nbShards, + EligibleNodes: eligibleValidators, + WaitingNodes: waitingValidators, + SelfPublicKey: pubKeyBytes, + ConsensusGroupCache: consensusGroupCache, + ShuffledOutHandler: shuffledOutHandler, + Epoch: currentEpoch, + StartEpoch: startEpoch, + WaitingListFixEnabledEpoch: waitingListFixEnabledEpoch, + ChanStopNode: chanNodeStop, + NodeTypeProvider: nodeTypeProvider, + IsFullArchive: prefsConfig.FullArchive, + RefactorPeersMiniBlocksEnableEpoch: refactorPeersMiniBlocksEnableEpoch, } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/genesis/process/shardGenesisBlockCreator.go b/genesis/process/shardGenesisBlockCreator.go index 66d8abb5277..547d7ab3a81 100644 --- a/genesis/process/shardGenesisBlockCreator.go +++ b/genesis/process/shardGenesisBlockCreator.go @@ -122,6 +122,7 @@ func createGenesisConfig() config.EnableEpochs { HeartbeatDisableEpoch: unreachableEpoch, MiniBlockPartialExecutionEnableEpoch: unreachableEpoch, ESDTMetadataContinuousCleanupEnableEpoch: unreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: unreachableEpoch, } } @@ -598,6 +599,7 @@ func createProcessorsForShardGenesisBlock(arg ArgsGenesisBlockCreator, enableEpo txTypeHandler, disabledScheduledTxsExecutionHandler, disabledProcessedMiniBlocksTracker, + enableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) if err != nil { return nil, err diff --git a/integrationTests/consensus/testInitializer.go b/integrationTests/consensus/testInitializer.go index d065655a7b6..0e77971ffec 100644 --- a/integrationTests/consensus/testInitializer.go +++ b/integrationTests/consensus/testInitializer.go @@ -514,23 +514,24 @@ func createNodes( consensusCache, _ := lrucache.NewCache(10000) argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusSize, - MetaConsensusGroupSize: 1, - Marshalizer: integrationTests.TestMarshalizer, - Hasher: createHasher(consensusType), - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartRegistrationHandler, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte(strconv.Itoa(i)), - ConsensusGroupCache: consensusCache, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: consensusSize, + MetaConsensusGroupSize: 1, + Marshalizer: integrationTests.TestMarshalizer, + Hasher: createHasher(consensusType), + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartRegistrationHandler, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte(strconv.Itoa(i)), + ConsensusGroupCache: consensusCache, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nodesCoord, _ := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/factory/consensusComponents/consensusComponents_test.go b/integrationTests/factory/consensusComponents/consensusComponents_test.go index 705e4f5e7e6..f66cd0c6b4e 100644 --- a/integrationTests/factory/consensusComponents/consensusComponents_test.go +++ b/integrationTests/factory/consensusComponents/consensusComponents_test.go @@ -65,6 +65,7 @@ func TestConsensusComponents_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), + configs.EpochConfig.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/processComponents/processComponents_test.go b/integrationTests/factory/processComponents/processComponents_test.go index 3f0371137f7..d921c802d80 100644 --- a/integrationTests/factory/processComponents/processComponents_test.go +++ b/integrationTests/factory/processComponents/processComponents_test.go @@ -66,6 +66,7 @@ func TestProcessComponents_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), + configs.EpochConfig.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/statusComponents/statusComponents_test.go b/integrationTests/factory/statusComponents/statusComponents_test.go index 30da3113aad..4d56064f394 100644 --- a/integrationTests/factory/statusComponents/statusComponents_test.go +++ b/integrationTests/factory/statusComponents/statusComponents_test.go @@ -66,6 +66,7 @@ func TestStatusComponents_Create_Close_ShouldWork(t *testing.T) { configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), + configs.EpochConfig.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/multiShard/hardFork/hardFork_test.go b/integrationTests/multiShard/hardFork/hardFork_test.go index 315a4d32c1b..73ef16adb13 100644 --- a/integrationTests/multiShard/hardFork/hardFork_test.go +++ b/integrationTests/multiShard/hardFork/hardFork_test.go @@ -616,11 +616,12 @@ func createHardForkExporter( NumResolveFailureThreshold: 3, DebugLineExpiration: 3, }, - MaxHardCapForMissingNodes: 500, - NumConcurrentTrieSyncers: 50, - TrieSyncerVersion: 2, - PeersRatingHandler: node.PeersRatingHandler, - CheckNodesOnDisk: false, + MaxHardCapForMissingNodes: 500, + NumConcurrentTrieSyncers: 50, + TrieSyncerVersion: 2, + PeersRatingHandler: node.PeersRatingHandler, + CheckNodesOnDisk: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } exportHandler, err := factory.NewExportHandlerFactory(argsExportHandler) diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 669e294d405..832aadabaa4 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -51,24 +51,25 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd } nodeShuffler, _ := nodesCoordinator.NewHashValidatorsShuffler(nodeShufflerArgs) argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: arg.shardConsensusGroupSize, - MetaConsensusGroupSize: arg.metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: arg.hasher, - Shuffler: nodeShuffler, - EpochStartNotifier: arg.epochStartSubscriber, - ShardIDAsObserver: arg.shardId, - NbShards: uint32(arg.nbShards), - EligibleNodes: arg.validatorsMap, - WaitingNodes: arg.waitingMap, - SelfPublicKey: pubKeyBytes, - ConsensusGroupCache: arg.consensusGroupCache, - BootStorer: arg.bootStorer, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: arg.shardConsensusGroupSize, + MetaConsensusGroupSize: arg.metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: arg.hasher, + Shuffler: nodeShuffler, + EpochStartNotifier: arg.epochStartSubscriber, + ShardIDAsObserver: arg.shardId, + NbShards: uint32(arg.nbShards), + EligibleNodes: arg.validatorsMap, + WaitingNodes: arg.waitingMap, + SelfPublicKey: pubKeyBytes, + ConsensusGroupCache: arg.consensusGroupCache, + BootStorer: arg.bootStorer, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -103,24 +104,25 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato } nodeShuffler, _ := nodesCoordinator.NewHashValidatorsShuffler(shufflerArgs) argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: arg.shardConsensusGroupSize, - MetaConsensusGroupSize: arg.metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: arg.hasher, - Shuffler: nodeShuffler, - EpochStartNotifier: arg.epochStartSubscriber, - ShardIDAsObserver: arg.shardId, - NbShards: uint32(arg.nbShards), - EligibleNodes: arg.validatorsMap, - WaitingNodes: arg.waitingMap, - SelfPublicKey: pubKeyBytes, - ConsensusGroupCache: arg.consensusGroupCache, - BootStorer: arg.bootStorer, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: arg.shardConsensusGroupSize, + MetaConsensusGroupSize: arg.metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: arg.hasher, + Shuffler: nodeShuffler, + EpochStartNotifier: arg.epochStartSubscriber, + ShardIDAsObserver: arg.shardId, + NbShards: uint32(arg.nbShards), + EligibleNodes: arg.validatorsMap, + WaitingNodes: arg.waitingMap, + SelfPublicKey: pubKeyBytes, + ConsensusGroupCache: arg.consensusGroupCache, + BootStorer: arg.bootStorer, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } baseCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testHeartbeatNode.go b/integrationTests/testHeartbeatNode.go index d22767e1911..d7a0b223161 100644 --- a/integrationTests/testHeartbeatNode.go +++ b/integrationTests/testHeartbeatNode.go @@ -278,25 +278,26 @@ func CreateNodesWithTestHeartbeatNode( cache, _ := storageUnit.NewCache(cacherCfg) for shardId, validatorList := range validatorsMap { argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: shardConsensusGroupSize, - MetaConsensusGroupSize: metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - ShardIDAsObserver: shardId, - NbShards: uint32(numShards), - EligibleNodes: validatorsForNodesCoordinator, - SelfPublicKey: []byte(strconv.Itoa(int(shardId))), - ConsensusGroupCache: cache, - Shuffler: &shardingMocks.NodeShufflerMock{}, - BootStorer: CreateMemUnit(), - WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), - Epoch: 0, - EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: shardConsensusGroupSize, + MetaConsensusGroupSize: metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + ShardIDAsObserver: shardId, + NbShards: uint32(numShards), + EligibleNodes: validatorsForNodesCoordinator, + SelfPublicKey: []byte(strconv.Itoa(int(shardId))), + ConsensusGroupCache: cache, + Shuffler: &shardingMocks.NodeShufflerMock{}, + BootStorer: CreateMemUnit(), + WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), + Epoch: 0, + EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -323,25 +324,26 @@ func CreateNodesWithTestHeartbeatNode( } argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: shardConsensusGroupSize, - MetaConsensusGroupSize: metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - ShardIDAsObserver: shardId, - NbShards: uint32(numShards), - EligibleNodes: validatorsForNodesCoordinator, - SelfPublicKey: []byte(strconv.Itoa(int(shardId))), - ConsensusGroupCache: cache, - Shuffler: &shardingMocks.NodeShufflerMock{}, - BootStorer: CreateMemUnit(), - WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), - Epoch: 0, - EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: shardConsensusGroupSize, + MetaConsensusGroupSize: metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + ShardIDAsObserver: shardId, + NbShards: uint32(numShards), + EligibleNodes: validatorsForNodesCoordinator, + SelfPublicKey: []byte(strconv.Itoa(int(shardId))), + ConsensusGroupCache: cache, + Shuffler: &shardingMocks.NodeShufflerMock{}, + BootStorer: CreateMemUnit(), + WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), + Epoch: 0, + EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testP2PNode.go b/integrationTests/testP2PNode.go index 4755c6480b0..f54ee14cfd4 100644 --- a/integrationTests/testP2PNode.go +++ b/integrationTests/testP2PNode.go @@ -332,25 +332,26 @@ func CreateNodesWithTestP2PNodes( for shardId, validatorList := range validatorsMap { argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: shardConsensusGroupSize, - MetaConsensusGroupSize: metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - ShardIDAsObserver: shardId, - NbShards: uint32(numShards), - EligibleNodes: validatorsForNodesCoordinator, - SelfPublicKey: []byte(strconv.Itoa(int(shardId))), - ConsensusGroupCache: cache, - Shuffler: &shardingMocks.NodeShufflerMock{}, - BootStorer: CreateMemUnit(), - WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), - Epoch: 0, - EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: shardConsensusGroupSize, + MetaConsensusGroupSize: metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + ShardIDAsObserver: shardId, + NbShards: uint32(numShards), + EligibleNodes: validatorsForNodesCoordinator, + SelfPublicKey: []byte(strconv.Itoa(int(shardId))), + ConsensusGroupCache: cache, + Shuffler: &shardingMocks.NodeShufflerMock{}, + BootStorer: CreateMemUnit(), + WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), + Epoch: 0, + EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -377,25 +378,26 @@ func CreateNodesWithTestP2PNodes( } argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: shardConsensusGroupSize, - MetaConsensusGroupSize: metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - ShardIDAsObserver: shardId, - NbShards: uint32(numShards), - EligibleNodes: validatorsForNodesCoordinator, - SelfPublicKey: []byte(strconv.Itoa(int(shardId))), - ConsensusGroupCache: cache, - Shuffler: &shardingMocks.NodeShufflerMock{}, - BootStorer: CreateMemUnit(), - WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), - Epoch: 0, - EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: shardConsensusGroupSize, + MetaConsensusGroupSize: metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + ShardIDAsObserver: shardId, + NbShards: uint32(numShards), + EligibleNodes: validatorsForNodesCoordinator, + SelfPublicKey: []byte(strconv.Itoa(int(shardId))), + ConsensusGroupCache: cache, + Shuffler: &shardingMocks.NodeShufflerMock{}, + BootStorer: CreateMemUnit(), + WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), + Epoch: 0, + EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testProcessorNode.go b/integrationTests/testProcessorNode.go index 52eddda4b08..de6783d2126 100644 --- a/integrationTests/testProcessorNode.go +++ b/integrationTests/testProcessorNode.go @@ -1290,20 +1290,21 @@ func (tpn *TestProcessorNode) initInterceptors(heartbeatPk string) { } peerMiniBlockSyncer, _ := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) argsShardEpochStart := &shardchain.ArgsShardEpochStartTrigger{ - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - HeaderValidator: tpn.HeaderValidator, - Uint64Converter: TestUint64Converter, - DataPool: tpn.DataPool, - Storage: tpn.Storage, - RequestHandler: tpn.RequestHandler, - Epoch: 0, - Validity: 1, - Finality: 1, - EpochStartNotifier: tpn.EpochStartNotifier, - PeerMiniBlocksSyncer: peerMiniBlockSyncer, - RoundHandler: tpn.RoundHandler, - AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + HeaderValidator: tpn.HeaderValidator, + Uint64Converter: TestUint64Converter, + DataPool: tpn.DataPool, + Storage: tpn.Storage, + RequestHandler: tpn.RequestHandler, + Epoch: 0, + Validity: 1, + Finality: 1, + EpochStartNotifier: tpn.EpochStartNotifier, + PeerMiniBlocksSyncer: peerMiniBlockSyncer, + RoundHandler: tpn.RoundHandler, + AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } epochStartTrigger, _ := shardchain.NewEpochStartTrigger(argsShardEpochStart) tpn.EpochStartTrigger = &shardchain.TestTrigger{} @@ -1654,6 +1655,7 @@ func (tpn *TestProcessorNode) initInnerProcessors(gasMap map[string]map[string]u txTypeHandler, scheduledTxsExecutionHandler, processedMiniBlocksTracker, + tpn.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) tpn.PreProcessorsContainer, _ = fact.Create() @@ -2205,12 +2207,14 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { validatorInfoStorage := tpn.Storage.GetStorer(dataRetriever.UnsignedTransactionUnit) argsEpochValidatorInfo := metachain.ArgsNewValidatorInfoCreator{ - ShardCoordinator: tpn.ShardCoordinator, - ValidatorInfoStorage: validatorInfoStorage, - MiniBlockStorage: miniBlockStorage, - Hasher: TestHasher, - Marshalizer: TestMarshalizer, - DataPool: tpn.DataPool, + ShardCoordinator: tpn.ShardCoordinator, + ValidatorInfoStorage: validatorInfoStorage, + MiniBlockStorage: miniBlockStorage, + Hasher: TestHasher, + Marshalizer: TestMarshalizer, + DataPool: tpn.DataPool, + EpochNotifier: tpn.EpochNotifier, + RefactorPeersMiniBlocksEnableEpoch: tpn.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, } epochStartValidatorInfo, _ := metachain.NewValidatorInfoCreator(argsEpochValidatorInfo) @@ -2262,20 +2266,21 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { } peerMiniBlocksSyncer, _ := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) argsShardEpochStart := &shardchain.ArgsShardEpochStartTrigger{ - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - HeaderValidator: tpn.HeaderValidator, - Uint64Converter: TestUint64Converter, - DataPool: tpn.DataPool, - Storage: tpn.Storage, - RequestHandler: tpn.RequestHandler, - Epoch: 0, - Validity: 1, - Finality: 1, - EpochStartNotifier: tpn.EpochStartNotifier, - PeerMiniBlocksSyncer: peerMiniBlocksSyncer, - RoundHandler: tpn.RoundHandler, - AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + HeaderValidator: tpn.HeaderValidator, + Uint64Converter: TestUint64Converter, + DataPool: tpn.DataPool, + Storage: tpn.Storage, + RequestHandler: tpn.RequestHandler, + Epoch: 0, + Validity: 1, + Finality: 1, + EpochStartNotifier: tpn.EpochStartNotifier, + PeerMiniBlocksSyncer: peerMiniBlocksSyncer, + RoundHandler: tpn.RoundHandler, + AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } epochStartTrigger, _ := shardchain.NewEpochStartTrigger(argsShardEpochStart) tpn.EpochStartTrigger = &shardchain.TestTrigger{} diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index 3dc8c5b3e6d..58b86ec9a6d 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -80,23 +80,24 @@ func CreateProcessorNodesWithNodesCoordinator( for i, v := range validatorList { cache, _ := lrucache.NewCache(10000) argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: shardConsensusGroupSize, - MetaConsensusGroupSize: metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - ShardIDAsObserver: shardId, - NbShards: numShards, - EligibleNodes: validatorsMapForNodesCoordinator, - WaitingNodes: waitingMapForNodesCoordinator, - SelfPublicKey: v.PubKeyBytes(), - ConsensusGroupCache: cache, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - IsFullArchive: false, + ShardConsensusGroupSize: shardConsensusGroupSize, + MetaConsensusGroupSize: metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + ShardIDAsObserver: shardId, + NbShards: numShards, + EligibleNodes: validatorsMapForNodesCoordinator, + WaitingNodes: waitingMapForNodesCoordinator, + SelfPublicKey: v.PubKeyBytes(), + ConsensusGroupCache: cache, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } - nodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) + coordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { fmt.Println("error creating node coordinator") } @@ -104,7 +105,7 @@ func CreateProcessorNodesWithNodesCoordinator( tpn := newTestProcessorNodeWithCustomNodesCoordinator( numShards, shardId, - nodesCoordinator, + coordinator, i, ncp, nodesSetup, diff --git a/integrationTests/testProcessorNodeWithMultisigner.go b/integrationTests/testProcessorNodeWithMultisigner.go index cd91776e0c7..48a988ec991 100644 --- a/integrationTests/testProcessorNodeWithMultisigner.go +++ b/integrationTests/testProcessorNodeWithMultisigner.go @@ -515,24 +515,25 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( for shardId, validatorList := range validatorsMap { consensusCache, _ := lrucache.NewCache(10000) argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: shardConsensusGroupSize, - MetaConsensusGroupSize: metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - Shuffler: nodeShuffler, - BootStorer: bootStorer, - EpochStartNotifier: epochStartSubscriber, - ShardIDAsObserver: shardId, - NbShards: uint32(nbShards), - EligibleNodes: validatorsMapForNodesCoordinator, - WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), - SelfPublicKey: []byte(strconv.Itoa(int(shardId))), - ConsensusGroupCache: consensusCache, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: shardConsensusGroupSize, + MetaConsensusGroupSize: metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + Shuffler: nodeShuffler, + BootStorer: bootStorer, + EpochStartNotifier: epochStartSubscriber, + ShardIDAsObserver: shardId, + NbShards: uint32(nbShards), + EligibleNodes: validatorsMapForNodesCoordinator, + WaitingNodes: make(map[uint32][]nodesCoordinator.Validator), + SelfPublicKey: []byte(strconv.Itoa(int(shardId))), + ConsensusGroupCache: consensusCache, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) @@ -614,24 +615,25 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( bootStorer := CreateMemUnit() cache, _ := lrucache.NewCache(10000) argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ - ShardConsensusGroupSize: shardConsensusGroupSize, - MetaConsensusGroupSize: metaConsensusGroupSize, - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - ShardIDAsObserver: shardId, - NbShards: uint32(nbShards), - EligibleNodes: validatorsMapForNodesCoordinator, - WaitingNodes: waitingMapForNodesCoordinator, - SelfPublicKey: []byte(strconv.Itoa(int(shardId))), - ConsensusGroupCache: cache, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: endProcess.GetDummyEndProcessChannel(), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: shardConsensusGroupSize, + MetaConsensusGroupSize: metaConsensusGroupSize, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + ShardIDAsObserver: shardId, + NbShards: uint32(nbShards), + EligibleNodes: validatorsMapForNodesCoordinator, + WaitingNodes: waitingMapForNodesCoordinator, + SelfPublicKey: []byte(strconv.Itoa(int(shardId))), + ConsensusGroupCache: cache, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: endProcess.GetDummyEndProcessChannel(), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/node/nodeRunner.go b/node/nodeRunner.go index 92a9dc8e88c..19c79a8ef9f 100644 --- a/node/nodeRunner.go +++ b/node/nodeRunner.go @@ -182,6 +182,7 @@ func printEnableEpochs(configs *config.Configs) { log.Debug(readEpochFor("refactor contexts"), "epoch", enableEpochs.RefactorContextEnableEpoch) log.Debug(readEpochFor("disable heartbeat v1"), "epoch", enableEpochs.HeartbeatDisableEpoch) log.Debug(readEpochFor("mini block partial execution"), "epoch", enableEpochs.MiniBlockPartialExecutionEnableEpoch) + log.Debug(readEpochFor("refactor peers mini blocks"), "epoch", enableEpochs.RefactorPeersMiniBlocksEnableEpoch) gasSchedule := configs.EpochConfig.GasSchedule log.Debug(readEpochFor("gas schedule directories paths"), "epoch", gasSchedule.GasScheduleByEpochs) @@ -337,6 +338,7 @@ func (nr *nodeRunner) executeOneComponentCreationCycle( configs.EpochConfig.EnableEpochs.WaitingListFixEnableEpoch, managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), + configs.EpochConfig.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) if err != nil { return true, err diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index 8a69cc88a7b..fedeefaf6d2 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -1,6 +1,7 @@ package preprocess import ( + "github.com/ElrondNetwork/elrond-go-core/core/atomic" "time" "github.com/ElrondNetwork/elrond-go-core/core" @@ -20,11 +21,13 @@ var _ process.PreProcessor = (*validatorInfoPreprocessor)(nil) type validatorInfoPreprocessor struct { *basePreProcess - chReceivedAllValidatorsInfo chan bool - onRequestValidatorsInfo func(txHashes [][]byte) - validatorsInfoForBlock txsForBlock - validatorsInfoPool dataRetriever.ShardedDataCacherNotifier - storage dataRetriever.StorageService + chReceivedAllValidatorsInfo chan bool + onRequestValidatorsInfo func(txHashes [][]byte) + validatorsInfoForBlock txsForBlock + validatorsInfoPool dataRetriever.ShardedDataCacherNotifier + storage dataRetriever.StorageService + refactorPeersMiniBlocksEnableEpoch uint32 + flagRefactorPeersMiniBlocks atomic.Flag } // NewValidatorInfoPreprocessor creates a new validatorInfo preprocessor object @@ -35,6 +38,8 @@ func NewValidatorInfoPreprocessor( validatorsInfoPool dataRetriever.ShardedDataCacherNotifier, store dataRetriever.StorageService, onRequestValidatorsInfo func(txHashes [][]byte), + epochNotifier process.EpochNotifier, + refactorPeersMiniBlocksEnableEpoch uint32, ) (*validatorInfoPreprocessor, error) { if check.IfNil(hasher) { @@ -55,6 +60,9 @@ func NewValidatorInfoPreprocessor( if onRequestValidatorsInfo == nil { return nil, process.ErrNilRequestHandler } + if check.IfNil(epochNotifier) { + return nil, process.ErrNilEpochNotifier + } bpp := &basePreProcess{ hasher: hasher, @@ -63,16 +71,21 @@ func NewValidatorInfoPreprocessor( } vip := &validatorInfoPreprocessor{ - basePreProcess: bpp, - storage: store, - validatorsInfoPool: validatorsInfoPool, - onRequestValidatorsInfo: onRequestValidatorsInfo, + basePreProcess: bpp, + storage: store, + validatorsInfoPool: validatorsInfoPool, + onRequestValidatorsInfo: onRequestValidatorsInfo, + refactorPeersMiniBlocksEnableEpoch: refactorPeersMiniBlocksEnableEpoch, } vip.chReceivedAllValidatorsInfo = make(chan bool) vip.validatorsInfoPool.RegisterOnAdded(vip.receivedValidatorInfoTransaction) vip.validatorsInfoForBlock.txHashAndInfo = make(map[string]*txInfo) + log.Debug("validatorInfoPreprocessor: enable epoch for refactor peers mini blocks", "epoch", vip.refactorPeersMiniBlocksEnableEpoch) + + epochNotifier.RegisterNotifyHandler(vip) + return vip, nil } @@ -135,8 +148,7 @@ func (vip *validatorInfoPreprocessor) RestoreBlockDataIntoPools( continue } - // TODO: Use refactor peers mbs activation flag below - if true { + if vip.flagRefactorPeersMiniBlocks.IsSet() { err := vip.restoreValidatorsInfo(miniBlock) if err != nil { return validatorsInfoRestored, err @@ -397,3 +409,11 @@ func (vip *validatorInfoPreprocessor) IsInterfaceNil() bool { func (vip *validatorInfoPreprocessor) isMiniBlockCorrect(mbType block.Type) bool { return mbType == block.PeerBlock } + +// EpochConfirmed is called whenever a new epoch is confirmed +func (vip *validatorInfoPreprocessor) EpochConfirmed(epoch uint32, timestamp uint64) { + vip.epochConfirmed(epoch, timestamp) + + vip.flagRefactorPeersMiniBlocks.SetValue(epoch >= vip.refactorPeersMiniBlocksEnableEpoch) + log.Debug("validatorInfoPreprocessor: refactor peers mini blocks", "enabled", vip.flagRefactorPeersMiniBlocks.IsSet()) +} diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index 956b7a3d24a..f75503d12a9 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -1,6 +1,7 @@ package preprocess import ( + "github.com/ElrondNetwork/elrond-go/testscommon/epochNotifier" "testing" "github.com/ElrondNetwork/elrond-go-core/core" @@ -23,6 +24,8 @@ func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) assert.Nil(t, rtp) @@ -40,6 +43,8 @@ func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) assert.Nil(t, rtp) @@ -57,6 +62,8 @@ func TestNewValidatorInfoPreprocessor_NilBlockSizeComputationHandlerShouldErr(t tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) assert.Nil(t, rtp) @@ -73,6 +80,8 @@ func TestNewValidatorInfoPreprocessor_NilValidatorInfoPoolShouldErr(t *testing.T nil, &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) assert.Nil(t, rtp) @@ -90,6 +99,8 @@ func TestNewValidatorInfoPreprocessor_NilStoreShouldErr(t *testing.T) { tdp.ValidatorsInfo(), nil, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) assert.Nil(t, rtp) @@ -107,12 +118,33 @@ func TestNewValidatorInfoPreprocessor_NilRequestHandlerShouldErr(t *testing.T) { tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, nil, + &epochNotifier.EpochNotifierStub{}, + 0, ) assert.Nil(t, rtp) assert.Equal(t, process.ErrNilRequestHandler, err) } +func TestNewValidatorInfoPreprocessor_NilEpochNotifierShouldErr(t *testing.T) { + t.Parallel() + + tdp := initDataPool() + rtp, err := NewValidatorInfoPreprocessor( + &hashingMocks.HasherMock{}, + &testscommon.MarshalizerMock{}, + &testscommon.BlockSizeComputationStub{}, + tdp.ValidatorsInfo(), + &mock.ChainStorerMock{}, + func(txHashes [][]byte) {}, + nil, + 0, + ) + + assert.Nil(t, rtp) + assert.Equal(t, process.ErrNilEpochNotifier, err) +} + func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { t.Parallel() @@ -124,6 +156,8 @@ func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) assert.Nil(t, err) assert.NotNil(t, rtp) @@ -140,6 +174,8 @@ func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) hash := make([][]byte, 0) @@ -160,6 +196,8 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShould tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) txHashes := make([][]byte, 0) @@ -189,6 +227,8 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) txHashes := make([][]byte, 0) @@ -218,6 +258,8 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockNotFromMeta(t *testing.T) tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) txHashes := make([][]byte, 0) @@ -251,6 +293,8 @@ func TestNewValidatorInfoPreprocessor_RestorePeerBlockIntoPools(t *testing.T) { tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -296,6 +340,8 @@ func TestNewValidatorInfoPreprocessor_RestoreOtherBlockTypeIntoPoolsShouldNotRes tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -341,6 +387,8 @@ func TestNewValidatorInfoPreprocessor_RemovePeerBlockFromPool(t *testing.T) { tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -386,6 +434,8 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, func(txHashes [][]byte) {}, + &epochNotifier.EpochNotifierStub{}, + 0, ) txHashes := [][]byte{[]byte("tx_hash1")} diff --git a/process/block/shardblock_test.go b/process/block/shardblock_test.go index 171aec9accb..a3f2e6db060 100644 --- a/process/block/shardblock_test.go +++ b/process/block/shardblock_test.go @@ -476,6 +476,7 @@ func TestShardProcessor_ProcessBlockWithInvalidTransactionShouldErr(t *testing.T &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := factory.Create() @@ -700,6 +701,7 @@ func TestShardProcessor_ProcessBlockWithErrOnProcessBlockTransactionsCallShouldR &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := factory.Create() @@ -2595,6 +2597,7 @@ func TestShardProcessor_MarshalizedDataToBroadcastShouldWork(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := factory.Create() @@ -2706,6 +2709,7 @@ func TestShardProcessor_MarshalizedDataMarshalWithoutSuccess(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := factory.Create() @@ -3101,6 +3105,7 @@ func TestShardProcessor_CreateMiniBlocksShouldWorkWithIntraShardTxs(t *testing.T &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := factory.Create() @@ -3285,6 +3290,7 @@ func TestShardProcessor_RestoreBlockIntoPoolsShouldWork(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := factory.Create() diff --git a/process/coordinator/process_test.go b/process/coordinator/process_test.go index d1c6945f290..676c9e955b3 100644 --- a/process/coordinator/process_test.go +++ b/process/coordinator/process_test.go @@ -546,6 +546,7 @@ func createPreProcessorContainer() process.PreProcessorsContainer { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() @@ -644,6 +645,7 @@ func createPreProcessorContainerWithDataPool( &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() @@ -915,6 +917,7 @@ func TestTransactionCoordinator_CreateMbsAndProcessCrossShardTransactions(t *tes &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() @@ -1102,6 +1105,7 @@ func TestTransactionCoordinator_CreateMbsAndProcessCrossShardTransactionsNilPreP &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() @@ -1213,6 +1217,7 @@ func TestTransactionCoordinator_CreateMbsAndProcessTransactionsFromMeNothingToPr &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() @@ -1753,6 +1758,7 @@ func TestTransactionCoordinator_ProcessBlockTransactionProcessTxError(t *testing &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() @@ -1882,6 +1888,7 @@ func TestTransactionCoordinator_RequestMiniblocks(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() @@ -2024,6 +2031,7 @@ func TestShardProcessor_ProcessMiniBlockCompleteWithOkTxsShouldExecuteThemAndNot &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() @@ -2167,6 +2175,7 @@ func TestShardProcessor_ProcessMiniBlockCompleteWithErrorWhileProcessShouldCallR &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) container, _ := preFactory.Create() diff --git a/process/factory/shard/preProcessorsContainerFactory.go b/process/factory/shard/preProcessorsContainerFactory.go index 601eca50f03..da34af5e32c 100644 --- a/process/factory/shard/preProcessorsContainerFactory.go +++ b/process/factory/shard/preProcessorsContainerFactory.go @@ -41,6 +41,7 @@ type preProcessorsContainerFactory struct { txTypeHandler process.TxTypeHandler scheduledTxsExecutionHandler process.ScheduledTxsExecutionHandler processedMiniBlocksTracker process.ProcessedMiniBlocksTracker + refactorPeersMiniBlocksEnableEpoch uint32 } // NewPreProcessorsContainerFactory is responsible for creating a new preProcessors factory object @@ -69,6 +70,7 @@ func NewPreProcessorsContainerFactory( txTypeHandler process.TxTypeHandler, scheduledTxsExecutionHandler process.ScheduledTxsExecutionHandler, processedMiniBlocksTracker process.ProcessedMiniBlocksTracker, + refactorPeersMiniBlocksEnableEpoch uint32, ) (*preProcessorsContainerFactory, error) { if check.IfNil(shardCoordinator) { @@ -160,6 +162,7 @@ func NewPreProcessorsContainerFactory( txTypeHandler: txTypeHandler, scheduledTxsExecutionHandler: scheduledTxsExecutionHandler, processedMiniBlocksTracker: processedMiniBlocksTracker, + refactorPeersMiniBlocksEnableEpoch: refactorPeersMiniBlocksEnableEpoch, }, nil } @@ -292,6 +295,8 @@ func (ppcm *preProcessorsContainerFactory) createValidatorInfoPreProcessor() (pr ppcm.dataPool.ValidatorsInfo(), ppcm.store, ppcm.requestHandler.RequestValidatorsInfo, + ppcm.epochNotifier, + ppcm.refactorPeersMiniBlocksEnableEpoch, ) return validatorInfoPreprocessor, err diff --git a/process/factory/shard/preProcessorsContainerFactory_test.go b/process/factory/shard/preProcessorsContainerFactory_test.go index e785028ba6d..91f1229a09b 100644 --- a/process/factory/shard/preProcessorsContainerFactory_test.go +++ b/process/factory/shard/preProcessorsContainerFactory_test.go @@ -46,6 +46,7 @@ func TestNewPreProcessorsContainerFactory_NilShardCoordinator(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilShardCoordinator, err) @@ -80,6 +81,7 @@ func TestNewPreProcessorsContainerFactory_NilStore(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilStore, err) @@ -114,6 +116,7 @@ func TestNewPreProcessorsContainerFactory_NilMarshalizer(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilMarshalizer, err) @@ -148,6 +151,7 @@ func TestNewPreProcessorsContainerFactory_NilHasher(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilHasher, err) @@ -182,6 +186,7 @@ func TestNewPreProcessorsContainerFactory_NilDataPool(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilDataPoolHolder, err) @@ -216,6 +221,7 @@ func TestNewPreProcessorsContainerFactory_NilAddrConv(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilPubkeyConverter, err) @@ -250,6 +256,7 @@ func TestNewPreProcessorsContainerFactory_NilAccounts(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilAccountsAdapter, err) @@ -284,6 +291,7 @@ func TestNewPreProcessorsContainerFactory_NilTxProcessor(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilTxProcessor, err) @@ -318,6 +326,7 @@ func TestNewPreProcessorsContainerFactory_NilSCProcessor(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilSmartContractProcessor, err) @@ -352,6 +361,7 @@ func TestNewPreProcessorsContainerFactory_NilSCR(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilSmartContractResultProcessor, err) @@ -386,6 +396,7 @@ func TestNewPreProcessorsContainerFactory_NilRewardTxProcessor(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilRewardsTxProcessor, err) @@ -420,6 +431,7 @@ func TestNewPreProcessorsContainerFactory_NilRequestHandler(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilRequestHandler, err) @@ -454,6 +466,7 @@ func TestNewPreProcessorsContainerFactory_NilFeeHandler(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilEconomicsFeeHandler, err) @@ -488,6 +501,7 @@ func TestNewPreProcessorsContainerFactory_NilGasHandler(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilGasHandler, err) @@ -522,6 +536,7 @@ func TestNewPreProcessorsContainerFactory_NilBlockTracker(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilBlockTracker, err) @@ -556,6 +571,7 @@ func TestNewPreProcessorsContainerFactory_NilBlockSizeComputationHandler(t *test &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilBlockSizeComputationHandler, err) @@ -590,6 +606,7 @@ func TestNewPreProcessorsContainerFactory_NilBalanceComputationHandler(t *testin &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilBalanceComputationHandler, err) @@ -624,6 +641,7 @@ func TestNewPreProcessorsContainerFactory_NilEpochNotifier(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilEpochNotifier, err) @@ -658,6 +676,7 @@ func TestNewPreProcessorsContainerFactory_NilTxTypeHandler(t *testing.T) { nil, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilTxTypeHandler, err) @@ -692,6 +711,7 @@ func TestNewPreProcessorsContainerFactory_NilScheduledTxsExecutionHandler(t *tes &testscommon.TxTypeHandlerMock{}, nil, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Equal(t, process.ErrNilScheduledTxsExecutionHandler, err) @@ -726,6 +746,7 @@ func TestNewPreProcessorsContainerFactory_NilProcessedMiniBlocksTracker(t *testi &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, nil, + 2, ) assert.Equal(t, process.ErrNilProcessedMiniBlocksTracker, err) @@ -760,6 +781,7 @@ func TestNewPreProcessorsContainerFactory(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Nil(t, err) @@ -799,6 +821,7 @@ func TestPreProcessorsContainerFactory_CreateErrTxPreproc(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Nil(t, err) @@ -844,6 +867,7 @@ func TestPreProcessorsContainerFactory_CreateErrScrPreproc(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Nil(t, err) @@ -892,6 +916,7 @@ func TestPreProcessorsContainerFactory_Create(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, + 2, ) assert.Nil(t, err) diff --git a/process/peer/interceptedValidatorInfo_test.go b/process/peer/interceptedValidatorInfo_test.go index 662ad7c691a..18c9d160e44 100644 --- a/process/peer/interceptedValidatorInfo_test.go +++ b/process/peer/interceptedValidatorInfo_test.go @@ -152,9 +152,9 @@ func testInterceptedValidatorInfoPropertyLen(property string, tooLong bool) func switch property { case publicKeyProperty: - ivi.validatorInfo.PublicKey = value + ivi.shardValidatorInfo.PublicKey = value case listProperty: - ivi.validatorInfo.List = string(value) + ivi.shardValidatorInfo.List = string(value) default: assert.True(t, false) } @@ -185,7 +185,7 @@ func TestInterceptedValidatorInfo_Getters(t *testing.T) { assert.Equal(t, hash, identifiers[0]) str := ivi.String() - assert.True(t, strings.Contains(str, fmt.Sprintf("pk=%s", logger.DisplayByteSlice(ivi.validatorInfo.PublicKey)))) + assert.True(t, strings.Contains(str, fmt.Sprintf("pk=%s", logger.DisplayByteSlice(ivi.shardValidatorInfo.PublicKey)))) assert.True(t, strings.Contains(str, fmt.Sprintf("shard=%d", validatorInfo.ShardId))) assert.True(t, strings.Contains(str, fmt.Sprintf("list=%s", validatorInfo.List))) assert.True(t, strings.Contains(str, fmt.Sprintf("index=%d", validatorInfo.Index))) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go index c7d1bd97c40..efdc48a91b0 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go @@ -68,32 +68,34 @@ type epochNodesConfig struct { } type indexHashedNodesCoordinator struct { - shardIDAsObserver uint32 - currentEpoch uint32 - shardConsensusGroupSize int - metaConsensusGroupSize int - numTotalEligible uint64 - selfPubKey []byte - savedStateKey []byte - marshalizer marshal.Marshalizer - hasher hashing.Hasher - shuffler NodesShuffler - epochStartRegistrationHandler EpochStartEventNotifier - bootStorer storage.Storer - nodesConfig map[uint32]*epochNodesConfig - mutNodesConfig sync.RWMutex - mutSavedStateKey sync.RWMutex - nodesCoordinatorHelper NodesCoordinatorHelper - consensusGroupCacher Cacher - loadingFromDisk atomic.Value - shuffledOutHandler ShuffledOutHandler - startEpoch uint32 - publicKeyToValidatorMap map[string]*validatorWithShardID - waitingListFixEnableEpoch uint32 - isFullArchive bool - chanStopNode chan endProcess.ArgEndProcess - flagWaitingListFix atomicFlags.Flag - nodeTypeProvider NodeTypeProviderHandler + shardIDAsObserver uint32 + currentEpoch uint32 + shardConsensusGroupSize int + metaConsensusGroupSize int + numTotalEligible uint64 + selfPubKey []byte + savedStateKey []byte + marshalizer marshal.Marshalizer + hasher hashing.Hasher + shuffler NodesShuffler + epochStartRegistrationHandler EpochStartEventNotifier + bootStorer storage.Storer + nodesConfig map[uint32]*epochNodesConfig + mutNodesConfig sync.RWMutex + mutSavedStateKey sync.RWMutex + nodesCoordinatorHelper NodesCoordinatorHelper + consensusGroupCacher Cacher + loadingFromDisk atomic.Value + shuffledOutHandler ShuffledOutHandler + startEpoch uint32 + publicKeyToValidatorMap map[string]*validatorWithShardID + waitingListFixEnableEpoch uint32 + isFullArchive bool + chanStopNode chan endProcess.ArgEndProcess + flagWaitingListFix atomicFlags.Flag + nodeTypeProvider NodeTypeProviderHandler + refactorPeersMiniBlocksEnableEpoch uint32 + flagRefactorPeersMiniBlocks atomicFlags.Flag } // NewIndexHashedNodesCoordinator creates a new index hashed group selector @@ -118,28 +120,30 @@ func NewIndexHashedNodesCoordinator(arguments ArgNodesCoordinator) (*indexHashed savedKey := arguments.Hasher.Compute(string(arguments.SelfPublicKey)) ihnc := &indexHashedNodesCoordinator{ - marshalizer: arguments.Marshalizer, - hasher: arguments.Hasher, - shuffler: arguments.Shuffler, - epochStartRegistrationHandler: arguments.EpochStartNotifier, - bootStorer: arguments.BootStorer, - selfPubKey: arguments.SelfPublicKey, - nodesConfig: nodesConfig, - currentEpoch: arguments.Epoch, - savedStateKey: savedKey, - shardConsensusGroupSize: arguments.ShardConsensusGroupSize, - metaConsensusGroupSize: arguments.MetaConsensusGroupSize, - consensusGroupCacher: arguments.ConsensusGroupCache, - shardIDAsObserver: arguments.ShardIDAsObserver, - shuffledOutHandler: arguments.ShuffledOutHandler, - startEpoch: arguments.StartEpoch, - publicKeyToValidatorMap: make(map[string]*validatorWithShardID), - waitingListFixEnableEpoch: arguments.WaitingListFixEnabledEpoch, - chanStopNode: arguments.ChanStopNode, - nodeTypeProvider: arguments.NodeTypeProvider, - isFullArchive: arguments.IsFullArchive, + marshalizer: arguments.Marshalizer, + hasher: arguments.Hasher, + shuffler: arguments.Shuffler, + epochStartRegistrationHandler: arguments.EpochStartNotifier, + bootStorer: arguments.BootStorer, + selfPubKey: arguments.SelfPublicKey, + nodesConfig: nodesConfig, + currentEpoch: arguments.Epoch, + savedStateKey: savedKey, + shardConsensusGroupSize: arguments.ShardConsensusGroupSize, + metaConsensusGroupSize: arguments.MetaConsensusGroupSize, + consensusGroupCacher: arguments.ConsensusGroupCache, + shardIDAsObserver: arguments.ShardIDAsObserver, + shuffledOutHandler: arguments.ShuffledOutHandler, + startEpoch: arguments.StartEpoch, + publicKeyToValidatorMap: make(map[string]*validatorWithShardID), + waitingListFixEnableEpoch: arguments.WaitingListFixEnabledEpoch, + chanStopNode: arguments.ChanStopNode, + nodeTypeProvider: arguments.NodeTypeProvider, + isFullArchive: arguments.IsFullArchive, + refactorPeersMiniBlocksEnableEpoch: arguments.RefactorPeersMiniBlocksEnableEpoch, } log.Debug("indexHashedNodesCoordinator: enable epoch for waiting waiting list", "epoch", ihnc.waitingListFixEnableEpoch) + log.Debug("indexHashedNodesCoordinator: enable epoch for refactor peers mini blocks", "epoch", ihnc.refactorPeersMiniBlocksEnableEpoch) ihnc.loadingFromDisk.Store(false) @@ -556,14 +560,14 @@ func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHa return } - allValidatorInfo, err := createValidatorInfoFromBody(body, ihnc.numTotalEligible, validatorInfoCacher) + ihnc.updateEpochFlags(newEpoch) + + allValidatorInfo, err := ihnc.createValidatorInfoFromBody(body, ihnc.numTotalEligible, validatorInfoCacher) if err != nil { log.Error("could not create validator info from body - do nothing on nodesCoordinator epochStartPrepare", "error", err.Error()) return } - ihnc.updateEpochFlags(newEpoch) - ihnc.mutNodesConfig.RLock() previousConfig := ihnc.nodesConfig[ihnc.currentEpoch] if previousConfig == nil { @@ -1159,7 +1163,7 @@ func selectValidators( } // createValidatorInfoFromBody unmarshalls body data to create validator info -func createValidatorInfoFromBody( +func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( body data.BodyHandler, previousTotal uint64, validatorInfoCacher epochStart.ValidatorInfoCacher, @@ -1180,7 +1184,7 @@ func createValidatorInfoFromBody( } for _, txHash := range peerMiniBlock.TxHashes { - shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) + shardValidatorInfo, err := ihnc.getShardValidatorInfoData(txHash, validatorInfoCacher) if err != nil { return nil, err } @@ -1192,7 +1196,28 @@ func createValidatorInfoFromBody( return allValidatorInfo, nil } +func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, validatorInfoCacher epochStart.ValidatorInfoCacher) (*state.ShardValidatorInfo, error) { + if ihnc.flagRefactorPeersMiniBlocks.IsSet() { + shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil + } + + shardValidatorInfo := &state.ShardValidatorInfo{} + err := ihnc.marshalizer.Unmarshal(shardValidatorInfo, txHash) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil +} + func (ihnc *indexHashedNodesCoordinator) updateEpochFlags(epoch uint32) { ihnc.flagWaitingListFix.SetValue(epoch >= ihnc.waitingListFixEnableEpoch) log.Debug("indexHashedNodesCoordinator: waiting list fix", "enabled", ihnc.flagWaitingListFix.IsSet()) + ihnc.flagRefactorPeersMiniBlocks.SetValue(epoch >= ihnc.refactorPeersMiniBlocksEnableEpoch) + log.Debug("indexHashedNodesCoordinator: refactor peers mini blocks", "enabled", ihnc.flagRefactorPeersMiniBlocks.IsSet()) } diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go index 5076bfbee13..17981e262b0 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go @@ -163,9 +163,11 @@ func TestIndexHashedNodesCoordinator_IsEpochInConfig(t *testing.T) { epoch := uint32(1) ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] + ihnc.updateEpochFlags(epoch) + validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) - validatorsInfo, _ := createValidatorInfoFromBody(body, 10, validatorInfoCacher) + validatorsInfo, _ := ihnc.createValidatorInfoFromBody(body, 10, validatorInfoCacher) err = ihnc.SetNodesConfigFromValidatorsInfo(epoch, []byte{}, validatorsInfo) require.Nil(t, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go index 53ad9b39774..617f6b0d1a1 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go @@ -77,23 +77,24 @@ func TestIndexHashedGroupSelectorWithRater_OkValShouldWork(t *testing.T) { bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 2, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("test"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: 2, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("test"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nc, err := NewIndexHashedNodesCoordinator(arguments) assert.Nil(t, err) @@ -172,22 +173,23 @@ func BenchmarkIndexHashedGroupSelectorWithRater_ComputeValidatorsGroup63of400(b bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusGroupSize, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: consensusGroupSize, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -246,21 +248,22 @@ func Test_ComputeValidatorsGroup63of400(t *testing.T) { bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusGroupSize, - MetaConsensusGroupSize: 1, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: consensusGroupSize, + MetaConsensusGroupSize: 1, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) numRounds := uint64(1000000) @@ -317,23 +320,24 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -369,23 +373,24 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -435,23 +440,24 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldWork(t eligibleMap[1] = listShard1 arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 2, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 2, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -517,24 +523,25 @@ func TestIndexHashedGroupSelectorWithRater_GetAllEligibleValidatorsPublicKeys(t eligibleMap[shardOneId] = listShard1 arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - ShardIDAsObserver: shardZeroId, - NbShards: 2, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + ShardIDAsObserver: shardZeroId, + NbShards: 2, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } nc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -822,23 +829,24 @@ func BenchmarkIndexHashedWithRaterGroupSelector_ComputeValidatorsGroup21of400(b bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusGroupSize, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - IsFullArchive: false, + ShardConsensusGroupSize: consensusGroupSize, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + IsFullArchive: false, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index 4f42fb48898..149cdbda3dc 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -97,23 +97,24 @@ func createArguments() ArgNodesCoordinator { bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: nbShards, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("test"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - IsFullArchive: false, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: nbShards, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("test"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + IsFullArchive: false, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } return arguments } @@ -246,22 +247,23 @@ func TestIndexHashedNodesCoordinator_OkValShouldWork(t *testing.T) { bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 2, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: 2, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -304,22 +306,23 @@ func TestIndexHashedNodesCoordinator_NewCoordinatorTooFewNodesShouldErr(t *testi bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 10, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: 10, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -376,22 +379,23 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup1ValidatorShouldRetur bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: nodesMap, - WaitingNodes: make(map[uint32][]Validator), - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: nodesMap, + WaitingNodes: make(map[uint32][]Validator), + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) list2, err := ihnc.ComputeConsensusGroup([]byte("randomness"), 0, 0, 0) @@ -434,22 +438,23 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10locksNoM } arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusGroupSize, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: cache, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: consensusGroupSize, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: cache, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -520,22 +525,23 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10BlocksMe } arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusGroupSize, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: cache, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: consensusGroupSize, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: cache, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -651,22 +657,23 @@ func BenchmarkIndexHashedGroupSelector_ComputeValidatorsGroup21of400(b *testing. bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusGroupSize, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: consensusGroupSize, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -722,22 +729,23 @@ func runBenchmark(consensusGroupCache Cacher, consensusGroupSize int, nodesMap m bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusGroupSize, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - EpochStartNotifier: epochStartSubscriber, - Shuffler: nodeShuffler, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: nodesMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: consensusGroupCache, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: consensusGroupSize, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + EpochStartNotifier: epochStartSubscriber, + Shuffler: nodeShuffler, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: nodesMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: consensusGroupCache, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -770,22 +778,23 @@ func computeMemoryRequirements(consensusGroupCache Cacher, consensusGroupSize in bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: consensusGroupSize, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - EpochStartNotifier: epochStartSubscriber, - Shuffler: nodeShuffler, - BootStorer: bootStorer, - NbShards: 1, - EligibleNodes: nodesMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: consensusGroupCache, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: consensusGroupSize, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + EpochStartNotifier: epochStartSubscriber, + Shuffler: nodeShuffler, + BootStorer: bootStorer, + NbShards: 1, + EligibleNodes: nodesMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: consensusGroupCache, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -908,22 +917,23 @@ func TestIndexHashedNodesCoordinator_GetValidatorWithPublicKeyShouldWork(t *test bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: 2, - EligibleNodes: eligibleMap, - WaitingNodes: make(map[uint32][]Validator), - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: 2, + EligibleNodes: eligibleMap, + WaitingNodes: make(map[uint32][]Validator), + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -989,23 +999,24 @@ func TestIndexHashedGroupSelector_GetAllEligibleValidatorsPublicKeys(t *testing. bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - ShardIDAsObserver: shardZeroId, - NbShards: 2, - EligibleNodes: eligibleMap, - WaitingNodes: make(map[uint32][]Validator), - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + ShardIDAsObserver: shardZeroId, + NbShards: 2, + EligibleNodes: eligibleMap, + WaitingNodes: make(map[uint32][]Validator), + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1066,23 +1077,24 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T eligibleMap[shardZeroId] = []Validator{&validator{}} arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - ShardIDAsObserver: shardZeroId, - NbShards: 2, - EligibleNodes: eligibleMap, - WaitingNodes: waitingMap, - SelfPublicKey: []byte("key"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + ShardIDAsObserver: shardZeroId, + NbShards: 2, + EligibleNodes: eligibleMap, + WaitingNodes: waitingMap, + SelfPublicKey: []byte("key"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1425,22 +1437,23 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t bootStorer := genericMocks.NewStorerMock() arguments := ArgNodesCoordinator{ - ShardConsensusGroupSize: 1, - MetaConsensusGroupSize: 1, - Marshalizer: &mock.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - Shuffler: nodeShuffler, - EpochStartNotifier: epochStartSubscriber, - BootStorer: bootStorer, - NbShards: nbShards, - EligibleNodes: eligibleMap, - WaitingNodes: map[uint32][]Validator{}, - SelfPublicKey: []byte("test"), - ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, - ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, - WaitingListFixEnabledEpoch: 0, - ChanStopNode: make(chan endProcess.ArgEndProcess), - NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + ShardConsensusGroupSize: 1, + MetaConsensusGroupSize: 1, + Marshalizer: &mock.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, + Shuffler: nodeShuffler, + EpochStartNotifier: epochStartSubscriber, + BootStorer: bootStorer, + NbShards: nbShards, + EligibleNodes: eligibleMap, + WaitingNodes: map[uint32][]Validator{}, + SelfPublicKey: []byte("test"), + ConsensusGroupCache: &mock.NodesCoordinatorCacheMock{}, + ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, + WaitingListFixEnabledEpoch: 0, + ChanStopNode: make(chan endProcess.ArgEndProcess), + NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, + RefactorPeersMiniBlocksEnableEpoch: 0, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) diff --git a/sharding/nodesCoordinator/shardingArgs.go b/sharding/nodesCoordinator/shardingArgs.go index 2b3173f9aa3..ea49481ba4c 100644 --- a/sharding/nodesCoordinator/shardingArgs.go +++ b/sharding/nodesCoordinator/shardingArgs.go @@ -9,24 +9,25 @@ import ( // ArgNodesCoordinator holds all dependencies required by the nodes coordinator in order to create new instances type ArgNodesCoordinator struct { - ShardConsensusGroupSize int - MetaConsensusGroupSize int - Marshalizer marshal.Marshalizer - Hasher hashing.Hasher - Shuffler NodesShuffler - EpochStartNotifier EpochStartEventNotifier - BootStorer storage.Storer - ShardIDAsObserver uint32 - NbShards uint32 - EligibleNodes map[uint32][]Validator - WaitingNodes map[uint32][]Validator - SelfPublicKey []byte - Epoch uint32 - StartEpoch uint32 - ConsensusGroupCache Cacher - ShuffledOutHandler ShuffledOutHandler - WaitingListFixEnabledEpoch uint32 - ChanStopNode chan endProcess.ArgEndProcess - NodeTypeProvider NodeTypeProviderHandler - IsFullArchive bool + ShardConsensusGroupSize int + MetaConsensusGroupSize int + Marshalizer marshal.Marshalizer + Hasher hashing.Hasher + Shuffler NodesShuffler + EpochStartNotifier EpochStartEventNotifier + BootStorer storage.Storer + ShardIDAsObserver uint32 + NbShards uint32 + EligibleNodes map[uint32][]Validator + WaitingNodes map[uint32][]Validator + SelfPublicKey []byte + Epoch uint32 + StartEpoch uint32 + ConsensusGroupCache Cacher + ShuffledOutHandler ShuffledOutHandler + WaitingListFixEnabledEpoch uint32 + ChanStopNode chan endProcess.ArgEndProcess + NodeTypeProvider NodeTypeProviderHandler + IsFullArchive bool + RefactorPeersMiniBlocksEnableEpoch uint32 } diff --git a/update/factory/exportHandlerFactory.go b/update/factory/exportHandlerFactory.go index c2220deed24..ffa2dc83a07 100644 --- a/update/factory/exportHandlerFactory.go +++ b/update/factory/exportHandlerFactory.go @@ -37,76 +37,78 @@ var log = logger.GetOrCreate("update/factory") // ArgsExporter is the argument structure to create a new exporter type ArgsExporter struct { - CoreComponents process.CoreComponentsHolder - CryptoComponents process.CryptoComponentsHolder - HeaderValidator epochStart.HeaderValidator - DataPool dataRetriever.PoolsHolder - StorageService dataRetriever.StorageService - RequestHandler process.RequestHandler - ShardCoordinator sharding.Coordinator - Messenger p2p.Messenger - ActiveAccountsDBs map[state.AccountsDbIdentifier]state.AccountsAdapter - ExistingResolvers dataRetriever.ResolversContainer - ExportFolder string - ExportTriesStorageConfig config.StorageConfig - ExportStateStorageConfig config.StorageConfig - ExportStateKeysConfig config.StorageConfig - MaxTrieLevelInMemory uint - WhiteListHandler process.WhiteListHandler - WhiteListerVerifiedTxs process.WhiteListHandler - InterceptorsContainer process.InterceptorsContainer - NodesCoordinator nodesCoordinator.NodesCoordinator - HeaderSigVerifier process.InterceptedHeaderSigVerifier - HeaderIntegrityVerifier process.HeaderIntegrityVerifier - ValidityAttester process.ValidityAttester - InputAntifloodHandler process.P2PAntifloodHandler - OutputAntifloodHandler process.P2PAntifloodHandler - RoundHandler process.RoundHandler - PeersRatingHandler dataRetriever.PeersRatingHandler - InterceptorDebugConfig config.InterceptorResolverDebugConfig - EnableSignTxWithHashEpoch uint32 - MaxHardCapForMissingNodes int - NumConcurrentTrieSyncers int - TrieSyncerVersion int - CheckNodesOnDisk bool + CoreComponents process.CoreComponentsHolder + CryptoComponents process.CryptoComponentsHolder + HeaderValidator epochStart.HeaderValidator + DataPool dataRetriever.PoolsHolder + StorageService dataRetriever.StorageService + RequestHandler process.RequestHandler + ShardCoordinator sharding.Coordinator + Messenger p2p.Messenger + ActiveAccountsDBs map[state.AccountsDbIdentifier]state.AccountsAdapter + ExistingResolvers dataRetriever.ResolversContainer + ExportFolder string + ExportTriesStorageConfig config.StorageConfig + ExportStateStorageConfig config.StorageConfig + ExportStateKeysConfig config.StorageConfig + MaxTrieLevelInMemory uint + WhiteListHandler process.WhiteListHandler + WhiteListerVerifiedTxs process.WhiteListHandler + InterceptorsContainer process.InterceptorsContainer + NodesCoordinator nodesCoordinator.NodesCoordinator + HeaderSigVerifier process.InterceptedHeaderSigVerifier + HeaderIntegrityVerifier process.HeaderIntegrityVerifier + ValidityAttester process.ValidityAttester + InputAntifloodHandler process.P2PAntifloodHandler + OutputAntifloodHandler process.P2PAntifloodHandler + RoundHandler process.RoundHandler + PeersRatingHandler dataRetriever.PeersRatingHandler + InterceptorDebugConfig config.InterceptorResolverDebugConfig + EnableSignTxWithHashEpoch uint32 + MaxHardCapForMissingNodes int + NumConcurrentTrieSyncers int + TrieSyncerVersion int + CheckNodesOnDisk bool + RefactorPeersMiniBlocksEnableEpoch uint32 } type exportHandlerFactory struct { - CoreComponents process.CoreComponentsHolder - CryptoComponents process.CryptoComponentsHolder - headerValidator epochStart.HeaderValidator - dataPool dataRetriever.PoolsHolder - storageService dataRetriever.StorageService - requestHandler process.RequestHandler - shardCoordinator sharding.Coordinator - messenger p2p.Messenger - activeAccountsDBs map[state.AccountsDbIdentifier]state.AccountsAdapter - exportFolder string - exportTriesStorageConfig config.StorageConfig - exportStateStorageConfig config.StorageConfig - exportStateKeysConfig config.StorageConfig - maxTrieLevelInMemory uint - whiteListHandler process.WhiteListHandler - whiteListerVerifiedTxs process.WhiteListHandler - interceptorsContainer process.InterceptorsContainer - existingResolvers dataRetriever.ResolversContainer - epochStartTrigger epochStart.TriggerHandler - accounts state.AccountsAdapter - nodesCoordinator nodesCoordinator.NodesCoordinator - headerSigVerifier process.InterceptedHeaderSigVerifier - headerIntegrityVerifier process.HeaderIntegrityVerifier - validityAttester process.ValidityAttester - resolverContainer dataRetriever.ResolversContainer - inputAntifloodHandler process.P2PAntifloodHandler - outputAntifloodHandler process.P2PAntifloodHandler - roundHandler process.RoundHandler - peersRatingHandler dataRetriever.PeersRatingHandler - interceptorDebugConfig config.InterceptorResolverDebugConfig - enableSignTxWithHashEpoch uint32 - maxHardCapForMissingNodes int - numConcurrentTrieSyncers int - trieSyncerVersion int - checkNodesOnDisk bool + CoreComponents process.CoreComponentsHolder + CryptoComponents process.CryptoComponentsHolder + headerValidator epochStart.HeaderValidator + dataPool dataRetriever.PoolsHolder + storageService dataRetriever.StorageService + requestHandler process.RequestHandler + shardCoordinator sharding.Coordinator + messenger p2p.Messenger + activeAccountsDBs map[state.AccountsDbIdentifier]state.AccountsAdapter + exportFolder string + exportTriesStorageConfig config.StorageConfig + exportStateStorageConfig config.StorageConfig + exportStateKeysConfig config.StorageConfig + maxTrieLevelInMemory uint + whiteListHandler process.WhiteListHandler + whiteListerVerifiedTxs process.WhiteListHandler + interceptorsContainer process.InterceptorsContainer + existingResolvers dataRetriever.ResolversContainer + epochStartTrigger epochStart.TriggerHandler + accounts state.AccountsAdapter + nodesCoordinator nodesCoordinator.NodesCoordinator + headerSigVerifier process.InterceptedHeaderSigVerifier + headerIntegrityVerifier process.HeaderIntegrityVerifier + validityAttester process.ValidityAttester + resolverContainer dataRetriever.ResolversContainer + inputAntifloodHandler process.P2PAntifloodHandler + outputAntifloodHandler process.P2PAntifloodHandler + roundHandler process.RoundHandler + peersRatingHandler dataRetriever.PeersRatingHandler + interceptorDebugConfig config.InterceptorResolverDebugConfig + enableSignTxWithHashEpoch uint32 + maxHardCapForMissingNodes int + numConcurrentTrieSyncers int + trieSyncerVersion int + checkNodesOnDisk bool + refactorPeersMiniBlocksEnableEpoch uint32 } // NewExportHandlerFactory creates an exporter factory @@ -225,39 +227,40 @@ func NewExportHandlerFactory(args ArgsExporter) (*exportHandlerFactory, error) { } e := &exportHandlerFactory{ - CoreComponents: args.CoreComponents, - CryptoComponents: args.CryptoComponents, - headerValidator: args.HeaderValidator, - dataPool: args.DataPool, - storageService: args.StorageService, - requestHandler: args.RequestHandler, - shardCoordinator: args.ShardCoordinator, - messenger: args.Messenger, - activeAccountsDBs: args.ActiveAccountsDBs, - exportFolder: args.ExportFolder, - exportTriesStorageConfig: args.ExportTriesStorageConfig, - exportStateStorageConfig: args.ExportStateStorageConfig, - exportStateKeysConfig: args.ExportStateKeysConfig, - interceptorsContainer: args.InterceptorsContainer, - whiteListHandler: args.WhiteListHandler, - whiteListerVerifiedTxs: args.WhiteListerVerifiedTxs, - existingResolvers: args.ExistingResolvers, - accounts: args.ActiveAccountsDBs[state.UserAccountsState], - nodesCoordinator: args.NodesCoordinator, - headerSigVerifier: args.HeaderSigVerifier, - headerIntegrityVerifier: args.HeaderIntegrityVerifier, - validityAttester: args.ValidityAttester, - inputAntifloodHandler: args.InputAntifloodHandler, - outputAntifloodHandler: args.OutputAntifloodHandler, - maxTrieLevelInMemory: args.MaxTrieLevelInMemory, - roundHandler: args.RoundHandler, - peersRatingHandler: args.PeersRatingHandler, - interceptorDebugConfig: args.InterceptorDebugConfig, - enableSignTxWithHashEpoch: args.EnableSignTxWithHashEpoch, - maxHardCapForMissingNodes: args.MaxHardCapForMissingNodes, - numConcurrentTrieSyncers: args.NumConcurrentTrieSyncers, - trieSyncerVersion: args.TrieSyncerVersion, - checkNodesOnDisk: args.CheckNodesOnDisk, + CoreComponents: args.CoreComponents, + CryptoComponents: args.CryptoComponents, + headerValidator: args.HeaderValidator, + dataPool: args.DataPool, + storageService: args.StorageService, + requestHandler: args.RequestHandler, + shardCoordinator: args.ShardCoordinator, + messenger: args.Messenger, + activeAccountsDBs: args.ActiveAccountsDBs, + exportFolder: args.ExportFolder, + exportTriesStorageConfig: args.ExportTriesStorageConfig, + exportStateStorageConfig: args.ExportStateStorageConfig, + exportStateKeysConfig: args.ExportStateKeysConfig, + interceptorsContainer: args.InterceptorsContainer, + whiteListHandler: args.WhiteListHandler, + whiteListerVerifiedTxs: args.WhiteListerVerifiedTxs, + existingResolvers: args.ExistingResolvers, + accounts: args.ActiveAccountsDBs[state.UserAccountsState], + nodesCoordinator: args.NodesCoordinator, + headerSigVerifier: args.HeaderSigVerifier, + headerIntegrityVerifier: args.HeaderIntegrityVerifier, + validityAttester: args.ValidityAttester, + inputAntifloodHandler: args.InputAntifloodHandler, + outputAntifloodHandler: args.OutputAntifloodHandler, + maxTrieLevelInMemory: args.MaxTrieLevelInMemory, + roundHandler: args.RoundHandler, + peersRatingHandler: args.PeersRatingHandler, + interceptorDebugConfig: args.InterceptorDebugConfig, + enableSignTxWithHashEpoch: args.EnableSignTxWithHashEpoch, + maxHardCapForMissingNodes: args.MaxHardCapForMissingNodes, + numConcurrentTrieSyncers: args.NumConcurrentTrieSyncers, + trieSyncerVersion: args.TrieSyncerVersion, + checkNodesOnDisk: args.CheckNodesOnDisk, + refactorPeersMiniBlocksEnableEpoch: args.RefactorPeersMiniBlocksEnableEpoch, } log.Debug("exportHandlerFactory: enable epoch for transaction signed with tx hash", "epoch", e.enableSignTxWithHashEpoch) @@ -287,20 +290,21 @@ func (e *exportHandlerFactory) Create() (update.ExportHandler, error) { return nil, err } argsEpochTrigger := shardchain.ArgsShardEpochStartTrigger{ - Marshalizer: e.CoreComponents.InternalMarshalizer(), - Hasher: e.CoreComponents.Hasher(), - HeaderValidator: e.headerValidator, - Uint64Converter: e.CoreComponents.Uint64ByteSliceConverter(), - DataPool: e.dataPool, - Storage: e.storageService, - RequestHandler: e.requestHandler, - EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), - Epoch: 0, - Validity: process.MetaBlockValidity, - Finality: process.BlockFinality, - PeerMiniBlocksSyncer: peerMiniBlocksSyncer, - RoundHandler: e.roundHandler, - AppStatusHandler: e.CoreComponents.StatusHandler(), + Marshalizer: e.CoreComponents.InternalMarshalizer(), + Hasher: e.CoreComponents.Hasher(), + HeaderValidator: e.headerValidator, + Uint64Converter: e.CoreComponents.Uint64ByteSliceConverter(), + DataPool: e.dataPool, + Storage: e.storageService, + RequestHandler: e.requestHandler, + EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), + Epoch: 0, + Validity: process.MetaBlockValidity, + Finality: process.BlockFinality, + PeerMiniBlocksSyncer: peerMiniBlocksSyncer, + RoundHandler: e.roundHandler, + AppStatusHandler: e.CoreComponents.StatusHandler(), + RefactorPeersMiniBlocksEnableEpoch: e.refactorPeersMiniBlocksEnableEpoch, } epochHandler, err := shardchain.NewEpochStartTrigger(&argsEpochTrigger) if err != nil { From ee80eb5c62e67087b8256ac5c5ca8ef8f1431472 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 5 Aug 2022 18:15:54 +0300 Subject: [PATCH 40/70] * Cleanup/refactor code --- consensus/spos/bls/subroundStartRound.go | 2 +- .../dataPool/currentEpochValidatorInfoPool.go | 2 +- .../bootstrap/syncValidatorStatus_test.go | 4 +- epochStart/metachain/trigger_test.go | 4 +- epochStart/metachain/validators.go | 193 +++++++++++------- epochStart/metachain/validators_test.go | 4 +- epochStart/shardchain/peerMiniBlocksSyncer.go | 8 +- epochStart/shardchain/trigger_test.go | 2 +- integrationTests/nodesCoordinatorFactory.go | 1 + .../testProcessorNodeWithCoordinator.go | 4 +- .../vm/systemVM/stakingSC_test.go | 28 --- .../preprocess/validatorInfoPreProcessor.go | 151 +------------- .../validatorInfoPreProcessor_test.go | 2 +- process/common.go | 42 ---- sharding/networksharding/peerShardMapper.go | 2 +- 15 files changed, 147 insertions(+), 302 deletions(-) diff --git a/consensus/spos/bls/subroundStartRound.go b/consensus/spos/bls/subroundStartRound.go index 9b5302ef2ef..7a46f3bf978 100644 --- a/consensus/spos/bls/subroundStartRound.go +++ b/consensus/spos/bls/subroundStartRound.go @@ -4,7 +4,6 @@ import ( "context" "encoding/hex" "fmt" - "github.com/ElrondNetwork/elrond-go/epochStart" "sync" "time" @@ -14,6 +13,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/indexer" "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/consensus/spos" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/outport" "github.com/ElrondNetwork/elrond-go/outport/disabled" ) diff --git a/dataRetriever/dataPool/currentEpochValidatorInfoPool.go b/dataRetriever/dataPool/currentEpochValidatorInfoPool.go index 6e07183c6a0..34ba5a841ab 100644 --- a/dataRetriever/dataPool/currentEpochValidatorInfoPool.go +++ b/dataRetriever/dataPool/currentEpochValidatorInfoPool.go @@ -1,11 +1,11 @@ package dataPool import ( - "github.com/ElrondNetwork/elrond-go/state" "sync" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/state" ) var _ dataRetriever.ValidatorInfoCacher = (*validatorInfoMapCacher)(nil) diff --git a/epochStart/bootstrap/syncValidatorStatus_test.go b/epochStart/bootstrap/syncValidatorStatus_test.go index 88e83bf6077..43cfe9b36bf 100644 --- a/epochStart/bootstrap/syncValidatorStatus_test.go +++ b/epochStart/bootstrap/syncValidatorStatus_test.go @@ -2,7 +2,7 @@ package bootstrap import ( "context" - "github.com/ElrondNetwork/elrond-go/epochStart" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "testing" "github.com/ElrondNetwork/elrond-go-core/core" @@ -11,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/endProcess" "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/epochStart/mock" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" @@ -20,7 +21,6 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" - "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/epochStart/metachain/trigger_test.go b/epochStart/metachain/trigger_test.go index a48161364a7..ed0d496e21c 100644 --- a/epochStart/metachain/trigger_test.go +++ b/epochStart/metachain/trigger_test.go @@ -2,8 +2,6 @@ package metachain import ( "errors" - dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "math" "math/big" "testing" @@ -17,9 +15,11 @@ import ( "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/epochStart/mock" "github.com/ElrondNetwork/elrond-go/storage" + dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageStubs "github.com/ElrondNetwork/elrond-go/testscommon/storage" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 5e301eaef1a..3b42070b057 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -2,11 +2,11 @@ package metachain import ( "bytes" - "github.com/ElrondNetwork/elrond-go-core/core/atomic" "sort" "sync" "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" @@ -74,8 +74,6 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr return nil, epochStart.ErrNilEpochNotifier } - //TODO: currValidatorInfoCache := dataPool.NewCurrentEpochValidatorInfoPool() should be replaced by - //args.DataPool.CurrentEpochValidatorInfo(), as this pool is already created vic := &validatorInfoCreator{ shardCoordinator: args.ShardCoordinator, hasher: args.Hasher, @@ -148,12 +146,10 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat return bytes.Compare(validatorCopy[a].PublicKey, validatorCopy[b].PublicKey) < 0 }) - validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() - for index, validator := range validatorCopy { shardValidatorInfo := createShardValidatorInfo(validator) - shardValidatorInfoData, err := vic.getShardValidatorInfoData(shardValidatorInfo, validatorInfoCacher) + shardValidatorInfoData, err := vic.getShardValidatorInfoData(shardValidatorInfo) if err != nil { return nil, err } @@ -164,13 +160,14 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat return miniBlock, nil } -func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *state.ShardValidatorInfo, validatorInfoCacher dataRetriever.ValidatorInfoCacher) ([]byte, error) { +func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *state.ShardValidatorInfo) ([]byte, error) { if vic.flagRefactorPeersMiniBlocks.IsSet() { shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) if err != nil { return nil, err } + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() validatorInfoCacher.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) return shardValidatorInfoHash, nil } @@ -194,10 +191,7 @@ func createShardValidatorInfo(validator *state.ValidatorInfo) *state.ShardValida } // VerifyValidatorInfoMiniBlocks verifies if received validator info mini blocks are correct -func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks( - miniBlocks []*block.MiniBlock, - validatorsInfo map[uint32][]*state.ValidatorInfo, -) error { +func (vic *validatorInfoCreator) VerifyValidatorInfoMiniBlocks(miniBlocks []*block.MiniBlock, validatorsInfo map[uint32][]*state.ValidatorInfo) error { if len(miniBlocks) == 0 { return epochStart.ErrNilMiniblocks } @@ -255,12 +249,15 @@ func (vic *validatorInfoCreator) GetLocalValidatorInfoCache() epochStart.Validat // CreateMarshalledData creates the marshalled data to be sent to shards func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[string][][]byte { + if !vic.flagRefactorPeersMiniBlocks.IsSet() { + return nil + } + if check.IfNil(body) { return nil } marshalledValidatorInfoTxs := make(map[string][][]byte) - currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { @@ -276,21 +273,7 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri marshalledValidatorInfoTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) } - for _, txHash := range miniBlock.TxHashes { - validatorInfoTx, err := currentEpochValidatorInfo.GetValidatorInfo(txHash) - if err != nil { - log.Error("validatorInfoCreator.CreateMarshalledData.GetValidatorInfo", "hash", txHash, "error", err) - continue - } - - marshalledData, err := vic.marshalizer.Marshal(validatorInfoTx) - if err != nil { - log.Error("validatorInfoCreator.CreateMarshalledData.Marshal", "hash", txHash, "error", err) - continue - } - - marshalledValidatorInfoTxs[broadcastTopic] = append(marshalledValidatorInfoTxs[broadcastTopic], marshalledData) - } + vic.setMarshalledValidatorInfoTxs(miniBlock, marshalledValidatorInfoTxs, broadcastTopic) if len(marshalledValidatorInfoTxs[broadcastTopic]) == 0 { delete(marshalledValidatorInfoTxs, broadcastTopic) @@ -300,26 +283,71 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri return marshalledValidatorInfoTxs } +func (vic *validatorInfoCreator) setMarshalledValidatorInfoTxs(miniBlock *block.MiniBlock, marshalledValidatorInfoTxs map[string][][]byte, broadcastTopic string) { + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() + + for _, txHash := range miniBlock.TxHashes { + validatorInfoTx, err := validatorInfoCacher.GetValidatorInfo(txHash) + if err != nil { + log.Error("validatorInfoCreator.setMarshalledValidatorInfoTxs.GetValidatorInfo", "hash", txHash, "error", err) + continue + } + + marshalledData, err := vic.marshalizer.Marshal(validatorInfoTx) + if err != nil { + log.Error("validatorInfoCreator.setMarshalledValidatorInfoTxs.Marshal", "hash", txHash, "error", err) + continue + } + + marshalledValidatorInfoTxs[broadcastTopic] = append(marshalledValidatorInfoTxs[broadcastTopic], marshalledData) + } +} + // GetValidatorInfoTxs returns validator info txs for the current epoch func (vic *validatorInfoCreator) GetValidatorInfoTxs(body *block.Body) map[string]*state.ShardValidatorInfo { - validatorInfoTxs := make(map[string]*state.ShardValidatorInfo) - currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() + mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) + for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { continue } - for _, txHash := range miniBlock.TxHashes { - validatorInfoTx, err := currentEpochValidatorInfo.GetValidatorInfo(txHash) - if err != nil { - continue - } + vic.getMapShardValidatorInfo(miniBlock, mapShardValidatorInfo) + } - validatorInfoTxs[string(txHash)] = validatorInfoTx + return mapShardValidatorInfo +} + +func (vic *validatorInfoCreator) getMapShardValidatorInfo(miniBlock *block.MiniBlock, mapShardValidatorInfo map[string]*state.ShardValidatorInfo) { + for _, txHash := range miniBlock.TxHashes { + shardValidatorInfo, err := vic.getShardValidatorInfo(txHash) + if err != nil { + log.Error("validatorInfoCreator.getMapShardValidatorInfo", "hash", txHash, "error", err) + continue } + + mapShardValidatorInfo[string(txHash)] = shardValidatorInfo + } +} + +func (vic *validatorInfoCreator) getShardValidatorInfo(txHash []byte) (*state.ShardValidatorInfo, error) { + if vic.flagRefactorPeersMiniBlocks.IsSet() { + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() + shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil } - return validatorInfoTxs + shardValidatorInfo := &state.ShardValidatorInfo{} + err := vic.marshalizer.Unmarshal(shardValidatorInfo, txHash) + if err != nil { + return nil, err + } + + return shardValidatorInfo, nil } // SaveBlockDataToStorage saves block data to storage @@ -328,37 +356,45 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo return } - var validatorInfo *state.ShardValidatorInfo - var marshalledData []byte - var err error - currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() - for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { continue } - for _, validatorInfoHash := range miniBlock.TxHashes { - validatorInfo, err = currentEpochValidatorInfo.GetValidatorInfo(validatorInfoHash) - if err != nil { - continue - } + vic.saveValidatorInfo(miniBlock) + + marshalledData, err := vic.marshalizer.Marshal(miniBlock) + if err != nil { + log.Error("validatorInfoCreator.SaveBlockDataToStorage.Marshal", "error", err) + continue + } + + mbHash := vic.hasher.Compute(string(marshalledData)) + _ = vic.miniBlockStorage.Put(mbHash, marshalledData) + } +} + +func (vic *validatorInfoCreator) saveValidatorInfo(miniBlock *block.MiniBlock) { + if !vic.flagRefactorPeersMiniBlocks.IsSet() { + return + } - marshalledData, err = vic.marshalizer.Marshal(validatorInfo) - if err != nil { - continue - } + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() - _ = vic.validatorInfoStorage.Put(validatorInfoHash, marshalledData) + for _, validatorInfoHash := range miniBlock.TxHashes { + validatorInfo, err := validatorInfoCacher.GetValidatorInfo(validatorInfoHash) + if err != nil { + log.Error("validatorInfoCreator.saveValidatorInfo.GetValidatorInfo", "hash", validatorInfoHash, "error", err) + continue } - marshalledData, err = vic.marshalizer.Marshal(miniBlock) + marshalledData, err := vic.marshalizer.Marshal(validatorInfo) if err != nil { + log.Error("validatorInfoCreator.saveValidatorInfo.Marshal", "hash", validatorInfoHash, "error", err) continue } - mbHash := vic.hasher.Compute(string(marshalledData)) - _ = vic.miniBlockStorage.Put(mbHash, marshalledData) + _ = vic.validatorInfoStorage.Put(validatorInfoHash, marshalledData) } } @@ -368,6 +404,18 @@ func (vic *validatorInfoCreator) DeleteBlockDataFromStorage(metaBlock data.Heade return } + if vic.flagRefactorPeersMiniBlocks.IsSet() { + vic.removeValidatorInfoFromStorage(body) + } + + for _, mbHeader := range metaBlock.GetMiniBlockHeaderHandlers() { + if mbHeader.GetTypeInt32() == int32(block.PeerBlock) { + _ = vic.miniBlockStorage.Remove(mbHeader.GetHash()) + } + } +} + +func (vic *validatorInfoCreator) removeValidatorInfoFromStorage(body *block.Body) { for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { continue @@ -377,12 +425,6 @@ func (vic *validatorInfoCreator) DeleteBlockDataFromStorage(metaBlock data.Heade _ = vic.validatorInfoStorage.Remove(txHash) } } - - for _, mbHeader := range metaBlock.GetMiniBlockHeaderHandlers() { - if mbHeader.GetTypeInt32() == int32(block.PeerBlock) { - _ = vic.miniBlockStorage.Remove(mbHeader.GetHash()) - } - } } // RemoveBlockDataFromPools removes block data from pools @@ -391,19 +433,12 @@ func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderH return } - miniBlocksPool := vic.dataPool.MiniBlocks() - validatorInfoPool := vic.dataPool.ValidatorsInfo() - - for _, miniBlock := range body.MiniBlocks { - if miniBlock.Type != block.PeerBlock { - continue - } - - for _, txHash := range miniBlock.TxHashes { - validatorInfoPool.RemoveDataFromAllShards(txHash) - } + if vic.flagRefactorPeersMiniBlocks.IsSet() { + vic.removeValidatorInfoFromPool(body) } + miniBlocksPool := vic.dataPool.MiniBlocks() + for _, mbHeader := range metaBlock.GetMiniBlockHeaderHandlers() { if mbHeader.GetTypeInt32() != int32(block.PeerBlock) { continue @@ -420,12 +455,26 @@ func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderH } } +func (vic *validatorInfoCreator) removeValidatorInfoFromPool(body *block.Body) { + validatorInfoPool := vic.dataPool.ValidatorsInfo() + + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + + for _, txHash := range miniBlock.TxHashes { + validatorInfoPool.RemoveDataFromAllShards(txHash) + } + } +} + func (vic *validatorInfoCreator) clean() { currentEpochValidatorInfo := vic.dataPool.CurrentEpochValidatorInfo() currentEpochValidatorInfo.Clean() } -// IsInterfaceNil return true if underlying object is nil +// IsInterfaceNil returns true if underlying object is nil func (vic *validatorInfoCreator) IsInterfaceNil() bool { return vic == nil } diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index e7c52f93205..6d0891bbe83 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -3,7 +3,6 @@ package metachain import ( "bytes" "errors" - "github.com/ElrondNetwork/elrond-go/testscommon/epochNotifier" "math/big" "reflect" "sort" @@ -19,8 +18,9 @@ import ( "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" + "github.com/ElrondNetwork/elrond-go/testscommon/epochNotifier" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" - "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/require" ) diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index 80bbfa6245e..a5b5e97b476 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -219,9 +219,10 @@ func (p *peerMiniBlockSyncer) getAllValidatorsInfo(body *block.Body) map[string] } func (p *peerMiniBlockSyncer) computeMissingPeerBlocks(metaBlock data.HeaderHandler) { - numMissingPeerMiniBlocks := uint32(0) p.mutMiniBlocksForBlock.Lock() + defer p.mutMiniBlocksForBlock.Unlock() + numMissingPeerMiniBlocks := uint32(0) for _, mb := range metaBlock.GetMiniBlockHeaderHandlers() { if mb.GetTypeInt32() != int32(block.PeerBlock) { continue @@ -245,13 +246,13 @@ func (p *peerMiniBlockSyncer) computeMissingPeerBlocks(metaBlock data.HeaderHand } p.numMissingPeerMiniBlocks = numMissingPeerMiniBlocks - p.mutMiniBlocksForBlock.Unlock() } func (p *peerMiniBlockSyncer) computeMissingValidatorsInfo(body *block.Body) { - numMissingValidatorsInfo := uint32(0) p.mutValidatorsInfoForBlock.Lock() + defer p.mutValidatorsInfoForBlock.Unlock() + numMissingValidatorsInfo := uint32(0) for _, mb := range body.MiniBlocks { if mb.Type != block.PeerBlock { continue @@ -277,7 +278,6 @@ func (p *peerMiniBlockSyncer) computeMissingValidatorsInfo(body *block.Body) { } p.numMissingValidatorsInfo = numMissingValidatorsInfo - p.mutValidatorsInfoForBlock.Unlock() } func (p *peerMiniBlockSyncer) retrieveMissingMiniBlocks() ([][]byte, error) { diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index b6e0b0c4fcf..706177a89c8 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -3,7 +3,6 @@ package shardchain import ( "bytes" "fmt" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "testing" "github.com/ElrondNetwork/elrond-go-core/core" @@ -19,6 +18,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageStubs "github.com/ElrondNetwork/elrond-go/testscommon/storage" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 832aadabaa4..b6c18b4ba9d 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -2,6 +2,7 @@ package integrationTests import ( "fmt" + "github.com/ElrondNetwork/elrond-go-core/data/endProcess" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go/integrationTests/mock" diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index 58b86ec9a6d..bc8d867f471 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -97,7 +97,7 @@ func CreateProcessorNodesWithNodesCoordinator( RefactorPeersMiniBlocksEnableEpoch: 0, } - coordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) + indexHashedNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { fmt.Println("error creating node coordinator") } @@ -105,7 +105,7 @@ func CreateProcessorNodesWithNodesCoordinator( tpn := newTestProcessorNodeWithCustomNodesCoordinator( numShards, shardId, - coordinator, + indexHashedNodesCoordinator, i, ncp, nodesSetup, diff --git a/integrationTests/vm/systemVM/stakingSC_test.go b/integrationTests/vm/systemVM/stakingSC_test.go index 5dc3980729e..d2969bd000a 100644 --- a/integrationTests/vm/systemVM/stakingSC_test.go +++ b/integrationTests/vm/systemVM/stakingSC_test.go @@ -128,8 +128,6 @@ func TestStakingUnstakingAndUnbondingOnMultiShardEnvironmentWithValidatorStatist numOfShards := 2 nodesPerShard := 2 numMetachainNodes := 2 - //shardConsensusGroupSize := 1 - //metaConsensusGroupSize := 1 nodes := integrationTests.CreateNodes( numOfShards, @@ -143,28 +141,6 @@ func TestStakingUnstakingAndUnbondingOnMultiShardEnvironmentWithValidatorStatist } idxProposers[numOfShards] = numOfShards * nodesPerShard - //nodesMap := integrationTests.CreateNodesWithNodesCoordinator( - // nodesPerShard, - // numMetachainNodes, - // numOfShards, - // shardConsensusGroupSize, - // metaConsensusGroupSize, - //) - - //nodes := make([]*integrationTests.TestProcessorNode, 0) - //idxProposers := make([]int, numOfShards+1) - // - //for _, nds := range nodesMap { - // nodes = append(nodes, nds...) - //} - // - //for _, nds := range nodesMap { - // idx, err := integrationTestsVm.GetNodeIndex(nodes, nds[0]) - // require.Nil(t, err) - // - // idxProposers = append(idxProposers, idx) - //} - integrationTests.DisplayAndStartNodes(nodes) defer func() { @@ -173,10 +149,6 @@ func TestStakingUnstakingAndUnbondingOnMultiShardEnvironmentWithValidatorStatist } }() - //for _, nds := range nodesMap { - // fmt.Println(integrationTests.MakeDisplayTable(nds)) - //} - initialVal := big.NewInt(10000000000) integrationTests.MintAllNodes(nodes, initialVal) verifyInitialBalance(t, nodes, initialVal) diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index fedeefaf6d2..b7e8abc587b 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -1,10 +1,10 @@ package preprocess import ( - "github.com/ElrondNetwork/elrond-go-core/core/atomic" "time" "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" @@ -89,34 +89,9 @@ func NewValidatorInfoPreprocessor( return vip, nil } -// waitForValidatorsInfoHashes waits for a call whether all the requested validators info appeared -//func (vip *validatorInfoPreprocessor) waitForValidatorsInfoHashes(waitTime time.Duration) error { -// select { -// case <-vip.chReceivedAllValidatorsInfo: -// return nil -// case <-time.After(waitTime): -// return process.ErrTimeIsOut -// } -//} - // IsDataPrepared returns non error if all the requested validators info arrived and were saved into the pool func (vip *validatorInfoPreprocessor) IsDataPrepared(_ int, _ func() time.Duration) error { return nil - //if requestedValidatorsInfo > 0 { - // log.Debug("requested missing validators info", - // "num validators info", requestedValidatorsInfo) - // err := vip.waitForValidatorsInfoHashes(haveTime()) - // vip.validatorsInfoForBlock.mutTxsForBlock.Lock() - // missingValidatorsInfo := vip.validatorsInfoForBlock.missingTxs - // vip.validatorsInfoForBlock.missingTxs = 0 - // vip.validatorsInfoForBlock.mutTxsForBlock.Unlock() - // log.Debug("received validators info", - // "num validators info", requestedValidatorsInfo-missingValidatorsInfo) - // if err != nil { - // return err - // } - //} - //return nil } // RemoveBlockDataFromPools removes the peer miniblocks from pool @@ -203,28 +178,9 @@ func (vip *validatorInfoPreprocessor) ProcessBlockTransactions( return nil } -// SaveTxsToStorage saves the validators info from body into storage +// SaveTxsToStorage does nothing func (vip *validatorInfoPreprocessor) SaveTxsToStorage(_ *block.Body) error { return nil - //if check.IfNil(body) { - // return process.ErrNilBlockBody - //} - // - //for i := 0; i < len(body.MiniBlocks); i++ { - // miniBlock := body.MiniBlocks[i] - // if miniBlock.Type != block.PeerBlock { - // continue - // } - // - // vip.saveTxsToStorage( - // miniBlock.TxHashes, - // &vip.validatorsInfoForBlock, - // vip.storage, - // dataRetriever.UnsignedTransactionUnit, - // ) - //} - // - //return nil } // receivedValidatorInfoTransaction is a callback function called when a new validator info transaction @@ -236,12 +192,7 @@ func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(_ []byte, return } - log.Debug("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "pk", validatorInfo.PublicKey) - //receivedAllMissing := vip.baseReceivedTransaction(key, tx, &vip.validatorsInfoForBlock) - // - //if receivedAllMissing { - // vip.chReceivedAllValidatorsInfo <- true - //} + log.Trace("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "pk", validatorInfo.PublicKey) } // CreateBlockStarted cleans the local cache map for processed/created validators info at this round @@ -254,88 +205,16 @@ func (vip *validatorInfoPreprocessor) CreateBlockStarted() { vip.validatorsInfoForBlock.mutTxsForBlock.Unlock() } -// RequestBlockTransactions request for validators info if missing from a block.Body +// RequestBlockTransactions does nothing func (vip *validatorInfoPreprocessor) RequestBlockTransactions(_ *block.Body) int { return 0 - //if check.IfNil(body) { - // return 0 - //} - // - //return vip.computeExistingAndRequestMissingValidatorsInfoForShards(body) } -// computeExistingAndRequestMissingValidatorsInfoForShards calculates what validators info are available and requests -// what are missing from block.Body -//func (vip *validatorInfoPreprocessor) computeExistingAndRequestMissingValidatorsInfoForShards(body *block.Body) int { -// validatorsInfoBody := block.Body{} -// for _, mb := range body.MiniBlocks { -// if mb.Type != block.PeerBlock { -// continue -// } -// if mb.SenderShardID != core.MetachainShardId { -// continue -// } -// -// validatorsInfoBody.MiniBlocks = append(validatorsInfoBody.MiniBlocks, mb) -// } -// -// numMissingTxsForShards := vip.computeExistingAndRequestMissing( -// &validatorsInfoBody, -// &vip.validatorsInfoForBlock, -// vip.chReceivedAllValidatorsInfo, -// vip.isMiniBlockCorrect, -// vip.validatorsInfoPool, -// vip.onRequestValidatorsInfoWithShard, -// ) -// -// return numMissingTxsForShards -//} - -//func (vip *validatorInfoPreprocessor) onRequestValidatorsInfoWithShard(_ uint32, txHashes [][]byte) { -// vip.onRequestValidatorsInfo(txHashes) -//} - -// RequestTransactionsForMiniBlock requests missing validators info for a certain miniblock +// RequestTransactionsForMiniBlock does nothing func (vip *validatorInfoPreprocessor) RequestTransactionsForMiniBlock(_ *block.MiniBlock) int { return 0 - //if miniBlock == nil { - // return 0 - //} - // - //missingValidatorsInfoHashesForMiniBlock := vip.computeMissingValidatorsInfoHashesForMiniBlock(miniBlock) - //if len(missingValidatorsInfoHashesForMiniBlock) > 0 { - // vip.onRequestValidatorsInfo(missingValidatorsInfoHashesForMiniBlock) - //} - // - //return len(missingValidatorsInfoHashesForMiniBlock) } -// computeMissingValidatorsInfoHashesForMiniBlock computes missing validators info hashes for a certain miniblock -//func (vip *validatorInfoPreprocessor) computeMissingValidatorsInfoHashesForMiniBlock(miniBlock *block.MiniBlock) [][]byte { -// missingValidatorsInfoHashes := make([][]byte, 0) -// return missingValidatorsInfoHashes -// -// if miniBlock.Type != block.PeerBlock { -// return missingValidatorsInfoHashes -// } -// -// for _, txHash := range miniBlock.TxHashes { -// validatorInfo, _ := process.GetValidatorInfoFromPool( -// miniBlock.SenderShardID, -// miniBlock.ReceiverShardID, -// txHash, -// vip.validatorsInfoPool, -// false, -// ) -// -// if validatorInfo == nil { -// missingValidatorsInfoHashes = append(missingValidatorsInfoHashes, txHash) -// } -// } -// -// return missingValidatorsInfoHashes -//} - // CreateAndProcessMiniBlocks does nothing func (vip *validatorInfoPreprocessor) CreateAndProcessMiniBlocks(_ func() bool, _ []byte) (block.MiniBlockSlice, error) { // validatorsInfo are created only by meta @@ -369,28 +248,14 @@ func (vip *validatorInfoPreprocessor) ProcessMiniBlock( return nil, len(miniBlock.TxHashes) - 1, false, nil } -// CreateMarshalledData marshals validators info hashes and saves them into a new structure +// CreateMarshalledData does nothing func (vip *validatorInfoPreprocessor) CreateMarshalledData(_ [][]byte) ([][]byte, error) { return make([][]byte, 0), nil - //marshalledValidatorsInfo, err := vip.createMarshalledData(txHashes, &vip.validatorsInfoForBlock) - //if err != nil { - // return nil, err - //} - // - //return marshalledValidatorsInfo, nil } -// GetAllCurrentUsedTxs returns all the validators info used at current creation / processing +// GetAllCurrentUsedTxs does nothing func (vip *validatorInfoPreprocessor) GetAllCurrentUsedTxs() map[string]data.TransactionHandler { return make(map[string]data.TransactionHandler) - //vip.validatorsInfoForBlock.mutTxsForBlock.RLock() - //validatorsInfoPool := make(map[string]data.TransactionHandler, len(vip.validatorsInfoForBlock.txHashAndInfo)) - //for txHash, txData := range vip.validatorsInfoForBlock.txHashAndInfo { - // validatorsInfoPool[txHash] = txData.tx - //} - //vip.validatorsInfoForBlock.mutTxsForBlock.RUnlock() - // - //return validatorsInfoPool } // AddTxsFromMiniBlocks does nothing @@ -401,7 +266,7 @@ func (vip *validatorInfoPreprocessor) AddTxsFromMiniBlocks(_ block.MiniBlockSlic func (vip *validatorInfoPreprocessor) AddTransactions(_ []data.TransactionHandler) { } -// IsInterfaceNil does nothing +// IsInterfaceNil returns true if there is no value under the interface func (vip *validatorInfoPreprocessor) IsInterfaceNil() bool { return vip == nil } diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index f75503d12a9..0eec9851f1b 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -1,7 +1,6 @@ package preprocess import ( - "github.com/ElrondNetwork/elrond-go/testscommon/epochNotifier" "testing" "github.com/ElrondNetwork/elrond-go-core/core" @@ -9,6 +8,7 @@ import ( "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/process/mock" "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/epochNotifier" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/stretchr/testify/assert" ) diff --git a/process/common.go b/process/common.go index db0e87b050a..9b10cba7774 100644 --- a/process/common.go +++ b/process/common.go @@ -409,48 +409,6 @@ func GetTransactionHandlerFromPool( return tx, nil } -// GetValidatorInfoFromPool gets the validator info from pool with a given sender/receiver shardId and txHash -func GetValidatorInfoFromPool( - senderShardID uint32, - destShardID uint32, - validatorInfoHash []byte, - shardedDataCacherNotifier dataRetriever.ShardedDataCacherNotifier, - searchFirst bool, -) (*state.ShardValidatorInfo, error) { - - if shardedDataCacherNotifier == nil { - return nil, ErrNilShardedDataCacherNotifier - } - - var val interface{} - ok := false - if searchFirst { - val, ok = shardedDataCacherNotifier.SearchFirstData(validatorInfoHash) - if !ok { - return nil, ErrValidatorInfoNotFound - } - } else { - strCache := ShardCacherIdentifier(senderShardID, destShardID) - txStore := shardedDataCacherNotifier.ShardDataStore(strCache) - if txStore == nil { - return nil, ErrNilStorage - } - - val, ok = txStore.Peek(validatorInfoHash) - } - - if !ok { - return nil, ErrValidatorInfoNotFound - } - - validatorInfo, ok := val.(*state.ShardValidatorInfo) - if !ok { - return nil, ErrInvalidValidatorInfoInPool - } - - return validatorInfo, nil -} - // GetTransactionHandlerFromStorage gets the transaction from storage with a given sender/receiver shardId and txHash func GetTransactionHandlerFromStorage( txHash []byte, diff --git a/sharding/networksharding/peerShardMapper.go b/sharding/networksharding/peerShardMapper.go index b9fb314576d..47548098e85 100644 --- a/sharding/networksharding/peerShardMapper.go +++ b/sharding/networksharding/peerShardMapper.go @@ -4,7 +4,6 @@ import ( "bytes" "encoding/hex" "fmt" - "github.com/ElrondNetwork/elrond-go/epochStart" "sync" "github.com/ElrondNetwork/elrond-go-core/core" @@ -12,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data" logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/p2p" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" From 5a9b821c97384b99a28342ddf3acb70d4f1809b0 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Mon, 8 Aug 2022 18:34:29 +0300 Subject: [PATCH 41/70] * Additional cleaning mechanism introduced in peers mini block data syncer (requestMissingValidatorsInfo and requestMissingMiniBlocks) --- epochStart/shardchain/peerMiniBlocksSyncer.go | 6 +++--- epochStart/shardchain/trigger.go | 18 ++++++++++++++++-- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index a5b5e97b476..2888a21eeec 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -261,19 +261,19 @@ func (p *peerMiniBlockSyncer) computeMissingValidatorsInfo(body *block.Body) { for _, txHash := range mb.TxHashes { p.mapAllValidatorsInfo[string(txHash)] = nil - mbObjectFound, ok := p.validatorsInfoPool.SearchFirstData(txHash) + validatorInfoObjectFound, ok := p.validatorsInfoPool.SearchFirstData(txHash) if !ok { numMissingValidatorsInfo++ continue } - mbFound, ok := mbObjectFound.(*state.ShardValidatorInfo) + validatorInfo, ok := validatorInfoObjectFound.(*state.ShardValidatorInfo) if !ok { numMissingValidatorsInfo++ continue } - p.mapAllValidatorsInfo[string(txHash)] = mbFound + p.mapAllValidatorsInfo[string(txHash)] = validatorInfo } } diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index c1c1b6c60a6..559929b59b1 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -297,12 +297,19 @@ func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { for { select { case <-ctx.Done(): - log.Debug("trigger's go routine is stopping...") + log.Debug("requestMissingMiniBlocks: trigger's go routine is stopping...") return case <-time.After(sleepTime): } t.mutMissingMiniBlocks.RLock() + + for hash, epochOfMissingMb := range t.mapMissingMiniBlocks { + if epochOfMissingMb <= t.metaEpoch { + delete(t.mapMissingMiniBlocks, hash) + } + } + if len(t.mapMissingMiniBlocks) == 0 { t.mutMissingMiniBlocks.RUnlock() continue @@ -332,12 +339,19 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { for { select { case <-ctx.Done(): - log.Debug("trigger's go routine is stopping...") + log.Debug("requestMissingValidatorsInfo: trigger's go routine is stopping...") return case <-time.After(sleepTime): } t.mutMissingValidatorsInfo.RLock() + + for hash, epochOfMissingValidatorInfo := range t.mapMissingValidatorsInfo { + if epochOfMissingValidatorInfo <= t.metaEpoch { + delete(t.mapMissingValidatorsInfo, hash) + } + } + if len(t.mapMissingValidatorsInfo) == 0 { t.mutMissingValidatorsInfo.RUnlock() continue From f92b32f81925572607229f53463ce5206826e6c5 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 9 Aug 2022 14:03:28 +0300 Subject: [PATCH 42/70] * Added support for requesting validators info with given epoch * Added some extra logs --- .../requestHandlers/requestHandler.go | 12 +++--- .../requestHandlers/requestHandler_test.go | 6 +-- epochStart/interface.go | 2 +- epochStart/metachain/validators.go | 14 +++---- epochStart/mock/validatorInfoSyncerStub.go | 2 +- epochStart/shardchain/peerMiniBlocksSyncer.go | 20 +++++++--- epochStart/shardchain/trigger.go | 38 ++++++++++--------- genesis/process/disabled/requestHandler.go | 2 +- .../preprocess/validatorInfoPreProcessor.go | 9 +++-- .../validatorInfoPreProcessor_test.go | 30 +++++++-------- process/interface.go | 4 +- testscommon/requestHandlerStub.go | 6 +-- update/sync/syncTransactions.go | 10 ++--- update/sync/syncTransactions_test.go | 16 ++++---- 14 files changed, 90 insertions(+), 81 deletions(-) diff --git a/dataRetriever/requestHandlers/requestHandler.go b/dataRetriever/requestHandlers/requestHandler.go index fda20a0e149..9278e958bce 100644 --- a/dataRetriever/requestHandlers/requestHandler.go +++ b/dataRetriever/requestHandlers/requestHandler.go @@ -592,8 +592,8 @@ func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { rrh.addRequestedItems([][]byte{hash}, uniqueValidatorInfoSuffix) } -// RequestValidatorsInfo asks for the validators` info associated with the specified hashes from connected peers -func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { +// RequestValidatorsInfo asks for the validators` info associated with the specified hashes and epoch from connected peers +func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte, epoch uint32) { unrequestedHashes := rrh.getUnrequestedHashes(hashes, uniqueValidatorInfoSuffix) if len(unrequestedHashes) == 0 { return @@ -602,7 +602,7 @@ func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { log.Debug("requesting validator info messages from network", "topic", common.ValidatorInfoTopic, "num hashes", len(unrequestedHashes), - "epoch", rrh.epoch, + "epoch", epoch, ) resolver, err := rrh.resolversFinder.MetaChainResolver(common.ValidatorInfoTopic) @@ -611,7 +611,7 @@ func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { "error", err.Error(), "topic", common.ValidatorInfoTopic, "num hashes", len(unrequestedHashes), - "epoch", rrh.epoch, + "epoch", epoch, ) return } @@ -624,13 +624,13 @@ func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { rrh.whiteList.Add(unrequestedHashes) - err = validatorInfoResolver.RequestDataFromHashArray(unrequestedHashes, rrh.epoch) + err = validatorInfoResolver.RequestDataFromHashArray(unrequestedHashes, epoch) if err != nil { log.Debug("RequestValidatorInfo.RequestDataFromHash", "error", err.Error(), "topic", common.ValidatorInfoTopic, "num hashes", len(unrequestedHashes), - "epoch", rrh.epoch, + "epoch", epoch, ) return } diff --git a/dataRetriever/requestHandlers/requestHandler_test.go b/dataRetriever/requestHandlers/requestHandler_test.go index df0235ff220..cf85e051fb4 100644 --- a/dataRetriever/requestHandlers/requestHandler_test.go +++ b/dataRetriever/requestHandlers/requestHandler_test.go @@ -1534,7 +1534,7 @@ func TestResolverRequestHandler_RequestValidatorsInfo(t *testing.T) { time.Second, ) - rrh.RequestValidatorsInfo([][]byte{providedHash}) + rrh.RequestValidatorsInfo([][]byte{providedHash}, 0) assert.False(t, wasCalled) }) t.Run("cast fails", func(t *testing.T) { @@ -1558,7 +1558,7 @@ func TestResolverRequestHandler_RequestValidatorsInfo(t *testing.T) { time.Second, ) - rrh.RequestValidatorsInfo([][]byte{providedHash}) + rrh.RequestValidatorsInfo([][]byte{providedHash}, 0) assert.False(t, wasCalled) }) t.Run("should work", func(t *testing.T) { @@ -1586,7 +1586,7 @@ func TestResolverRequestHandler_RequestValidatorsInfo(t *testing.T) { time.Second, ) - rrh.RequestValidatorsInfo(providedHashes) + rrh.RequestValidatorsInfo(providedHashes, 0) assert.True(t, wasCalled) }) } diff --git a/epochStart/interface.go b/epochStart/interface.go index bf4c861a189..42ba01cd358 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -59,7 +59,7 @@ type RequestHandler interface { SetNumPeersToQuery(key string, intra int, cross int) error GetNumPeersToQuery(key string) (int, int, error) RequestValidatorInfo(hash []byte) - RequestValidatorsInfo(hashes [][]byte) + RequestValidatorsInfo(hashes [][]byte, epoch uint32) IsInterfaceNil() bool } diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 3b42070b057..5e05548521d 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -312,17 +312,17 @@ func (vic *validatorInfoCreator) GetValidatorInfoTxs(body *block.Body) map[strin continue } - vic.getMapShardValidatorInfo(miniBlock, mapShardValidatorInfo) + vic.setMapShardValidatorInfo(miniBlock, mapShardValidatorInfo) } return mapShardValidatorInfo } -func (vic *validatorInfoCreator) getMapShardValidatorInfo(miniBlock *block.MiniBlock, mapShardValidatorInfo map[string]*state.ShardValidatorInfo) { +func (vic *validatorInfoCreator) setMapShardValidatorInfo(miniBlock *block.MiniBlock, mapShardValidatorInfo map[string]*state.ShardValidatorInfo) { for _, txHash := range miniBlock.TxHashes { shardValidatorInfo, err := vic.getShardValidatorInfo(txHash) if err != nil { - log.Error("validatorInfoCreator.getMapShardValidatorInfo", "hash", txHash, "error", err) + log.Error("validatorInfoCreator.setMapShardValidatorInfo", "hash", txHash, "error", err) continue } @@ -361,7 +361,9 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo continue } - vic.saveValidatorInfo(miniBlock) + if vic.flagRefactorPeersMiniBlocks.IsSet() { + vic.saveValidatorInfo(miniBlock) + } marshalledData, err := vic.marshalizer.Marshal(miniBlock) if err != nil { @@ -375,10 +377,6 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo } func (vic *validatorInfoCreator) saveValidatorInfo(miniBlock *block.MiniBlock) { - if !vic.flagRefactorPeersMiniBlocks.IsSet() { - return - } - validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() for _, validatorInfoHash := range miniBlock.TxHashes { diff --git a/epochStart/mock/validatorInfoSyncerStub.go b/epochStart/mock/validatorInfoSyncerStub.go index 3dedd99d58e..e0bee4249ec 100644 --- a/epochStart/mock/validatorInfoSyncerStub.go +++ b/epochStart/mock/validatorInfoSyncerStub.go @@ -15,7 +15,7 @@ func (vip *ValidatorInfoSyncerStub) SyncMiniBlocks(_ data.HeaderHandler) ([][]by } // SyncValidatorsInfo - -func (vip *ValidatorInfoSyncerStub) SyncValidatorsInfo(_ data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) { +func (vip *ValidatorInfoSyncerStub) SyncValidatorsInfo(_ data.BodyHandler, _ uint32) ([][]byte, map[string]*state.ShardValidatorInfo, error) { return nil, nil, nil } diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index 2888a21eeec..3c06ce77ab3 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -105,7 +105,7 @@ func (p *peerMiniBlockSyncer) SyncMiniBlocks(headerHandler data.HeaderHandler) ( } // SyncValidatorsInfo synchronizes validators info from a block body of an epoch start meta block -func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) { +func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler, epoch uint32) ([][]byte, map[string]*state.ShardValidatorInfo, error) { if check.IfNil(bodyHandler) { return nil, nil, epochStart.ErrNilBlockBody } @@ -119,7 +119,7 @@ func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler) ( p.computeMissingValidatorsInfo(body) - allMissingValidatorsInfoHashes, err := p.retrieveMissingValidatorsInfo() + allMissingValidatorsInfoHashes, err := p.retrieveMissingValidatorsInfo(epoch) if err != nil { return allMissingValidatorsInfoHashes, nil, err } @@ -135,7 +135,8 @@ func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { return } - log.Trace(fmt.Sprintf("received miniblock of type %s", peerMb.Type)) + //TODO: Set the log level on Trace + log.Debug(fmt.Sprintf("received miniblock of type %s", peerMb.Type)) p.mutMiniBlocksForBlock.Lock() havingPeerMb, ok := p.mapAllPeerMiniBlocks[string(key)] @@ -149,6 +150,9 @@ func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { numMissingPeerMiniBlocks := p.numMissingPeerMiniBlocks p.mutMiniBlocksForBlock.Unlock() + //TODO: Set the log level on Trace + log.Debug("peerMiniBlockSyncer.receivedMiniBlock", "mb hash", key, "num missing peer mini blocks", numMissingPeerMiniBlocks) + if numMissingPeerMiniBlocks == 0 { p.chRcvAllMiniBlocks <- struct{}{} } @@ -161,7 +165,8 @@ func (p *peerMiniBlockSyncer) receivedValidatorInfo(key []byte, val interface{}) return } - log.Trace(fmt.Sprintf("received validator info of pk %s", validatorInfo.PublicKey)) + //TODO: Set the log level on Trace + log.Debug(fmt.Sprintf("received validator info of pk %s", validatorInfo.PublicKey)) p.mutValidatorsInfoForBlock.Lock() havingValidatorInfo, ok := p.mapAllValidatorsInfo[string(key)] @@ -175,6 +180,9 @@ func (p *peerMiniBlockSyncer) receivedValidatorInfo(key []byte, val interface{}) numMissingValidatorsInfo := p.numMissingValidatorsInfo p.mutValidatorsInfoForBlock.Unlock() + //TODO: Set the log level on Trace + log.Debug("peerMiniBlockSyncer.receivedValidatorInfo", "tx hash", key, "num missing validators info", numMissingValidatorsInfo) + if numMissingValidatorsInfo == 0 { p.chRcvAllValidatorsInfo <- struct{}{} } @@ -305,7 +313,7 @@ func (p *peerMiniBlockSyncer) retrieveMissingMiniBlocks() ([][]byte, error) { } } -func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) { +func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo(epoch uint32) ([][]byte, error) { p.mutValidatorsInfoForBlock.Lock() missingValidatorsInfo := make([][]byte, 0) for validatorInfoHash, validatorInfo := range p.mapAllValidatorsInfo { @@ -320,7 +328,7 @@ func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) return nil, nil } - p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo, epoch) select { case <-p.chRcvAllValidatorsInfo: diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 559929b59b1..a7f8331f3db 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -304,11 +304,11 @@ func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { t.mutMissingMiniBlocks.RLock() - for hash, epochOfMissingMb := range t.mapMissingMiniBlocks { - if epochOfMissingMb <= t.metaEpoch { - delete(t.mapMissingMiniBlocks, hash) - } - } + //for hash, epochOfMissingMb := range t.mapMissingMiniBlocks { + // if epochOfMissingMb <= t.metaEpoch { + // delete(t.mapMissingMiniBlocks, hash) + // } + //} if len(t.mapMissingMiniBlocks) == 0 { t.mutMissingMiniBlocks.RUnlock() @@ -316,9 +316,9 @@ func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { } missingMiniBlocks := make([][]byte, 0, len(t.mapMissingMiniBlocks)) - for hash := range t.mapMissingMiniBlocks { + for hash, epoch := range t.mapMissingMiniBlocks { missingMiniBlocks = append(missingMiniBlocks, []byte(hash)) - log.Debug("trigger.requestMissingMiniBlocks", "hash", []byte(hash)) + log.Debug("trigger.requestMissingMiniBlocks", "epoch", epoch, "hash", []byte(hash)) } t.mutMissingMiniBlocks.RUnlock() @@ -326,7 +326,7 @@ func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { select { case <-ctx.Done(): - log.Debug("trigger's go routine is stopping...") + log.Debug("requestMissingMiniBlocks: trigger's go routine is stopping...") return case <-time.After(waitTime): } @@ -346,29 +346,31 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { t.mutMissingValidatorsInfo.RLock() - for hash, epochOfMissingValidatorInfo := range t.mapMissingValidatorsInfo { - if epochOfMissingValidatorInfo <= t.metaEpoch { - delete(t.mapMissingValidatorsInfo, hash) - } - } + //for hash, epochOfMissingValidatorInfo := range t.mapMissingValidatorsInfo { + // if epochOfMissingValidatorInfo <= t.metaEpoch { + // delete(t.mapMissingValidatorsInfo, hash) + // } + //} if len(t.mapMissingValidatorsInfo) == 0 { t.mutMissingValidatorsInfo.RUnlock() continue } + var requestWithEpoch uint32 missingValidatorsInfo := make([][]byte, 0, len(t.mapMissingValidatorsInfo)) - for hash := range t.mapMissingValidatorsInfo { + for hash, epoch := range t.mapMissingValidatorsInfo { + requestWithEpoch = epoch missingValidatorsInfo = append(missingValidatorsInfo, []byte(hash)) - log.Debug("trigger.requestMissingValidatorsInfo", "hash", []byte(hash)) + log.Debug("trigger.requestMissingValidatorsInfo", "epoch", epoch, "hash", []byte(hash)) } t.mutMissingValidatorsInfo.RUnlock() - go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo, requestWithEpoch) select { case <-ctx.Done(): - log.Debug("trigger's go routine is stopping...") + log.Debug("requestMissingValidatorsInfo: trigger's go routine is stopping...") return case <-time.After(waitTime): } @@ -751,7 +753,7 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH } if metaHdr.GetEpoch() >= t.refactorPeersMiniBlocksEnableEpoch { - missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) + missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody, metaHdr.GetEpoch()) if err != nil { t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) log.Debug("checkIfTriggerCanBeActivated.SyncValidatorsInfo", "num missing validators info", len(missingValidatorsInfoHashes), "error", err) diff --git a/genesis/process/disabled/requestHandler.go b/genesis/process/disabled/requestHandler.go index 64f110205ac..91bd54d5347 100644 --- a/genesis/process/disabled/requestHandler.go +++ b/genesis/process/disabled/requestHandler.go @@ -91,7 +91,7 @@ func (r *RequestHandler) RequestValidatorInfo(_ []byte) { } // RequestValidatorsInfo does nothing -func (r *RequestHandler) RequestValidatorsInfo(_ [][]byte) { +func (r *RequestHandler) RequestValidatorsInfo(_ [][]byte, _ uint32) { } // IsInterfaceNil returns true if there is no value under the interface diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index b7e8abc587b..e83f5812cab 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -22,7 +22,7 @@ var _ process.PreProcessor = (*validatorInfoPreprocessor)(nil) type validatorInfoPreprocessor struct { *basePreProcess chReceivedAllValidatorsInfo chan bool - onRequestValidatorsInfo func(txHashes [][]byte) + onRequestValidatorsInfo func(txHashes [][]byte, epoch uint32) validatorsInfoForBlock txsForBlock validatorsInfoPool dataRetriever.ShardedDataCacherNotifier storage dataRetriever.StorageService @@ -37,7 +37,7 @@ func NewValidatorInfoPreprocessor( blockSizeComputation BlockSizeComputationHandler, validatorsInfoPool dataRetriever.ShardedDataCacherNotifier, store dataRetriever.StorageService, - onRequestValidatorsInfo func(txHashes [][]byte), + onRequestValidatorsInfo func(txHashes [][]byte, epoch uint32), epochNotifier process.EpochNotifier, refactorPeersMiniBlocksEnableEpoch uint32, ) (*validatorInfoPreprocessor, error) { @@ -185,14 +185,15 @@ func (vip *validatorInfoPreprocessor) SaveTxsToStorage(_ *block.Body) error { // receivedValidatorInfoTransaction is a callback function called when a new validator info transaction // is added in the validator info transactions pool -func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(_ []byte, value interface{}) { +func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(txHash []byte, value interface{}) { validatorInfo, ok := value.(*state.ShardValidatorInfo) if !ok { log.Warn("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "error", process.ErrWrongTypeAssertion) return } - log.Trace("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "pk", validatorInfo.PublicKey) + //TODO: Set the log level on Trace + log.Debug("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "tx hash", txHash, "pk", validatorInfo.PublicKey) } // CreateBlockStarted cleans the local cache map for processed/created validators info at this round diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index 0eec9851f1b..49ad21ab4a6 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -23,7 +23,7 @@ func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -42,7 +42,7 @@ func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -61,7 +61,7 @@ func TestNewValidatorInfoPreprocessor_NilBlockSizeComputationHandlerShouldErr(t nil, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -79,7 +79,7 @@ func TestNewValidatorInfoPreprocessor_NilValidatorInfoPoolShouldErr(t *testing.T &testscommon.BlockSizeComputationStub{}, nil, &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -98,7 +98,7 @@ func TestNewValidatorInfoPreprocessor_NilStoreShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), nil, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -136,7 +136,7 @@ func TestNewValidatorInfoPreprocessor_NilEpochNotifierShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, nil, 0, ) @@ -155,7 +155,7 @@ func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -173,7 +173,7 @@ func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -195,7 +195,7 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShould &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -226,7 +226,7 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -257,7 +257,7 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockNotFromMeta(t *testing.T) &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -292,7 +292,7 @@ func TestNewValidatorInfoPreprocessor_RestorePeerBlockIntoPools(t *testing.T) { blockSizeComputation, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -339,7 +339,7 @@ func TestNewValidatorInfoPreprocessor_RestoreOtherBlockTypeIntoPoolsShouldNotRes blockSizeComputation, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -386,7 +386,7 @@ func TestNewValidatorInfoPreprocessor_RemovePeerBlockFromPool(t *testing.T) { blockSizeComputation, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -433,7 +433,7 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov blockSizeComputation, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte) {}, + func(txHashes [][]byte, epoch uint32) {}, &epochNotifier.EpochNotifierStub{}, 0, ) diff --git a/process/interface.go b/process/interface.go index 63ba800f1f2..f6f21b62f41 100644 --- a/process/interface.go +++ b/process/interface.go @@ -559,7 +559,7 @@ type RequestHandler interface { RequestPeerAuthenticationsChunk(destShardID uint32, chunkIndex uint32) RequestPeerAuthenticationsByHashes(destShardID uint32, hashes [][]byte) RequestValidatorInfo(hash []byte) - RequestValidatorsInfo(hashes [][]byte) + RequestValidatorsInfo(hashes [][]byte, epoch uint32) IsInterfaceNil() bool } @@ -1000,7 +1000,7 @@ type RatingsStepHandler interface { // ValidatorInfoSyncer defines the method needed for validatorInfoProcessing type ValidatorInfoSyncer interface { SyncMiniBlocks(headerHandler data.HeaderHandler) ([][]byte, data.BodyHandler, error) - SyncValidatorsInfo(bodyHandler data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) + SyncValidatorsInfo(bodyHandler data.BodyHandler, epoch uint32) ([][]byte, map[string]*state.ShardValidatorInfo, error) IsInterfaceNil() bool } diff --git a/testscommon/requestHandlerStub.go b/testscommon/requestHandlerStub.go index 3ff19202845..251a43ffdab 100644 --- a/testscommon/requestHandlerStub.go +++ b/testscommon/requestHandlerStub.go @@ -22,7 +22,7 @@ type RequestHandlerStub struct { RequestPeerAuthenticationsChunkCalled func(destShardID uint32, chunkIndex uint32) RequestPeerAuthenticationsByHashesCalled func(destShardID uint32, hashes [][]byte) RequestValidatorInfoCalled func(hash []byte) - RequestValidatorsInfoCalled func(hashes [][]byte) + RequestValidatorsInfoCalled func(hashes [][]byte, epoch uint32) } // SetNumPeersToQuery - @@ -178,9 +178,9 @@ func (rhs *RequestHandlerStub) RequestValidatorInfo(hash []byte) { } // RequestValidatorsInfo - -func (rhs *RequestHandlerStub) RequestValidatorsInfo(hashes [][]byte) { +func (rhs *RequestHandlerStub) RequestValidatorsInfo(hashes [][]byte, epoch uint32) { if rhs.RequestValidatorsInfoCalled != nil { - rhs.RequestValidatorsInfoCalled(hashes) + rhs.RequestValidatorsInfoCalled(hashes, epoch) } } diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index 3b706c7805f..4053b9a1c29 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -114,7 +114,7 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock log.Debug("transactionsSync.SyncTransactionsFor", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash needed", txHash) } - numRequestedTxs += ts.requestTransactionsFor(miniBlock) + numRequestedTxs += ts.requestTransactionsFor(miniBlock, epoch) } ts.mutPendingTx.Unlock() @@ -147,9 +147,9 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min } } -func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock) int { +func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock, epoch uint32) int { if miniBlock.Type == block.PeerBlock { - return ts.requestTransactionsForPeerMiniBlock(miniBlock) + return ts.requestTransactionsForPeerMiniBlock(miniBlock, epoch) } return ts.requestTransactionsForNonPeerMiniBlock(miniBlock) @@ -197,7 +197,7 @@ func (ts *transactionsSync) requestTransactionsForNonPeerMiniBlock(miniBlock *bl return len(missingTxs) } -func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block.MiniBlock) int { +func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block.MiniBlock, epoch uint32) int { missingTxs := make([][]byte, 0) for _, txHash := range miniBlock.TxHashes { if _, ok := ts.mapValidatorsInfo[string(txHash)]; ok { @@ -218,7 +218,7 @@ func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block log.Debug("transactionsSync.requestTransactionsForPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) } - ts.requestHandler.RequestValidatorsInfo(missingTxs) + ts.requestHandler.RequestValidatorsInfo(missingTxs, epoch) return len(missingTxs) } diff --git a/update/sync/syncTransactions_test.go b/update/sync/syncTransactions_test.go index e65f90e8ad3..8d816b229bd 100644 --- a/update/sync/syncTransactions_test.go +++ b/update/sync/syncTransactions_test.go @@ -168,14 +168,14 @@ func TestSyncPendingTransactionsFor_requestTransactionsFor(t *testing.T) { SenderShardID: 0, ReceiverShardID: 0, } - numMissing := pendingTxsSyncer.requestTransactionsFor(mb) + numMissing := pendingTxsSyncer.requestTransactionsFor(mb, 0) require.Equal(t, 2, numMissing) require.Equal(t, 1, len(requests)) require.Equal(t, 2, requests[0]) requests = make(map[uint32]int) mb.SenderShardID = 1 - numMissing = pendingTxsSyncer.requestTransactionsFor(mb) + numMissing = pendingTxsSyncer.requestTransactionsFor(mb, 0) require.Equal(t, 2, numMissing) require.Equal(t, 2, len(requests)) require.Equal(t, 1, requests[0]) @@ -197,14 +197,14 @@ func TestSyncPendingTransactionsFor_requestTransactionsFor(t *testing.T) { SenderShardID: 0, ReceiverShardID: 0, } - numMissing := pendingTxsSyncer.requestTransactionsFor(mb) + numMissing := pendingTxsSyncer.requestTransactionsFor(mb, 0) require.Equal(t, 2, numMissing) require.Equal(t, 1, len(requests)) require.Equal(t, 2, requests[0]) requests = make(map[uint32]int) mb.SenderShardID = 1 - numMissing = pendingTxsSyncer.requestTransactionsFor(mb) + numMissing = pendingTxsSyncer.requestTransactionsFor(mb, 0) require.Equal(t, 2, numMissing) require.Equal(t, 2, len(requests)) require.Equal(t, 1, requests[0]) @@ -226,14 +226,14 @@ func TestSyncPendingTransactionsFor_requestTransactionsFor(t *testing.T) { SenderShardID: 0, ReceiverShardID: 0, } - numMissing := pendingTxsSyncer.requestTransactionsFor(mb) + numMissing := pendingTxsSyncer.requestTransactionsFor(mb, 0) require.Equal(t, 2, numMissing) require.Equal(t, 1, len(requests)) require.Equal(t, 2, requests[0]) requests = make(map[uint32]int) mb.SenderShardID = 1 - numMissing = pendingTxsSyncer.requestTransactionsFor(mb) + numMissing = pendingTxsSyncer.requestTransactionsFor(mb, 0) require.Equal(t, 2, numMissing) require.Equal(t, 2, len(requests)) require.Equal(t, 1, requests[0]) @@ -255,14 +255,14 @@ func TestSyncPendingTransactionsFor_requestTransactionsFor(t *testing.T) { SenderShardID: 0, ReceiverShardID: 0, } - numMissing := pendingTxsSyncer.requestTransactionsFor(mb) + numMissing := pendingTxsSyncer.requestTransactionsFor(mb, 0) require.Equal(t, 2, numMissing) require.Equal(t, 1, len(requests)) require.Equal(t, 2, requests[0]) requests = make(map[uint32]int) mb.SenderShardID = 1 - numMissing = pendingTxsSyncer.requestTransactionsFor(mb) + numMissing = pendingTxsSyncer.requestTransactionsFor(mb, 0) require.Equal(t, 2, numMissing) require.Equal(t, 2, len(requests)) require.Equal(t, 1, requests[0]) From d045976aeee7977dd8b490460a00b60c9993d518 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 10 Aug 2022 12:53:23 +0300 Subject: [PATCH 43/70] * Added more logs * Changed request for validator info from hashes array to one by one hash in a loop --- epochStart/shardchain/peerMiniBlocksSyncer.go | 23 +++++++++++++------ epochStart/shardchain/trigger.go | 11 ++++++--- p2p/libp2p/netMessenger.go | 6 +++-- process/block/metablock.go | 10 ++++++++ process/interceptors/baseDataInterceptor.go | 3 ++- .../validatorInfoInterceptorProcessor.go | 3 +++ 6 files changed, 43 insertions(+), 13 deletions(-) diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index 3c06ce77ab3..662bc1178e7 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -1,7 +1,6 @@ package shardchain import ( - "fmt" "sync" "time" @@ -130,13 +129,18 @@ func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler, e } func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { - peerMb, ok := val.(*block.MiniBlock) - if !ok || peerMb.Type != block.PeerBlock { + peerMiniBlock, ok := val.(*block.MiniBlock) + if !ok { + log.Error("receivedMiniBlock", "key", key, "error", epochStart.ErrWrongTypeAssertion) + return + } + + if peerMiniBlock.Type != block.PeerBlock { return } //TODO: Set the log level on Trace - log.Debug(fmt.Sprintf("received miniblock of type %s", peerMb.Type)) + log.Debug("peerMiniBlockSyncer.receivedMiniBlock", "type", peerMiniBlock.Type) p.mutMiniBlocksForBlock.Lock() havingPeerMb, ok := p.mapAllPeerMiniBlocks[string(key)] @@ -145,7 +149,7 @@ func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { return } - p.mapAllPeerMiniBlocks[string(key)] = peerMb + p.mapAllPeerMiniBlocks[string(key)] = peerMiniBlock p.numMissingPeerMiniBlocks-- numMissingPeerMiniBlocks := p.numMissingPeerMiniBlocks p.mutMiniBlocksForBlock.Unlock() @@ -166,7 +170,7 @@ func (p *peerMiniBlockSyncer) receivedValidatorInfo(key []byte, val interface{}) } //TODO: Set the log level on Trace - log.Debug(fmt.Sprintf("received validator info of pk %s", validatorInfo.PublicKey)) + log.Debug("peerMiniBlockSyncer.receivedValidatorInfo", "pk", validatorInfo.PublicKey) p.mutValidatorsInfoForBlock.Lock() havingValidatorInfo, ok := p.mapAllValidatorsInfo[string(key)] @@ -328,7 +332,12 @@ func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo(epoch uint32) ([][]b return nil, nil } - p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo, epoch) + //p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo, epoch) + for _, missingValidatorInfo := range missingValidatorsInfo { + go func(validatorInfo []byte) { + p.requestHandler.RequestValidatorInfo(validatorInfo) + }(missingValidatorInfo) + } select { case <-p.chRcvAllValidatorsInfo: diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index a7f8331f3db..0485e80df22 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -357,16 +357,21 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { continue } - var requestWithEpoch uint32 + //var requestWithEpoch uint32 missingValidatorsInfo := make([][]byte, 0, len(t.mapMissingValidatorsInfo)) for hash, epoch := range t.mapMissingValidatorsInfo { - requestWithEpoch = epoch + //requestWithEpoch = epoch missingValidatorsInfo = append(missingValidatorsInfo, []byte(hash)) log.Debug("trigger.requestMissingValidatorsInfo", "epoch", epoch, "hash", []byte(hash)) } t.mutMissingValidatorsInfo.RUnlock() - go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo, requestWithEpoch) + //go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo, requestWithEpoch) + for _, missingValidatorInfo := range missingValidatorsInfo { + go func(validatorInfo []byte) { + t.requestHandler.RequestValidatorInfo(validatorInfo) + }(missingValidatorInfo) + } select { case <-ctx.Done(): diff --git a/p2p/libp2p/netMessenger.go b/p2p/libp2p/netMessenger.go index 5128a2ccd58..51a97258cbb 100644 --- a/p2p/libp2p/netMessenger.go +++ b/p2p/libp2p/netMessenger.go @@ -979,7 +979,8 @@ func (netMes *networkMessenger) pubsubCallback(topicProcs *topicProcessors, topi for index, handler := range handlers { err = handler.ProcessReceivedMessage(msg, fromConnectedPeer) if err != nil { - log.Trace("p2p validator", + //TODO: Set the log level on Trace + log.Debug("p2p validator", "error", err.Error(), "topic", topic, "originator", p2p.MessageOriginatorPid(msg), @@ -1217,7 +1218,8 @@ func (netMes *networkMessenger) directMessageHandler(message *pubsub.Message, fr for index, handler := range handlers { errProcess := handler.ProcessReceivedMessage(msg, fromConnectedPeer) if errProcess != nil { - log.Trace("p2p validator", + //TODO: Set the log level on Trace + log.Debug("p2p validator", "error", errProcess.Error(), "topic", msg.Topic(), "originator", p2p.MessageOriginatorPid(msg), diff --git a/process/block/metablock.go b/process/block/metablock.go index 268bd0dd09c..4cac7afacf6 100644 --- a/process/block/metablock.go +++ b/process/block/metablock.go @@ -2431,12 +2431,22 @@ func (mp *metaProcessor) getAllMarshalledTxs(body *block.Body) map[string][][]by for topic, marshalledTxs := range marshalledRewardsTxs { allMarshalledTxs[topic] = append(allMarshalledTxs[topic], marshalledTxs...) + //TODO: Set the log level on Trace + log.Debug("metaProcessor.getAllMarshalledTxs", "topic", topic, "num rewards txs", len(marshalledTxs)) } + //TODO: Set the log level on Trace + log.Debug("metaProcessor.getAllMarshalledTxs", "num rewards txs", len(marshalledRewardsTxs)) + for topic, marshalledTxs := range marshalledValidatorInfoTxs { allMarshalledTxs[topic] = append(allMarshalledTxs[topic], marshalledTxs...) + //TODO: Set the log level on Trace + log.Debug("metaProcessor.getAllMarshalledTxs", "topic", topic, "num validator info txs", len(marshalledTxs)) } + //TODO: Set the log level on Trace + log.Debug("metaProcessor.getAllMarshalledTxs", "num validator info txs", len(marshalledValidatorInfoTxs)) + return allMarshalledTxs } diff --git a/process/interceptors/baseDataInterceptor.go b/process/interceptors/baseDataInterceptor.go index 93c67ef1a4b..dfea6d70dbb 100644 --- a/process/interceptors/baseDataInterceptor.go +++ b/process/interceptors/baseDataInterceptor.go @@ -80,7 +80,8 @@ func (bdi *baseDataInterceptor) processInterceptedData(data process.InterceptedD err = bdi.processor.Save(data, msg.Peer(), bdi.topic) if err != nil { - log.Trace("intercepted data can not be processed", + //TODO: Set the log level on Trace + log.Debug("intercepted data can not be processed", "hash", data.Hash(), "type", data.Type(), "pid", p2p.MessageOriginatorPid(msg), diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go index 5d831d820f0..df4026a7b67 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -51,6 +51,9 @@ func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData validatorInfo := ivi.ValidatorInfo() hash := ivi.Hash() + //TODO: Set the log level on Trace + log.Debug("validatorInfoInterceptorProcessor.Save", "tx hash", hash, "publick key", validatorInfo.PublicKey) + strCache := process.ShardCacherIdentifier(core.MetachainShardId, core.AllShardId) viip.validatorInfoPool.AddData(hash, validatorInfo, validatorInfo.Size(), strCache) From 7c3246513e521fd6435a45640dd47918b1ca3c87 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 10 Aug 2022 13:00:19 +0300 Subject: [PATCH 44/70] * Changed ValidatorInfoPool from 4 Shards to 1 Shard --- cmd/node/config/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index fba1d159bd6..ca5bd14c9e8 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -447,7 +447,7 @@ Name = "ValidatorInfoPool" Capacity = 10000 SizeInBytes = 31457280 #30MB - Shards = 4 + Shards = 1 #PublicKeyPeerId represents the main cache used to map Elrond block signing public keys to their associated peer id's. [PublicKeyPeerId] From bfb11de6220c05b673d837586b9cf6be00803ccb Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 10 Aug 2022 17:27:53 +0300 Subject: [PATCH 45/70] * Rollback to older version * Set 4 shards instead 1 for ValidatorInfoPool --- cmd/node/config/config.toml | 2 +- .../requestHandlers/requestHandler.go | 12 ++++---- .../requestHandlers/requestHandler_test.go | 6 ++-- epochStart/interface.go | 2 +- epochStart/mock/validatorInfoSyncerStub.go | 2 +- epochStart/shardchain/peerMiniBlocksSyncer.go | 14 ++++----- epochStart/shardchain/trigger.go | 20 ++----------- genesis/process/disabled/requestHandler.go | 2 +- p2p/libp2p/netMessenger.go | 6 ++-- process/block/metablock.go | 12 ++------ .../preprocess/validatorInfoPreProcessor.go | 7 ++--- .../validatorInfoPreProcessor_test.go | 30 +++++++++---------- process/interceptors/baseDataInterceptor.go | 3 +- .../validatorInfoInterceptorProcessor.go | 3 +- process/interface.go | 4 +-- testscommon/requestHandlerStub.go | 6 ++-- update/sync/syncTransactions.go | 23 ++++++++------ update/sync/syncTransactions_test.go | 16 +++++----- 18 files changed, 71 insertions(+), 99 deletions(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index ca5bd14c9e8..fba1d159bd6 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -447,7 +447,7 @@ Name = "ValidatorInfoPool" Capacity = 10000 SizeInBytes = 31457280 #30MB - Shards = 1 + Shards = 4 #PublicKeyPeerId represents the main cache used to map Elrond block signing public keys to their associated peer id's. [PublicKeyPeerId] diff --git a/dataRetriever/requestHandlers/requestHandler.go b/dataRetriever/requestHandlers/requestHandler.go index 9278e958bce..fda20a0e149 100644 --- a/dataRetriever/requestHandlers/requestHandler.go +++ b/dataRetriever/requestHandlers/requestHandler.go @@ -592,8 +592,8 @@ func (rrh *resolverRequestHandler) RequestValidatorInfo(hash []byte) { rrh.addRequestedItems([][]byte{hash}, uniqueValidatorInfoSuffix) } -// RequestValidatorsInfo asks for the validators` info associated with the specified hashes and epoch from connected peers -func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte, epoch uint32) { +// RequestValidatorsInfo asks for the validators` info associated with the specified hashes from connected peers +func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte) { unrequestedHashes := rrh.getUnrequestedHashes(hashes, uniqueValidatorInfoSuffix) if len(unrequestedHashes) == 0 { return @@ -602,7 +602,7 @@ func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte, epoch log.Debug("requesting validator info messages from network", "topic", common.ValidatorInfoTopic, "num hashes", len(unrequestedHashes), - "epoch", epoch, + "epoch", rrh.epoch, ) resolver, err := rrh.resolversFinder.MetaChainResolver(common.ValidatorInfoTopic) @@ -611,7 +611,7 @@ func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte, epoch "error", err.Error(), "topic", common.ValidatorInfoTopic, "num hashes", len(unrequestedHashes), - "epoch", epoch, + "epoch", rrh.epoch, ) return } @@ -624,13 +624,13 @@ func (rrh *resolverRequestHandler) RequestValidatorsInfo(hashes [][]byte, epoch rrh.whiteList.Add(unrequestedHashes) - err = validatorInfoResolver.RequestDataFromHashArray(unrequestedHashes, epoch) + err = validatorInfoResolver.RequestDataFromHashArray(unrequestedHashes, rrh.epoch) if err != nil { log.Debug("RequestValidatorInfo.RequestDataFromHash", "error", err.Error(), "topic", common.ValidatorInfoTopic, "num hashes", len(unrequestedHashes), - "epoch", epoch, + "epoch", rrh.epoch, ) return } diff --git a/dataRetriever/requestHandlers/requestHandler_test.go b/dataRetriever/requestHandlers/requestHandler_test.go index cf85e051fb4..df0235ff220 100644 --- a/dataRetriever/requestHandlers/requestHandler_test.go +++ b/dataRetriever/requestHandlers/requestHandler_test.go @@ -1534,7 +1534,7 @@ func TestResolverRequestHandler_RequestValidatorsInfo(t *testing.T) { time.Second, ) - rrh.RequestValidatorsInfo([][]byte{providedHash}, 0) + rrh.RequestValidatorsInfo([][]byte{providedHash}) assert.False(t, wasCalled) }) t.Run("cast fails", func(t *testing.T) { @@ -1558,7 +1558,7 @@ func TestResolverRequestHandler_RequestValidatorsInfo(t *testing.T) { time.Second, ) - rrh.RequestValidatorsInfo([][]byte{providedHash}, 0) + rrh.RequestValidatorsInfo([][]byte{providedHash}) assert.False(t, wasCalled) }) t.Run("should work", func(t *testing.T) { @@ -1586,7 +1586,7 @@ func TestResolverRequestHandler_RequestValidatorsInfo(t *testing.T) { time.Second, ) - rrh.RequestValidatorsInfo(providedHashes, 0) + rrh.RequestValidatorsInfo(providedHashes) assert.True(t, wasCalled) }) } diff --git a/epochStart/interface.go b/epochStart/interface.go index 42ba01cd358..bf4c861a189 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -59,7 +59,7 @@ type RequestHandler interface { SetNumPeersToQuery(key string, intra int, cross int) error GetNumPeersToQuery(key string) (int, int, error) RequestValidatorInfo(hash []byte) - RequestValidatorsInfo(hashes [][]byte, epoch uint32) + RequestValidatorsInfo(hashes [][]byte) IsInterfaceNil() bool } diff --git a/epochStart/mock/validatorInfoSyncerStub.go b/epochStart/mock/validatorInfoSyncerStub.go index e0bee4249ec..3dedd99d58e 100644 --- a/epochStart/mock/validatorInfoSyncerStub.go +++ b/epochStart/mock/validatorInfoSyncerStub.go @@ -15,7 +15,7 @@ func (vip *ValidatorInfoSyncerStub) SyncMiniBlocks(_ data.HeaderHandler) ([][]by } // SyncValidatorsInfo - -func (vip *ValidatorInfoSyncerStub) SyncValidatorsInfo(_ data.BodyHandler, _ uint32) ([][]byte, map[string]*state.ShardValidatorInfo, error) { +func (vip *ValidatorInfoSyncerStub) SyncValidatorsInfo(_ data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) { return nil, nil, nil } diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index 662bc1178e7..237e37e92dd 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -104,7 +104,7 @@ func (p *peerMiniBlockSyncer) SyncMiniBlocks(headerHandler data.HeaderHandler) ( } // SyncValidatorsInfo synchronizes validators info from a block body of an epoch start meta block -func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler, epoch uint32) ([][]byte, map[string]*state.ShardValidatorInfo, error) { +func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) { if check.IfNil(bodyHandler) { return nil, nil, epochStart.ErrNilBlockBody } @@ -118,7 +118,7 @@ func (p *peerMiniBlockSyncer) SyncValidatorsInfo(bodyHandler data.BodyHandler, e p.computeMissingValidatorsInfo(body) - allMissingValidatorsInfoHashes, err := p.retrieveMissingValidatorsInfo(epoch) + allMissingValidatorsInfoHashes, err := p.retrieveMissingValidatorsInfo() if err != nil { return allMissingValidatorsInfoHashes, nil, err } @@ -139,8 +139,7 @@ func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { return } - //TODO: Set the log level on Trace - log.Debug("peerMiniBlockSyncer.receivedMiniBlock", "type", peerMiniBlock.Type) + log.Debug("peerMiniBlockSyncer.receivedMiniBlock", "mb type", peerMiniBlock.Type) p.mutMiniBlocksForBlock.Lock() havingPeerMb, ok := p.mapAllPeerMiniBlocks[string(key)] @@ -154,7 +153,6 @@ func (p *peerMiniBlockSyncer) receivedMiniBlock(key []byte, val interface{}) { numMissingPeerMiniBlocks := p.numMissingPeerMiniBlocks p.mutMiniBlocksForBlock.Unlock() - //TODO: Set the log level on Trace log.Debug("peerMiniBlockSyncer.receivedMiniBlock", "mb hash", key, "num missing peer mini blocks", numMissingPeerMiniBlocks) if numMissingPeerMiniBlocks == 0 { @@ -169,7 +167,6 @@ func (p *peerMiniBlockSyncer) receivedValidatorInfo(key []byte, val interface{}) return } - //TODO: Set the log level on Trace log.Debug("peerMiniBlockSyncer.receivedValidatorInfo", "pk", validatorInfo.PublicKey) p.mutValidatorsInfoForBlock.Lock() @@ -184,7 +181,6 @@ func (p *peerMiniBlockSyncer) receivedValidatorInfo(key []byte, val interface{}) numMissingValidatorsInfo := p.numMissingValidatorsInfo p.mutValidatorsInfoForBlock.Unlock() - //TODO: Set the log level on Trace log.Debug("peerMiniBlockSyncer.receivedValidatorInfo", "tx hash", key, "num missing validators info", numMissingValidatorsInfo) if numMissingValidatorsInfo == 0 { @@ -317,7 +313,7 @@ func (p *peerMiniBlockSyncer) retrieveMissingMiniBlocks() ([][]byte, error) { } } -func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo(epoch uint32) ([][]byte, error) { +func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) { p.mutValidatorsInfoForBlock.Lock() missingValidatorsInfo := make([][]byte, 0) for validatorInfoHash, validatorInfo := range p.mapAllValidatorsInfo { @@ -332,7 +328,7 @@ func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo(epoch uint32) ([][]b return nil, nil } - //p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo, epoch) + //TODO: Analyze if it works also with: go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) for _, missingValidatorInfo := range missingValidatorsInfo { go func(validatorInfo []byte) { p.requestHandler.RequestValidatorInfo(validatorInfo) diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 0485e80df22..38766deceed 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -303,13 +303,6 @@ func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { } t.mutMissingMiniBlocks.RLock() - - //for hash, epochOfMissingMb := range t.mapMissingMiniBlocks { - // if epochOfMissingMb <= t.metaEpoch { - // delete(t.mapMissingMiniBlocks, hash) - // } - //} - if len(t.mapMissingMiniBlocks) == 0 { t.mutMissingMiniBlocks.RUnlock() continue @@ -345,28 +338,19 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { } t.mutMissingValidatorsInfo.RLock() - - //for hash, epochOfMissingValidatorInfo := range t.mapMissingValidatorsInfo { - // if epochOfMissingValidatorInfo <= t.metaEpoch { - // delete(t.mapMissingValidatorsInfo, hash) - // } - //} - if len(t.mapMissingValidatorsInfo) == 0 { t.mutMissingValidatorsInfo.RUnlock() continue } - //var requestWithEpoch uint32 missingValidatorsInfo := make([][]byte, 0, len(t.mapMissingValidatorsInfo)) for hash, epoch := range t.mapMissingValidatorsInfo { - //requestWithEpoch = epoch missingValidatorsInfo = append(missingValidatorsInfo, []byte(hash)) log.Debug("trigger.requestMissingValidatorsInfo", "epoch", epoch, "hash", []byte(hash)) } t.mutMissingValidatorsInfo.RUnlock() - //go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo, requestWithEpoch) + //TODO: Analyze if it works also with: go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) for _, missingValidatorInfo := range missingValidatorsInfo { go func(validatorInfo []byte) { t.requestHandler.RequestValidatorInfo(validatorInfo) @@ -758,7 +742,7 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH } if metaHdr.GetEpoch() >= t.refactorPeersMiniBlocksEnableEpoch { - missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody, metaHdr.GetEpoch()) + missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) if err != nil { t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) log.Debug("checkIfTriggerCanBeActivated.SyncValidatorsInfo", "num missing validators info", len(missingValidatorsInfoHashes), "error", err) diff --git a/genesis/process/disabled/requestHandler.go b/genesis/process/disabled/requestHandler.go index 91bd54d5347..64f110205ac 100644 --- a/genesis/process/disabled/requestHandler.go +++ b/genesis/process/disabled/requestHandler.go @@ -91,7 +91,7 @@ func (r *RequestHandler) RequestValidatorInfo(_ []byte) { } // RequestValidatorsInfo does nothing -func (r *RequestHandler) RequestValidatorsInfo(_ [][]byte, _ uint32) { +func (r *RequestHandler) RequestValidatorsInfo(_ [][]byte) { } // IsInterfaceNil returns true if there is no value under the interface diff --git a/p2p/libp2p/netMessenger.go b/p2p/libp2p/netMessenger.go index 51a97258cbb..5128a2ccd58 100644 --- a/p2p/libp2p/netMessenger.go +++ b/p2p/libp2p/netMessenger.go @@ -979,8 +979,7 @@ func (netMes *networkMessenger) pubsubCallback(topicProcs *topicProcessors, topi for index, handler := range handlers { err = handler.ProcessReceivedMessage(msg, fromConnectedPeer) if err != nil { - //TODO: Set the log level on Trace - log.Debug("p2p validator", + log.Trace("p2p validator", "error", err.Error(), "topic", topic, "originator", p2p.MessageOriginatorPid(msg), @@ -1218,8 +1217,7 @@ func (netMes *networkMessenger) directMessageHandler(message *pubsub.Message, fr for index, handler := range handlers { errProcess := handler.ProcessReceivedMessage(msg, fromConnectedPeer) if errProcess != nil { - //TODO: Set the log level on Trace - log.Debug("p2p validator", + log.Trace("p2p validator", "error", errProcess.Error(), "topic", msg.Topic(), "originator", p2p.MessageOriginatorPid(msg), diff --git a/process/block/metablock.go b/process/block/metablock.go index 4cac7afacf6..650d70e6a5d 100644 --- a/process/block/metablock.go +++ b/process/block/metablock.go @@ -2431,22 +2431,14 @@ func (mp *metaProcessor) getAllMarshalledTxs(body *block.Body) map[string][][]by for topic, marshalledTxs := range marshalledRewardsTxs { allMarshalledTxs[topic] = append(allMarshalledTxs[topic], marshalledTxs...) - //TODO: Set the log level on Trace - log.Debug("metaProcessor.getAllMarshalledTxs", "topic", topic, "num rewards txs", len(marshalledTxs)) + log.Trace("metaProcessor.getAllMarshalledTxs", "topic", topic, "num rewards txs", len(marshalledTxs)) } - //TODO: Set the log level on Trace - log.Debug("metaProcessor.getAllMarshalledTxs", "num rewards txs", len(marshalledRewardsTxs)) - for topic, marshalledTxs := range marshalledValidatorInfoTxs { allMarshalledTxs[topic] = append(allMarshalledTxs[topic], marshalledTxs...) - //TODO: Set the log level on Trace - log.Debug("metaProcessor.getAllMarshalledTxs", "topic", topic, "num validator info txs", len(marshalledTxs)) + log.Trace("metaProcessor.getAllMarshalledTxs", "topic", topic, "num validator info txs", len(marshalledTxs)) } - //TODO: Set the log level on Trace - log.Debug("metaProcessor.getAllMarshalledTxs", "num validator info txs", len(marshalledValidatorInfoTxs)) - return allMarshalledTxs } diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index e83f5812cab..2d5189e90d9 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -22,7 +22,7 @@ var _ process.PreProcessor = (*validatorInfoPreprocessor)(nil) type validatorInfoPreprocessor struct { *basePreProcess chReceivedAllValidatorsInfo chan bool - onRequestValidatorsInfo func(txHashes [][]byte, epoch uint32) + onRequestValidatorsInfo func(txHashes [][]byte) validatorsInfoForBlock txsForBlock validatorsInfoPool dataRetriever.ShardedDataCacherNotifier storage dataRetriever.StorageService @@ -37,7 +37,7 @@ func NewValidatorInfoPreprocessor( blockSizeComputation BlockSizeComputationHandler, validatorsInfoPool dataRetriever.ShardedDataCacherNotifier, store dataRetriever.StorageService, - onRequestValidatorsInfo func(txHashes [][]byte, epoch uint32), + onRequestValidatorsInfo func(txHashes [][]byte), epochNotifier process.EpochNotifier, refactorPeersMiniBlocksEnableEpoch uint32, ) (*validatorInfoPreprocessor, error) { @@ -192,8 +192,7 @@ func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(txHash [] return } - //TODO: Set the log level on Trace - log.Debug("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "tx hash", txHash, "pk", validatorInfo.PublicKey) + log.Trace("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "tx hash", txHash, "pk", validatorInfo.PublicKey) } // CreateBlockStarted cleans the local cache map for processed/created validators info at this round diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index 49ad21ab4a6..0eec9851f1b 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -23,7 +23,7 @@ func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -42,7 +42,7 @@ func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -61,7 +61,7 @@ func TestNewValidatorInfoPreprocessor_NilBlockSizeComputationHandlerShouldErr(t nil, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -79,7 +79,7 @@ func TestNewValidatorInfoPreprocessor_NilValidatorInfoPoolShouldErr(t *testing.T &testscommon.BlockSizeComputationStub{}, nil, &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -98,7 +98,7 @@ func TestNewValidatorInfoPreprocessor_NilStoreShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), nil, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -136,7 +136,7 @@ func TestNewValidatorInfoPreprocessor_NilEpochNotifierShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, nil, 0, ) @@ -155,7 +155,7 @@ func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -173,7 +173,7 @@ func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -195,7 +195,7 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShould &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -226,7 +226,7 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -257,7 +257,7 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockNotFromMeta(t *testing.T) &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -292,7 +292,7 @@ func TestNewValidatorInfoPreprocessor_RestorePeerBlockIntoPools(t *testing.T) { blockSizeComputation, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -339,7 +339,7 @@ func TestNewValidatorInfoPreprocessor_RestoreOtherBlockTypeIntoPoolsShouldNotRes blockSizeComputation, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -386,7 +386,7 @@ func TestNewValidatorInfoPreprocessor_RemovePeerBlockFromPool(t *testing.T) { blockSizeComputation, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) @@ -433,7 +433,7 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov blockSizeComputation, tdp.ValidatorsInfo(), &mock.ChainStorerMock{}, - func(txHashes [][]byte, epoch uint32) {}, + func(txHashes [][]byte) {}, &epochNotifier.EpochNotifierStub{}, 0, ) diff --git a/process/interceptors/baseDataInterceptor.go b/process/interceptors/baseDataInterceptor.go index dfea6d70dbb..93c67ef1a4b 100644 --- a/process/interceptors/baseDataInterceptor.go +++ b/process/interceptors/baseDataInterceptor.go @@ -80,8 +80,7 @@ func (bdi *baseDataInterceptor) processInterceptedData(data process.InterceptedD err = bdi.processor.Save(data, msg.Peer(), bdi.topic) if err != nil { - //TODO: Set the log level on Trace - log.Debug("intercepted data can not be processed", + log.Trace("intercepted data can not be processed", "hash", data.Hash(), "type", data.Type(), "pid", p2p.MessageOriginatorPid(msg), diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor.go b/process/interceptors/processor/validatorInfoInterceptorProcessor.go index df4026a7b67..d3d70b6a70d 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor.go @@ -51,8 +51,7 @@ func (viip *validatorInfoInterceptorProcessor) Save(data process.InterceptedData validatorInfo := ivi.ValidatorInfo() hash := ivi.Hash() - //TODO: Set the log level on Trace - log.Debug("validatorInfoInterceptorProcessor.Save", "tx hash", hash, "publick key", validatorInfo.PublicKey) + log.Trace("validatorInfoInterceptorProcessor.Save", "tx hash", hash, "pk", validatorInfo.PublicKey) strCache := process.ShardCacherIdentifier(core.MetachainShardId, core.AllShardId) viip.validatorInfoPool.AddData(hash, validatorInfo, validatorInfo.Size(), strCache) diff --git a/process/interface.go b/process/interface.go index f6f21b62f41..63ba800f1f2 100644 --- a/process/interface.go +++ b/process/interface.go @@ -559,7 +559,7 @@ type RequestHandler interface { RequestPeerAuthenticationsChunk(destShardID uint32, chunkIndex uint32) RequestPeerAuthenticationsByHashes(destShardID uint32, hashes [][]byte) RequestValidatorInfo(hash []byte) - RequestValidatorsInfo(hashes [][]byte, epoch uint32) + RequestValidatorsInfo(hashes [][]byte) IsInterfaceNil() bool } @@ -1000,7 +1000,7 @@ type RatingsStepHandler interface { // ValidatorInfoSyncer defines the method needed for validatorInfoProcessing type ValidatorInfoSyncer interface { SyncMiniBlocks(headerHandler data.HeaderHandler) ([][]byte, data.BodyHandler, error) - SyncValidatorsInfo(bodyHandler data.BodyHandler, epoch uint32) ([][]byte, map[string]*state.ShardValidatorInfo, error) + SyncValidatorsInfo(bodyHandler data.BodyHandler) ([][]byte, map[string]*state.ShardValidatorInfo, error) IsInterfaceNil() bool } diff --git a/testscommon/requestHandlerStub.go b/testscommon/requestHandlerStub.go index 251a43ffdab..3ff19202845 100644 --- a/testscommon/requestHandlerStub.go +++ b/testscommon/requestHandlerStub.go @@ -22,7 +22,7 @@ type RequestHandlerStub struct { RequestPeerAuthenticationsChunkCalled func(destShardID uint32, chunkIndex uint32) RequestPeerAuthenticationsByHashesCalled func(destShardID uint32, hashes [][]byte) RequestValidatorInfoCalled func(hash []byte) - RequestValidatorsInfoCalled func(hashes [][]byte, epoch uint32) + RequestValidatorsInfoCalled func(hashes [][]byte) } // SetNumPeersToQuery - @@ -178,9 +178,9 @@ func (rhs *RequestHandlerStub) RequestValidatorInfo(hash []byte) { } // RequestValidatorsInfo - -func (rhs *RequestHandlerStub) RequestValidatorsInfo(hashes [][]byte, epoch uint32) { +func (rhs *RequestHandlerStub) RequestValidatorsInfo(hashes [][]byte) { if rhs.RequestValidatorsInfoCalled != nil { - rhs.RequestValidatorsInfoCalled(hashes, epoch) + rhs.RequestValidatorsInfoCalled(hashes) } } diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index 4053b9a1c29..3026e6c10ca 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -114,7 +114,7 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock log.Debug("transactionsSync.SyncTransactionsFor", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash needed", txHash) } - numRequestedTxs += ts.requestTransactionsFor(miniBlock, epoch) + numRequestedTxs += ts.requestTransactionsFor(miniBlock) } ts.mutPendingTx.Unlock() @@ -147,9 +147,9 @@ func (ts *transactionsSync) SyncTransactionsFor(miniBlocks map[string]*block.Min } } -func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock, epoch uint32) int { +func (ts *transactionsSync) requestTransactionsFor(miniBlock *block.MiniBlock) int { if miniBlock.Type == block.PeerBlock { - return ts.requestTransactionsForPeerMiniBlock(miniBlock, epoch) + return ts.requestTransactionsForPeerMiniBlock(miniBlock) } return ts.requestTransactionsForNonPeerMiniBlock(miniBlock) @@ -197,8 +197,8 @@ func (ts *transactionsSync) requestTransactionsForNonPeerMiniBlock(miniBlock *bl return len(missingTxs) } -func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block.MiniBlock, epoch uint32) int { - missingTxs := make([][]byte, 0) +func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block.MiniBlock) int { + missingValidatorsInfo := make([][]byte, 0) for _, txHash := range miniBlock.TxHashes { if _, ok := ts.mapValidatorsInfo[string(txHash)]; ok { continue @@ -210,17 +210,22 @@ func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block continue } - missingTxs = append(missingTxs, txHash) + missingValidatorsInfo = append(missingValidatorsInfo, txHash) } - for _, txHash := range missingTxs { + for _, txHash := range missingValidatorsInfo { ts.mapTxsToMiniBlocks[string(txHash)] = miniBlock log.Debug("transactionsSync.requestTransactionsForPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) } - ts.requestHandler.RequestValidatorsInfo(missingTxs, epoch) + //TODO: Analyze if it works also with: go ts.requestHandler.RequestValidatorsInfo(missingTxs) + for _, missingValidatorInfo := range missingValidatorsInfo { + go func(validatorInfo []byte) { + ts.requestHandler.RequestValidatorInfo(validatorInfo) + }(missingValidatorInfo) + } - return len(missingTxs) + return len(missingValidatorsInfo) } func (ts *transactionsSync) receivedTransaction(txHash []byte, val interface{}) { diff --git a/update/sync/syncTransactions_test.go b/update/sync/syncTransactions_test.go index 8d816b229bd..e65f90e8ad3 100644 --- a/update/sync/syncTransactions_test.go +++ b/update/sync/syncTransactions_test.go @@ -168,14 +168,14 @@ func TestSyncPendingTransactionsFor_requestTransactionsFor(t *testing.T) { SenderShardID: 0, ReceiverShardID: 0, } - numMissing := pendingTxsSyncer.requestTransactionsFor(mb, 0) + numMissing := pendingTxsSyncer.requestTransactionsFor(mb) require.Equal(t, 2, numMissing) require.Equal(t, 1, len(requests)) require.Equal(t, 2, requests[0]) requests = make(map[uint32]int) mb.SenderShardID = 1 - numMissing = pendingTxsSyncer.requestTransactionsFor(mb, 0) + numMissing = pendingTxsSyncer.requestTransactionsFor(mb) require.Equal(t, 2, numMissing) require.Equal(t, 2, len(requests)) require.Equal(t, 1, requests[0]) @@ -197,14 +197,14 @@ func TestSyncPendingTransactionsFor_requestTransactionsFor(t *testing.T) { SenderShardID: 0, ReceiverShardID: 0, } - numMissing := pendingTxsSyncer.requestTransactionsFor(mb, 0) + numMissing := pendingTxsSyncer.requestTransactionsFor(mb) require.Equal(t, 2, numMissing) require.Equal(t, 1, len(requests)) require.Equal(t, 2, requests[0]) requests = make(map[uint32]int) mb.SenderShardID = 1 - numMissing = pendingTxsSyncer.requestTransactionsFor(mb, 0) + numMissing = pendingTxsSyncer.requestTransactionsFor(mb) require.Equal(t, 2, numMissing) require.Equal(t, 2, len(requests)) require.Equal(t, 1, requests[0]) @@ -226,14 +226,14 @@ func TestSyncPendingTransactionsFor_requestTransactionsFor(t *testing.T) { SenderShardID: 0, ReceiverShardID: 0, } - numMissing := pendingTxsSyncer.requestTransactionsFor(mb, 0) + numMissing := pendingTxsSyncer.requestTransactionsFor(mb) require.Equal(t, 2, numMissing) require.Equal(t, 1, len(requests)) require.Equal(t, 2, requests[0]) requests = make(map[uint32]int) mb.SenderShardID = 1 - numMissing = pendingTxsSyncer.requestTransactionsFor(mb, 0) + numMissing = pendingTxsSyncer.requestTransactionsFor(mb) require.Equal(t, 2, numMissing) require.Equal(t, 2, len(requests)) require.Equal(t, 1, requests[0]) @@ -255,14 +255,14 @@ func TestSyncPendingTransactionsFor_requestTransactionsFor(t *testing.T) { SenderShardID: 0, ReceiverShardID: 0, } - numMissing := pendingTxsSyncer.requestTransactionsFor(mb, 0) + numMissing := pendingTxsSyncer.requestTransactionsFor(mb) require.Equal(t, 2, numMissing) require.Equal(t, 1, len(requests)) require.Equal(t, 2, requests[0]) requests = make(map[uint32]int) mb.SenderShardID = 1 - numMissing = pendingTxsSyncer.requestTransactionsFor(mb, 0) + numMissing = pendingTxsSyncer.requestTransactionsFor(mb) require.Equal(t, 2, numMissing) require.Equal(t, 2, len(requests)) require.Equal(t, 1, requests[0]) From 611d338280ca4e739c2cf31d9c509e7b0b76fd1c Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 10 Aug 2022 21:53:40 +0300 Subject: [PATCH 46/70] * Rollback to older version * Request validators info by hashes array instead by hash one by one --- cmd/node/config/config.toml | 2 +- epochStart/shardchain/peerMiniBlocksSyncer.go | 13 +++++++------ epochStart/shardchain/trigger.go | 13 +++++++------ update/sync/syncTransactions.go | 13 +++++++------ 4 files changed, 22 insertions(+), 19 deletions(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index fba1d159bd6..ca5bd14c9e8 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -447,7 +447,7 @@ Name = "ValidatorInfoPool" Capacity = 10000 SizeInBytes = 31457280 #30MB - Shards = 4 + Shards = 1 #PublicKeyPeerId represents the main cache used to map Elrond block signing public keys to their associated peer id's. [PublicKeyPeerId] diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index 237e37e92dd..8b82263c452 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -328,12 +328,13 @@ func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) return nil, nil } - //TODO: Analyze if it works also with: go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) - for _, missingValidatorInfo := range missingValidatorsInfo { - go func(validatorInfo []byte) { - p.requestHandler.RequestValidatorInfo(validatorInfo) - }(missingValidatorInfo) - } + ////TODO: Analyze if it works also with: go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + //for _, missingValidatorInfo := range missingValidatorsInfo { + // go func(validatorInfo []byte) { + // p.requestHandler.RequestValidatorInfo(validatorInfo) + // }(missingValidatorInfo) + //} + go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { case <-p.chRcvAllValidatorsInfo: diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 38766deceed..1fce588c4db 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -350,12 +350,13 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { } t.mutMissingValidatorsInfo.RUnlock() - //TODO: Analyze if it works also with: go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) - for _, missingValidatorInfo := range missingValidatorsInfo { - go func(validatorInfo []byte) { - t.requestHandler.RequestValidatorInfo(validatorInfo) - }(missingValidatorInfo) - } + ////TODO: Analyze if it works also with: go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + //for _, missingValidatorInfo := range missingValidatorsInfo { + // go func(validatorInfo []byte) { + // t.requestHandler.RequestValidatorInfo(validatorInfo) + // }(missingValidatorInfo) + //} + go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { case <-ctx.Done(): diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index 3026e6c10ca..468a09ac724 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -218,12 +218,13 @@ func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block log.Debug("transactionsSync.requestTransactionsForPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) } - //TODO: Analyze if it works also with: go ts.requestHandler.RequestValidatorsInfo(missingTxs) - for _, missingValidatorInfo := range missingValidatorsInfo { - go func(validatorInfo []byte) { - ts.requestHandler.RequestValidatorInfo(validatorInfo) - }(missingValidatorInfo) - } + ////TODO: Analyze if it works also with: go ts.requestHandler.RequestValidatorsInfo(missingTxs) + //for _, missingValidatorInfo := range missingValidatorsInfo { + // go func(validatorInfo []byte) { + // ts.requestHandler.RequestValidatorInfo(validatorInfo) + // }(missingValidatorInfo) + //} + go ts.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) return len(missingValidatorsInfo) } From 2cad58c6e1768173bc16c331fe366137bb1f9b10 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 11 Aug 2022 00:37:18 +0300 Subject: [PATCH 47/70] * Changed request for validator info from hashes array to one by one hash in a loop --- epochStart/shardchain/peerMiniBlocksSyncer.go | 14 +++++++------- epochStart/shardchain/trigger.go | 14 +++++++------- update/sync/syncTransactions.go | 14 +++++++------- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index 8b82263c452..fd90ee5a140 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -328,13 +328,13 @@ func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) return nil, nil } - ////TODO: Analyze if it works also with: go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) - //for _, missingValidatorInfo := range missingValidatorsInfo { - // go func(validatorInfo []byte) { - // p.requestHandler.RequestValidatorInfo(validatorInfo) - // }(missingValidatorInfo) - //} - go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + //TODO: Analyze if it works also with: go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + for _, missingValidatorInfo := range missingValidatorsInfo { + go func(validatorInfo []byte) { + p.requestHandler.RequestValidatorInfo(validatorInfo) + }(missingValidatorInfo) + } + //go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { case <-p.chRcvAllValidatorsInfo: diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 1fce588c4db..463f1a79cc1 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -350,13 +350,13 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { } t.mutMissingValidatorsInfo.RUnlock() - ////TODO: Analyze if it works also with: go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) - //for _, missingValidatorInfo := range missingValidatorsInfo { - // go func(validatorInfo []byte) { - // t.requestHandler.RequestValidatorInfo(validatorInfo) - // }(missingValidatorInfo) - //} - go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + //TODO: Analyze if it works also with: go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + for _, missingValidatorInfo := range missingValidatorsInfo { + go func(validatorInfo []byte) { + t.requestHandler.RequestValidatorInfo(validatorInfo) + }(missingValidatorInfo) + } + //go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { case <-ctx.Done(): diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index 468a09ac724..5bc54a21a0e 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -218,13 +218,13 @@ func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block log.Debug("transactionsSync.requestTransactionsForPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) } - ////TODO: Analyze if it works also with: go ts.requestHandler.RequestValidatorsInfo(missingTxs) - //for _, missingValidatorInfo := range missingValidatorsInfo { - // go func(validatorInfo []byte) { - // ts.requestHandler.RequestValidatorInfo(validatorInfo) - // }(missingValidatorInfo) - //} - go ts.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + //TODO: Analyze if it works also with: go ts.requestHandler.RequestValidatorsInfo(missingTxs) + for _, missingValidatorInfo := range missingValidatorsInfo { + go func(validatorInfo []byte) { + ts.requestHandler.RequestValidatorInfo(validatorInfo) + }(missingValidatorInfo) + } + //go ts.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) return len(missingValidatorsInfo) } From b957be2740a9f9f548b3cf4e0d6bc7d4dfbe35a5 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 11 Aug 2022 11:05:27 +0300 Subject: [PATCH 48/70] * Merged development into feat/refactor-peers-mbs --- .../resolverscontainer/baseResolversContainerFactory.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go index c5678856ba1..859bd85f024 100644 --- a/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go +++ b/dataRetriever/factory/resolverscontainer/baseResolversContainerFactory.go @@ -412,13 +412,18 @@ func (brcf *baseResolversContainerFactory) generateValidatorInfoResolver() error return err } + validatorInfoStorage, err := brcf.store.GetStorer(dataRetriever.UnsignedTransactionUnit) + if err != nil { + return err + } + arg := resolvers.ArgValidatorInfoResolver{ SenderResolver: resolverSender, Marshaller: brcf.marshalizer, AntifloodHandler: brcf.inputAntifloodHandler, Throttler: brcf.throttler, ValidatorInfoPool: brcf.dataPools.ValidatorsInfo(), - ValidatorInfoStorage: brcf.store.GetStorer(dataRetriever.UnsignedTransactionUnit), + ValidatorInfoStorage: validatorInfoStorage, DataPacker: brcf.dataPacker, IsFullHistoryNode: brcf.isFullHistoryNode, } From f6a9328015c5b34f34db79ce3f42a258170cc755 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 11 Aug 2022 15:03:50 +0300 Subject: [PATCH 49/70] * Reverted to 4 shards and requests with hash array --- cmd/node/config/config.toml | 2 +- epochStart/shardchain/peerMiniBlocksSyncer.go | 8 +------- epochStart/shardchain/trigger.go | 8 +------- update/sync/syncTransactions.go | 8 +------- 4 files changed, 4 insertions(+), 22 deletions(-) diff --git a/cmd/node/config/config.toml b/cmd/node/config/config.toml index ca5bd14c9e8..fba1d159bd6 100644 --- a/cmd/node/config/config.toml +++ b/cmd/node/config/config.toml @@ -447,7 +447,7 @@ Name = "ValidatorInfoPool" Capacity = 10000 SizeInBytes = 31457280 #30MB - Shards = 1 + Shards = 4 #PublicKeyPeerId represents the main cache used to map Elrond block signing public keys to their associated peer id's. [PublicKeyPeerId] diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index fd90ee5a140..31ef9ed20fa 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -328,13 +328,7 @@ func (p *peerMiniBlockSyncer) retrieveMissingValidatorsInfo() ([][]byte, error) return nil, nil } - //TODO: Analyze if it works also with: go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) - for _, missingValidatorInfo := range missingValidatorsInfo { - go func(validatorInfo []byte) { - p.requestHandler.RequestValidatorInfo(validatorInfo) - }(missingValidatorInfo) - } - //go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + go p.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { case <-p.chRcvAllValidatorsInfo: diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 463f1a79cc1..ea5d87dfa9a 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -350,13 +350,7 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { } t.mutMissingValidatorsInfo.RUnlock() - //TODO: Analyze if it works also with: go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) - for _, missingValidatorInfo := range missingValidatorsInfo { - go func(validatorInfo []byte) { - t.requestHandler.RequestValidatorInfo(validatorInfo) - }(missingValidatorInfo) - } - //go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) select { case <-ctx.Done(): diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index 5bc54a21a0e..f887e29ed9a 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -218,13 +218,7 @@ func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block log.Debug("transactionsSync.requestTransactionsForPeerMiniBlock", "mb type", miniBlock.Type, "mb sender", miniBlock.SenderShardID, "mb receiver", miniBlock.ReceiverShardID, "tx hash missing", txHash) } - //TODO: Analyze if it works also with: go ts.requestHandler.RequestValidatorsInfo(missingTxs) - for _, missingValidatorInfo := range missingValidatorsInfo { - go func(validatorInfo []byte) { - ts.requestHandler.RequestValidatorInfo(validatorInfo) - }(missingValidatorInfo) - } - //go ts.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + go ts.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) return len(missingValidatorsInfo) } From 66bef2ac19d95dee57b51fd9e63b61455c91797b Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 16 Aug 2022 20:33:35 +0300 Subject: [PATCH 50/70] * Integrated EnableEpochHandler into feat/refactor-peers-mbs --- common/enablers/enableEpochsHandler.go | 6 ++ common/enablers/epochFlags.go | 7 ++ common/interface.go | 2 + epochStart/bootstrap/syncValidatorStatus.go | 32 +++--- epochStart/metachain/validators.go | 73 ++++++-------- epochStart/metachain/validators_test.go | 6 +- epochStart/shardchain/trigger.go | 99 ++++++++++--------- epochStart/shardchain/triggerRegistry_test.go | 1 + epochStart/shardchain/trigger_test.go | 12 +-- factory/blockProcessorCreator.go | 21 ++-- factory/processComponents.go | 30 +++--- genesis/process/shardGenesisBlockCreator.go | 1 - integrationTests/consensus/testInitializer.go | 4 +- .../multiShard/hardFork/hardFork_test.go | 11 +-- integrationTests/nodesCoordinatorFactory.go | 17 ++-- integrationTests/testProcessorNode.go | 79 ++++++++------- .../testProcessorNodeWithCoordinator.go | 4 +- .../testProcessorNodeWithMultisigner.go | 8 +- .../preprocess/validatorInfoPreProcessor.go | 46 +++------ .../validatorInfoPreProcessor_test.go | 83 +++++++--------- process/block/shardblock_test.go | 6 -- process/coordinator/process_test.go | 9 -- .../shard/preProcessorsContainerFactory.go | 4 +- .../preProcessorsContainerFactory_test.go | 25 ----- sharding/mock/enableEpochsHandlerMock.go | 14 ++- sharding/nodesCoordinator/errors.go | 3 - .../indexHashedNodesCoordinator.go | 4 +- .../indexHashedNodesCoordinator_test.go | 12 ++- testscommon/enableEpochsHandlerStub.go | 12 +++ update/factory/exportHandlerFactory.go | 30 +++--- 30 files changed, 314 insertions(+), 347 deletions(-) diff --git a/common/enablers/enableEpochsHandler.go b/common/enablers/enableEpochsHandler.go index 21e832e22d1..fe341299e31 100644 --- a/common/enablers/enableEpochsHandler.go +++ b/common/enablers/enableEpochsHandler.go @@ -109,6 +109,7 @@ func (handler *enableEpochsHandler) EpochConfirmed(epoch uint32, _ uint64) { handler.setFlagValue(epoch >= handler.enableEpochsConfig.RefactorContextEnableEpoch, handler.refactorContextFlag, "refactorContextFlag") handler.setFlagValue(epoch >= handler.enableEpochsConfig.CheckFunctionArgumentEnableEpoch, handler.checkFunctionArgumentFlag, "checkFunctionArgumentFlag") handler.setFlagValue(epoch >= handler.enableEpochsConfig.CheckExecuteOnReadOnlyEnableEpoch, handler.checkExecuteOnReadOnlyFlag, "checkExecuteOnReadOnlyFlag") + handler.setFlagValue(epoch >= handler.enableEpochsConfig.RefactorPeersMiniBlocksEnableEpoch, handler.refactorPeersMiniBlocksFlag, "refactorPeersMiniBlocksFlag") } func (handler *enableEpochsHandler) setFlagValue(value bool, flag *atomic.Flag, flagName string) { @@ -201,6 +202,11 @@ func (handler *enableEpochsHandler) MiniBlockPartialExecutionEnableEpoch() uint3 return handler.enableEpochsConfig.MiniBlockPartialExecutionEnableEpoch } +// RefactorPeersMiniBlocksEnableEpoch returns the epoch when refactor of peers mini blocks becomes active +func (handler *enableEpochsHandler) RefactorPeersMiniBlocksEnableEpoch() uint32 { + return handler.enableEpochsConfig.RefactorPeersMiniBlocksEnableEpoch +} + // IsInterfaceNil returns true if there is no value under the interface func (handler *enableEpochsHandler) IsInterfaceNil() bool { return handler == nil diff --git a/common/enablers/epochFlags.go b/common/enablers/epochFlags.go index 31fbfc09ccd..cfd9b7e0617 100644 --- a/common/enablers/epochFlags.go +++ b/common/enablers/epochFlags.go @@ -79,6 +79,7 @@ type epochFlagsHolder struct { refactorContextFlag *atomic.Flag checkFunctionArgumentFlag *atomic.Flag checkExecuteOnReadOnlyFlag *atomic.Flag + refactorPeersMiniBlocksFlag *atomic.Flag } func newEpochFlagsHolder() *epochFlagsHolder { @@ -159,6 +160,7 @@ func newEpochFlagsHolder() *epochFlagsHolder { refactorContextFlag: &atomic.Flag{}, checkFunctionArgumentFlag: &atomic.Flag{}, checkExecuteOnReadOnlyFlag: &atomic.Flag{}, + refactorPeersMiniBlocksFlag: &atomic.Flag{}, } } @@ -594,3 +596,8 @@ func (holder *epochFlagsHolder) IsTransferToMetaFlagEnabled() bool { func (holder *epochFlagsHolder) IsESDTNFTImprovementV1FlagEnabled() bool { return holder.esdtMultiTransferFlag.IsSet() } + +// IsRefactorPeersMiniBlocksFlagEnabled returns true if refactorPeersMiniBlocksFlag is enabled +func (holder *epochFlagsHolder) IsRefactorPeersMiniBlocksFlagEnabled() bool { + return holder.refactorPeersMiniBlocksFlag.IsSet() +} diff --git a/common/interface.go b/common/interface.go index 3c7053fc3ae..c7d2b109d5b 100644 --- a/common/interface.go +++ b/common/interface.go @@ -179,6 +179,7 @@ type EnableEpochsHandler interface { CheckExecuteReadOnlyEnableEpoch() uint32 StorageAPICostOptimizationEnableEpoch() uint32 MiniBlockPartialExecutionEnableEpoch() uint32 + RefactorPeersMiniBlocksEnableEpoch() uint32 IsSCDeployFlagEnabled() bool IsBuiltInFunctionsFlagEnabled() bool IsRelayedTransactionsFlagEnabled() bool @@ -264,6 +265,7 @@ type EnableEpochsHandler interface { IsCheckTransferFlagEnabled() bool IsTransferToMetaFlagEnabled() bool IsESDTNFTImprovementV1FlagEnabled() bool + IsRefactorPeersMiniBlocksFlagEnabled() bool IsInterfaceNil() bool } diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index e270ed24bba..973b5460998 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -25,15 +25,15 @@ import ( const consensusGroupCacheSize = 50 type syncValidatorStatus struct { - miniBlocksSyncer epochStart.PendingMiniBlocksSyncHandler - transactionsSyncer update.TransactionsSyncHandler - dataPool dataRetriever.PoolsHolder - marshalizer marshal.Marshalizer - requestHandler process.RequestHandler - nodeCoordinator StartInEpochNodesCoordinator - genesisNodesConfig sharding.GenesisNodesSetupHandler - memDB storage.Storer - refactorPeersMiniBlocksEnableEpoch uint32 + miniBlocksSyncer epochStart.PendingMiniBlocksSyncHandler + transactionsSyncer update.TransactionsSyncHandler + dataPool dataRetriever.PoolsHolder + marshalizer marshal.Marshalizer + requestHandler process.RequestHandler + nodeCoordinator StartInEpochNodesCoordinator + genesisNodesConfig sharding.GenesisNodesSetupHandler + memDB storage.Storer + enableEpochsHandler common.EnableEpochsHandler } // ArgsNewSyncValidatorStatus holds the arguments needed for creating a new validator status process component @@ -60,11 +60,11 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat } s := &syncValidatorStatus{ - dataPool: args.DataPool, - marshalizer: args.Marshalizer, - requestHandler: args.RequestHandler, - genesisNodesConfig: args.GenesisNodesConfig, - refactorPeersMiniBlocksEnableEpoch: args.RefactorPeersMiniBlocksEnableEpoch, + dataPool: args.DataPool, + marshalizer: args.Marshalizer, + requestHandler: args.RequestHandler, + genesisNodesConfig: args.GenesisNodesConfig, + enableEpochsHandler: args.EnableEpochsHandler, } var err error @@ -82,7 +82,7 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat syncTxsArgs := sync.ArgsNewTransactionsSyncer{ DataPools: s.dataPool, - Storages: dataRetriever.NewChainStorer(), + Storages: disabled.NewChainStorer(), Marshaller: s.marshalizer, RequestHandler: s.requestHandler, } @@ -226,7 +226,7 @@ func (s *syncValidatorStatus) getPeerBlockBodyForMeta( return nil, nil, err } - if metaBlock.GetEpoch() >= s.refactorPeersMiniBlocksEnableEpoch { + if metaBlock.GetEpoch() >= s.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { s.transactionsSyncer.ClearFields() ctx, cancel = context.WithTimeout(context.Background(), time.Minute) err = s.transactionsSyncer.SyncTransactionsFor(peerMiniBlocks, metaBlock.GetEpoch(), ctx) diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 5e05548521d..66dad3c330a 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -6,7 +6,6 @@ import ( "sync" "github.com/ElrondNetwork/elrond-go-core/core" - "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" @@ -25,26 +24,24 @@ var _ process.EpochStartValidatorInfoCreator = (*validatorInfoCreator)(nil) // ArgsNewValidatorInfoCreator defines the arguments structure needed to create a new validatorInfo creator type ArgsNewValidatorInfoCreator struct { - ShardCoordinator sharding.Coordinator - ValidatorInfoStorage storage.Storer - MiniBlockStorage storage.Storer - Hasher hashing.Hasher - Marshalizer marshal.Marshalizer - DataPool dataRetriever.PoolsHolder - EpochNotifier process.EpochNotifier - RefactorPeersMiniBlocksEnableEpoch uint32 + ShardCoordinator sharding.Coordinator + ValidatorInfoStorage storage.Storer + MiniBlockStorage storage.Storer + Hasher hashing.Hasher + Marshalizer marshal.Marshalizer + DataPool dataRetriever.PoolsHolder + EnableEpochsHandler common.EnableEpochsHandler } type validatorInfoCreator struct { - shardCoordinator sharding.Coordinator - validatorInfoStorage storage.Storer - miniBlockStorage storage.Storer - hasher hashing.Hasher - marshalizer marshal.Marshalizer - dataPool dataRetriever.PoolsHolder - mutValidatorInfo sync.Mutex - refactorPeersMiniBlocksEnableEpoch uint32 - flagRefactorPeersMiniBlocks atomic.Flag + shardCoordinator sharding.Coordinator + validatorInfoStorage storage.Storer + miniBlockStorage storage.Storer + hasher hashing.Hasher + marshalizer marshal.Marshalizer + dataPool dataRetriever.PoolsHolder + mutValidatorInfo sync.Mutex + enableEpochsHandler common.EnableEpochsHandler } // NewValidatorInfoCreator creates a new validatorInfo creator object @@ -70,24 +67,20 @@ func NewValidatorInfoCreator(args ArgsNewValidatorInfoCreator) (*validatorInfoCr if check.IfNil(args.DataPool.CurrentEpochValidatorInfo()) { return nil, epochStart.ErrNilCurrentEpochValidatorsInfoPool } - if check.IfNil(args.EpochNotifier) { - return nil, epochStart.ErrNilEpochNotifier + if check.IfNil(args.EnableEpochsHandler) { + return nil, epochStart.ErrNilEnableEpochsHandler } vic := &validatorInfoCreator{ - shardCoordinator: args.ShardCoordinator, - hasher: args.Hasher, - marshalizer: args.Marshalizer, - validatorInfoStorage: args.ValidatorInfoStorage, - miniBlockStorage: args.MiniBlockStorage, - dataPool: args.DataPool, - refactorPeersMiniBlocksEnableEpoch: args.RefactorPeersMiniBlocksEnableEpoch, + shardCoordinator: args.ShardCoordinator, + hasher: args.Hasher, + marshalizer: args.Marshalizer, + validatorInfoStorage: args.ValidatorInfoStorage, + miniBlockStorage: args.MiniBlockStorage, + dataPool: args.DataPool, + enableEpochsHandler: args.EnableEpochsHandler, } - log.Debug("validatorInfoCreator: enable epoch for refactor peers mini blocks", "epoch", vic.refactorPeersMiniBlocksEnableEpoch) - - args.EpochNotifier.RegisterNotifyHandler(vic) - return vic, nil } @@ -161,7 +154,7 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat } func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *state.ShardValidatorInfo) ([]byte, error) { - if vic.flagRefactorPeersMiniBlocks.IsSet() { + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) if err != nil { return nil, err @@ -249,7 +242,7 @@ func (vic *validatorInfoCreator) GetLocalValidatorInfoCache() epochStart.Validat // CreateMarshalledData creates the marshalled data to be sent to shards func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[string][][]byte { - if !vic.flagRefactorPeersMiniBlocks.IsSet() { + if !vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { return nil } @@ -331,7 +324,7 @@ func (vic *validatorInfoCreator) setMapShardValidatorInfo(miniBlock *block.MiniB } func (vic *validatorInfoCreator) getShardValidatorInfo(txHash []byte) (*state.ShardValidatorInfo, error) { - if vic.flagRefactorPeersMiniBlocks.IsSet() { + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) if err != nil { @@ -361,7 +354,7 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo continue } - if vic.flagRefactorPeersMiniBlocks.IsSet() { + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { vic.saveValidatorInfo(miniBlock) } @@ -402,7 +395,7 @@ func (vic *validatorInfoCreator) DeleteBlockDataFromStorage(metaBlock data.Heade return } - if vic.flagRefactorPeersMiniBlocks.IsSet() { + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { vic.removeValidatorInfoFromStorage(body) } @@ -431,7 +424,7 @@ func (vic *validatorInfoCreator) RemoveBlockDataFromPools(metaBlock data.HeaderH return } - if vic.flagRefactorPeersMiniBlocks.IsSet() { + if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { vic.removeValidatorInfoFromPool(body) } @@ -476,9 +469,3 @@ func (vic *validatorInfoCreator) clean() { func (vic *validatorInfoCreator) IsInterfaceNil() bool { return vic == nil } - -// EpochConfirmed is called whenever a new epoch is confirmed -func (vic *validatorInfoCreator) EpochConfirmed(epoch uint32, _ uint64) { - vic.flagRefactorPeersMiniBlocks.SetValue(epoch >= vic.refactorPeersMiniBlocksEnableEpoch) - log.Debug("validatorInfoCreator: refactor peers mini blocks", "enabled", vic.flagRefactorPeersMiniBlocks.IsSet()) -} diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index 6d0891bbe83..cd6f747d62d 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -18,7 +18,6 @@ import ( "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" - "github.com/ElrondNetwork/elrond-go/testscommon/epochNotifier" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/require" @@ -130,8 +129,9 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator return &validatorInfoCacherMock.ValidatorInfoCacherMock{} }, }, - EpochNotifier: &epochNotifier.EpochNotifierStub{}, - RefactorPeersMiniBlocksEnableEpoch: 0, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } return argsNewEpochEconomics } diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index f844b2aaa8d..1522c153368 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -53,11 +53,11 @@ type ArgsShardEpochStartTrigger struct { PeerMiniBlocksSyncer process.ValidatorInfoSyncer RoundHandler process.RoundHandler AppStatusHandler core.AppStatusHandler + EnableEpochsHandler common.EnableEpochsHandler - Epoch uint32 - Validity uint64 - Finality uint64 - RefactorPeersMiniBlocksEnableEpoch uint32 + Epoch uint32 + Validity uint64 + Finality uint64 } type trigger struct { @@ -104,14 +104,14 @@ type trigger struct { peerMiniBlocksSyncer process.ValidatorInfoSyncer - appStatusHandler core.AppStatusHandler + appStatusHandler core.AppStatusHandler + enableEpochsHandler common.EnableEpochsHandler - mapMissingMiniBlocks map[string]uint32 - mapMissingValidatorsInfo map[string]uint32 - mutMissingMiniBlocks sync.RWMutex - mutMissingValidatorsInfo sync.RWMutex - cancelFunc func() - refactorPeersMiniBlocksEnableEpoch uint32 + mapMissingMiniBlocks map[string]uint32 + mapMissingValidatorsInfo map[string]uint32 + mutMissingMiniBlocks sync.RWMutex + mutMissingValidatorsInfo sync.RWMutex + cancelFunc func() } type metaInfo struct { @@ -191,6 +191,9 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { if check.IfNil(args.AppStatusHandler) { return nil, epochStart.ErrNilStatusHandler } + if check.IfNil(args.EnableEpochsHandler) { + return nil, epochStart.ErrNilEnableEpochsHandler + } metaHdrStorage, err := args.Storage.GetStorer(dataRetriever.MetaBlockUnit) if err != nil { @@ -215,42 +218,42 @@ func NewEpochStartTrigger(args *ArgsShardEpochStartTrigger) (*trigger, error) { trigggerStateKey := common.TriggerRegistryInitialKeyPrefix + fmt.Sprintf("%d", args.Epoch) t := &trigger{ - triggerStateKey: []byte(trigggerStateKey), - epoch: args.Epoch, - metaEpoch: args.Epoch, - currentRoundIndex: 0, - epochStartRound: 0, - epochFinalityAttestingRound: 0, - isEpochStart: false, - validity: args.Validity, - finality: args.Finality, - newEpochHdrReceived: false, - mutTrigger: sync.RWMutex{}, - mapHashHdr: make(map[string]data.HeaderHandler), - mapNonceHashes: make(map[uint64][]string), - mapEpochStartHdrs: make(map[string]data.HeaderHandler), - mapFinalizedEpochs: make(map[uint32]string), - headersPool: args.DataPool.Headers(), - miniBlocksPool: args.DataPool.MiniBlocks(), - validatorInfoPool: args.DataPool.ValidatorsInfo(), - currentEpochValidatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), - metaHdrStorage: metaHdrStorage, - shardHdrStorage: shardHdrStorage, - triggerStorage: triggerStorage, - metaNonceHdrStorage: metaHdrNoncesStorage, - uint64Converter: args.Uint64Converter, - marshaller: args.Marshalizer, - hasher: args.Hasher, - headerValidator: args.HeaderValidator, - requestHandler: args.RequestHandler, - epochMetaBlockHash: nil, - epochStartNotifier: args.EpochStartNotifier, - epochStartMeta: &block.MetaBlock{}, - epochStartShardHeader: &block.Header{}, - peerMiniBlocksSyncer: args.PeerMiniBlocksSyncer, - appStatusHandler: args.AppStatusHandler, - roundHandler: args.RoundHandler, - refactorPeersMiniBlocksEnableEpoch: args.RefactorPeersMiniBlocksEnableEpoch, + triggerStateKey: []byte(trigggerStateKey), + epoch: args.Epoch, + metaEpoch: args.Epoch, + currentRoundIndex: 0, + epochStartRound: 0, + epochFinalityAttestingRound: 0, + isEpochStart: false, + validity: args.Validity, + finality: args.Finality, + newEpochHdrReceived: false, + mutTrigger: sync.RWMutex{}, + mapHashHdr: make(map[string]data.HeaderHandler), + mapNonceHashes: make(map[uint64][]string), + mapEpochStartHdrs: make(map[string]data.HeaderHandler), + mapFinalizedEpochs: make(map[uint32]string), + headersPool: args.DataPool.Headers(), + miniBlocksPool: args.DataPool.MiniBlocks(), + validatorInfoPool: args.DataPool.ValidatorsInfo(), + currentEpochValidatorInfoPool: args.DataPool.CurrentEpochValidatorInfo(), + metaHdrStorage: metaHdrStorage, + shardHdrStorage: shardHdrStorage, + triggerStorage: triggerStorage, + metaNonceHdrStorage: metaHdrNoncesStorage, + uint64Converter: args.Uint64Converter, + marshaller: args.Marshalizer, + hasher: args.Hasher, + headerValidator: args.HeaderValidator, + requestHandler: args.RequestHandler, + epochMetaBlockHash: nil, + epochStartNotifier: args.EpochStartNotifier, + epochStartMeta: &block.MetaBlock{}, + epochStartShardHeader: &block.Header{}, + peerMiniBlocksSyncer: args.PeerMiniBlocksSyncer, + appStatusHandler: args.AppStatusHandler, + roundHandler: args.RoundHandler, + enableEpochsHandler: args.EnableEpochsHandler, } t.headersPool.RegisterHandler(t.receivedMetaBlock) @@ -736,7 +739,7 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH return false, 0 } - if metaHdr.GetEpoch() >= t.refactorPeersMiniBlocksEnableEpoch { + if metaHdr.GetEpoch() >= t.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { missingValidatorsInfoHashes, validatorsInfo, err := t.peerMiniBlocksSyncer.SyncValidatorsInfo(blockBody) if err != nil { t.addMissingValidatorsInfo(metaHdr.GetEpoch(), missingValidatorsInfoHashes) diff --git a/epochStart/shardchain/triggerRegistry_test.go b/epochStart/shardchain/triggerRegistry_test.go index 74911fcb6db..ed1cef599a4 100644 --- a/epochStart/shardchain/triggerRegistry_test.go +++ b/epochStart/shardchain/triggerRegistry_test.go @@ -54,6 +54,7 @@ func cloneTrigger(t *trigger) *trigger { rt.mapMissingMiniBlocks = t.mapMissingMiniBlocks rt.mapFinalizedEpochs = t.mapFinalizedEpochs rt.roundHandler = t.roundHandler + rt.enableEpochsHandler = t.enableEpochsHandler return rt } diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index f6905b55b93..90f89e489e2 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -58,12 +58,12 @@ func createMockShardEpochStartTriggerArguments() *ArgsShardEpochStartTrigger { }, nil }, }, - RequestHandler: &testscommon.RequestHandlerStub{}, - EpochStartNotifier: &mock.EpochStartNotifierStub{}, - PeerMiniBlocksSyncer: &mock.ValidatorInfoSyncerStub{}, - RoundHandler: &mock.RoundHandlerStub{}, - AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, - RefactorPeersMiniBlocksEnableEpoch: 0, + RequestHandler: &testscommon.RequestHandlerStub{}, + EpochStartNotifier: &mock.EpochStartNotifierStub{}, + PeerMiniBlocksSyncer: &mock.ValidatorInfoSyncerStub{}, + RoundHandler: &mock.RoundHandlerStub{}, + AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, } } diff --git a/factory/blockProcessorCreator.go b/factory/blockProcessorCreator.go index b5dcc07596c..fad4b66b797 100644 --- a/factory/blockProcessorCreator.go +++ b/factory/blockProcessorCreator.go @@ -323,7 +323,6 @@ func (pcf *processComponentsFactory) newShardBlockProcessor( txTypeHandler, scheduledTxsExecutionHandler, processedMiniBlocksTracker, - enableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) if err != nil { return nil, err @@ -757,16 +756,18 @@ func (pcf *processComponentsFactory) newMetaBlockProcessor( return nil, err } - validatorInfoStorage := pcf.data.StorageService().GetStorer(dataRetriever.UnsignedTransactionUnit) + validatorInfoStorage, err := pcf.data.StorageService().GetStorer(dataRetriever.UnsignedTransactionUnit) + if err != nil { + return nil, err + } argsEpochValidatorInfo := metachainEpochStart.ArgsNewValidatorInfoCreator{ - ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), - ValidatorInfoStorage: validatorInfoStorage, - MiniBlockStorage: miniBlockStorage, - Hasher: pcf.coreData.Hasher(), - Marshalizer: pcf.coreData.InternalMarshalizer(), - DataPool: pcf.data.Datapool(), - EpochNotifier: pcf.epochNotifier, - RefactorPeersMiniBlocksEnableEpoch: enableEpochs.RefactorPeersMiniBlocksEnableEpoch, + ShardCoordinator: pcf.bootstrapComponents.ShardCoordinator(), + ValidatorInfoStorage: validatorInfoStorage, + MiniBlockStorage: miniBlockStorage, + Hasher: pcf.coreData.Hasher(), + Marshalizer: pcf.coreData.InternalMarshalizer(), + DataPool: pcf.data.Datapool(), + EnableEpochsHandler: pcf.coreData.EnableEpochsHandler(), } validatorInfoCreator, err := metachainEpochStart.NewValidatorInfoCreator(argsEpochValidatorInfo) if err != nil { diff --git a/factory/processComponents.go b/factory/processComponents.go index 6076e2fc60d..3fb1b80847a 100644 --- a/factory/processComponents.go +++ b/factory/processComponents.go @@ -745,21 +745,21 @@ func (pcf *processComponentsFactory) newEpochStartTrigger(requestHandler epochSt } argEpochStart := &shardchain.ArgsShardEpochStartTrigger{ - Marshalizer: pcf.coreData.InternalMarshalizer(), - Hasher: pcf.coreData.Hasher(), - HeaderValidator: headerValidator, - Uint64Converter: pcf.coreData.Uint64ByteSliceConverter(), - DataPool: pcf.data.Datapool(), - Storage: pcf.data.StorageService(), - RequestHandler: requestHandler, - Epoch: pcf.bootstrapComponents.EpochBootstrapParams().Epoch(), - EpochStartNotifier: pcf.coreData.EpochStartNotifierWithConfirm(), - Validity: process.MetaBlockValidity, - Finality: process.BlockFinality, - PeerMiniBlocksSyncer: peerMiniBlockSyncer, - RoundHandler: pcf.coreData.RoundHandler(), - AppStatusHandler: pcf.coreData.StatusHandler(), - RefactorPeersMiniBlocksEnableEpoch: pcf.epochConfig.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, + Marshalizer: pcf.coreData.InternalMarshalizer(), + Hasher: pcf.coreData.Hasher(), + HeaderValidator: headerValidator, + Uint64Converter: pcf.coreData.Uint64ByteSliceConverter(), + DataPool: pcf.data.Datapool(), + Storage: pcf.data.StorageService(), + RequestHandler: requestHandler, + Epoch: pcf.bootstrapComponents.EpochBootstrapParams().Epoch(), + EpochStartNotifier: pcf.coreData.EpochStartNotifierWithConfirm(), + Validity: process.MetaBlockValidity, + Finality: process.BlockFinality, + PeerMiniBlocksSyncer: peerMiniBlockSyncer, + RoundHandler: pcf.coreData.RoundHandler(), + AppStatusHandler: pcf.coreData.StatusHandler(), + EnableEpochsHandler: pcf.coreData.EnableEpochsHandler(), } epochStartTrigger, err := shardchain.NewEpochStartTrigger(argEpochStart) if err != nil { diff --git a/genesis/process/shardGenesisBlockCreator.go b/genesis/process/shardGenesisBlockCreator.go index 95f6ce89ff3..ae48c9dd007 100644 --- a/genesis/process/shardGenesisBlockCreator.go +++ b/genesis/process/shardGenesisBlockCreator.go @@ -588,7 +588,6 @@ func createProcessorsForShardGenesisBlock(arg ArgsGenesisBlockCreator, enableEpo txTypeHandler, disabledScheduledTxsExecutionHandler, disabledProcessedMiniBlocksTracker, - enableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) if err != nil { return nil, err diff --git a/integrationTests/consensus/testInitializer.go b/integrationTests/consensus/testInitializer.go index 0f87f2320b6..aa0fbdb2272 100644 --- a/integrationTests/consensus/testInitializer.go +++ b/integrationTests/consensus/testInitializer.go @@ -533,8 +533,8 @@ func createNodes( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsWaitingListFixFlagEnabledField: true, - IsRefactorPeersMiniBlocksEnabledField: true, + IsWaitingListFixFlagEnabledField: true, + IsRefactorPeersMiniBlocksFlagEnabledField: true, }, } nodesCoord, _ := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/multiShard/hardFork/hardFork_test.go b/integrationTests/multiShard/hardFork/hardFork_test.go index 2dc929f550f..68237e1f7df 100644 --- a/integrationTests/multiShard/hardFork/hardFork_test.go +++ b/integrationTests/multiShard/hardFork/hardFork_test.go @@ -616,12 +616,11 @@ func createHardForkExporter( NumResolveFailureThreshold: 3, DebugLineExpiration: 3, }, - MaxHardCapForMissingNodes: 500, - NumConcurrentTrieSyncers: 50, - TrieSyncerVersion: 2, - PeersRatingHandler: node.PeersRatingHandler, - CheckNodesOnDisk: false, - RefactorPeersMiniBlocksEnableEpoch: 0, + MaxHardCapForMissingNodes: 500, + NumConcurrentTrieSyncers: 50, + TrieSyncerVersion: 2, + PeersRatingHandler: node.PeersRatingHandler, + CheckNodesOnDisk: false, } exportHandler, err := factory.NewExportHandlerFactory(argsExportHandler) diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 4cf45b4ebf2..fe78c2d9aba 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -48,7 +48,9 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd Adaptivity: adaptivity, ShuffleBetweenShards: shuffleBetweenShards, MaxNodesEnableConfig: nil, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } nodeShuffler, _ := nodesCoordinator.NewHashValidatorsShuffler(nodeShufflerArgs) argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ @@ -69,7 +71,9 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -100,8 +104,9 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato ShuffleBetweenShards: shuffleBetweenShards, MaxNodesEnableConfig: nil, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsWaitingListFixFlagEnabledField: true, - IsBalanceWaitingListsFlagEnabledField: true, + IsWaitingListFixFlagEnabledField: true, + IsBalanceWaitingListsFlagEnabledField: true, + IsRefactorPeersMiniBlocksFlagEnabledField: true, }, } nodeShuffler, _ := nodesCoordinator.NewHashValidatorsShuffler(shufflerArgs) @@ -124,8 +129,8 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsWaitingListFixFlagEnabledField: true, - IsRefactorPeersMiniBlocksEnabledField: true, + IsWaitingListFixFlagEnabledField: true, + IsRefactorPeersMiniBlocksFlagEnabledField: true, }, } diff --git a/integrationTests/testProcessorNode.go b/integrationTests/testProcessorNode.go index c1cf55ec1da..e84b8dc3366 100644 --- a/integrationTests/testProcessorNode.go +++ b/integrationTests/testProcessorNode.go @@ -1156,21 +1156,21 @@ func (tpn *TestProcessorNode) initInterceptors(heartbeatPk string) { } peerMiniBlockSyncer, _ := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) argsShardEpochStart := &shardchain.ArgsShardEpochStartTrigger{ - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - HeaderValidator: tpn.HeaderValidator, - Uint64Converter: TestUint64Converter, - DataPool: tpn.DataPool, - Storage: tpn.Storage, - RequestHandler: tpn.RequestHandler, - Epoch: 0, - Validity: 1, - Finality: 1, - EpochStartNotifier: tpn.EpochStartNotifier, - PeerMiniBlocksSyncer: peerMiniBlockSyncer, - RoundHandler: tpn.RoundHandler, - AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, - RefactorPeersMiniBlocksEnableEpoch: 0, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + HeaderValidator: tpn.HeaderValidator, + Uint64Converter: TestUint64Converter, + DataPool: tpn.DataPool, + Storage: tpn.Storage, + RequestHandler: tpn.RequestHandler, + Epoch: 0, + Validity: 1, + Finality: 1, + EpochStartNotifier: tpn.EpochStartNotifier, + PeerMiniBlocksSyncer: peerMiniBlockSyncer, + RoundHandler: tpn.RoundHandler, + AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + EnableEpochsHandler: tpn.EnableEpochsHandler, } epochStartTrigger, _ := shardchain.NewEpochStartTrigger(argsShardEpochStart) tpn.EpochStartTrigger = &shardchain.TestTrigger{} @@ -1513,7 +1513,6 @@ func (tpn *TestProcessorNode) initInnerProcessors(gasMap map[string]map[string]u txTypeHandler, scheduledTxsExecutionHandler, processedMiniBlocksTracker, - tpn.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, ) tpn.PreProcessorsContainer, _ = fact.Create() @@ -2052,16 +2051,15 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { } epochStartRewards, _ := metachain.NewRewardsCreatorProxy(argsEpochRewards) - validatorInfoStorage := tpn.Storage.GetStorer(dataRetriever.UnsignedTransactionUnit) + validatorInfoStorage, _ := tpn.Storage.GetStorer(dataRetriever.UnsignedTransactionUnit) argsEpochValidatorInfo := metachain.ArgsNewValidatorInfoCreator{ - ShardCoordinator: tpn.ShardCoordinator, - ValidatorInfoStorage: validatorInfoStorage, - MiniBlockStorage: miniBlockStorage, - Hasher: TestHasher, - Marshalizer: TestMarshalizer, - DataPool: tpn.DataPool, - EpochNotifier: tpn.EpochNotifier, - RefactorPeersMiniBlocksEnableEpoch: tpn.EnableEpochs.RefactorPeersMiniBlocksEnableEpoch, + ShardCoordinator: tpn.ShardCoordinator, + ValidatorInfoStorage: validatorInfoStorage, + MiniBlockStorage: miniBlockStorage, + Hasher: TestHasher, + Marshalizer: TestMarshalizer, + DataPool: tpn.DataPool, + EnableEpochsHandler: tpn.EnableEpochsHandler, } epochStartValidatorInfo, _ := metachain.NewValidatorInfoCreator(argsEpochValidatorInfo) argsEpochSystemSC := metachain.ArgsNewEpochStartSystemSCProcessing{ @@ -2107,21 +2105,21 @@ func (tpn *TestProcessorNode) initBlockProcessor(stateCheckpointModulus uint) { } peerMiniBlocksSyncer, _ := shardchain.NewPeerMiniBlockSyncer(argsPeerMiniBlocksSyncer) argsShardEpochStart := &shardchain.ArgsShardEpochStartTrigger{ - Marshalizer: TestMarshalizer, - Hasher: TestHasher, - HeaderValidator: tpn.HeaderValidator, - Uint64Converter: TestUint64Converter, - DataPool: tpn.DataPool, - Storage: tpn.Storage, - RequestHandler: tpn.RequestHandler, - Epoch: 0, - Validity: 1, - Finality: 1, - EpochStartNotifier: tpn.EpochStartNotifier, - PeerMiniBlocksSyncer: peerMiniBlocksSyncer, - RoundHandler: tpn.RoundHandler, - AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, - RefactorPeersMiniBlocksEnableEpoch: 0, + Marshalizer: TestMarshalizer, + Hasher: TestHasher, + HeaderValidator: tpn.HeaderValidator, + Uint64Converter: TestUint64Converter, + DataPool: tpn.DataPool, + Storage: tpn.Storage, + RequestHandler: tpn.RequestHandler, + Epoch: 0, + Validity: 1, + Finality: 1, + EpochStartNotifier: tpn.EpochStartNotifier, + PeerMiniBlocksSyncer: peerMiniBlocksSyncer, + RoundHandler: tpn.RoundHandler, + AppStatusHandler: &statusHandlerMock.AppStatusHandlerStub{}, + EnableEpochsHandler: tpn.EnableEpochsHandler, } epochStartTrigger, _ := shardchain.NewEpochStartTrigger(argsShardEpochStart) tpn.EpochStartTrigger = &shardchain.TestTrigger{} @@ -2957,6 +2955,7 @@ func CreateEnableEpochsConfig() config.EnableEpochs { CheckCorrectTokenIDForTransferRoleEnableEpoch: UnreachableEpoch, HeartbeatDisableEpoch: UnreachableEpoch, MiniBlockPartialExecutionEnableEpoch: UnreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: UnreachableEpoch, } } diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index 0c08dc19cdd..bb834d9b4ba 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -73,7 +73,9 @@ func CreateProcessorNodesWithNodesCoordinator( ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, ChanStopNode: endProcess.GetDummyEndProcessChannel(), IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testProcessorNodeWithMultisigner.go b/integrationTests/testProcessorNodeWithMultisigner.go index 68f89112870..25527bc730d 100644 --- a/integrationTests/testProcessorNodeWithMultisigner.go +++ b/integrationTests/testProcessorNodeWithMultisigner.go @@ -425,7 +425,9 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) @@ -539,7 +541,9 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index 2d5189e90d9..cff4abdffb0 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -1,10 +1,10 @@ package preprocess import ( + "github.com/ElrondNetwork/elrond-go/common" "time" "github.com/ElrondNetwork/elrond-go-core/core" - "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" @@ -21,13 +21,12 @@ var _ process.PreProcessor = (*validatorInfoPreprocessor)(nil) type validatorInfoPreprocessor struct { *basePreProcess - chReceivedAllValidatorsInfo chan bool - onRequestValidatorsInfo func(txHashes [][]byte) - validatorsInfoForBlock txsForBlock - validatorsInfoPool dataRetriever.ShardedDataCacherNotifier - storage dataRetriever.StorageService - refactorPeersMiniBlocksEnableEpoch uint32 - flagRefactorPeersMiniBlocks atomic.Flag + chReceivedAllValidatorsInfo chan bool + onRequestValidatorsInfo func(txHashes [][]byte) + validatorsInfoForBlock txsForBlock + validatorsInfoPool dataRetriever.ShardedDataCacherNotifier + storage dataRetriever.StorageService + enableEpochsHandler common.EnableEpochsHandler } // NewValidatorInfoPreprocessor creates a new validatorInfo preprocessor object @@ -38,8 +37,7 @@ func NewValidatorInfoPreprocessor( validatorsInfoPool dataRetriever.ShardedDataCacherNotifier, store dataRetriever.StorageService, onRequestValidatorsInfo func(txHashes [][]byte), - epochNotifier process.EpochNotifier, - refactorPeersMiniBlocksEnableEpoch uint32, + enableEpochsHandler common.EnableEpochsHandler, ) (*validatorInfoPreprocessor, error) { if check.IfNil(hasher) { @@ -60,8 +58,8 @@ func NewValidatorInfoPreprocessor( if onRequestValidatorsInfo == nil { return nil, process.ErrNilRequestHandler } - if check.IfNil(epochNotifier) { - return nil, process.ErrNilEpochNotifier + if check.IfNil(enableEpochsHandler) { + return nil, process.ErrNilEnableEpochsHandler } bpp := &basePreProcess{ @@ -71,21 +69,17 @@ func NewValidatorInfoPreprocessor( } vip := &validatorInfoPreprocessor{ - basePreProcess: bpp, - storage: store, - validatorsInfoPool: validatorsInfoPool, - onRequestValidatorsInfo: onRequestValidatorsInfo, - refactorPeersMiniBlocksEnableEpoch: refactorPeersMiniBlocksEnableEpoch, + basePreProcess: bpp, + storage: store, + validatorsInfoPool: validatorsInfoPool, + onRequestValidatorsInfo: onRequestValidatorsInfo, + enableEpochsHandler: enableEpochsHandler, } vip.chReceivedAllValidatorsInfo = make(chan bool) vip.validatorsInfoPool.RegisterOnAdded(vip.receivedValidatorInfoTransaction) vip.validatorsInfoForBlock.txHashAndInfo = make(map[string]*txInfo) - log.Debug("validatorInfoPreprocessor: enable epoch for refactor peers mini blocks", "epoch", vip.refactorPeersMiniBlocksEnableEpoch) - - epochNotifier.RegisterNotifyHandler(vip) - return vip, nil } @@ -123,7 +117,7 @@ func (vip *validatorInfoPreprocessor) RestoreBlockDataIntoPools( continue } - if vip.flagRefactorPeersMiniBlocks.IsSet() { + if vip.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { err := vip.restoreValidatorsInfo(miniBlock) if err != nil { return validatorsInfoRestored, err @@ -274,11 +268,3 @@ func (vip *validatorInfoPreprocessor) IsInterfaceNil() bool { func (vip *validatorInfoPreprocessor) isMiniBlockCorrect(mbType block.Type) bool { return mbType == block.PeerBlock } - -// EpochConfirmed is called whenever a new epoch is confirmed -func (vip *validatorInfoPreprocessor) EpochConfirmed(epoch uint32, timestamp uint64) { - vip.epochConfirmed(epoch, timestamp) - - vip.flagRefactorPeersMiniBlocks.SetValue(epoch >= vip.refactorPeersMiniBlocksEnableEpoch) - log.Debug("validatorInfoPreprocessor: refactor peers mini blocks", "enabled", vip.flagRefactorPeersMiniBlocks.IsSet()) -} diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index 0eec9851f1b..1068a2f1264 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -1,14 +1,13 @@ package preprocess import ( + "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "testing" "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/process" - "github.com/ElrondNetwork/elrond-go/process/mock" "github.com/ElrondNetwork/elrond-go/testscommon" - "github.com/ElrondNetwork/elrond-go/testscommon/epochNotifier" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/stretchr/testify/assert" ) @@ -22,10 +21,9 @@ func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) @@ -41,10 +39,9 @@ func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { nil, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) @@ -60,10 +57,9 @@ func TestNewValidatorInfoPreprocessor_NilBlockSizeComputationHandlerShouldErr(t &testscommon.MarshalizerMock{}, nil, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) @@ -78,10 +74,9 @@ func TestNewValidatorInfoPreprocessor_NilValidatorInfoPoolShouldErr(t *testing.T &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, nil, - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) @@ -99,8 +94,7 @@ func TestNewValidatorInfoPreprocessor_NilStoreShouldErr(t *testing.T) { tdp.ValidatorsInfo(), nil, func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) @@ -116,17 +110,16 @@ func TestNewValidatorInfoPreprocessor_NilRequestHandlerShouldErr(t *testing.T) { &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), nil, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, rtp) assert.Equal(t, process.ErrNilRequestHandler, err) } -func TestNewValidatorInfoPreprocessor_NilEpochNotifierShouldErr(t *testing.T) { +func TestNewValidatorInfoPreprocessor_NilEnableEpochHandlerShouldErr(t *testing.T) { t.Parallel() tdp := initDataPool() @@ -135,14 +128,13 @@ func TestNewValidatorInfoPreprocessor_NilEpochNotifierShouldErr(t *testing.T) { &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, nil, - 0, ) assert.Nil(t, rtp) - assert.Equal(t, process.ErrNilEpochNotifier, err) + assert.Equal(t, process.ErrNilEnableEpochsHandler, err) } func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { @@ -154,10 +146,9 @@ func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, err) assert.NotNil(t, rtp) @@ -172,10 +163,9 @@ func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) hash := make([][]byte, 0) @@ -194,10 +184,9 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShould &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := make([][]byte, 0) @@ -225,10 +214,9 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := make([][]byte, 0) @@ -256,10 +244,9 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockNotFromMeta(t *testing.T) &testscommon.MarshalizerMock{}, &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := make([][]byte, 0) @@ -291,10 +278,9 @@ func TestNewValidatorInfoPreprocessor_RestorePeerBlockIntoPools(t *testing.T) { marshalizer, blockSizeComputation, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -338,10 +324,9 @@ func TestNewValidatorInfoPreprocessor_RestoreOtherBlockTypeIntoPoolsShouldNotRes marshalizer, blockSizeComputation, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -385,10 +370,9 @@ func TestNewValidatorInfoPreprocessor_RemovePeerBlockFromPool(t *testing.T) { marshalizer, blockSizeComputation, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := [][]byte{[]byte("tx_hash1")} @@ -432,10 +416,9 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov marshalizer, blockSizeComputation, tdp.ValidatorsInfo(), - &mock.ChainStorerMock{}, + genericMocks.NewChainStorerMock(0), func(txHashes [][]byte) {}, - &epochNotifier.EpochNotifierStub{}, - 0, + &testscommon.EnableEpochsHandlerStub{}, ) txHashes := [][]byte{[]byte("tx_hash1")} diff --git a/process/block/shardblock_test.go b/process/block/shardblock_test.go index 45d6b4a9574..fbf32f16e71 100644 --- a/process/block/shardblock_test.go +++ b/process/block/shardblock_test.go @@ -472,7 +472,6 @@ func TestShardProcessor_ProcessBlockWithInvalidTransactionShouldErr(t *testing.T &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := factory.Create() @@ -694,7 +693,6 @@ func TestShardProcessor_ProcessBlockWithErrOnProcessBlockTransactionsCallShouldR &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := factory.Create() @@ -2587,7 +2585,6 @@ func TestShardProcessor_MarshalizedDataToBroadcastShouldWork(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := factory.Create() @@ -2696,7 +2693,6 @@ func TestShardProcessor_MarshalizedDataMarshalWithoutSuccess(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := factory.Create() @@ -3089,7 +3085,6 @@ func TestShardProcessor_CreateMiniBlocksShouldWorkWithIntraShardTxs(t *testing.T &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := factory.Create() @@ -3271,7 +3266,6 @@ func TestShardProcessor_RestoreBlockIntoPoolsShouldWork(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := factory.Create() diff --git a/process/coordinator/process_test.go b/process/coordinator/process_test.go index e7d11a9c2f5..3381eb61f84 100644 --- a/process/coordinator/process_test.go +++ b/process/coordinator/process_test.go @@ -540,7 +540,6 @@ func createPreProcessorContainer() process.PreProcessorsContainer { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() @@ -636,7 +635,6 @@ func createPreProcessorContainerWithDataPool( &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() @@ -905,7 +903,6 @@ func TestTransactionCoordinator_CreateMbsAndProcessCrossShardTransactions(t *tes &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() @@ -1090,7 +1087,6 @@ func TestTransactionCoordinator_CreateMbsAndProcessCrossShardTransactionsNilPreP &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() @@ -1199,7 +1195,6 @@ func TestTransactionCoordinator_CreateMbsAndProcessTransactionsFromMeNothingToPr &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() @@ -1737,7 +1732,6 @@ func TestTransactionCoordinator_ProcessBlockTransactionProcessTxError(t *testing &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() @@ -1864,7 +1858,6 @@ func TestTransactionCoordinator_RequestMiniblocks(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() @@ -2004,7 +1997,6 @@ func TestShardProcessor_ProcessMiniBlockCompleteWithOkTxsShouldExecuteThemAndNot &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() @@ -2145,7 +2137,6 @@ func TestShardProcessor_ProcessMiniBlockCompleteWithErrorWhileProcessShouldCallR &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) container, _ := preFactory.Create() diff --git a/process/factory/shard/preProcessorsContainerFactory.go b/process/factory/shard/preProcessorsContainerFactory.go index 5507070cbd2..d9b0a3af53b 100644 --- a/process/factory/shard/preProcessorsContainerFactory.go +++ b/process/factory/shard/preProcessorsContainerFactory.go @@ -64,7 +64,6 @@ func NewPreProcessorsContainerFactory( txTypeHandler process.TxTypeHandler, scheduledTxsExecutionHandler process.ScheduledTxsExecutionHandler, processedMiniBlocksTracker process.ProcessedMiniBlocksTracker, - refactorPeersMiniBlocksEnableEpoch uint32, ) (*preProcessorsContainerFactory, error) { if check.IfNil(shardCoordinator) { @@ -281,8 +280,7 @@ func (ppcm *preProcessorsContainerFactory) createValidatorInfoPreProcessor() (pr ppcm.dataPool.ValidatorsInfo(), ppcm.store, ppcm.requestHandler.RequestValidatorsInfo, - ppcm.epochNotifier, - ppcm.refactorPeersMiniBlocksEnableEpoch, + ppcm.enableEpochsHandler, ) return validatorInfoPreprocessor, err diff --git a/process/factory/shard/preProcessorsContainerFactory_test.go b/process/factory/shard/preProcessorsContainerFactory_test.go index 250ff0d811c..b57fc9e8409 100644 --- a/process/factory/shard/preProcessorsContainerFactory_test.go +++ b/process/factory/shard/preProcessorsContainerFactory_test.go @@ -43,7 +43,6 @@ func TestNewPreProcessorsContainerFactory_NilShardCoordinator(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilShardCoordinator, err) @@ -75,7 +74,6 @@ func TestNewPreProcessorsContainerFactory_NilStore(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilStore, err) @@ -107,7 +105,6 @@ func TestNewPreProcessorsContainerFactory_NilMarshalizer(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilMarshalizer, err) @@ -139,7 +136,6 @@ func TestNewPreProcessorsContainerFactory_NilHasher(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilHasher, err) @@ -171,7 +167,6 @@ func TestNewPreProcessorsContainerFactory_NilDataPool(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilDataPoolHolder, err) @@ -203,7 +198,6 @@ func TestNewPreProcessorsContainerFactory_NilAddrConv(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilPubkeyConverter, err) @@ -235,7 +229,6 @@ func TestNewPreProcessorsContainerFactory_NilAccounts(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilAccountsAdapter, err) @@ -267,7 +260,6 @@ func TestNewPreProcessorsContainerFactory_NilTxProcessor(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilTxProcessor, err) @@ -299,7 +291,6 @@ func TestNewPreProcessorsContainerFactory_NilSCProcessor(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilSmartContractProcessor, err) @@ -331,7 +322,6 @@ func TestNewPreProcessorsContainerFactory_NilSCR(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilSmartContractResultProcessor, err) @@ -363,7 +353,6 @@ func TestNewPreProcessorsContainerFactory_NilRewardTxProcessor(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilRewardsTxProcessor, err) @@ -395,7 +384,6 @@ func TestNewPreProcessorsContainerFactory_NilRequestHandler(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilRequestHandler, err) @@ -427,7 +415,6 @@ func TestNewPreProcessorsContainerFactory_NilFeeHandler(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilEconomicsFeeHandler, err) @@ -459,7 +446,6 @@ func TestNewPreProcessorsContainerFactory_NilGasHandler(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilGasHandler, err) @@ -491,7 +477,6 @@ func TestNewPreProcessorsContainerFactory_NilBlockTracker(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilBlockTracker, err) @@ -523,7 +508,6 @@ func TestNewPreProcessorsContainerFactory_NilBlockSizeComputationHandler(t *test &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilBlockSizeComputationHandler, err) @@ -555,7 +539,6 @@ func TestNewPreProcessorsContainerFactory_NilBalanceComputationHandler(t *testin &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilBalanceComputationHandler, err) @@ -587,7 +570,6 @@ func TestNewPreProcessorsContainerFactory_NilEnableEpochsHandler(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilEnableEpochsHandler, err) @@ -619,7 +601,6 @@ func TestNewPreProcessorsContainerFactory_NilTxTypeHandler(t *testing.T) { nil, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilTxTypeHandler, err) @@ -651,7 +632,6 @@ func TestNewPreProcessorsContainerFactory_NilScheduledTxsExecutionHandler(t *tes &testscommon.TxTypeHandlerMock{}, nil, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Equal(t, process.ErrNilScheduledTxsExecutionHandler, err) @@ -683,7 +663,6 @@ func TestNewPreProcessorsContainerFactory_NilProcessedMiniBlocksTracker(t *testi &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, nil, - 2, ) assert.Equal(t, process.ErrNilProcessedMiniBlocksTracker, err) @@ -715,7 +694,6 @@ func TestNewPreProcessorsContainerFactory(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Nil(t, err) @@ -752,7 +730,6 @@ func TestPreProcessorsContainerFactory_CreateErrTxPreproc(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Nil(t, err) @@ -795,7 +772,6 @@ func TestPreProcessorsContainerFactory_CreateErrScrPreproc(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Nil(t, err) @@ -841,7 +817,6 @@ func TestPreProcessorsContainerFactory_Create(t *testing.T) { &testscommon.TxTypeHandlerMock{}, &testscommon.ScheduledTxsExecutionStub{}, &testscommon.ProcessedMiniBlocksTrackerStub{}, - 2, ) assert.Nil(t, err) diff --git a/sharding/mock/enableEpochsHandlerMock.go b/sharding/mock/enableEpochsHandlerMock.go index 2a1c4f6f188..9dcf9108dc2 100644 --- a/sharding/mock/enableEpochsHandlerMock.go +++ b/sharding/mock/enableEpochsHandlerMock.go @@ -2,7 +2,9 @@ package mock // EnableEpochsHandlerMock - type EnableEpochsHandlerMock struct { - WaitingListFixEnableEpochField uint32 + WaitingListFixEnableEpochField uint32 + RefactorPeersMiniBlocksEnableEpochField uint32 + IsRefactorPeersMiniBlocksFlagEnabledField bool } // BlockGasAndFeesReCheckEnableEpoch returns 0 @@ -90,6 +92,11 @@ func (mock *EnableEpochsHandlerMock) MiniBlockPartialExecutionEnableEpoch() uint return 0 } +// RefactorPeersMiniBlocksEnableEpoch returns 0 +func (mock *EnableEpochsHandlerMock) RefactorPeersMiniBlocksEnableEpoch() uint32 { + return mock.RefactorPeersMiniBlocksEnableEpochField +} + // IsSCDeployFlagEnabled returns false func (mock *EnableEpochsHandlerMock) IsSCDeployFlagEnabled() bool { return false @@ -514,6 +521,11 @@ func (mock *EnableEpochsHandlerMock) IsESDTNFTImprovementV1FlagEnabled() bool { return false } +// IsRefactorPeersMiniBlocksFlagEnabled returns false +func (mock *EnableEpochsHandlerMock) IsRefactorPeersMiniBlocksFlagEnabled() bool { + return mock.IsRefactorPeersMiniBlocksFlagEnabledField +} + // IsInterfaceNil returns true if there is no value under the interface func (mock *EnableEpochsHandlerMock) IsInterfaceNil() bool { return mock == nil diff --git a/sharding/nodesCoordinator/errors.go b/sharding/nodesCoordinator/errors.go index 359dec6abc8..e9f210ecdd9 100644 --- a/sharding/nodesCoordinator/errors.go +++ b/sharding/nodesCoordinator/errors.go @@ -108,6 +108,3 @@ var ErrNilNodeTypeProvider = errors.New("nil node type provider") // ErrNilEnableEpochsHandler signals that a nil enable epochs handler has been provided var ErrNilEnableEpochsHandler = errors.New("nil enable epochs handler") - -// ErrNilPoolsHolder signals that a nil validator info cacher has been provided -var ErrNilValidatorInfoCacher = errors.New("nil validator info cacher") diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go index bdd41678eac..7aa1b66142d 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go @@ -1195,7 +1195,7 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( } func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, validatorInfoCacher epochStart.ValidatorInfoCacher) (*state.ShardValidatorInfo, error) { - if ihnc.flagRefactorPeersMiniBlocks.IsSet() { + if ihnc.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) if err != nil { return nil, err @@ -1216,6 +1216,4 @@ func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte func (ihnc *indexHashedNodesCoordinator) updateEpochFlags(epoch uint32) { ihnc.flagWaitingListFix.SetValue(epoch >= ihnc.enableEpochsHandler.WaitingListFixEnableEpoch()) log.Debug("indexHashedNodesCoordinator: waiting list fix", "enabled", ihnc.flagWaitingListFix.IsSet()) - ihnc.flagRefactorPeersMiniBlocks.SetValue(epoch >= ihnc.refactorPeersMiniBlocksEnableEpoch) - log.Debug("indexHashedNodesCoordinator: refactor peers mini blocks", "enabled", ihnc.flagRefactorPeersMiniBlocks.IsSet()) } diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index 593020f0648..fdbe49a5166 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -114,7 +114,9 @@ func createArguments() ArgNodesCoordinator { IsFullArchive: false, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } return arguments } @@ -1430,7 +1432,9 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t Adaptivity: adaptivity, ShuffleBetweenShards: shuffleBetweenShards, MaxNodesEnableConfig: nil, - EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } nodeShuffler, err := NewHashValidatorsShuffler(shufflerArgs) require.Nil(t, err) @@ -1454,7 +1458,9 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, - EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + }, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) diff --git a/testscommon/enableEpochsHandlerStub.go b/testscommon/enableEpochsHandlerStub.go index f9fbc7c523c..ea3e6b8bfb5 100644 --- a/testscommon/enableEpochsHandlerStub.go +++ b/testscommon/enableEpochsHandlerStub.go @@ -20,6 +20,7 @@ type EnableEpochsHandlerStub struct { CheckExecuteReadOnlyEnableEpochField uint32 StorageAPICostOptimizationEnableEpochField uint32 MiniBlockPartialExecutionEnableEpochField uint32 + RefactorPeersMiniBlocksEnableEpochField uint32 IsSCDeployFlagEnabledField bool IsBuiltInFunctionsFlagEnabledField bool IsRelayedTransactionsFlagEnabledField bool @@ -104,6 +105,7 @@ type EnableEpochsHandlerStub struct { IsCheckTransferFlagEnabledField bool IsTransferToMetaFlagEnabledField bool IsESDTNFTImprovementV1FlagEnabledField bool + IsRefactorPeersMiniBlocksFlagEnabledField bool } // ResetPenalizedTooMuchGasFlag - @@ -198,6 +200,11 @@ func (stub *EnableEpochsHandlerStub) MiniBlockPartialExecutionEnableEpoch() uint return stub.MiniBlockPartialExecutionEnableEpochField } +// RefactorPeersMiniBlocksEnableEpoch - +func (stub *EnableEpochsHandlerStub) RefactorPeersMiniBlocksEnableEpoch() uint32 { + return stub.RefactorPeersMiniBlocksEnableEpochField +} + // IsSCDeployFlagEnabled - func (stub *EnableEpochsHandlerStub) IsSCDeployFlagEnabled() bool { return stub.IsSCDeployFlagEnabledField @@ -618,6 +625,11 @@ func (stub *EnableEpochsHandlerStub) IsESDTNFTImprovementV1FlagEnabled() bool { return stub.IsESDTNFTImprovementV1FlagEnabledField } +// IsRefactorPeersMiniBlocksFlagEnabled - +func (stub *EnableEpochsHandlerStub) IsRefactorPeersMiniBlocksFlagEnabled() bool { + return stub.IsRefactorPeersMiniBlocksFlagEnabledField +} + // IsInterfaceNil - func (stub *EnableEpochsHandlerStub) IsInterfaceNil() bool { return stub == nil diff --git a/update/factory/exportHandlerFactory.go b/update/factory/exportHandlerFactory.go index b838f5cd6be..cec8db108a2 100644 --- a/update/factory/exportHandlerFactory.go +++ b/update/factory/exportHandlerFactory.go @@ -280,21 +280,21 @@ func (e *exportHandlerFactory) Create() (update.ExportHandler, error) { return nil, err } argsEpochTrigger := shardchain.ArgsShardEpochStartTrigger{ - Marshalizer: e.CoreComponents.InternalMarshalizer(), - Hasher: e.CoreComponents.Hasher(), - HeaderValidator: e.headerValidator, - Uint64Converter: e.CoreComponents.Uint64ByteSliceConverter(), - DataPool: e.dataPool, - Storage: e.storageService, - RequestHandler: e.requestHandler, - EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), - Epoch: 0, - Validity: process.MetaBlockValidity, - Finality: process.BlockFinality, - PeerMiniBlocksSyncer: peerMiniBlocksSyncer, - RoundHandler: e.roundHandler, - AppStatusHandler: e.CoreComponents.StatusHandler(), - RefactorPeersMiniBlocksEnableEpoch: e.refactorPeersMiniBlocksEnableEpoch, + Marshalizer: e.CoreComponents.InternalMarshalizer(), + Hasher: e.CoreComponents.Hasher(), + HeaderValidator: e.headerValidator, + Uint64Converter: e.CoreComponents.Uint64ByteSliceConverter(), + DataPool: e.dataPool, + Storage: e.storageService, + RequestHandler: e.requestHandler, + EpochStartNotifier: notifier.NewEpochStartSubscriptionHandler(), + Epoch: 0, + Validity: process.MetaBlockValidity, + Finality: process.BlockFinality, + PeerMiniBlocksSyncer: peerMiniBlocksSyncer, + RoundHandler: e.roundHandler, + AppStatusHandler: e.CoreComponents.StatusHandler(), + EnableEpochsHandler: e.CoreComponents.EnableEpochsHandler(), } epochHandler, err := shardchain.NewEpochStartTrigger(&argsEpochTrigger) if err != nil { From d6fc99c54ccc698635c099842339f175b1a7ebaf Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 17 Aug 2022 00:32:01 +0300 Subject: [PATCH 51/70] * Fixed unit/integration tests --- integrationTests/consensus/testInitializer.go | 3 +-- .../startInEpoch/startInEpoch_test.go | 1 + integrationTests/nodesCoordinatorFactory.go | 16 ++++-------- .../testProcessorNodeWithCoordinator.go | 4 +-- .../testProcessorNodeWithMultisigner.go | 10 +++---- integrationTests/testSyncNode.go | 6 +++-- .../vm/systemVM/stakingSC_test.go | 26 +++++++++++++++---- .../indexHashedNodesCoordinator_test.go | 4 +-- 8 files changed, 38 insertions(+), 32 deletions(-) diff --git a/integrationTests/consensus/testInitializer.go b/integrationTests/consensus/testInitializer.go index aa0fbdb2272..d1851ec56bf 100644 --- a/integrationTests/consensus/testInitializer.go +++ b/integrationTests/consensus/testInitializer.go @@ -533,8 +533,7 @@ func createNodes( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsWaitingListFixFlagEnabledField: true, - IsRefactorPeersMiniBlocksFlagEnabledField: true, + IsWaitingListFixFlagEnabledField: true, }, } nodesCoord, _ := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/multiShard/endOfEpoch/startInEpoch/startInEpoch_test.go b/integrationTests/multiShard/endOfEpoch/startInEpoch/startInEpoch_test.go index 7c20d389ded..296bcc634b6 100644 --- a/integrationTests/multiShard/endOfEpoch/startInEpoch/startInEpoch_test.go +++ b/integrationTests/multiShard/endOfEpoch/startInEpoch/startInEpoch_test.go @@ -63,6 +63,7 @@ func testNodeStartsInEpoch(t *testing.T, shardID uint32, expectedHighestRound ui StakingV2EnableEpoch: integrationTests.UnreachableEpoch, ScheduledMiniBlocksEnableEpoch: integrationTests.UnreachableEpoch, MiniBlockPartialExecutionEnableEpoch: integrationTests.UnreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: integrationTests.UnreachableEpoch, } nodes := integrationTests.CreateNodesWithEnableEpochs( diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index fe78c2d9aba..518bb535e14 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -48,9 +48,7 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd Adaptivity: adaptivity, ShuffleBetweenShards: shuffleBetweenShards, MaxNodesEnableConfig: nil, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsRefactorPeersMiniBlocksFlagEnabledField: true, - }, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, } nodeShuffler, _ := nodesCoordinator.NewHashValidatorsShuffler(nodeShufflerArgs) argumentsNodesCoordinator := nodesCoordinator.ArgNodesCoordinator{ @@ -71,9 +69,7 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsRefactorPeersMiniBlocksFlagEnabledField: true, - }, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -104,9 +100,8 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato ShuffleBetweenShards: shuffleBetweenShards, MaxNodesEnableConfig: nil, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsWaitingListFixFlagEnabledField: true, - IsBalanceWaitingListsFlagEnabledField: true, - IsRefactorPeersMiniBlocksFlagEnabledField: true, + IsWaitingListFixFlagEnabledField: true, + IsBalanceWaitingListsFlagEnabledField: true, }, } nodeShuffler, _ := nodesCoordinator.NewHashValidatorsShuffler(shufflerArgs) @@ -129,8 +124,7 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsWaitingListFixFlagEnabledField: true, - IsRefactorPeersMiniBlocksFlagEnabledField: true, + IsWaitingListFixFlagEnabledField: true, }, } diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index bb834d9b4ba..0c08dc19cdd 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -73,9 +73,7 @@ func CreateProcessorNodesWithNodesCoordinator( ShuffledOutHandler: &mock.ShuffledOutHandlerStub{}, ChanStopNode: endProcess.GetDummyEndProcessChannel(), IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsRefactorPeersMiniBlocksFlagEnabledField: true, - }, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testProcessorNodeWithMultisigner.go b/integrationTests/testProcessorNodeWithMultisigner.go index 25527bc730d..b3be4a0e203 100644 --- a/integrationTests/testProcessorNodeWithMultisigner.go +++ b/integrationTests/testProcessorNodeWithMultisigner.go @@ -174,6 +174,7 @@ func CreateNodeWithBLSAndTxKeys( DelegationSmartContractEnableEpoch: 1, ScheduledMiniBlocksEnableEpoch: UnreachableEpoch, MiniBlockPartialExecutionEnableEpoch: UnreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: UnreachableEpoch, } return CreateNode( @@ -230,6 +231,7 @@ func CreateNodesWithNodesCoordinatorFactory( StakingV2EnableEpoch: UnreachableEpoch, ScheduledMiniBlocksEnableEpoch: UnreachableEpoch, MiniBlockPartialExecutionEnableEpoch: UnreachableEpoch, + RefactorPeersMiniBlocksEnableEpoch: UnreachableEpoch, } nodesMap := make(map[uint32][]*TestProcessorNode) @@ -425,9 +427,7 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsRefactorPeersMiniBlocksFlagEnabledField: true, - }, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) @@ -541,9 +541,7 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsRefactorPeersMiniBlocksFlagEnabledField: true, - }, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testSyncNode.go b/integrationTests/testSyncNode.go index 68480aea03f..054d2e4e9a8 100644 --- a/integrationTests/testSyncNode.go +++ b/integrationTests/testSyncNode.go @@ -43,7 +43,9 @@ func (tpn *TestProcessorNode) initBlockProcessorWithSync() { coreComponents.HasherField = TestHasher coreComponents.Uint64ByteSliceConverterField = TestUint64Converter coreComponents.EpochNotifierField = tpn.EpochNotifier - coreComponents.EnableEpochsHandlerField = &testscommon.EnableEpochsHandlerStub{} + coreComponents.EnableEpochsHandlerField = &testscommon.EnableEpochsHandlerStub{ + RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, + } dataComponents := GetDefaultDataComponents() dataComponents.Store = tpn.Storage @@ -88,7 +90,7 @@ func (tpn *TestProcessorNode) initBlockProcessorWithSync() { GasHandler: tpn.GasHandler, ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, - ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, + ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, } if tpn.ShardCoordinator.SelfId() == core.MetachainShardId { diff --git a/integrationTests/vm/systemVM/stakingSC_test.go b/integrationTests/vm/systemVM/stakingSC_test.go index 0d33571c856..63e78859af6 100644 --- a/integrationTests/vm/systemVM/stakingSC_test.go +++ b/integrationTests/vm/systemVM/stakingSC_test.go @@ -136,18 +136,30 @@ func TestStakingUnstakingAndUnbondingOnMultiShardEnvironmentWithValidatorStatist numOfShards := 2 nodesPerShard := 2 numMetachainNodes := 2 + shardConsensusGroupSize := 1 + metaConsensusGroupSize := 1 - nodes := integrationTests.CreateNodes( - numOfShards, + nodesMap := integrationTests.CreateNodesWithNodesCoordinator( nodesPerShard, numMetachainNodes, + numOfShards, + shardConsensusGroupSize, + metaConsensusGroupSize, ) + nodes := make([]*integrationTests.TestProcessorNode, 0) idxProposers := make([]int, numOfShards+1) - for i := 0; i < numOfShards; i++ { - idxProposers[i] = i * nodesPerShard + + for _, nds := range nodesMap { + nodes = append(nodes, nds...) + } + + for _, nds := range nodesMap { + idx, err := integrationTestsVm.GetNodeIndex(nodes, nds[0]) + require.Nil(t, err) + + idxProposers = append(idxProposers, idx) } - idxProposers[numOfShards] = numOfShards * nodesPerShard integrationTests.DisplayAndStartNodes(nodes) @@ -157,6 +169,10 @@ func TestStakingUnstakingAndUnbondingOnMultiShardEnvironmentWithValidatorStatist } }() + for _, nds := range nodesMap { + fmt.Println(integrationTests.MakeDisplayTable(nds)) + } + initialVal := big.NewInt(10000000000) integrationTests.MintAllNodes(nodes, initialVal) verifyInitialBalance(t, nodes, initialVal) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index fdbe49a5166..2dc749b5e56 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -1432,9 +1432,7 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t Adaptivity: adaptivity, ShuffleBetweenShards: shuffleBetweenShards, MaxNodesEnableConfig: nil, - EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ - IsRefactorPeersMiniBlocksFlagEnabledField: true, - }, + EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, } nodeShuffler, err := NewHashValidatorsShuffler(shufflerArgs) require.Nil(t, err) From 979a046b12aec5bce0f09d14eef441178c2ba298 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 17 Aug 2022 00:56:50 +0300 Subject: [PATCH 52/70] * Added unit tests for nil checks --- epochStart/metachain/validators_test.go | 11 +++++++++++ epochStart/shardchain/trigger_test.go | 11 +++++++++++ .../indexHashedNodesCoordinator_test.go | 9 +++++++++ 3 files changed, 31 insertions(+) diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index cd6f747d62d..215748ff634 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -217,6 +217,17 @@ func TestEpochValidatorInfoCreator_NewValidatorInfoCreatorNilDataPool(t *testing require.Equal(t, epochStart.ErrNilDataPoolsHolder, err) } +func TestEpochValidatorInfoCreator_NewValidatorInfoCreatorNilEnableEpochsHandler(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = nil + vic, err := NewValidatorInfoCreator(arguments) + + require.Nil(t, vic) + require.Equal(t, epochStart.ErrNilEnableEpochsHandler, err) +} + func TestEpochValidatorInfoCreator_NewValidatorInfoCreatorShouldWork(t *testing.T) { t.Parallel() diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index 90f89e489e2..9198fb9bb14 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -234,6 +234,17 @@ func TestNewEpochStartTrigger_NilRoundHandlerShouldErr(t *testing.T) { assert.Equal(t, epochStart.ErrNilRoundHandler, err) } +func TestNewEpochStartTrigger_NilEnableEpochsHandlerShouldErr(t *testing.T) { + t.Parallel() + + args := createMockShardEpochStartTriggerArguments() + args.EnableEpochsHandler = nil + epochStartTrigger, err := NewEpochStartTrigger(args) + + assert.Nil(t, epochStartTrigger) + assert.Equal(t, epochStart.ErrNilEnableEpochsHandler, err) +} + func TestNewEpochStartTrigger_ShouldOk(t *testing.T) { t.Parallel() diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index 2dc749b5e56..ab489b7d49d 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -196,6 +196,15 @@ func TestNewIndexHashedNodesCoordinator_NilCacherShouldErr(t *testing.T) { require.Nil(t, ihnc) } +func TestNewIndexHashedNodesCoordinator_NilEnableEpochsHandlerShouldErr(t *testing.T) { + arguments := createArguments() + arguments.EnableEpochsHandler = nil + ihnc, err := NewIndexHashedNodesCoordinator(arguments) + + require.Equal(t, ErrNilEnableEpochsHandler, err) + require.Nil(t, ihnc) +} + func TestNewIndexHashedGroupSelector_OkValsShouldWork(t *testing.T) { t.Parallel() From 07dff5e8df51a02c9dc97e6a04bd05d0752044dc Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 17 Aug 2022 15:49:16 +0300 Subject: [PATCH 53/70] * Fixed RefactorPeersMiniBlocks activation flag --- sharding/nodesCoordinator/indexHashedNodesCoordinator.go | 9 +++++---- .../indexHashedNodesCoordinatorLite_test.go | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go index 7aa1b66142d..532f05d12c2 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go @@ -560,7 +560,7 @@ func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHa ihnc.updateEpochFlags(newEpoch) - allValidatorInfo, err := ihnc.createValidatorInfoFromBody(body, ihnc.numTotalEligible, validatorInfoCacher) + allValidatorInfo, err := ihnc.createValidatorInfoFromBody(body, ihnc.numTotalEligible, validatorInfoCacher, newEpoch) if err != nil { log.Error("could not create validator info from body - do nothing on nodesCoordinator epochStartPrepare", "error", err.Error()) return @@ -1165,6 +1165,7 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( body data.BodyHandler, previousTotal uint64, validatorInfoCacher epochStart.ValidatorInfoCacher, + currentEpoch uint32, ) ([]*state.ShardValidatorInfo, error) { if check.IfNil(body) { return nil, ErrNilBlockBody @@ -1182,7 +1183,7 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( } for _, txHash := range peerMiniBlock.TxHashes { - shardValidatorInfo, err := ihnc.getShardValidatorInfoData(txHash, validatorInfoCacher) + shardValidatorInfo, err := ihnc.getShardValidatorInfoData(txHash, validatorInfoCacher, currentEpoch) if err != nil { return nil, err } @@ -1194,8 +1195,8 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( return allValidatorInfo, nil } -func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, validatorInfoCacher epochStart.ValidatorInfoCacher) (*state.ShardValidatorInfo, error) { - if ihnc.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { +func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, validatorInfoCacher epochStart.ValidatorInfoCacher, currentEpoch uint32) (*state.ShardValidatorInfo, error) { + if currentEpoch >= ihnc.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) if err != nil { return nil, err diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go index b6a70e1f525..b564082f488 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go @@ -170,7 +170,7 @@ func TestIndexHashedNodesCoordinator_IsEpochInConfig(t *testing.T) { validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) - validatorsInfo, _ := ihnc.createValidatorInfoFromBody(body, 10, validatorInfoCacher) + validatorsInfo, _ := ihnc.createValidatorInfoFromBody(body, 10, validatorInfoCacher, epoch) err = ihnc.SetNodesConfigFromValidatorsInfo(epoch, []byte{}, validatorsInfo) require.Nil(t, err) From fe1cd4b54682d64c770fcbfe4071ff68a7d93277 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 17 Aug 2022 17:34:14 +0300 Subject: [PATCH 54/70] * Fixed integration tests --- integrationTests/nodesCoordinatorFactory.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 518bb535e14..7b91051954f 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -69,7 +69,9 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd ChanStopNode: endProcess.GetDummyEndProcessChannel(), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, - EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ + RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, + }, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -124,7 +126,8 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ - IsWaitingListFixFlagEnabledField: true, + IsWaitingListFixFlagEnabledField: true, + RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, }, } From 98f3b51385e530b14b921cbece41a3ec862c506b Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 18 Aug 2022 13:10:09 +0300 Subject: [PATCH 55/70] * Fixed restriction given by GetValidatorWithPublicKey in interceptedValidatorInfo --- process/peer/interceptedValidatorInfo.go | 9 ++++-- process/peer/interceptedValidatorInfo_test.go | 31 ++++++++++--------- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go index 4de280911cd..f536071444a 100644 --- a/process/peer/interceptedValidatorInfo.go +++ b/process/peer/interceptedValidatorInfo.go @@ -84,9 +84,12 @@ func (ivi *interceptedValidatorInfo) CheckValidity() error { return err } - // Check if the public key is a validator - _, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.shardValidatorInfo.PublicKey) - return err + //TODO: Analyse if GetValidatorWithPublicKey is still needed to be called also in all other places + //// Check if the public key is a validator + //_, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.shardValidatorInfo.PublicKey) + //return err + + return nil } // IsForCurrentShard always returns true diff --git a/process/peer/interceptedValidatorInfo_test.go b/process/peer/interceptedValidatorInfo_test.go index 18c9d160e44..871f8b08bfe 100644 --- a/process/peer/interceptedValidatorInfo_test.go +++ b/process/peer/interceptedValidatorInfo_test.go @@ -104,21 +104,22 @@ func TestInterceptedValidatorInfo_CheckValidity(t *testing.T) { t.Run("listProperty too short", testInterceptedValidatorInfoPropertyLen(listProperty, false)) t.Run("listProperty too long", testInterceptedValidatorInfoPropertyLen(listProperty, true)) - t.Run("not validator should error", func(t *testing.T) { - t.Parallel() - - expectedErr := errors.New("expected err") - args := createMockArgInterceptedValidatorInfo() - args.NodesCoordinator = &shardingMocks.NodesCoordinatorStub{ - GetValidatorWithPublicKeyCalled: func(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) { - return nil, 0, expectedErr - }, - } - - ivi, _ := NewInterceptedValidatorInfo(args) - require.False(t, check.IfNil(ivi)) - assert.Equal(t, expectedErr, ivi.CheckValidity()) - }) + //TODO: Remove commented code + //t.Run("not validator should error", func(t *testing.T) { + // t.Parallel() + // + // expectedErr := errors.New("expected err") + // args := createMockArgInterceptedValidatorInfo() + // args.NodesCoordinator = &shardingMocks.NodesCoordinatorStub{ + // GetValidatorWithPublicKeyCalled: func(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) { + // return nil, 0, expectedErr + // }, + // } + // + // ivi, _ := NewInterceptedValidatorInfo(args) + // require.False(t, check.IfNil(ivi)) + // assert.Equal(t, expectedErr, ivi.CheckValidity()) + //}) t.Run("should work", func(t *testing.T) { t.Parallel() From 8e24b901fbd8c62c5ccdccde2c691d4728559aa0 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 18 Aug 2022 13:22:28 +0300 Subject: [PATCH 56/70] * Fixed restriction given by GetValidatorWithPublicKey in interceptedValidatorInfo --- process/peer/interceptedValidatorInfo.go | 9 ++++-- process/peer/interceptedValidatorInfo_test.go | 31 ++++++++++--------- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go index 4de280911cd..f536071444a 100644 --- a/process/peer/interceptedValidatorInfo.go +++ b/process/peer/interceptedValidatorInfo.go @@ -84,9 +84,12 @@ func (ivi *interceptedValidatorInfo) CheckValidity() error { return err } - // Check if the public key is a validator - _, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.shardValidatorInfo.PublicKey) - return err + //TODO: Analyse if GetValidatorWithPublicKey is still needed to be called also in all other places + //// Check if the public key is a validator + //_, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.shardValidatorInfo.PublicKey) + //return err + + return nil } // IsForCurrentShard always returns true diff --git a/process/peer/interceptedValidatorInfo_test.go b/process/peer/interceptedValidatorInfo_test.go index 18c9d160e44..871f8b08bfe 100644 --- a/process/peer/interceptedValidatorInfo_test.go +++ b/process/peer/interceptedValidatorInfo_test.go @@ -104,21 +104,22 @@ func TestInterceptedValidatorInfo_CheckValidity(t *testing.T) { t.Run("listProperty too short", testInterceptedValidatorInfoPropertyLen(listProperty, false)) t.Run("listProperty too long", testInterceptedValidatorInfoPropertyLen(listProperty, true)) - t.Run("not validator should error", func(t *testing.T) { - t.Parallel() - - expectedErr := errors.New("expected err") - args := createMockArgInterceptedValidatorInfo() - args.NodesCoordinator = &shardingMocks.NodesCoordinatorStub{ - GetValidatorWithPublicKeyCalled: func(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) { - return nil, 0, expectedErr - }, - } - - ivi, _ := NewInterceptedValidatorInfo(args) - require.False(t, check.IfNil(ivi)) - assert.Equal(t, expectedErr, ivi.CheckValidity()) - }) + //TODO: Remove commented code + //t.Run("not validator should error", func(t *testing.T) { + // t.Parallel() + // + // expectedErr := errors.New("expected err") + // args := createMockArgInterceptedValidatorInfo() + // args.NodesCoordinator = &shardingMocks.NodesCoordinatorStub{ + // GetValidatorWithPublicKeyCalled: func(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) { + // return nil, 0, expectedErr + // }, + // } + // + // ivi, _ := NewInterceptedValidatorInfo(args) + // require.False(t, check.IfNil(ivi)) + // assert.Equal(t, expectedErr, ivi.CheckValidity()) + //}) t.Run("should work", func(t *testing.T) { t.Parallel() From 810a9dad61a5c07181d7fa395cdf5053930acdc9 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 18 Aug 2022 23:13:15 +0300 Subject: [PATCH 57/70] * Removed nodesCoordinator from interceptedValidatorInfo and interceptedValidatorInfoDataFactory --- .../interceptedValidatorInfoDataFactory.go | 20 ++++------ ...nterceptedValidatorInfoDataFactory_test.go | 11 ------ .../validatorInfoInterceptorProcessor_test.go | 6 +-- process/peer/interceptedValidatorInfo.go | 16 ++------ process/peer/interceptedValidatorInfo_test.go | 39 +------------------ 5 files changed, 14 insertions(+), 78 deletions(-) diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go index afd6f02ccef..62247d91e17 100644 --- a/process/interceptors/factory/interceptedValidatorInfoDataFactory.go +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory.go @@ -9,9 +9,8 @@ import ( ) type interceptedValidatorInfoDataFactory struct { - marshaller marshal.Marshalizer - hasher hashing.Hasher - nodesCoordinator process.NodesCoordinator + marshaller marshal.Marshalizer + hasher hashing.Hasher } // NewInterceptedValidatorInfoDataFactory creates an instance of interceptedValidatorInfoDataFactory @@ -22,9 +21,8 @@ func NewInterceptedValidatorInfoDataFactory(args ArgInterceptedDataFactory) (*in } return &interceptedValidatorInfoDataFactory{ - marshaller: args.CoreComponents.InternalMarshalizer(), - hasher: args.CoreComponents.Hasher(), - nodesCoordinator: args.NodesCoordinator, + marshaller: args.CoreComponents.InternalMarshalizer(), + hasher: args.CoreComponents.Hasher(), }, nil } @@ -38,9 +36,6 @@ func checkInterceptedValidatorInfoDataFactoryArgs(args ArgInterceptedDataFactory if check.IfNil(args.CoreComponents.Hasher()) { return process.ErrNilHasher } - if check.IfNil(args.NodesCoordinator) { - return process.ErrNilNodesCoordinator - } return nil } @@ -48,10 +43,9 @@ func checkInterceptedValidatorInfoDataFactoryArgs(args ArgInterceptedDataFactory // Create creates instances of InterceptedData by unmarshalling provided buffer func (ividf *interceptedValidatorInfoDataFactory) Create(buff []byte) (process.InterceptedData, error) { args := peer.ArgInterceptedValidatorInfo{ - DataBuff: buff, - Marshalizer: ividf.marshaller, - Hasher: ividf.hasher, - NodesCoordinator: ividf.nodesCoordinator, + DataBuff: buff, + Marshalizer: ividf.marshaller, + Hasher: ividf.hasher, } return peer.NewInterceptedValidatorInfo(args) diff --git a/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go index 2f6ed90e8ad..3fcf3021e7c 100644 --- a/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go +++ b/process/interceptors/factory/interceptedValidatorInfoDataFactory_test.go @@ -62,16 +62,6 @@ func TestNewInterceptedValidatorInfoDataFactory(t *testing.T) { assert.Equal(t, process.ErrNilHasher, err) assert.True(t, check.IfNil(ividf)) }) - t.Run("nil nodes coordinator should error", func(t *testing.T) { - t.Parallel() - - args := createMockArgument(createMockComponentHolders()) - args.NodesCoordinator = nil - - ividf, err := NewInterceptedValidatorInfoDataFactory(*args) - assert.Equal(t, process.ErrNilNodesCoordinator, err) - assert.True(t, check.IfNil(ividf)) - }) t.Run("should work", func(t *testing.T) { t.Parallel() @@ -79,7 +69,6 @@ func TestNewInterceptedValidatorInfoDataFactory(t *testing.T) { assert.Nil(t, err) assert.False(t, check.IfNil(ividf)) }) - } func TestInterceptedValidatorInfoDataFactory_Create(t *testing.T) { diff --git a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go index e630f01781f..bad90439cdf 100644 --- a/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go +++ b/process/interceptors/processor/validatorInfoInterceptorProcessor_test.go @@ -13,7 +13,6 @@ import ( "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" - "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -30,9 +29,8 @@ func createMockValidatorInfo() state.ValidatorInfo { func createMockInterceptedValidatorInfo() process.InterceptedData { args := peer.ArgInterceptedValidatorInfo{ - Marshalizer: testscommon.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - NodesCoordinator: &shardingMocks.NodesCoordinatorStub{}, + Marshalizer: testscommon.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, } args.DataBuff, _ = args.Marshalizer.Marshal(createMockValidatorInfo()) ivi, _ := peer.NewInterceptedValidatorInfo(args) diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go index f536071444a..2fbf57780ca 100644 --- a/process/peer/interceptedValidatorInfo.go +++ b/process/peer/interceptedValidatorInfo.go @@ -13,10 +13,9 @@ import ( // ArgInterceptedValidatorInfo is the argument used to create a new intercepted validator info type ArgInterceptedValidatorInfo struct { - DataBuff []byte - Marshalizer marshal.Marshalizer - Hasher hashing.Hasher - NodesCoordinator process.NodesCoordinator + DataBuff []byte + Marshalizer marshal.Marshalizer + Hasher hashing.Hasher } // interceptedValidatorInfo is a wrapper over validatorInfo @@ -40,7 +39,6 @@ func NewInterceptedValidatorInfo(args ArgInterceptedValidatorInfo) (*intercepted return &interceptedValidatorInfo{ shardValidatorInfo: shardValidatorInfo, - nodesCoordinator: args.NodesCoordinator, hash: args.Hasher.Compute(string(args.DataBuff)), }, nil } @@ -55,9 +53,6 @@ func checkArgs(args ArgInterceptedValidatorInfo) error { if check.IfNil(args.Hasher) { return process.ErrNilHasher } - if check.IfNil(args.NodesCoordinator) { - return process.ErrNilNodesCoordinator - } return nil } @@ -84,11 +79,6 @@ func (ivi *interceptedValidatorInfo) CheckValidity() error { return err } - //TODO: Analyse if GetValidatorWithPublicKey is still needed to be called also in all other places - //// Check if the public key is a validator - //_, _, err = ivi.nodesCoordinator.GetValidatorWithPublicKey(ivi.shardValidatorInfo.PublicKey) - //return err - return nil } diff --git a/process/peer/interceptedValidatorInfo_test.go b/process/peer/interceptedValidatorInfo_test.go index 871f8b08bfe..4993fc774c9 100644 --- a/process/peer/interceptedValidatorInfo_test.go +++ b/process/peer/interceptedValidatorInfo_test.go @@ -9,19 +9,16 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/check" logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/ElrondNetwork/elrond-go/process" - "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" - "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func createMockArgInterceptedValidatorInfo() ArgInterceptedValidatorInfo { args := ArgInterceptedValidatorInfo{ - Marshalizer: testscommon.MarshalizerMock{}, - Hasher: &hashingMocks.HasherMock{}, - NodesCoordinator: &shardingMocks.NodesCoordinatorStub{}, + Marshalizer: testscommon.MarshalizerMock{}, + Hasher: &hashingMocks.HasherMock{}, } args.DataBuff, _ = args.Marshalizer.Marshal(createMockShardValidatorInfo()) @@ -61,16 +58,6 @@ func TestNewInterceptedValidatorInfo(t *testing.T) { assert.Equal(t, process.ErrNilHasher, err) assert.True(t, check.IfNil(ivi)) }) - t.Run("nil nodes coordinator should error", func(t *testing.T) { - t.Parallel() - - args := createMockArgInterceptedValidatorInfo() - args.NodesCoordinator = nil - - ivi, err := NewInterceptedValidatorInfo(args) - assert.Equal(t, process.ErrNilNodesCoordinator, err) - assert.True(t, check.IfNil(ivi)) - }) t.Run("unmarshal returns error", func(t *testing.T) { t.Parallel() @@ -104,32 +91,10 @@ func TestInterceptedValidatorInfo_CheckValidity(t *testing.T) { t.Run("listProperty too short", testInterceptedValidatorInfoPropertyLen(listProperty, false)) t.Run("listProperty too long", testInterceptedValidatorInfoPropertyLen(listProperty, true)) - //TODO: Remove commented code - //t.Run("not validator should error", func(t *testing.T) { - // t.Parallel() - // - // expectedErr := errors.New("expected err") - // args := createMockArgInterceptedValidatorInfo() - // args.NodesCoordinator = &shardingMocks.NodesCoordinatorStub{ - // GetValidatorWithPublicKeyCalled: func(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) { - // return nil, 0, expectedErr - // }, - // } - // - // ivi, _ := NewInterceptedValidatorInfo(args) - // require.False(t, check.IfNil(ivi)) - // assert.Equal(t, expectedErr, ivi.CheckValidity()) - //}) t.Run("should work", func(t *testing.T) { t.Parallel() args := createMockArgInterceptedValidatorInfo() - args.NodesCoordinator = &shardingMocks.NodesCoordinatorStub{ - GetValidatorWithPublicKeyCalled: func(publicKey []byte) (validator nodesCoordinator.Validator, shardId uint32, err error) { - return nil, 0, nil - }, - } - ivi, _ := NewInterceptedValidatorInfo(args) require.False(t, check.IfNil(ivi)) assert.Nil(t, ivi.CheckValidity()) From f59dbc3b45c006c493620ed99e079e16ab0ada21 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Thu, 18 Aug 2022 23:15:18 +0300 Subject: [PATCH 58/70] * Removed nodesCoordinator from interceptedValidatorInfo and interceptedValidatorInfoDataFactory --- process/peer/interceptedValidatorInfo.go | 1 - 1 file changed, 1 deletion(-) diff --git a/process/peer/interceptedValidatorInfo.go b/process/peer/interceptedValidatorInfo.go index 2fbf57780ca..1924cb536eb 100644 --- a/process/peer/interceptedValidatorInfo.go +++ b/process/peer/interceptedValidatorInfo.go @@ -21,7 +21,6 @@ type ArgInterceptedValidatorInfo struct { // interceptedValidatorInfo is a wrapper over validatorInfo type interceptedValidatorInfo struct { shardValidatorInfo *state.ShardValidatorInfo - nodesCoordinator process.NodesCoordinator hash []byte } From 24db64dae9c11353d4ddb5f1408b248799d0ae74 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 19 Aug 2022 11:01:29 +0300 Subject: [PATCH 59/70] * Refactored/cleanup code --- process/block/preprocess/validatorInfoPreProcessor.go | 2 +- .../block/preprocess/validatorInfoPreProcessor_test.go | 2 +- sharding/nodesCoordinator/indexHashedNodesCoordinator.go | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index cff4abdffb0..5828ec50833 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -1,7 +1,6 @@ package preprocess import ( - "github.com/ElrondNetwork/elrond-go/common" "time" "github.com/ElrondNetwork/elrond-go-core/core" @@ -10,6 +9,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/state" diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index 1068a2f1264..f145ed586c4 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -1,13 +1,13 @@ package preprocess import ( - "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "testing" "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/stretchr/testify/assert" ) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go index 532f05d12c2..0f33f231d35 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go @@ -1165,7 +1165,7 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( body data.BodyHandler, previousTotal uint64, validatorInfoCacher epochStart.ValidatorInfoCacher, - currentEpoch uint32, + epoch uint32, ) ([]*state.ShardValidatorInfo, error) { if check.IfNil(body) { return nil, ErrNilBlockBody @@ -1183,7 +1183,7 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( } for _, txHash := range peerMiniBlock.TxHashes { - shardValidatorInfo, err := ihnc.getShardValidatorInfoData(txHash, validatorInfoCacher, currentEpoch) + shardValidatorInfo, err := ihnc.getShardValidatorInfoData(txHash, validatorInfoCacher, epoch) if err != nil { return nil, err } @@ -1195,8 +1195,8 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( return allValidatorInfo, nil } -func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, validatorInfoCacher epochStart.ValidatorInfoCacher, currentEpoch uint32) (*state.ShardValidatorInfo, error) { - if currentEpoch >= ihnc.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { +func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, validatorInfoCacher epochStart.ValidatorInfoCacher, epoch uint32) (*state.ShardValidatorInfo, error) { + if epoch >= ihnc.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) if err != nil { return nil, err From 7a6f74efd3e37dc1d1fe03a74fbcdf1e694cb307 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 19 Aug 2022 12:41:57 +0300 Subject: [PATCH 60/70] * Fixed sorting of imports --- epochStart/bootstrap/interface.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/epochStart/bootstrap/interface.go b/epochStart/bootstrap/interface.go index 87040153955..293da1fbcf8 100644 --- a/epochStart/bootstrap/interface.go +++ b/epochStart/bootstrap/interface.go @@ -2,12 +2,12 @@ package bootstrap import ( "context" - "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/dataRetriever" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" ) From 9a5b73a0db055f0ad75edf8cc8cd9cee233e8759 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 19 Aug 2022 18:50:07 +0300 Subject: [PATCH 61/70] * Added unit tests for validatorInfoCreator in validator.go --- epochStart/metachain/validators.go | 5 +- epochStart/metachain/validators_test.go | 646 ++++++++++++++++++++++++ 2 files changed, 650 insertions(+), 1 deletion(-) diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 66dad3c330a..80414438d82 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -385,7 +385,10 @@ func (vic *validatorInfoCreator) saveValidatorInfo(miniBlock *block.MiniBlock) { continue } - _ = vic.validatorInfoStorage.Put(validatorInfoHash, marshalledData) + err = vic.validatorInfoStorage.Put(validatorInfoHash, marshalledData) + if err != nil { + log.Error("validatorInfoCreator.saveValidatorInfo.Put", "hash", validatorInfoHash, "error", err) + } } } diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index 215748ff634..24cbab85a22 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -11,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/marshal" + "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process/mock" @@ -20,6 +21,7 @@ import ( dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -559,3 +561,647 @@ func TestEpochValidatorInfoCreator_IsInterfaceNil(t *testing.T) { vic, _ := NewValidatorInfoCreator(arguments) require.False(t, vic.IsInterfaceNil()) } + +func TestEpochValidatorInfoCreator_GetShardValidatorInfoData(t *testing.T) { + t.Parallel() + + t.Run("get shard validator info data before refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: false, + } + vic, _ := NewValidatorInfoCreator(arguments) + + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + marshalledShardValidatorInfo, _ := arguments.Marshalizer.Marshal(shardValidatorInfo) + shardValidatorInfoData, _ := vic.getShardValidatorInfoData(shardValidatorInfo) + assert.Equal(t, marshalledShardValidatorInfo, shardValidatorInfoData) + }) + + t.Run("get shard validator info data after refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + vic, _ := NewValidatorInfoCreator(arguments) + + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + shardValidatorInfoHash, _ := core.CalculateHash(arguments.Marshalizer, arguments.Hasher, shardValidatorInfo) + shardValidatorInfoData, _ := vic.getShardValidatorInfoData(shardValidatorInfo) + assert.Equal(t, shardValidatorInfoHash, shardValidatorInfoData) + }) +} + +func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { + t.Parallel() + + t.Run("CreateMarshalledData should return nil before refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: false, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: 0, + ReceiverShardID: 1, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + marshalledData := vic.CreateMarshalledData(body) + assert.Nil(t, marshalledData) + }) + + t.Run("CreateMarshalledData should return nil body is nil", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + vic, _ := NewValidatorInfoCreator(arguments) + + marshalledData := vic.CreateMarshalledData(nil) + assert.Nil(t, marshalledData) + }) + + t.Run("CreateMarshalledData should return empty slice when there is no peer mini block in body", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: 0, + ReceiverShardID: 1, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + marshalledData := vic.CreateMarshalledData(body) + assert.Equal(t, make(map[string][][]byte), marshalledData) + }) + + t.Run("CreateMarshalledData should return empty slice when sender or receiver do not match", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: 0, + ReceiverShardID: 1, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + marshalledData := vic.CreateMarshalledData(body) + assert.Equal(t, make(map[string][][]byte), marshalledData) + }) + + t.Run("CreateMarshalledData should return empty slice when tx hash does not exist in validator info cacher", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + marshalledData := vic.CreateMarshalledData(body) + assert.Equal(t, make(map[string][][]byte), marshalledData) + }) + + t.Run("CreateMarshalledData should work", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + marshalledSVI1, _ := arguments.Marshalizer.Marshal(svi1) + + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + marshalledSVI2, _ := arguments.Marshalizer.Marshal(svi2) + + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + marshalledSVI3, _ := arguments.Marshalizer.Marshal(svi3) + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("b")) { + return svi2, nil + } + if bytes.Equal(validatorInfoHash, []byte("c")) { + return svi3, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + marshalledData := vic.CreateMarshalledData(body) + require.Equal(t, 1, len(marshalledData)) + require.Equal(t, 3, len(marshalledData[common.ValidatorInfoTopic])) + assert.Equal(t, marshalledSVI1, marshalledData[common.ValidatorInfoTopic][0]) + assert.Equal(t, marshalledSVI2, marshalledData[common.ValidatorInfoTopic][1]) + assert.Equal(t, marshalledSVI3, marshalledData[common.ValidatorInfoTopic][2]) + }) +} + +func TestEpochValidatorInfoCreator_SetMarshalledValidatorInfoTxsShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + marshalledSVI1, _ := arguments.Marshalizer.Marshal(svi1) + + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + marshalledSVI2, _ := arguments.Marshalizer.Marshal(svi2) + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("c")) { + return svi2, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + miniBlock := &block.MiniBlock{ + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + + marshalledValidatorInfoTxs := make(map[string][][]byte) + vic.setMarshalledValidatorInfoTxs(miniBlock, marshalledValidatorInfoTxs, common.ValidatorInfoTopic) + + require.Equal(t, 1, len(marshalledValidatorInfoTxs)) + require.Equal(t, 2, len(marshalledValidatorInfoTxs[common.ValidatorInfoTopic])) + assert.Equal(t, marshalledSVI1, marshalledValidatorInfoTxs[common.ValidatorInfoTopic][0]) + assert.Equal(t, marshalledSVI2, marshalledValidatorInfoTxs[common.ValidatorInfoTopic][1]) +} + +func TestEpochValidatorInfoCreator_GetValidatorInfoTxsShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("b")) { + return svi2, nil + } + if bytes.Equal(validatorInfoHash, []byte("c")) { + return svi3, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + mapValidatorInfoTxs := vic.GetValidatorInfoTxs(body) + + require.Equal(t, 3, len(mapValidatorInfoTxs)) + require.Equal(t, svi1, mapValidatorInfoTxs["a"]) + require.Equal(t, svi2, mapValidatorInfoTxs["b"]) + require.Equal(t, svi3, mapValidatorInfoTxs["c"]) +} + +func TestEpochValidatorInfoCreator_SetMapShardValidatorInfoShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("b")) { + return svi2, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + miniBlock := &block.MiniBlock{ + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + + mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) + vic.setMapShardValidatorInfo(miniBlock, mapShardValidatorInfo) + + require.Equal(t, 2, len(mapShardValidatorInfo)) + require.Equal(t, svi1, mapShardValidatorInfo["a"]) + require.Equal(t, svi2, mapShardValidatorInfo["b"]) +} + +func TestEpochValidatorInfoCreator_GetShardValidatorInfoShouldWork(t *testing.T) { + t.Parallel() + + t.Run("get shard validator info before refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + marshalledSVI, _ := arguments.Marshalizer.Marshal(svi) + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: false, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + shardValidatorInfo, _ := vic.getShardValidatorInfo(marshalledSVI) + require.Equal(t, svi, shardValidatorInfo) + }) + + t.Run("get shard validator info after refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + arguments.EnableEpochsHandler = &testscommon.EnableEpochsHandlerStub{ + IsRefactorPeersMiniBlocksFlagEnabledField: true, + } + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + shardValidatorInfo, _ := vic.getShardValidatorInfo([]byte("a")) + require.Equal(t, svi, shardValidatorInfo) + }) +} + +func TestEpochValidatorInfoCreator_SaveValidatorInfoShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + marshalledSVI1, _ := arguments.Marshalizer.Marshal(svi1) + + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + marshalledSVI2, _ := arguments.Marshalizer.Marshal(svi2) + + storer := createMemUnit() + arguments.ValidatorInfoStorage = storer + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, []byte("a")) { + return svi1, nil + } + if bytes.Equal(validatorInfoHash, []byte("b")) { + return svi2, nil + } + return nil, errors.New("error") + }, + } + }, + } + vic, _ := NewValidatorInfoCreator(arguments) + + miniBlock := &block.MiniBlock{ + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + + vic.saveValidatorInfo(miniBlock) + + msvi1, err := storer.Get([]byte("a")) + assert.Nil(t, err) + assert.Equal(t, marshalledSVI1, msvi1) + + msvi2, err := storer.Get([]byte("b")) + assert.Nil(t, err) + assert.Equal(t, marshalledSVI2, msvi2) + + msvi3, err := storer.Get([]byte("c")) + assert.NotNil(t, err) + assert.Nil(t, msvi3) +} + +func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromStorageShouldWork(t *testing.T) { + t.Parallel() + + arguments := createMockEpochValidatorInfoCreatorsArguments() + + storer := createMemUnit() + arguments.ValidatorInfoStorage = storer + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + _ = storer.Put([]byte("a"), []byte("aa")) + _ = storer.Put([]byte("b"), []byte("bb")) + _ = storer.Put([]byte("c"), []byte("cc")) + _ = storer.Put([]byte("d"), []byte("dd")) + + vic.removeValidatorInfoFromStorage(body) + + msvi, err := storer.Get([]byte("a")) + assert.NotNil(t, err) + assert.Nil(t, msvi) + + msvi, err = storer.Get([]byte("b")) + assert.NotNil(t, err) + assert.Nil(t, msvi) + + msvi, err = storer.Get([]byte("c")) + assert.NotNil(t, err) + assert.Nil(t, msvi) + + msvi, err = storer.Get([]byte("d")) + assert.Nil(t, err) + assert.Equal(t, []byte("dd"), msvi) +} + +func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromPoolShouldWork(t *testing.T) { + t.Parallel() + + shardedDataCacheNotifierMock := testscommon.NewShardedDataCacheNotifierMock() + arguments := createMockEpochValidatorInfoCreatorsArguments() + arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return shardedDataCacheNotifierMock + }, + } + + vic, _ := NewValidatorInfoCreator(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("aa")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("bb")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("cc")} + svi4 := &state.ShardValidatorInfo{PublicKey: []byte("dd")} + + shardedDataCacheNotifierMock.AddData([]byte("a"), svi1, svi1.Size(), "x") + shardedDataCacheNotifierMock.AddData([]byte("b"), svi2, svi2.Size(), "x") + shardedDataCacheNotifierMock.AddData([]byte("c"), svi3, svi3.Size(), "x") + shardedDataCacheNotifierMock.AddData([]byte("d"), svi4, svi4.Size(), "x") + + vic.removeValidatorInfoFromPool(body) + + svi, found := shardedDataCacheNotifierMock.SearchFirstData([]byte("a")) + assert.False(t, found) + assert.Nil(t, svi) + + svi, found = shardedDataCacheNotifierMock.SearchFirstData([]byte("b")) + assert.False(t, found) + assert.Nil(t, svi) + + svi, found = shardedDataCacheNotifierMock.SearchFirstData([]byte("c")) + assert.False(t, found) + assert.Nil(t, svi) + + svi, found = shardedDataCacheNotifierMock.SearchFirstData([]byte("d")) + assert.True(t, found) + assert.Equal(t, svi4, svi) +} From 1ec62e749f2d5e1e279460e722318a5961f7f2ee Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Mon, 22 Aug 2022 18:55:07 +0300 Subject: [PATCH 62/70] * Added unit tests for peerMiniBlocksSyncer in shardChain package * Added unit tests for trigger in shardChain package --- .../shardchain/peerMiniBlocksSyncer_test.go | 456 ++++++++++++++++++ epochStart/shardchain/trigger_test.go | 119 +++++ 2 files changed, 575 insertions(+) diff --git a/epochStart/shardchain/peerMiniBlocksSyncer_test.go b/epochStart/shardchain/peerMiniBlocksSyncer_test.go index 8c644a83bf8..c2197a48ff3 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer_test.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer_test.go @@ -13,6 +13,7 @@ import ( "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/testscommon" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -344,3 +345,458 @@ func TestValidatorInfoProcessor_ProcesStartOfEpochWithMissinPeerMiniblocksTimeou require.Equal(t, process.ErrTimeIsOut, processError) } + +func TestValidatorInfoProcessor_SyncValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("sync validators info with nil block body", func(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + + missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(nil) + assert.Nil(t, missingValidatorsInfoHashes) + assert.Nil(t, validatorsInfo) + assert.Equal(t, epochStart.ErrNilBlockBody, err) + }) + + t.Run("sync validators info with missing validators info", func(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + args.ValidatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + syncer, _ := NewPeerMiniBlockSyncer(args) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(body) + + assert.Equal(t, 3, len(missingValidatorsInfoHashes)) + assert.Nil(t, validatorsInfo) + assert.Equal(t, process.ErrTimeIsOut, err) + }) + + t.Run("sync validators info without missing validators info", func(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + args.ValidatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, []byte("a")) { + return svi1, true + } + if bytes.Equal(key, []byte("b")) { + return svi2, true + } + if bytes.Equal(key, []byte("c")) { + return svi3, true + } + return nil, false + }, + } + syncer, _ := NewPeerMiniBlockSyncer(args) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(body) + + assert.Nil(t, err) + assert.Nil(t, missingValidatorsInfoHashes) + assert.Equal(t, 3, len(validatorsInfo)) + assert.Equal(t, svi1, validatorsInfo["a"]) + assert.Equal(t, svi2, validatorsInfo["b"]) + assert.Equal(t, svi3, validatorsInfo["c"]) + }) +} + +func TestValidatorInfoProcessor_ReceivedValidatorInfo(t *testing.T) { + t.Parallel() + + t.Run("received validators info with wrong type assertion", func(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.numMissingValidatorsInfo = 1 + syncer.mutValidatorsInfoForBlock.Unlock() + + syncer.receivedValidatorInfo([]byte("a"), nil) + + syncer.mutValidatorsInfoForBlock.RLock() + numMissingValidatorsInfo := syncer.numMissingValidatorsInfo + syncer.mutValidatorsInfoForBlock.RUnlock() + + assert.Equal(t, uint32(1), numMissingValidatorsInfo) + }) + + t.Run("received validators info with not requested validator info", func(t *testing.T) { + t.Parallel() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.numMissingValidatorsInfo = 1 + syncer.mutValidatorsInfoForBlock.Unlock() + + syncer.receivedValidatorInfo([]byte("b"), svi) + + syncer.mutValidatorsInfoForBlock.RLock() + numMissingValidatorsInfo := syncer.numMissingValidatorsInfo + syncer.mutValidatorsInfoForBlock.RUnlock() + + assert.Equal(t, uint32(1), numMissingValidatorsInfo) + }) + + t.Run("received validators info with already received validator info", func(t *testing.T) { + t.Parallel() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = svi + syncer.numMissingValidatorsInfo = 1 + syncer.mutValidatorsInfoForBlock.Unlock() + + syncer.receivedValidatorInfo([]byte("a"), svi) + + syncer.mutValidatorsInfoForBlock.RLock() + numMissingValidatorsInfo := syncer.numMissingValidatorsInfo + syncer.mutValidatorsInfoForBlock.RUnlock() + + assert.Equal(t, uint32(1), numMissingValidatorsInfo) + }) + + t.Run("received validators info with missing validator info", func(t *testing.T) { + t.Parallel() + + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.numMissingValidatorsInfo = 1 + syncer.mutValidatorsInfoForBlock.Unlock() + + var err error + go func() { + select { + case <-syncer.chRcvAllValidatorsInfo: + return + case <-time.After(time.Second): + err = process.ErrTimeIsOut + return + } + }() + + syncer.receivedValidatorInfo([]byte("a"), svi) + + syncer.mutValidatorsInfoForBlock.RLock() + numMissingValidatorsInfo := syncer.numMissingValidatorsInfo + syncer.mutValidatorsInfoForBlock.RUnlock() + + assert.Nil(t, err) + assert.Equal(t, uint32(0), numMissingValidatorsInfo) + }) +} + +func TestValidatorInfoProcessor_GetAllValidatorsInfoShouldWork(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = svi1 + syncer.mapAllValidatorsInfo["b"] = svi2 + syncer.mapAllValidatorsInfo["c"] = svi3 + syncer.mutValidatorsInfoForBlock.Unlock() + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + validatorsInfo := syncer.getAllValidatorsInfo(body) + + assert.Equal(t, 3, len(validatorsInfo)) + assert.Equal(t, svi1, validatorsInfo["a"]) + assert.Equal(t, svi2, validatorsInfo["b"]) + assert.Equal(t, svi3, validatorsInfo["c"]) +} + +func TestValidatorInfoProcessor_ComputeMissingValidatorsInfoShouldWork(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + + args := createDefaultArguments() + args.ValidatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, []byte("a")) { + return svi1, true + } + if bytes.Equal(key, []byte("b")) { + return svi2, true + } + return nil, false + }, + } + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } + + syncer.computeMissingValidatorsInfo(body) + + syncer.mutValidatorsInfoForBlock.RLock() + assert.Equal(t, uint32(1), syncer.numMissingValidatorsInfo) + assert.Equal(t, 3, len(syncer.mapAllValidatorsInfo)) + assert.Equal(t, svi1, syncer.mapAllValidatorsInfo["a"]) + assert.Equal(t, svi2, syncer.mapAllValidatorsInfo["b"]) + assert.Nil(t, syncer.mapAllValidatorsInfo["c"]) + syncer.mutValidatorsInfoForBlock.RUnlock() +} + +func TestValidatorInfoProcessor_RetrieveMissingValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("retrieve missing validators info without missing validators info", func(t *testing.T) { + t.Parallel() + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + + missingValidatorsInfoHashes, err := syncer.retrieveMissingValidatorsInfo() + assert.Nil(t, missingValidatorsInfoHashes) + assert.Nil(t, err) + }) + + t.Run("retrieve missing validators info with not all validators info received", func(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + syncer.requestHandler = &testscommon.RequestHandlerStub{ + RequestValidatorsInfoCalled: func(hashes [][]byte) { + syncer.mutValidatorsInfoForBlock.Lock() + for _, hash := range hashes { + if bytes.Equal(hash, []byte("a")) { + syncer.mapAllValidatorsInfo["a"] = svi1 + } + if bytes.Equal(hash, []byte("b")) { + syncer.mapAllValidatorsInfo["b"] = svi2 + } + if bytes.Equal(hash, []byte("c")) { + syncer.mapAllValidatorsInfo["c"] = svi3 + } + } + syncer.mutValidatorsInfoForBlock.Unlock() + }, + } + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.mapAllValidatorsInfo["b"] = nil + syncer.mapAllValidatorsInfo["c"] = nil + syncer.mapAllValidatorsInfo["d"] = nil + syncer.mutValidatorsInfoForBlock.Unlock() + + missingValidatorsInfoHashes, err := syncer.retrieveMissingValidatorsInfo() + assert.Equal(t, process.ErrTimeIsOut, err) + require.Equal(t, 1, len(missingValidatorsInfoHashes)) + assert.Equal(t, []byte("d"), missingValidatorsInfoHashes[0]) + }) + + t.Run("retrieve missing validators info with all validators info received", func(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + syncer.requestHandler = &testscommon.RequestHandlerStub{ + RequestValidatorsInfoCalled: func(hashes [][]byte) { + syncer.mutValidatorsInfoForBlock.Lock() + for _, hash := range hashes { + if bytes.Equal(hash, []byte("a")) { + syncer.mapAllValidatorsInfo["a"] = svi1 + } + if bytes.Equal(hash, []byte("b")) { + syncer.mapAllValidatorsInfo["b"] = svi2 + } + if bytes.Equal(hash, []byte("c")) { + syncer.mapAllValidatorsInfo["c"] = svi3 + } + } + syncer.mutValidatorsInfoForBlock.Unlock() + }, + } + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = nil + syncer.mapAllValidatorsInfo["b"] = nil + syncer.mapAllValidatorsInfo["c"] = nil + syncer.mutValidatorsInfoForBlock.Unlock() + + go func() { + time.Sleep(waitTime / 2) + syncer.chRcvAllValidatorsInfo <- struct{}{} + }() + + missingValidatorsInfoHashes, err := syncer.retrieveMissingValidatorsInfo() + + assert.Nil(t, err) + assert.Nil(t, missingValidatorsInfoHashes) + }) +} + +func TestValidatorInfoProcessor_GetAllMissingValidatorsInfoHashesShouldWork(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + svi3 := &state.ShardValidatorInfo{PublicKey: []byte("z")} + + args := createDefaultArguments() + syncer, _ := NewPeerMiniBlockSyncer(args) + syncer.initValidatorsInfo() + + syncer.mutValidatorsInfoForBlock.Lock() + syncer.mapAllValidatorsInfo["a"] = svi1 + syncer.mapAllValidatorsInfo["b"] = svi2 + syncer.mapAllValidatorsInfo["c"] = svi3 + syncer.mapAllValidatorsInfo["d"] = nil + syncer.mutValidatorsInfoForBlock.Unlock() + + missingValidatorsInfoHashes := syncer.getAllMissingValidatorsInfoHashes() + require.Equal(t, 1, len(missingValidatorsInfoHashes)) + assert.Equal(t, []byte("d"), missingValidatorsInfoHashes[0]) +} diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index 9198fb9bb14..1634db72588 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -14,6 +14,7 @@ import ( "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/epochStart/mock" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" @@ -605,3 +606,121 @@ func TestTrigger_ReceivedHeaderChangeEpochFinalityAttestingRound(t *testing.T) { epochStartTrigger.receivedMetaBlock(header103, hash103) require.Equal(t, uint64(102), epochStartTrigger.EpochFinalityAttestingRound()) } + +func TestTrigger_ClearMissingValidatorsInfoMapShouldWork(t *testing.T) { + t.Parallel() + + args := createMockShardEpochStartTriggerArguments() + epochStartTrigger, _ := NewEpochStartTrigger(args) + + epochStartTrigger.mutMissingValidatorsInfo.Lock() + epochStartTrigger.mapMissingValidatorsInfo["a"] = 0 + epochStartTrigger.mapMissingValidatorsInfo["b"] = 0 + epochStartTrigger.mapMissingValidatorsInfo["c"] = 1 + epochStartTrigger.mapMissingValidatorsInfo["d"] = 1 + epochStartTrigger.mutMissingValidatorsInfo.Unlock() + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + numMissingValidatorsInfo := len(epochStartTrigger.mapMissingValidatorsInfo) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() + assert.Equal(t, 4, numMissingValidatorsInfo) + + epochStartTrigger.clearMissingValidatorsInfoMap(0) + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + numMissingValidatorsInfo = len(epochStartTrigger.mapMissingValidatorsInfo) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() + assert.Equal(t, 2, len(epochStartTrigger.mapMissingValidatorsInfo)) + + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["c"]) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["d"]) +} + +func TestTrigger_UpdateMissingValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("update missing validators when there are no missing validators", func(t *testing.T) { + t.Parallel() + + args := createMockShardEpochStartTriggerArguments() + epochStartTrigger, _ := NewEpochStartTrigger(args) + + epochStartTrigger.updateMissingValidatorsInfo() + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + assert.Equal(t, 0, len(epochStartTrigger.mapMissingValidatorsInfo)) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() + }) + + t.Run("update missing validators when there are missing validators", func(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{PublicKey: []byte("x")} + svi2 := &state.ShardValidatorInfo{PublicKey: []byte("y")} + + args := createMockShardEpochStartTriggerArguments() + + args.DataPool = &dataRetrieverMock.PoolsHolderStub{ + HeadersCalled: func() dataRetriever.HeadersPool { + return &mock.HeadersCacherStub{} + }, + MiniBlocksCalled: func() storage.Cacher { + return testscommon.NewCacherStub() + }, + CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { + return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + }, + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, []byte("a")) { + return svi1, true + } + if bytes.Equal(key, []byte("b")) { + return svi2, true + } + + return nil, false + }, + } + }, + } + + epochStartTrigger, _ := NewEpochStartTrigger(args) + + epochStartTrigger.mutMissingValidatorsInfo.Lock() + epochStartTrigger.mapMissingValidatorsInfo["a"] = 1 + epochStartTrigger.mapMissingValidatorsInfo["b"] = 1 + epochStartTrigger.mapMissingValidatorsInfo["c"] = 1 + epochStartTrigger.mutMissingValidatorsInfo.Unlock() + + epochStartTrigger.updateMissingValidatorsInfo() + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + assert.Equal(t, 1, len(epochStartTrigger.mapMissingValidatorsInfo)) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["c"]) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() + }) +} + +func TestTrigger_AddMissingValidatorsInfo(t *testing.T) { + t.Parallel() + + args := createMockShardEpochStartTriggerArguments() + epochStartTrigger, _ := NewEpochStartTrigger(args) + + missingValidatorsInfoHashes := [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + } + + epochStartTrigger.addMissingValidatorsInfo(1, missingValidatorsInfoHashes) + + epochStartTrigger.mutMissingValidatorsInfo.RLock() + assert.Equal(t, 3, len(epochStartTrigger.mapMissingValidatorsInfo)) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["a"]) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["b"]) + assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["c"]) + epochStartTrigger.mutMissingValidatorsInfo.RUnlock() +} From 73a3726fb5bd567063bb4068d39e430ab41aa8fa Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Wed, 24 Aug 2022 17:13:12 +0300 Subject: [PATCH 63/70] * Added unit tests for metaProcessor in metaBlock.go * Added unit tests for validatorInfoPreprocessor in validatorInfoPreprocessor.go * Added unit tests for indexHashedNodesCoordinator in indexHashedNodesCoordinator.go * Added unit tests for stateExport in export.go * Added unit tests for transactionsSync in syncTransactions.go --- process/block/export_test.go | 20 +- process/block/metablock_test.go | 100 ++- .../validatorInfoPreProcessor_test.go | 82 +++ .../indexHashedNodesCoordinator_test.go | 55 ++ update/genesis/export_test.go | 149 +++++ update/sync/syncTransactions_test.go | 586 ++++++++++++++++++ 6 files changed, 978 insertions(+), 14 deletions(-) diff --git a/process/block/export_test.go b/process/block/export_test.go index 8c58c102f79..f1616c4182c 100644 --- a/process/block/export_test.go +++ b/process/block/export_test.go @@ -147,15 +147,15 @@ func NewShardProcessorEmptyWith3shards( return nil }, }, - BlockTracker: mock.NewBlockTrackerMock(shardCoordinator, genesisBlocks), - BlockSizeThrottler: &mock.BlockSizeThrottlerStub{}, - Version: "softwareVersion", - HistoryRepository: &dblookupext.HistoryRepositoryStub{}, + BlockTracker: mock.NewBlockTrackerMock(shardCoordinator, genesisBlocks), + BlockSizeThrottler: &mock.BlockSizeThrottlerStub{}, + Version: "softwareVersion", + HistoryRepository: &dblookupext.HistoryRepositoryStub{}, EnableRoundsHandler: &testscommon.EnableRoundsHandlerStub{}, - GasHandler: &mock.GasHandlerMock{}, - ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, - ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, - ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, + GasHandler: &mock.GasHandlerMock{}, + ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, + ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, + ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, }, } shardProc, err := NewShardProcessor(arguments) @@ -537,3 +537,7 @@ func (sp *shardProcessor) RollBackProcessedMiniBlocksInfo(headerHandler data.Hea func (bp *baseProcessor) CheckConstructionStateAndIndexesCorrectness(mbh data.MiniBlockHeaderHandler) error { return checkConstructionStateAndIndexesCorrectness(mbh) } + +func (mp *metaProcessor) GetAllMarshalledTxs(body *block.Body) map[string][][]byte { + return mp.getAllMarshalledTxs(body) +} diff --git a/process/block/metablock_test.go b/process/block/metablock_test.go index d8441337bd1..6df809e155a 100644 --- a/process/block/metablock_test.go +++ b/process/block/metablock_test.go @@ -130,13 +130,13 @@ func createMockMetaArguments( return nil }, }, - BlockTracker: mock.NewBlockTrackerMock(bootstrapComponents.ShardCoordinator(), startHeaders), - BlockSizeThrottler: &mock.BlockSizeThrottlerStub{}, - HistoryRepository: &dblookupext.HistoryRepositoryStub{}, + BlockTracker: mock.NewBlockTrackerMock(bootstrapComponents.ShardCoordinator(), startHeaders), + BlockSizeThrottler: &mock.BlockSizeThrottlerStub{}, + HistoryRepository: &dblookupext.HistoryRepositoryStub{}, EnableRoundsHandler: &testscommon.EnableRoundsHandlerStub{}, - ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, - ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, - ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, + ScheduledTxsExecutionHandler: &testscommon.ScheduledTxsExecutionStub{}, + ProcessedMiniBlocksTracker: &testscommon.ProcessedMiniBlocksTrackerStub{}, + ReceiptsRepository: &testscommon.ReceiptsRepositoryStub{}, }, SCToProtocol: &mock.SCToProtocolStub{}, PendingMiniBlocksHandler: &mock.PendingMiniBlocksHandlerStub{}, @@ -3547,3 +3547,91 @@ func TestMetaProcessor_getFinalMiniBlockHashes(t *testing.T) { assert.Equal(t, expectedMbHeaders, retMbHeaders) }) } + +func TestMetaProcessor_getAllMarshalledTxs(t *testing.T) { + t.Parallel() + + arguments := createMockMetaArguments(createMockComponentHolders()) + + arguments.EpochRewardsCreator = &mock.EpochRewardsCreatorStub{ + CreateMarshalledDataCalled: func(body *block.Body) map[string][][]byte { + marshalledData := make(map[string][][]byte) + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.RewardsBlock { + continue + } + for _, txHash := range miniBlock.TxHashes { + marshalledData["rewards"] = append(marshalledData["rewards"], txHash) + } + } + return marshalledData + }, + } + + arguments.EpochValidatorInfoCreator = &mock.EpochValidatorInfoCreatorStub{ + CreateMarshalledDataCalled: func(body *block.Body) map[string][][]byte { + marshalledData := make(map[string][][]byte) + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { + continue + } + for _, txHash := range miniBlock.TxHashes { + marshalledData["validatorInfo"] = append(marshalledData["validatorInfo"], txHash) + } + } + return marshalledData + }, + } + + mp, _ := blproc.NewMetaProcessor(arguments) + + body := &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.TxBlock, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.RewardsBlock, + TxHashes: [][]byte{ + []byte("d"), + []byte("e"), + []byte("f"), + }, + }, + { + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + Type: block.PeerBlock, + TxHashes: [][]byte{ + []byte("g"), + []byte("h"), + []byte("i"), + }, + }, + }, + } + + allMarshalledTxs := mp.GetAllMarshalledTxs(body) + + require.Equal(t, 2, len(allMarshalledTxs)) + + require.Equal(t, 3, len(allMarshalledTxs["rewards"])) + require.Equal(t, 3, len(allMarshalledTxs["validatorInfo"])) + + assert.Equal(t, []byte("d"), allMarshalledTxs["rewards"][0]) + assert.Equal(t, []byte("e"), allMarshalledTxs["rewards"][1]) + assert.Equal(t, []byte("f"), allMarshalledTxs["rewards"][2]) + + assert.Equal(t, []byte("g"), allMarshalledTxs["validatorInfo"][0]) + assert.Equal(t, []byte("h"), allMarshalledTxs["validatorInfo"][1]) + assert.Equal(t, []byte("i"), allMarshalledTxs["validatorInfo"][2]) +} diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index f145ed586c4..28117061c7c 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -1,14 +1,19 @@ package preprocess import ( + "bytes" + "errors" "testing" "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" + "github.com/ElrondNetwork/elrond-go/testscommon/storage" "github.com/stretchr/testify/assert" ) @@ -448,3 +453,80 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov assert.NotNil(t, foundMb) assert.True(t, ok) } + +func TestNewValidatorInfoPreprocessor_RestoreValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("restore validators info with not all txs found in storage", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("error") + hasher := &hashingMocks.HasherMock{} + marshalizer := &testscommon.MarshalizerMock{} + blockSizeComputation := &testscommon.BlockSizeComputationStub{} + storer := &storage.ChainStorerStub{ + GetAllCalled: func(unitType dataRetriever.UnitType, keys [][]byte) (map[string][]byte, error) { + return nil, expectedErr + }, + } + tdp := initDataPool() + rtp, _ := NewValidatorInfoPreprocessor( + hasher, + marshalizer, + blockSizeComputation, + tdp.ValidatorsInfo(), + storer, + func(txHashes [][]byte) {}, + &testscommon.EnableEpochsHandlerStub{}, + ) + + miniBlock := &block.MiniBlock{} + err := rtp.restoreValidatorsInfo(miniBlock) + assert.Equal(t, expectedErr, err) + }) + + t.Run("restore validators info with all txs found in storage", func(t *testing.T) { + t.Parallel() + + hasher := &hashingMocks.HasherMock{} + marshalizer := &testscommon.MarshalizerMock{} + blockSizeComputation := &testscommon.BlockSizeComputationStub{} + shardValidatorInfoHash := []byte("hash") + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + marshalledShardValidatorInfo, _ := marshalizer.Marshal(shardValidatorInfo) + storer := &storage.ChainStorerStub{ + GetAllCalled: func(unitType dataRetriever.UnitType, keys [][]byte) (map[string][]byte, error) { + allShardValidatorsInfo := make(map[string][]byte) + allShardValidatorsInfo[string(shardValidatorInfoHash)] = marshalledShardValidatorInfo + return allShardValidatorsInfo, nil + }, + } + tdp := initDataPool() + wasCalledWithExpectedKey := false + tdp.ValidatorsInfoCalled = func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + AddDataCalled: func(key []byte, data interface{}, sizeInBytes int, cacheID string) { + if bytes.Equal(key, shardValidatorInfoHash) { + wasCalledWithExpectedKey = true + } + }, + } + } + rtp, _ := NewValidatorInfoPreprocessor( + hasher, + marshalizer, + blockSizeComputation, + tdp.ValidatorsInfo(), + storer, + func(txHashes [][]byte) {}, + &testscommon.EnableEpochsHandlerStub{}, + ) + + miniBlock := &block.MiniBlock{} + err := rtp.restoreValidatorsInfo(miniBlock) + assert.Nil(t, err) + assert.True(t, wasCalledWithExpectedKey) + }) +} diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index ab489b7d49d..6ac8861d80e 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -1,6 +1,7 @@ package nodesCoordinator import ( + "bytes" "encoding/hex" "errors" "fmt" @@ -28,6 +29,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -2366,3 +2368,56 @@ func TestIndexHashedNodesCoordinator_IsInterfaceNil(t *testing.T) { require.Nil(t, err) require.False(t, check.IfNil(ihnc3)) } + +func TestIndexHashedNodesCoordinator_GetShardValidatorInfoData(t *testing.T) { + t.Parallel() + + t.Run("get shard validator info data before refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + txHash := []byte("txHash") + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + arguments := createArguments() + arguments.EnableEpochsHandler = &mock.EnableEpochsHandlerMock{ + RefactorPeersMiniBlocksEnableEpochField: 1, + } + validatorInfoCacher := &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, txHash) { + return svi, nil + } + return nil, errors.New("error") + }, + } + ihnc, _ := NewIndexHashedNodesCoordinator(arguments) + + marshalledSVI, _ := arguments.Marshalizer.Marshal(svi) + shardValidatorInfo, _ := ihnc.getShardValidatorInfoData(marshalledSVI, validatorInfoCacher, 0) + require.Equal(t, svi, shardValidatorInfo) + }) + + t.Run("get shard validator info data after refactor peers mini block activation flag is set", func(t *testing.T) { + t.Parallel() + + txHash := []byte("txHash") + svi := &state.ShardValidatorInfo{PublicKey: []byte("x")} + + arguments := createArguments() + arguments.EnableEpochsHandler = &mock.EnableEpochsHandlerMock{ + RefactorPeersMiniBlocksEnableEpochField: 0, + } + validatorInfoCacher := &validatorInfoCacherMock.ValidatorInfoCacherMock{ + GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if bytes.Equal(validatorInfoHash, txHash) { + return svi, nil + } + return nil, errors.New("error") + }, + } + ihnc, _ := NewIndexHashedNodesCoordinator(arguments) + + shardValidatorInfo, _ := ihnc.getShardValidatorInfoData(txHash, validatorInfoCacher, 0) + require.Equal(t, svi, shardValidatorInfo) + }) +} diff --git a/update/genesis/export_test.go b/update/genesis/export_test.go index 5da226caa7b..65a7b1d68f7 100644 --- a/update/genesis/export_test.go +++ b/update/genesis/export_test.go @@ -448,3 +448,152 @@ func TestStateExport_ExportUnfinishedMetaBlocksShouldWork(t *testing.T) { assert.True(t, unFinishedMetablocksWereWrote) } + +func TestStateExport_ExportAllValidatorsInfo(t *testing.T) { + t.Parallel() + + t.Run("export all validators info with state syncer error", func(t *testing.T) { + t.Parallel() + + expectedStateSyncerErr := errors.New("state syncer error") + args := ArgsNewStateExporter{ + Marshalizer: &mock.MarshalizerMock{}, + ShardCoordinator: mock.NewOneShardCoordinatorMock(), + Hasher: &mock.HasherStub{}, + StateSyncer: &mock.StateSyncStub{ + GetAllValidatorsInfoCalled: func() (map[string]*state.ShardValidatorInfo, error) { + return nil, expectedStateSyncerErr + }, + }, + HardforkStorer: &mock.HardforkStorerStub{}, + AddressPubKeyConverter: &mock.PubkeyConverterStub{}, + ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, + GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, + ExportFolder: "test", + } + + stateExporter, _ := NewStateExporter(args) + err := stateExporter.exportAllValidatorsInfo() + assert.Equal(t, expectedStateSyncerErr, err) + }) + + t.Run("export all validators info with hardfork storer error", func(t *testing.T) { + t.Parallel() + + expectedHardforkStorerErr := errors.New("hardfork storer error") + args := ArgsNewStateExporter{ + Marshalizer: &mock.MarshalizerMock{}, + ShardCoordinator: mock.NewOneShardCoordinatorMock(), + Hasher: &mock.HasherStub{}, + StateSyncer: &mock.StateSyncStub{ + GetAllValidatorsInfoCalled: func() (map[string]*state.ShardValidatorInfo, error) { + mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + mapShardValidatorInfo["key"] = shardValidatorInfo + return mapShardValidatorInfo, nil + }, + }, + HardforkStorer: &mock.HardforkStorerStub{ + WriteCalled: func(identifier string, key []byte, value []byte) error { + return expectedHardforkStorerErr + }, + }, + AddressPubKeyConverter: &mock.PubkeyConverterStub{}, + ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, + GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, + ExportFolder: "test", + } + + stateExporter, _ := NewStateExporter(args) + err := stateExporter.exportAllValidatorsInfo() + assert.Equal(t, expectedHardforkStorerErr, err) + }) + + t.Run("export all validators info without error", func(t *testing.T) { + t.Parallel() + + finishedIdentifierWasCalled := false + args := ArgsNewStateExporter{ + Marshalizer: &mock.MarshalizerMock{}, + ShardCoordinator: mock.NewOneShardCoordinatorMock(), + Hasher: &mock.HasherStub{}, + StateSyncer: &mock.StateSyncStub{}, + HardforkStorer: &mock.HardforkStorerStub{ + FinishedIdentifierCalled: func(identifier string) error { + finishedIdentifierWasCalled = true + return nil + }, + }, + AddressPubKeyConverter: &mock.PubkeyConverterStub{}, + ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, + GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, + ExportFolder: "test", + } + + stateExporter, _ := NewStateExporter(args) + err := stateExporter.exportAllValidatorsInfo() + assert.Nil(t, err) + assert.True(t, finishedIdentifierWasCalled) + }) +} + +func TestStateExport_ExportValidatorInfo(t *testing.T) { + t.Parallel() + + t.Run("export validator info with error", func(t *testing.T) { + t.Parallel() + + expectedErr := errors.New("error") + args := ArgsNewStateExporter{ + Marshalizer: &mock.MarshalizerMock{}, + ShardCoordinator: mock.NewOneShardCoordinatorMock(), + Hasher: &mock.HasherStub{}, + StateSyncer: &mock.StateSyncStub{}, + HardforkStorer: &mock.HardforkStorerStub{ + WriteCalled: func(identifier string, key []byte, value []byte) error { + return expectedErr + }, + }, + AddressPubKeyConverter: &mock.PubkeyConverterStub{}, + ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, + GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, + ExportFolder: "test", + } + + stateExporter, _ := NewStateExporter(args) + key := "key" + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + err := stateExporter.exportValidatorInfo(key, shardValidatorInfo) + assert.Equal(t, expectedErr, err) + }) + + t.Run("export validator info without error", func(t *testing.T) { + t.Parallel() + + args := ArgsNewStateExporter{ + Marshalizer: &mock.MarshalizerMock{}, + ShardCoordinator: mock.NewOneShardCoordinatorMock(), + Hasher: &mock.HasherStub{}, + StateSyncer: &mock.StateSyncStub{}, + HardforkStorer: &mock.HardforkStorerStub{}, + AddressPubKeyConverter: &mock.PubkeyConverterStub{}, + ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, + GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, + ExportFolder: "test", + } + + stateExporter, _ := NewStateExporter(args) + key := "key" + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + err := stateExporter.exportValidatorInfo(key, shardValidatorInfo) + assert.Nil(t, err) + }) +} diff --git a/update/sync/syncTransactions_test.go b/update/sync/syncTransactions_test.go index a54b3333c36..36a08acb188 100644 --- a/update/sync/syncTransactions_test.go +++ b/update/sync/syncTransactions_test.go @@ -1,8 +1,10 @@ package sync import ( + "bytes" "context" "encoding/json" + "errors" "fmt" "math/big" "strings" @@ -14,10 +16,12 @@ import ( dataTransaction "github.com/ElrondNetwork/elrond-go-core/data/transaction" "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/process" + "github.com/ElrondNetwork/elrond-go/state" "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" storageStubs "github.com/ElrondNetwork/elrond-go/testscommon/storage" + "github.com/ElrondNetwork/elrond-go/update" "github.com/ElrondNetwork/elrond-go/update/mock" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -341,3 +345,585 @@ func TestSyncPendingTransactionsFor_ReceiveMissingTx(t *testing.T) { cancel() require.Nil(t, err) } + +func TestTransactionsSync_RequestTransactionsForPeerMiniBlockShouldWork(t *testing.T) { + t.Parallel() + + svi1 := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + svi2 := &state.ShardValidatorInfo{ + PublicKey: []byte("y"), + } + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, []byte("b")) { + return svi2, true + } + return nil, false + }, + } + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapValidatorsInfo["a"] = svi1 + transactionsSyncer.mapTxsToMiniBlocks["b"] = miniBlock + numMissingValidatorsInfo := transactionsSyncer.requestTransactionsForPeerMiniBlock(miniBlock) + + assert.Equal(t, 1, numMissingValidatorsInfo) + assert.Equal(t, 2, len(transactionsSyncer.mapValidatorsInfo)) + assert.Equal(t, 2, len(transactionsSyncer.mapTxsToMiniBlocks)) + assert.Equal(t, svi2, transactionsSyncer.mapValidatorsInfo["b"]) + assert.Equal(t, miniBlock, transactionsSyncer.mapTxsToMiniBlocks["c"]) + transactionsSyncer.mutPendingTx.Unlock() +} + +func TestTransactionsSync_ReceivedValidatorInfo(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + // stop sync is true + transactionsSyncer.receivedValidatorInfo(txHash, svi) + transactionsSyncer.mutPendingTx.Lock() + assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) + transactionsSyncer.mutPendingTx.Unlock() + + // txHash does not exist in mapTxsToMiniBlocks + transactionsSyncer.stopSync = false + transactionsSyncer.receivedValidatorInfo(txHash, svi) + transactionsSyncer.mutPendingTx.Lock() + assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) + transactionsSyncer.mutPendingTx.Unlock() + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + // value received is not of type *state.ShardValidatorInfo + transactionsSyncer.receivedValidatorInfo(txHash, nil) + transactionsSyncer.mutPendingTx.Lock() + assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) + transactionsSyncer.mutPendingTx.Unlock() + + wasReceivedAll := false + go func() { + select { + case <-transactionsSyncer.chReceivedAll: + wasReceivedAll = true + return + case <-time.After(time.Second): + } + }() + + // received all missing validators info with success + transactionsSyncer.receivedValidatorInfo(txHash, svi) + transactionsSyncer.mutPendingTx.Lock() + assert.Equal(t, 1, len(transactionsSyncer.mapValidatorsInfo)) + transactionsSyncer.mutPendingTx.Unlock() + assert.True(t, wasReceivedAll) +} + +func TestTransactionsSync_GetValidatorInfoFromPoolShouldWork(t *testing.T) { + t.Parallel() + + t.Run("get validator info from pool when tx hash does not exist in mapTxsToMiniBlocks", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool when shard data store is missing", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return nil + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool when tx hash is not found in shard data store", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool when value received from pool is not of type *state.ShardValidatorInfo", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, txHash) { + return nil, true + } + return nil, false + }, + } + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool should work", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, txHash) { + return svi, true + } + return nil, false + }, + } + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPool(txHash) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) + }) +} + +func TestTransactionsSync_GetValidatorInfoFromPoolWithSearchFirstShouldWork(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + // txHash is not found in validatorInfoPool + validatorsInfoPool := &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolWithSearchFirst(txHash, validatorsInfoPool) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + + // value received from validatorInfoPool is not of type *state.ShardValidatorInfo + validatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, true + }, + } + shardValidatorInfo, bFound = transactionsSyncer.getValidatorInfoFromPoolWithSearchFirst(txHash, validatorsInfoPool) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + + // get validator info from pool with search first should work + validatorsInfoPool = &testscommon.ShardedDataStub{ + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, txHash) { + return svi, true + } + return nil, false + }, + } + shardValidatorInfo, bFound = transactionsSyncer.getValidatorInfoFromPoolWithSearchFirst(txHash, validatorsInfoPool) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) +} + +func TestTransactionsSync_GetValidatorInfoFromPoolOrStorage(t *testing.T) { + t.Parallel() + + t.Run("get validator info from pool or storage should work from pool", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, txHash) { + return svi, true + } + return nil, false + }, + } + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) + }) + + t.Run("get validator info from pool or storage when txHash does not exist in mapTxsToMiniBlocks", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool or storage should work using search first", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return svi, true + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) + }) + + t.Run("get validator info from pool or storage when txHash does not exist in storage", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + + args := createMockArgs() + args.Storages = &storageStubs.ChainStorerStub{ + GetStorerCalled: func(unitType dataRetriever.UnitType) (storage.Storer, error) { + return &storageStubs.StorerStub{ + GetCalled: func(key []byte) ([]byte, error) { + return nil, errors.New("error") + }, + }, nil + }, + } + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Nil(t, shardValidatorInfo) + assert.False(t, bFound) + }) + + t.Run("get validator info from pool or storage should work from storage", func(t *testing.T) { + t.Parallel() + + txHash := []byte("hash") + svi := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + + args := createMockArgs() + marshalledSVI, _ := args.Marshaller.Marshal(svi) + args.Storages = &storageStubs.ChainStorerStub{ + GetStorerCalled: func(unitType dataRetriever.UnitType) (storage.Storer, error) { + return &storageStubs.StorerStub{ + GetCalled: func(key []byte) ([]byte, error) { + if bytes.Equal(key, txHash) { + return marshalledSVI, nil + } + return nil, errors.New("error") + }, + }, nil + }, + } + args.DataPools = &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + } + transactionsSyncer, _ := NewTransactionsSyncer(args) + + miniBlock := &block.MiniBlock{ + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } + transactionsSyncer.mutPendingTx.Lock() + transactionsSyncer.mapTxsToMiniBlocks[string(txHash)] = miniBlock + transactionsSyncer.mutPendingTx.Unlock() + + shardValidatorInfo, bFound := transactionsSyncer.getValidatorInfoFromPoolOrStorage(txHash) + assert.Equal(t, svi, shardValidatorInfo) + assert.True(t, bFound) + }) +} + +func TestTransactionsSync_GetValidatorsInfoShouldWork(t *testing.T) { + t.Parallel() + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + transactionsSyncer.syncedAll = false + mapShardValidatorInfo, err := transactionsSyncer.GetValidatorsInfo() + assert.Nil(t, mapShardValidatorInfo) + assert.Equal(t, update.ErrNotSynced, err) + + txHash1 := []byte("hash1") + svi1 := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + txHash2 := []byte("hash2") + svi2 := &state.ShardValidatorInfo{ + PublicKey: []byte("y"), + } + transactionsSyncer.mapValidatorsInfo[string(txHash1)] = svi1 + transactionsSyncer.mapValidatorsInfo[string(txHash2)] = svi2 + + transactionsSyncer.syncedAll = true + mapShardValidatorInfo, err = transactionsSyncer.GetValidatorsInfo() + assert.Equal(t, 2, len(mapShardValidatorInfo)) + assert.Equal(t, svi1, mapShardValidatorInfo[string(txHash1)]) + assert.Equal(t, svi2, mapShardValidatorInfo[string(txHash2)]) + assert.Nil(t, err) +} + +func TestTransactionsSync_ClearFieldsShouldWork(t *testing.T) { + t.Parallel() + + args := createMockArgs() + transactionsSyncer, _ := NewTransactionsSyncer(args) + + transactionsSyncer.mapTransactions["a"] = &dataTransaction.Transaction{} + transactionsSyncer.mapTxsToMiniBlocks["b"] = &block.MiniBlock{} + transactionsSyncer.mapValidatorsInfo["c"] = &state.ShardValidatorInfo{} + + assert.Equal(t, 1, len(transactionsSyncer.mapTransactions)) + assert.Equal(t, 1, len(transactionsSyncer.mapTxsToMiniBlocks)) + assert.Equal(t, 1, len(transactionsSyncer.mapValidatorsInfo)) + + transactionsSyncer.ClearFields() + + assert.Equal(t, 0, len(transactionsSyncer.mapTransactions)) + assert.Equal(t, 0, len(transactionsSyncer.mapTxsToMiniBlocks)) + assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) +} From 7c6d3800dd0f15fad448f068f0af602d64f89912 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Fri, 26 Aug 2022 19:10:38 +0300 Subject: [PATCH 64/70] * Fixed after reviews --- consensus/mock/epochStartNotifierStub.go | 4 +- consensus/spos/bls/subroundStartRound.go | 3 +- .../currentEpochValidatorInfoPool_test.go | 3 +- dataRetriever/dataPool/dataPool.go | 2 +- dataRetriever/dataPool/dataPool_test.go | 2 +- dataRetriever/errors.go | 4 +- ...go => validatorInfoForCurrentEpochStub.go} | 6 +- epochStart/bootstrap/interface.go | 3 +- epochStart/bootstrap/process_test.go | 6 +- epochStart/bootstrap/syncValidatorStatus.go | 3 +- .../bootstrap/syncValidatorStatus_test.go | 7 +- epochStart/interface.go | 4 +- epochStart/metachain/baseRewards.go | 6 +- epochStart/metachain/trigger.go | 2 +- epochStart/metachain/trigger_test.go | 2 +- epochStart/metachain/validators.go | 88 +++++++++++-------- epochStart/metachain/validators_test.go | 2 +- epochStart/mock/epochStartNotifierStub.go | 7 +- epochStart/notifier/common.go | 2 +- .../notifier/epochStartSubscriptionHandler.go | 6 +- epochStart/shardchain/peerMiniBlocksSyncer.go | 39 ++++---- epochStart/shardchain/trigger.go | 27 ++++-- epochStart/shardchain/trigger_test.go | 4 +- factory/interface.go | 2 +- factory/mock/epochStartNotifierStub.go | 6 +- factory/shardingFactory.go | 2 + heartbeat/mock/epochStartNotifier.go | 8 +- integrationTests/consensus/testInitializer.go | 2 + .../consensusComponents_test.go | 1 + .../processComponents_test.go | 1 + .../statusComponents/statusComponents_test.go | 1 + integrationTests/mock/epochStartNotifier.go | 8 +- integrationTests/nodesCoordinatorFactory.go | 3 + integrationTests/testHeartbeatNode.go | 3 + integrationTests/testP2PNode.go | 3 + .../testProcessorNodeWithCoordinator.go | 2 + .../testProcessorNodeWithMultisigner.go | 3 + node/mock/epochStartNotifier.go | 8 +- node/nodeRunner.go | 1 + process/block/metablock.go | 76 ++++++++-------- .../block/preprocess/rewardTxPreProcessor.go | 6 +- .../block/preprocess/smartContractResults.go | 4 +- process/block/preprocess/transactions.go | 6 +- .../preprocess/validatorInfoPreProcessor.go | 27 +----- .../validatorInfoPreProcessor_test.go | 33 ------- .../shard/preProcessorsContainerFactory.go | 1 - process/mock/epochStartNotifierStub.go | 8 +- process/peer/validatorsProvider_test.go | 2 +- sharding/networksharding/peerShardMapper.go | 3 +- .../networksharding/peerShardMapper_test.go | 3 +- sharding/nodesCoordinator/errors.go | 3 + .../indexHashedNodesCoordinator.go | 16 ++-- .../indexHashedNodesCoordinatorLite_test.go | 7 +- ...dexHashedNodesCoordinatorWithRater_test.go | 9 ++ .../indexHashedNodesCoordinator_test.go | 77 +++++++++------- sharding/nodesCoordinator/interface.go | 2 +- sharding/nodesCoordinator/shardingArgs.go | 2 + testscommon/genericMocks/actionHandlerStub.go | 7 +- .../shardingMocks/nodesCoordinatorStub.go | 7 +- .../validatorInfoCacherMock.go | 38 -------- .../validatorInfoCacherStub.go | 38 ++++++++ update/mock/epochStartNotifierStub.go | 8 +- update/sync/syncTransactions.go | 3 +- 63 files changed, 351 insertions(+), 321 deletions(-) rename dataRetriever/mock/{validatorInfoForCurrentEpochMock.go => validatorInfoForCurrentEpochStub.go} (91%) delete mode 100644 testscommon/validatorInfoCacher/validatorInfoCacherMock.go create mode 100644 testscommon/validatorInfoCacher/validatorInfoCacherStub.go diff --git a/consensus/mock/epochStartNotifierStub.go b/consensus/mock/epochStartNotifierStub.go index a0f67c63778..157bd99020b 100644 --- a/consensus/mock/epochStartNotifierStub.go +++ b/consensus/mock/epochStartNotifierStub.go @@ -38,13 +38,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if esnm.NotifyAllPrepareCalled != nil { esnm.NotifyAllPrepareCalled(metaHdr, body) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) + hdl.EpochStartPrepare(metaHdr, body) } } diff --git a/consensus/spos/bls/subroundStartRound.go b/consensus/spos/bls/subroundStartRound.go index 7a46f3bf978..a18fe9d893e 100644 --- a/consensus/spos/bls/subroundStartRound.go +++ b/consensus/spos/bls/subroundStartRound.go @@ -13,7 +13,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/indexer" "github.com/ElrondNetwork/elrond-go/common" "github.com/ElrondNetwork/elrond-go/consensus/spos" - "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/outport" "github.com/ElrondNetwork/elrond-go/outport/disabled" ) @@ -303,7 +302,7 @@ func (sr *subroundStartRound) generateNextConsensusGroup(roundIndex int64) error // EpochStartPrepare wis called when an epoch start event is observed, but not yet confirmed/committed. // Some components may need to do initialisation on this event -func (sr *subroundStartRound) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler, _ epochStart.ValidatorInfoCacher) { +func (sr *subroundStartRound) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler) { log.Trace(fmt.Sprintf("epoch %d start prepare in consensus", metaHdr.GetEpoch())) } diff --git a/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go b/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go index ab39cd14bee..0337e815ddf 100644 --- a/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go +++ b/dataRetriever/dataPool/currentEpochValidatorInfoPool_test.go @@ -1,10 +1,11 @@ package dataPool import ( + "testing" + "github.com/ElrondNetwork/elrond-go/dataRetriever" "github.com/ElrondNetwork/elrond-go/state" "github.com/stretchr/testify/require" - "testing" ) func TestCurrentEpochValidatorInfoPool_AddGetCleanTx(t *testing.T) { diff --git a/dataRetriever/dataPool/dataPool.go b/dataRetriever/dataPool/dataPool.go index 444539e240e..4bb4c20720e 100644 --- a/dataRetriever/dataPool/dataPool.go +++ b/dataRetriever/dataPool/dataPool.go @@ -70,7 +70,7 @@ func NewDataPool(args DataPoolArgs) (*dataPool, error) { return nil, dataRetriever.ErrNilCurrBlockTxs } if check.IfNil(args.CurrentEpochValidatorInfo) { - return nil, dataRetriever.ErrNilCurrEpochValidatorInfo + return nil, dataRetriever.ErrNilCurrentEpochValidatorInfo } if check.IfNil(args.TrieNodes) { return nil, dataRetriever.ErrNilTrieNodesPool diff --git a/dataRetriever/dataPool/dataPool_test.go b/dataRetriever/dataPool/dataPool_test.go index 3d4c0486517..05fc6e8e280 100644 --- a/dataRetriever/dataPool/dataPool_test.go +++ b/dataRetriever/dataPool/dataPool_test.go @@ -184,7 +184,7 @@ func TestNewDataPool_NilCurrEpochValidatorInfoShouldErr(t *testing.T) { tdp, err := dataPool.NewDataPool(args) require.Nil(t, tdp) - require.Equal(t, dataRetriever.ErrNilCurrEpochValidatorInfo, err) + require.Equal(t, dataRetriever.ErrNilCurrentEpochValidatorInfo, err) } func TestNewDataPool_OkValsShouldWork(t *testing.T) { diff --git a/dataRetriever/errors.go b/dataRetriever/errors.go index 86db71fe8a1..662ab1e2735 100644 --- a/dataRetriever/errors.go +++ b/dataRetriever/errors.go @@ -152,8 +152,8 @@ var ErrNilTrieDataGetter = errors.New("nil trie data getter provided") // ErrNilCurrBlockTxs signals that nil current block txs holder was provided var ErrNilCurrBlockTxs = errors.New("nil current block txs holder") -// ErrNilCurrEpochValidatorInfo signals that nil current epoch validator info holder was provided -var ErrNilCurrEpochValidatorInfo = errors.New("nil current epoch validator info holder") +// ErrNilCurrentEpochValidatorInfo signals that nil current epoch validator info holder was provided +var ErrNilCurrentEpochValidatorInfo = errors.New("nil current epoch validator info holder") // ErrNilRequestedItemsHandler signals that a nil requested items handler was provided var ErrNilRequestedItemsHandler = errors.New("nil requested items handler") diff --git a/dataRetriever/mock/validatorInfoForCurrentEpochMock.go b/dataRetriever/mock/validatorInfoForCurrentEpochStub.go similarity index 91% rename from dataRetriever/mock/validatorInfoForCurrentEpochMock.go rename to dataRetriever/mock/validatorInfoForCurrentEpochStub.go index cc9adfcf464..84905d69262 100644 --- a/dataRetriever/mock/validatorInfoForCurrentEpochMock.go +++ b/dataRetriever/mock/validatorInfoForCurrentEpochStub.go @@ -12,9 +12,9 @@ type ValidatorInfoForCurrentEpochStub struct { } // Clean - -func (t *ValidatorInfoForCurrentEpochStub) Clean() { - if t.CleanCalled != nil { - t.CleanCalled() +func (v *ValidatorInfoForCurrentEpochStub) Clean() { + if v.CleanCalled != nil { + v.CleanCalled() } } diff --git a/epochStart/bootstrap/interface.go b/epochStart/bootstrap/interface.go index 293da1fbcf8..84b9d2f56d5 100644 --- a/epochStart/bootstrap/interface.go +++ b/epochStart/bootstrap/interface.go @@ -7,7 +7,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/dataRetriever" - "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/process" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" ) @@ -26,7 +25,7 @@ type EpochStartMetaBlockInterceptorProcessor interface { // StartInEpochNodesCoordinator defines the methods to process and save nodesCoordinator information to storage type StartInEpochNodesCoordinator interface { - EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) NodesCoordinatorToRegistry() *nodesCoordinator.NodesCoordinatorRegistry ShardIdForEpoch(epoch uint32) (uint32, error) IsInterfaceNil() bool diff --git a/epochStart/bootstrap/process_test.go b/epochStart/bootstrap/process_test.go index 48daf9225ee..ed20437f2c2 100644 --- a/epochStart/bootstrap/process_test.go +++ b/epochStart/bootstrap/process_test.go @@ -1756,7 +1756,7 @@ func TestRequestAndProcessing(t *testing.T) { return &mock.HeadersCacherStub{} }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &validatorInfoCacherStub.ValidatorInfoCacherStub{} }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} @@ -1826,7 +1826,7 @@ func TestRequestAndProcessing(t *testing.T) { return &mock.HeadersCacherStub{} }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &validatorInfoCacherStub.ValidatorInfoCacherStub{} }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} @@ -1984,7 +1984,7 @@ func TestEpochStartBootstrap_WithDisabledShardIDAsObserver(t *testing.T) { return testscommon.NewCacherStub() }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &validatorInfoCacherStub.ValidatorInfoCacherStub{} }, } epochStartProvider.requestHandler = &testscommon.RequestHandlerStub{} diff --git a/epochStart/bootstrap/syncValidatorStatus.go b/epochStart/bootstrap/syncValidatorStatus.go index 973b5460998..23ac78a3841 100644 --- a/epochStart/bootstrap/syncValidatorStatus.go +++ b/epochStart/bootstrap/syncValidatorStatus.go @@ -129,6 +129,7 @@ func NewSyncValidatorStatus(args ArgsNewSyncValidatorStatus) (*syncValidatorStat NodeTypeProvider: args.NodeTypeProvider, IsFullArchive: args.IsFullArchive, EnableEpochsHandler: args.EnableEpochsHandler, + ValidatorInfoCacher: s.dataPool.CurrentEpochValidatorInfo(), } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argsNodesCoordinator) if err != nil { @@ -190,7 +191,7 @@ func (s *syncValidatorStatus) processValidatorChangesFor(metaBlock data.HeaderHa if err != nil { return nil, err } - s.nodeCoordinator.EpochStartPrepare(metaBlock, blockBody, s.dataPool.CurrentEpochValidatorInfo()) + s.nodeCoordinator.EpochStartPrepare(metaBlock, blockBody) return miniBlocks, nil } diff --git a/epochStart/bootstrap/syncValidatorStatus_test.go b/epochStart/bootstrap/syncValidatorStatus_test.go index 772d038baa7..272ef039960 100644 --- a/epochStart/bootstrap/syncValidatorStatus_test.go +++ b/epochStart/bootstrap/syncValidatorStatus_test.go @@ -2,7 +2,6 @@ package bootstrap import ( "context" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "testing" "github.com/ElrondNetwork/elrond-go-core/core" @@ -11,7 +10,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/endProcess" "github.com/ElrondNetwork/elrond-go/dataRetriever" - "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/epochStart/mock" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" @@ -21,6 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -140,7 +139,7 @@ func TestSyncValidatorStatus_processValidatorChangesFor(t *testing.T) { wasCalled := false svs.nodeCoordinator = &shardingMocks.NodesCoordinatorStub{ - EpochStartPrepareCalled: func(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { + EpochStartPrepareCalled: func(metaHdr data.HeaderHandler, body data.BodyHandler) { wasCalled = true assert.Equal(t, metaBlock, metaHdr) assert.Equal(t, expectedBody, body) @@ -252,7 +251,7 @@ func getSyncValidatorStatusArgs() ArgsNewSyncValidatorStatus { return testscommon.NewCacherStub() }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &validatorInfoCacherMock.ValidatorInfoCacherStub{} }, }, Marshalizer: &mock.MarshalizerMock{}, diff --git a/epochStart/interface.go b/epochStart/interface.go index bf4c861a189..f02f0b39bca 100644 --- a/epochStart/interface.go +++ b/epochStart/interface.go @@ -66,7 +66,7 @@ type RequestHandler interface { // ActionHandler defines the action taken on epoch start event type ActionHandler interface { EpochStartAction(hdr data.HeaderHandler) - EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher ValidatorInfoCacher) + EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) NotifyOrder() uint32 } @@ -80,7 +80,7 @@ type RegistrationHandler interface { // Notifier defines which actions should be done for handling new epoch's events type Notifier interface { NotifyAll(hdr data.HeaderHandler) - NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher ValidatorInfoCacher) + NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) NotifyEpochChangeConfirmed(epoch uint32) IsInterfaceNil() bool } diff --git a/epochStart/metachain/baseRewards.go b/epochStart/metachain/baseRewards.go index cad255c1f3a..8b8a96ddcf2 100644 --- a/epochStart/metachain/baseRewards.go +++ b/epochStart/metachain/baseRewards.go @@ -132,7 +132,7 @@ func (brc *baseRewardsCreator) CreateMarshalledData(body *block.Body) map[string continue } - broadcastTopic := createBroadcastTopic(factory.RewardsTransactionTopic, brc.shardCoordinator, miniBlock.ReceiverShardID) + broadcastTopic := createBroadcastTopic(brc.shardCoordinator, miniBlock.ReceiverShardID) if _, ok := marshalledRewardsTxs[broadcastTopic]; !ok { marshalledRewardsTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) } @@ -494,8 +494,8 @@ func getMiniBlockWithReceiverShardID(shardId uint32, miniBlocks block.MiniBlockS return nil } -func createBroadcastTopic(topic string, shardC sharding.Coordinator, destShId uint32) string { - transactionTopic := topic + shardC.CommunicationIdentifier(destShId) +func createBroadcastTopic(shardC sharding.Coordinator, destShId uint32) string { + transactionTopic := factory.RewardsTransactionTopic + shardC.CommunicationIdentifier(destShId) return transactionTopic } diff --git a/epochStart/metachain/trigger.go b/epochStart/metachain/trigger.go index 569d007f334..7bb0f66c6c3 100644 --- a/epochStart/metachain/trigger.go +++ b/epochStart/metachain/trigger.go @@ -256,7 +256,7 @@ func (t *trigger) SetProcessed(header data.HeaderHandler, body data.BodyHandler) t.epochStartMeta = metaBlock t.epochStartMetaHash = metaHash - t.epochStartNotifier.NotifyAllPrepare(metaBlock, body, t.validatorInfoPool) + t.epochStartNotifier.NotifyAllPrepare(metaBlock, body) t.epochStartNotifier.NotifyAll(metaBlock) t.saveCurrentState(metaBlock.Round) diff --git a/epochStart/metachain/trigger_test.go b/epochStart/metachain/trigger_test.go index 4ac2005720b..ee8e25bb70b 100644 --- a/epochStart/metachain/trigger_test.go +++ b/epochStart/metachain/trigger_test.go @@ -57,7 +57,7 @@ func createMockEpochStartTriggerArguments() *ArgsNewMetaEpochStartTrigger { }, DataPool: &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &validatorInfoCacherMock.ValidatorInfoCacherStub{} }, }, } diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 66dad3c330a..18e4051d2af 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -90,9 +90,6 @@ func (vic *validatorInfoCreator) CreateValidatorInfoMiniBlocks(validatorsInfo ma return nil, epochStart.ErrNilValidatorInfo } - vic.mutValidatorInfo.Lock() - defer vic.mutValidatorInfo.Unlock() - vic.clean() miniBlocks := make([]*block.MiniBlock, 0) @@ -155,14 +152,7 @@ func (vic *validatorInfoCreator) createMiniBlock(validatorsInfo []*state.Validat func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *state.ShardValidatorInfo) ([]byte, error) { if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { - shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) - if err != nil { - return nil, err - } - - validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() - validatorInfoCacher.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) - return shardValidatorInfoHash, nil + return vic.getShardValidatorInfoHash(shardValidatorInfo) } marshalledShardValidatorInfo, err := vic.marshalizer.Marshal(shardValidatorInfo) @@ -173,6 +163,17 @@ func (vic *validatorInfoCreator) getShardValidatorInfoData(shardValidatorInfo *s return marshalledShardValidatorInfo, nil } +func (vic *validatorInfoCreator) getShardValidatorInfoHash(shardValidatorInfo *state.ShardValidatorInfo) ([]byte, error) { + shardValidatorInfoHash, err := core.CalculateHash(vic.marshalizer, vic.hasher, shardValidatorInfo) + if err != nil { + return nil, err + } + + validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() + validatorInfoCacher.AddValidatorInfo(shardValidatorInfoHash, shardValidatorInfo) + return shardValidatorInfoHash, nil +} + func createShardValidatorInfo(validator *state.ValidatorInfo) *state.ShardValidatorInfo { return &state.ShardValidatorInfo{ PublicKey: validator.PublicKey, @@ -250,50 +251,49 @@ func (vic *validatorInfoCreator) CreateMarshalledData(body *block.Body) map[stri return nil } - marshalledValidatorInfoTxs := make(map[string][][]byte) - + marshalledValidatorInfoTxs := make([][]byte, 0) for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { continue } - if miniBlock.SenderShardID != vic.shardCoordinator.SelfId() || - miniBlock.ReceiverShardID == vic.shardCoordinator.SelfId() { + isCrossMiniBlockFromMe := miniBlock.SenderShardID == vic.shardCoordinator.SelfId() && + miniBlock.ReceiverShardID != vic.shardCoordinator.SelfId() + if !isCrossMiniBlockFromMe { continue } - broadcastTopic := common.ValidatorInfoTopic - if _, ok := marshalledValidatorInfoTxs[broadcastTopic]; !ok { - marshalledValidatorInfoTxs[broadcastTopic] = make([][]byte, 0, len(miniBlock.TxHashes)) - } - - vic.setMarshalledValidatorInfoTxs(miniBlock, marshalledValidatorInfoTxs, broadcastTopic) + marshalledValidatorInfoTxs = append(marshalledValidatorInfoTxs, vic.getMarshalledValidatorInfoTxs(miniBlock)...) + } - if len(marshalledValidatorInfoTxs[broadcastTopic]) == 0 { - delete(marshalledValidatorInfoTxs, broadcastTopic) - } + mapMarshalledValidatorInfoTxs := make(map[string][][]byte) + if len(marshalledValidatorInfoTxs) > 0 { + mapMarshalledValidatorInfoTxs[common.ValidatorInfoTopic] = marshalledValidatorInfoTxs } - return marshalledValidatorInfoTxs + return mapMarshalledValidatorInfoTxs } -func (vic *validatorInfoCreator) setMarshalledValidatorInfoTxs(miniBlock *block.MiniBlock, marshalledValidatorInfoTxs map[string][][]byte, broadcastTopic string) { +func (vic *validatorInfoCreator) getMarshalledValidatorInfoTxs(miniBlock *block.MiniBlock) [][]byte { validatorInfoCacher := vic.dataPool.CurrentEpochValidatorInfo() + marshalledValidatorInfoTxs := make([][]byte, 0) for _, txHash := range miniBlock.TxHashes { validatorInfoTx, err := validatorInfoCacher.GetValidatorInfo(txHash) if err != nil { - log.Error("validatorInfoCreator.setMarshalledValidatorInfoTxs.GetValidatorInfo", "hash", txHash, "error", err) + log.Error("validatorInfoCreator.getMarshalledValidatorInfoTxs.GetValidatorInfo", "hash", txHash, "error", err) continue } marshalledData, err := vic.marshalizer.Marshal(validatorInfoTx) if err != nil { - log.Error("validatorInfoCreator.setMarshalledValidatorInfoTxs.Marshal", "hash", txHash, "error", err) + log.Error("validatorInfoCreator.getMarshalledValidatorInfoTxs.Marshal", "hash", txHash, "error", err) continue } - marshalledValidatorInfoTxs[broadcastTopic] = append(marshalledValidatorInfoTxs[broadcastTopic], marshalledData) + marshalledValidatorInfoTxs = append(marshalledValidatorInfoTxs, marshalledData) } + + return marshalledValidatorInfoTxs } // GetValidatorInfoTxs returns validator info txs for the current epoch @@ -365,7 +365,10 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo } mbHash := vic.hasher.Compute(string(marshalledData)) - _ = vic.miniBlockStorage.Put(mbHash, marshalledData) + err = vic.miniBlockStorage.Put(mbHash, marshalledData) + if err != nil { + log.Debug("validatorInfoCreator.SaveBlockDataToStorage.Put", "error", err) + } } } @@ -385,7 +388,10 @@ func (vic *validatorInfoCreator) saveValidatorInfo(miniBlock *block.MiniBlock) { continue } - _ = vic.validatorInfoStorage.Put(validatorInfoHash, marshalledData) + err = vic.validatorInfoStorage.Put(validatorInfoHash, marshalledData) + if err != nil { + log.Debug("validatorInfoCreator.saveValidatorInfo.Put", "hash", validatorInfoHash, "error", err) + } } } @@ -396,24 +402,34 @@ func (vic *validatorInfoCreator) DeleteBlockDataFromStorage(metaBlock data.Heade } if vic.enableEpochsHandler.IsRefactorPeersMiniBlocksFlagEnabled() { - vic.removeValidatorInfoFromStorage(body) + vic.removeValidatorInfo(body) } for _, mbHeader := range metaBlock.GetMiniBlockHeaderHandlers() { if mbHeader.GetTypeInt32() == int32(block.PeerBlock) { - _ = vic.miniBlockStorage.Remove(mbHeader.GetHash()) + err := vic.miniBlockStorage.Remove(mbHeader.GetHash()) + if err != nil { + log.Debug("validatorInfoCreator.DeleteBlockDataFromStorage.Remove", "hash", mbHeader.GetHash(), "error", err) + } } } } -func (vic *validatorInfoCreator) removeValidatorInfoFromStorage(body *block.Body) { +func (vic *validatorInfoCreator) removeValidatorInfo(body *block.Body) { for _, miniBlock := range body.MiniBlocks { if miniBlock.Type != block.PeerBlock { continue } - for _, txHash := range miniBlock.TxHashes { - _ = vic.validatorInfoStorage.Remove(txHash) + vic.removeValidatorInfoFromStorage(miniBlock) + } +} + +func (vic *validatorInfoCreator) removeValidatorInfoFromStorage(miniBlock *block.MiniBlock) { + for _, txHash := range miniBlock.TxHashes { + err := vic.validatorInfoStorage.Remove(txHash) + if err != nil { + log.Debug("validatorInfoCreator.removeValidatorInfoFromStorage.Remove", "hash", txHash, "error", err) } } } diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index cd6f747d62d..f8a66c2afdd 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -126,7 +126,7 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator } }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &validatorInfoCacherMock.ValidatorInfoCacherStub{} }, }, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ diff --git a/epochStart/mock/epochStartNotifierStub.go b/epochStart/mock/epochStartNotifierStub.go index 5f7c2f7b565..0fe06e739bc 100644 --- a/epochStart/mock/epochStartNotifierStub.go +++ b/epochStart/mock/epochStartNotifierStub.go @@ -2,13 +2,12 @@ package mock import ( "github.com/ElrondNetwork/elrond-go-core/data" - "github.com/ElrondNetwork/elrond-go/epochStart" ) // EpochStartNotifierStub - type EpochStartNotifierStub struct { NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) NotifyEpochChangeConfirmedCalled func(epoch uint32) } @@ -27,9 +26,9 @@ func (esnm *EpochStartNotifierStub) NotifyAll(hdr data.HeaderHandler) { } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) + esnm.NotifyAllPrepareCalled(metaHdr, body) } } diff --git a/epochStart/notifier/common.go b/epochStart/notifier/common.go index ec0d2cd8ee3..cd2b48ccdbd 100644 --- a/epochStart/notifier/common.go +++ b/epochStart/notifier/common.go @@ -31,7 +31,7 @@ func NewHandlerForEpochStart( // EpochStartPrepare will notify the subscriber to prepare for a start of epoch. // The event can be triggered multiple times -func (hs *handlerStruct) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler, _ epochStart.ValidatorInfoCacher) { +func (hs *handlerStruct) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler) { if hs.act != nil { hs.prepare(metaHdr) } diff --git a/epochStart/notifier/epochStartSubscriptionHandler.go b/epochStart/notifier/epochStartSubscriptionHandler.go index cb6a5ff68a3..bf87a3b8f95 100644 --- a/epochStart/notifier/epochStartSubscriptionHandler.go +++ b/epochStart/notifier/epochStartSubscriptionHandler.go @@ -13,7 +13,7 @@ type EpochStartNotifier interface { RegisterHandler(handler epochStart.ActionHandler) UnregisterHandler(handler epochStart.ActionHandler) NotifyAll(hdr data.HeaderHandler) - NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) NotifyEpochChangeConfirmed(epoch uint32) RegisterForEpochChangeConfirmed(handler func(epoch uint32)) IsInterfaceNil() bool @@ -75,7 +75,7 @@ func (essh *epochStartSubscriptionHandler) NotifyAll(hdr data.HeaderHandler) { // NotifyAllPrepare will call all the subscribed clients to notify them that an epoch change block has been // observed, but not yet confirmed/committed. Some components may need to do some initialisation/preparation -func (essh *epochStartSubscriptionHandler) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (essh *epochStartSubscriptionHandler) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { essh.mutEpochStartHandler.RLock() sort.Slice(essh.epochStartHandlers, func(i, j int) bool { @@ -83,7 +83,7 @@ func (essh *epochStartSubscriptionHandler) NotifyAllPrepare(metaHdr data.HeaderH }) for i := 0; i < len(essh.epochStartHandlers); i++ { - essh.epochStartHandlers[i].EpochStartPrepare(metaHdr, body, validatorInfoCacher) + essh.epochStartHandlers[i].EpochStartPrepare(metaHdr, body) } essh.mutEpochStartHandler.RUnlock() } diff --git a/epochStart/shardchain/peerMiniBlocksSyncer.go b/epochStart/shardchain/peerMiniBlocksSyncer.go index 31ef9ed20fa..d1f320227f9 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer.go @@ -261,31 +261,38 @@ func (p *peerMiniBlockSyncer) computeMissingValidatorsInfo(body *block.Body) { defer p.mutValidatorsInfoForBlock.Unlock() numMissingValidatorsInfo := uint32(0) - for _, mb := range body.MiniBlocks { - if mb.Type != block.PeerBlock { + for _, miniBlock := range body.MiniBlocks { + if miniBlock.Type != block.PeerBlock { continue } - for _, txHash := range mb.TxHashes { - p.mapAllValidatorsInfo[string(txHash)] = nil + numMissingValidatorsInfo += p.setMissingValidatorsInfo(miniBlock) + } - validatorInfoObjectFound, ok := p.validatorsInfoPool.SearchFirstData(txHash) - if !ok { - numMissingValidatorsInfo++ - continue - } + p.numMissingValidatorsInfo = numMissingValidatorsInfo +} - validatorInfo, ok := validatorInfoObjectFound.(*state.ShardValidatorInfo) - if !ok { - numMissingValidatorsInfo++ - continue - } +func (p *peerMiniBlockSyncer) setMissingValidatorsInfo(miniBlock *block.MiniBlock) uint32 { + numMissingValidatorsInfo := uint32(0) + for _, txHash := range miniBlock.TxHashes { + p.mapAllValidatorsInfo[string(txHash)] = nil - p.mapAllValidatorsInfo[string(txHash)] = validatorInfo + validatorInfoObjectFound, ok := p.validatorsInfoPool.SearchFirstData(txHash) + if !ok { + numMissingValidatorsInfo++ + continue } + + validatorInfo, ok := validatorInfoObjectFound.(*state.ShardValidatorInfo) + if !ok { + numMissingValidatorsInfo++ + continue + } + + p.mapAllValidatorsInfo[string(txHash)] = validatorInfo } - p.numMissingValidatorsInfo = numMissingValidatorsInfo + return numMissingValidatorsInfo } func (p *peerMiniBlockSyncer) retrieveMissingMiniBlocks() ([][]byte, error) { diff --git a/epochStart/shardchain/trigger.go b/epochStart/shardchain/trigger.go index 1522c153368..2da852cc701 100644 --- a/epochStart/shardchain/trigger.go +++ b/epochStart/shardchain/trigger.go @@ -297,12 +297,17 @@ func (t *trigger) clearMissingValidatorsInfoMap(epoch uint32) { } func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { + timer := time.NewTimer(sleepTime) + defer timer.Stop() + for { + timer.Reset(sleepTime) + select { case <-ctx.Done(): log.Debug("requestMissingMiniBlocks: trigger's go routine is stopping...") return - case <-time.After(sleepTime): + case <-timer.C: } t.mutMissingMiniBlocks.RLock() @@ -320,11 +325,13 @@ func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { go t.requestHandler.RequestMiniBlocks(core.MetachainShardId, missingMiniBlocks) + timer.Reset(waitTime) + select { case <-ctx.Done(): log.Debug("requestMissingMiniBlocks: trigger's go routine is stopping...") return - case <-time.After(waitTime): + case <-timer.C: } t.updateMissingMiniBlocks() @@ -332,12 +339,17 @@ func (t *trigger) requestMissingMiniBlocks(ctx context.Context) { } func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { + timer := time.NewTimer(sleepTime) + defer timer.Stop() + for { + timer.Reset(sleepTime) + select { case <-ctx.Done(): log.Debug("requestMissingValidatorsInfo: trigger's go routine is stopping...") return - case <-time.After(sleepTime): + case <-timer.C: } t.mutMissingValidatorsInfo.RLock() @@ -355,11 +367,13 @@ func (t *trigger) requestMissingValidatorsInfo(ctx context.Context) { go t.requestHandler.RequestValidatorsInfo(missingValidatorsInfo) + timer.Reset(waitTime) + select { case <-ctx.Done(): log.Debug("requestMissingValidatorsInfo: trigger's go routine is stopping...") return - case <-time.After(waitTime): + case <-timer.C: } t.updateMissingValidatorsInfo() @@ -387,7 +401,8 @@ func (t *trigger) updateMissingMiniBlocks() { func (t *trigger) updateMissingValidatorsInfo() { t.mutMissingValidatorsInfo.Lock() for hash := range t.mapMissingValidatorsInfo { - if _, ok := t.validatorInfoPool.SearchFirstData([]byte(hash)); ok { + _, isValidatorInfoFound := t.validatorInfoPool.SearchFirstData([]byte(hash)) + if isValidatorInfoFound { delete(t.mapMissingValidatorsInfo, hash) } } @@ -752,7 +767,7 @@ func (t *trigger) checkIfTriggerCanBeActivated(hash string, metaHdr data.HeaderH } } - t.epochStartNotifier.NotifyAllPrepare(metaHdr, blockBody, t.currentEpochValidatorInfoPool) + t.epochStartNotifier.NotifyAllPrepare(metaHdr, blockBody) isMetaHdrFinal, finalityAttestingRound := t.isMetaBlockFinal(hash, metaHdr) return isMetaHdrFinal, finalityAttestingRound diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index 90f89e489e2..2a727965027 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -43,7 +43,7 @@ func createMockShardEpochStartTriggerArguments() *ArgsShardEpochStartTrigger { return testscommon.NewCacherStub() }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &validatorInfoCacherMock.ValidatorInfoCacherStub{} }, }, Storage: &storageStubs.ChainStorerStub{ @@ -360,7 +360,7 @@ func TestTrigger_ReceivedHeaderIsEpochStartTrueWithPeerMiniblocks(t *testing.T) } }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &validatorInfoCacherMock.ValidatorInfoCacherStub{} }, } args.Uint64Converter = &mock.Uint64ByteSliceConverterMock{ diff --git a/factory/interface.go b/factory/interface.go index bc17c554ce7..33a217a81c3 100644 --- a/factory/interface.go +++ b/factory/interface.go @@ -43,7 +43,7 @@ type EpochStartNotifier interface { RegisterHandler(handler epochStart.ActionHandler) UnregisterHandler(handler epochStart.ActionHandler) NotifyAll(hdr data.HeaderHandler) - NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) NotifyEpochChangeConfirmed(epoch uint32) IsInterfaceNil() bool } diff --git a/factory/mock/epochStartNotifierStub.go b/factory/mock/epochStartNotifierStub.go index 27b18094eac..4eb9a938a3d 100644 --- a/factory/mock/epochStartNotifierStub.go +++ b/factory/mock/epochStartNotifierStub.go @@ -9,7 +9,7 @@ import ( type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) NotifyAllCalled func(hdr data.HeaderHandler) NotifyEpochChangeConfirmedCalled func(epoch uint32) } @@ -29,9 +29,9 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) + esnm.NotifyAllPrepareCalled(metaHdr, body) } } diff --git a/factory/shardingFactory.go b/factory/shardingFactory.go index 60dbb89ad39..9dc44483fd6 100644 --- a/factory/shardingFactory.go +++ b/factory/shardingFactory.go @@ -105,6 +105,7 @@ func CreateNodesCoordinator( chanNodeStop chan endProcess.ArgEndProcess, nodeTypeProvider core.NodeTypeProviderHandler, enableEpochsHandler common.EnableEpochsHandler, + validatorInfoCacher epochStart.ValidatorInfoCacher, ) (nodesCoordinator.NodesCoordinator, error) { if chanNodeStop == nil { return nil, nodesCoordinator.ErrNilNodeStopChannel @@ -194,6 +195,7 @@ func CreateNodesCoordinator( NodeTypeProvider: nodeTypeProvider, IsFullArchive: prefsConfig.FullArchive, EnableEpochsHandler: enableEpochsHandler, + ValidatorInfoCacher: validatorInfoCacher, } baseNodesCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/heartbeat/mock/epochStartNotifier.go b/heartbeat/mock/epochStartNotifier.go index 3c3679c9737..157bd99020b 100644 --- a/heartbeat/mock/epochStartNotifier.go +++ b/heartbeat/mock/epochStartNotifier.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) epochStartHdls []epochStart.ActionHandler } @@ -38,13 +38,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) + esnm.NotifyAllPrepareCalled(metaHdr, body) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) + hdl.EpochStartPrepare(metaHdr, body) } } diff --git a/integrationTests/consensus/testInitializer.go b/integrationTests/consensus/testInitializer.go index d1851ec56bf..b1accec315d 100644 --- a/integrationTests/consensus/testInitializer.go +++ b/integrationTests/consensus/testInitializer.go @@ -52,6 +52,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/trie" "github.com/ElrondNetwork/elrond-go/trie/hashesHolder" vmcommon "github.com/ElrondNetwork/elrond-vm-common" @@ -535,6 +536,7 @@ func createNodes( EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ IsWaitingListFixFlagEnabledField: true, }, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoord, _ := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/factory/consensusComponents/consensusComponents_test.go b/integrationTests/factory/consensusComponents/consensusComponents_test.go index d532e695691..826947140f6 100644 --- a/integrationTests/factory/consensusComponents/consensusComponents_test.go +++ b/integrationTests/factory/consensusComponents/consensusComponents_test.go @@ -67,6 +67,7 @@ func TestConsensusComponents_Close_ShouldWork(t *testing.T) { managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), managedCoreComponents.EnableEpochsHandler(), + managedDataComponents.Datapool().CurrentEpochValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/processComponents/processComponents_test.go b/integrationTests/factory/processComponents/processComponents_test.go index c800775403a..6581c618da2 100644 --- a/integrationTests/factory/processComponents/processComponents_test.go +++ b/integrationTests/factory/processComponents/processComponents_test.go @@ -68,6 +68,7 @@ func TestProcessComponents_Close_ShouldWork(t *testing.T) { managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), managedCoreComponents.EnableEpochsHandler(), + managedDataComponents.Datapool().CurrentEpochValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/factory/statusComponents/statusComponents_test.go b/integrationTests/factory/statusComponents/statusComponents_test.go index 39bde603b0c..5f167c8291d 100644 --- a/integrationTests/factory/statusComponents/statusComponents_test.go +++ b/integrationTests/factory/statusComponents/statusComponents_test.go @@ -68,6 +68,7 @@ func TestStatusComponents_Create_Close_ShouldWork(t *testing.T) { managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), managedCoreComponents.EnableEpochsHandler(), + managedDataComponents.Datapool().CurrentEpochValidatorInfo(), ) require.Nil(t, err) managedStatusComponents, err := nr.CreateManagedStatusComponents( diff --git a/integrationTests/mock/epochStartNotifier.go b/integrationTests/mock/epochStartNotifier.go index 8024eefaac4..14a7cbc9cc6 100644 --- a/integrationTests/mock/epochStartNotifier.go +++ b/integrationTests/mock/epochStartNotifier.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) NotifyEpochChangeConfirmedCalled func(epoch uint32) epochStartHdls []epochStart.ActionHandler } @@ -39,13 +39,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) + esnm.NotifyAllPrepareCalled(metaHdr, body) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) + hdl.EpochStartPrepare(metaHdr, body) } } diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 7b91051954f..5610a79ef95 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -11,6 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) // ArgIndexHashedNodesCoordinatorFactory - @@ -72,6 +73,7 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, }, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -129,6 +131,7 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato IsWaitingListFixFlagEnabledField: true, RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, }, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } baseCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testHeartbeatNode.go b/integrationTests/testHeartbeatNode.go index dc67ecdc899..eb51313085b 100644 --- a/integrationTests/testHeartbeatNode.go +++ b/integrationTests/testHeartbeatNode.go @@ -45,6 +45,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/p2pmocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" trieMock "github.com/ElrondNetwork/elrond-go/testscommon/trie" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/update" ) @@ -297,6 +298,7 @@ func CreateNodesWithTestHeartbeatNode( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -342,6 +344,7 @@ func CreateNodesWithTestHeartbeatNode( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testP2PNode.go b/integrationTests/testP2PNode.go index bcaf842a441..a8feca91eb7 100644 --- a/integrationTests/testP2PNode.go +++ b/integrationTests/testP2PNode.go @@ -32,6 +32,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/p2pmocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/update/trigger" ) @@ -355,6 +356,7 @@ func CreateNodesWithTestP2PNodes( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -400,6 +402,7 @@ func CreateNodesWithTestP2PNodes( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index 0c08dc19cdd..a22d592ea38 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -15,6 +15,7 @@ import ( "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage/lrucache" "github.com/ElrondNetwork/elrond-go/testscommon" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) type nodeKeys struct { @@ -74,6 +75,7 @@ func CreateProcessorNodesWithNodesCoordinator( ChanStopNode: endProcess.GetDummyEndProcessChannel(), IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testProcessorNodeWithMultisigner.go b/integrationTests/testProcessorNodeWithMultisigner.go index b3be4a0e203..fe1d8718a6c 100644 --- a/integrationTests/testProcessorNodeWithMultisigner.go +++ b/integrationTests/testProcessorNodeWithMultisigner.go @@ -31,6 +31,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) // CreateNodesWithNodesCoordinator returns a map with nodes per shard each using a real nodes coordinator @@ -428,6 +429,7 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) @@ -542,6 +544,7 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/node/mock/epochStartNotifier.go b/node/mock/epochStartNotifier.go index 8024eefaac4..14a7cbc9cc6 100644 --- a/node/mock/epochStartNotifier.go +++ b/node/mock/epochStartNotifier.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) NotifyEpochChangeConfirmedCalled func(epoch uint32) epochStartHdls []epochStart.ActionHandler } @@ -39,13 +39,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) + esnm.NotifyAllPrepareCalled(metaHdr, body) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) + hdl.EpochStartPrepare(metaHdr, body) } } diff --git a/node/nodeRunner.go b/node/nodeRunner.go index 5dbe7a8fe08..d66a5acc8da 100644 --- a/node/nodeRunner.go +++ b/node/nodeRunner.go @@ -344,6 +344,7 @@ func (nr *nodeRunner) executeOneComponentCreationCycle( managedCoreComponents.ChanStopNodeProcess(), managedCoreComponents.NodeTypeProvider(), managedCoreComponents.EnableEpochsHandler(), + managedDataComponents.Datapool().CurrentEpochValidatorInfo(), ) if err != nil { return true, err diff --git a/process/block/metablock.go b/process/block/metablock.go index f0b1f735a6c..73141649284 100644 --- a/process/block/metablock.go +++ b/process/block/metablock.go @@ -97,45 +97,45 @@ func NewMetaProcessor(arguments ArgMetaProcessor) (*metaProcessor, error) { genesisHdr := arguments.DataComponents.Blockchain().GetGenesisHeader() base := &baseProcessor{ - accountsDB: arguments.AccountsDB, - blockSizeThrottler: arguments.BlockSizeThrottler, - forkDetector: arguments.ForkDetector, - hasher: arguments.CoreComponents.Hasher(), - marshalizer: arguments.CoreComponents.InternalMarshalizer(), - store: arguments.DataComponents.StorageService(), - shardCoordinator: arguments.BootstrapComponents.ShardCoordinator(), - feeHandler: arguments.FeeHandler, - nodesCoordinator: arguments.NodesCoordinator, - uint64Converter: arguments.CoreComponents.Uint64ByteSliceConverter(), - requestHandler: arguments.RequestHandler, - appStatusHandler: arguments.CoreComponents.StatusHandler(), - blockChainHook: arguments.BlockChainHook, - txCoordinator: arguments.TxCoordinator, - epochStartTrigger: arguments.EpochStartTrigger, - headerValidator: arguments.HeaderValidator, - roundHandler: arguments.CoreComponents.RoundHandler(), - bootStorer: arguments.BootStorer, - blockTracker: arguments.BlockTracker, - dataPool: arguments.DataComponents.Datapool(), - blockChain: arguments.DataComponents.Blockchain(), - stateCheckpointModulus: arguments.Config.StateTriesConfig.CheckpointRoundsModulus, - outportHandler: arguments.StatusComponents.OutportHandler(), - genesisNonce: genesisHdr.GetNonce(), - versionedHeaderFactory: arguments.BootstrapComponents.VersionedHeaderFactory(), - headerIntegrityVerifier: arguments.BootstrapComponents.HeaderIntegrityVerifier(), - historyRepo: arguments.HistoryRepository, + accountsDB: arguments.AccountsDB, + blockSizeThrottler: arguments.BlockSizeThrottler, + forkDetector: arguments.ForkDetector, + hasher: arguments.CoreComponents.Hasher(), + marshalizer: arguments.CoreComponents.InternalMarshalizer(), + store: arguments.DataComponents.StorageService(), + shardCoordinator: arguments.BootstrapComponents.ShardCoordinator(), + feeHandler: arguments.FeeHandler, + nodesCoordinator: arguments.NodesCoordinator, + uint64Converter: arguments.CoreComponents.Uint64ByteSliceConverter(), + requestHandler: arguments.RequestHandler, + appStatusHandler: arguments.CoreComponents.StatusHandler(), + blockChainHook: arguments.BlockChainHook, + txCoordinator: arguments.TxCoordinator, + epochStartTrigger: arguments.EpochStartTrigger, + headerValidator: arguments.HeaderValidator, + roundHandler: arguments.CoreComponents.RoundHandler(), + bootStorer: arguments.BootStorer, + blockTracker: arguments.BlockTracker, + dataPool: arguments.DataComponents.Datapool(), + blockChain: arguments.DataComponents.Blockchain(), + stateCheckpointModulus: arguments.Config.StateTriesConfig.CheckpointRoundsModulus, + outportHandler: arguments.StatusComponents.OutportHandler(), + genesisNonce: genesisHdr.GetNonce(), + versionedHeaderFactory: arguments.BootstrapComponents.VersionedHeaderFactory(), + headerIntegrityVerifier: arguments.BootstrapComponents.HeaderIntegrityVerifier(), + historyRepo: arguments.HistoryRepository, epochNotifier: arguments.CoreComponents.EpochNotifier(), enableEpochsHandler: arguments.CoreComponents.EnableEpochsHandler(), - enableRoundsHandler: arguments.EnableRoundsHandler, - vmContainerFactory: arguments.VMContainersFactory, - vmContainer: arguments.VmContainer, - processDataTriesOnCommitEpoch: arguments.Config.Debug.EpochStart.ProcessDataTrieOnCommitEpoch, - gasConsumedProvider: arguments.GasHandler, - economicsData: arguments.CoreComponents.EconomicsData(), - scheduledTxsExecutionHandler: arguments.ScheduledTxsExecutionHandler, - pruningDelay: pruningDelay, - processedMiniBlocksTracker: arguments.ProcessedMiniBlocksTracker, - receiptsRepository: arguments.ReceiptsRepository, + enableRoundsHandler: arguments.EnableRoundsHandler, + vmContainerFactory: arguments.VMContainersFactory, + vmContainer: arguments.VmContainer, + processDataTriesOnCommitEpoch: arguments.Config.Debug.EpochStart.ProcessDataTrieOnCommitEpoch, + gasConsumedProvider: arguments.GasHandler, + economicsData: arguments.CoreComponents.EconomicsData(), + scheduledTxsExecutionHandler: arguments.ScheduledTxsExecutionHandler, + pruningDelay: pruningDelay, + processedMiniBlocksTracker: arguments.ProcessedMiniBlocksTracker, + receiptsRepository: arguments.ReceiptsRepository, } mp := metaProcessor{ @@ -1326,7 +1326,7 @@ func (mp *metaProcessor) CommitBlock( mp.blockTracker.CleanupInvalidCrossHeaders(header.Epoch, header.Round) } - // TODO: Should pe sent alongside rewardsTxs also validatorInfoTxs -> mp.validatorInfoCreator.GetValidatorInfoTxs(body) ? + // TODO: Should be sent also validatorInfoTxs alongside rewardsTxs -> mp.validatorInfoCreator.GetValidatorInfoTxs(body) ? mp.indexBlock(header, headerHash, body, lastMetaBlock, notarizedHeadersHashes, rewardsTxs) mp.recordBlockInHistory(headerHash, headerHandler, bodyHandler) diff --git a/process/block/preprocess/rewardTxPreProcessor.go b/process/block/preprocess/rewardTxPreProcessor.go index 086b5fb066a..aafb42dab67 100644 --- a/process/block/preprocess/rewardTxPreProcessor.go +++ b/process/block/preprocess/rewardTxPreProcessor.go @@ -175,7 +175,7 @@ func (rtp *rewardTxPreprocessor) RestoreBlockDataIntoPools( continue } - err := rtp.restoreRewardTxs(miniBlock) + err := rtp.restoreRewardTxsIntoPool(miniBlock) if err != nil { return rewardTxsRestored, err } @@ -193,7 +193,7 @@ func (rtp *rewardTxPreprocessor) RestoreBlockDataIntoPools( return rewardTxsRestored, nil } -func (rtp *rewardTxPreprocessor) restoreRewardTxs(miniBlock *block.MiniBlock) error { +func (rtp *rewardTxPreprocessor) restoreRewardTxsIntoPool(miniBlock *block.MiniBlock) error { strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) rewardTxsBuff, err := rtp.storage.GetAll(dataRetriever.RewardTransactionUnit, miniBlock.TxHashes) if err != nil { @@ -393,7 +393,7 @@ func (rtp *rewardTxPreprocessor) computeMissingRewardTxsHashesForMiniBlock(miniB false, ) - if tx == nil { + if check.IfNil(tx) { missingRewardTxsHashes = append(missingRewardTxsHashes, txHash) } } diff --git a/process/block/preprocess/smartContractResults.go b/process/block/preprocess/smartContractResults.go index 3508629860e..36c2d52d447 100644 --- a/process/block/preprocess/smartContractResults.go +++ b/process/block/preprocess/smartContractResults.go @@ -186,7 +186,7 @@ func (scr *smartContractResults) RestoreBlockDataIntoPools( continue } - err := scr.restoreSmartContractResults(miniBlock) + err := scr.restoreSmartContractResultsIntoPool(miniBlock) if err != nil { return scrRestored, err } @@ -208,7 +208,7 @@ func (scr *smartContractResults) RestoreBlockDataIntoPools( return scrRestored, nil } -func (scr *smartContractResults) restoreSmartContractResults(miniBlock *block.MiniBlock) error { +func (scr *smartContractResults) restoreSmartContractResultsIntoPool(miniBlock *block.MiniBlock) error { strCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) scrsBuff, err := scr.storage.GetAll(dataRetriever.UnsignedTransactionUnit, miniBlock.TxHashes) if err != nil { diff --git a/process/block/preprocess/transactions.go b/process/block/preprocess/transactions.go index 2d21fefb21e..22ec9ee3374 100644 --- a/process/block/preprocess/transactions.go +++ b/process/block/preprocess/transactions.go @@ -243,7 +243,7 @@ func (txs *transactions) RestoreBlockDataIntoPools( continue } - err := txs.restoreTxs(miniBlock) + err := txs.restoreTxsIntoPool(miniBlock) if err != nil { return txsRestored, err } @@ -263,7 +263,7 @@ func (txs *transactions) RestoreBlockDataIntoPools( return txsRestored, nil } -func (txs *transactions) restoreTxs(miniBlock *block.MiniBlock) error { +func (txs *transactions) restoreTxsIntoPool(miniBlock *block.MiniBlock) error { miniBlockStrCache := process.ShardCacherIdentifier(miniBlock.SenderShardID, miniBlock.ReceiverShardID) txsBuff, err := txs.storage.GetAll(dataRetriever.TransactionUnit, miniBlock.TxHashes) if err != nil { @@ -953,7 +953,7 @@ func (txs *transactions) computeMissingTxsHashesForMiniBlock(miniBlock *block.Mi txs.txPool, searchFirst) - if tx == nil || tx.IsInterfaceNil() { + if check.IfNil(tx) { missingTransactionsHashes = append(missingTransactionsHashes, txHash) } } diff --git a/process/block/preprocess/validatorInfoPreProcessor.go b/process/block/preprocess/validatorInfoPreProcessor.go index 5828ec50833..8fb47ed9615 100644 --- a/process/block/preprocess/validatorInfoPreProcessor.go +++ b/process/block/preprocess/validatorInfoPreProcessor.go @@ -22,7 +22,6 @@ var _ process.PreProcessor = (*validatorInfoPreprocessor)(nil) type validatorInfoPreprocessor struct { *basePreProcess chReceivedAllValidatorsInfo chan bool - onRequestValidatorsInfo func(txHashes [][]byte) validatorsInfoForBlock txsForBlock validatorsInfoPool dataRetriever.ShardedDataCacherNotifier storage dataRetriever.StorageService @@ -36,7 +35,6 @@ func NewValidatorInfoPreprocessor( blockSizeComputation BlockSizeComputationHandler, validatorsInfoPool dataRetriever.ShardedDataCacherNotifier, store dataRetriever.StorageService, - onRequestValidatorsInfo func(txHashes [][]byte), enableEpochsHandler common.EnableEpochsHandler, ) (*validatorInfoPreprocessor, error) { @@ -55,9 +53,6 @@ func NewValidatorInfoPreprocessor( if check.IfNil(store) { return nil, process.ErrNilStorage } - if onRequestValidatorsInfo == nil { - return nil, process.ErrNilRequestHandler - } if check.IfNil(enableEpochsHandler) { return nil, process.ErrNilEnableEpochsHandler } @@ -69,15 +64,13 @@ func NewValidatorInfoPreprocessor( } vip := &validatorInfoPreprocessor{ - basePreProcess: bpp, - storage: store, - validatorsInfoPool: validatorsInfoPool, - onRequestValidatorsInfo: onRequestValidatorsInfo, - enableEpochsHandler: enableEpochsHandler, + basePreProcess: bpp, + storage: store, + validatorsInfoPool: validatorsInfoPool, + enableEpochsHandler: enableEpochsHandler, } vip.chReceivedAllValidatorsInfo = make(chan bool) - vip.validatorsInfoPool.RegisterOnAdded(vip.receivedValidatorInfoTransaction) vip.validatorsInfoForBlock.txHashAndInfo = make(map[string]*txInfo) return vip, nil @@ -177,18 +170,6 @@ func (vip *validatorInfoPreprocessor) SaveTxsToStorage(_ *block.Body) error { return nil } -// receivedValidatorInfoTransaction is a callback function called when a new validator info transaction -// is added in the validator info transactions pool -func (vip *validatorInfoPreprocessor) receivedValidatorInfoTransaction(txHash []byte, value interface{}) { - validatorInfo, ok := value.(*state.ShardValidatorInfo) - if !ok { - log.Warn("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "error", process.ErrWrongTypeAssertion) - return - } - - log.Trace("validatorInfoPreprocessor.receivedValidatorInfoTransaction", "tx hash", txHash, "pk", validatorInfo.PublicKey) -} - // CreateBlockStarted cleans the local cache map for processed/created validators info at this round func (vip *validatorInfoPreprocessor) CreateBlockStarted() { _ = core.EmptyChannel(vip.chReceivedAllValidatorsInfo) diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index f145ed586c4..c52d45dcd13 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -22,7 +22,6 @@ func TestNewValidatorInfoPreprocessor_NilHasherShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -40,7 +39,6 @@ func TestNewValidatorInfoPreprocessor_NilMarshalizerShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -58,7 +56,6 @@ func TestNewValidatorInfoPreprocessor_NilBlockSizeComputationHandlerShouldErr(t nil, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -75,7 +72,6 @@ func TestNewValidatorInfoPreprocessor_NilValidatorInfoPoolShouldErr(t *testing.T &testscommon.BlockSizeComputationStub{}, nil, genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -93,7 +89,6 @@ func TestNewValidatorInfoPreprocessor_NilStoreShouldErr(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), nil, - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -101,24 +96,6 @@ func TestNewValidatorInfoPreprocessor_NilStoreShouldErr(t *testing.T) { assert.Equal(t, process.ErrNilStorage, err) } -func TestNewValidatorInfoPreprocessor_NilRequestHandlerShouldErr(t *testing.T) { - t.Parallel() - - tdp := initDataPool() - rtp, err := NewValidatorInfoPreprocessor( - &hashingMocks.HasherMock{}, - &testscommon.MarshalizerMock{}, - &testscommon.BlockSizeComputationStub{}, - tdp.ValidatorsInfo(), - genericMocks.NewChainStorerMock(0), - nil, - &testscommon.EnableEpochsHandlerStub{}, - ) - - assert.Nil(t, rtp) - assert.Equal(t, process.ErrNilRequestHandler, err) -} - func TestNewValidatorInfoPreprocessor_NilEnableEpochHandlerShouldErr(t *testing.T) { t.Parallel() @@ -129,7 +106,6 @@ func TestNewValidatorInfoPreprocessor_NilEnableEpochHandlerShouldErr(t *testing. &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, nil, ) @@ -147,7 +123,6 @@ func TestNewValidatorInfoPreprocessor_OkValsShouldWork(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) assert.Nil(t, err) @@ -164,7 +139,6 @@ func TestNewValidatorInfoPreprocessor_CreateMarshalizedDataShouldWork(t *testing &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -185,7 +159,6 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockInvalidMiniBlockTypeShould &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -215,7 +188,6 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockShouldWork(t *testing.T) { &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -245,7 +217,6 @@ func TestNewValidatorInfoPreprocessor_ProcessMiniBlockNotFromMeta(t *testing.T) &testscommon.BlockSizeComputationStub{}, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -279,7 +250,6 @@ func TestNewValidatorInfoPreprocessor_RestorePeerBlockIntoPools(t *testing.T) { blockSizeComputation, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -325,7 +295,6 @@ func TestNewValidatorInfoPreprocessor_RestoreOtherBlockTypeIntoPoolsShouldNotRes blockSizeComputation, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -371,7 +340,6 @@ func TestNewValidatorInfoPreprocessor_RemovePeerBlockFromPool(t *testing.T) { blockSizeComputation, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -417,7 +385,6 @@ func TestNewValidatorInfoPreprocessor_RemoveOtherBlockTypeFromPoolShouldNotRemov blockSizeComputation, tdp.ValidatorsInfo(), genericMocks.NewChainStorerMock(0), - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) diff --git a/process/factory/shard/preProcessorsContainerFactory.go b/process/factory/shard/preProcessorsContainerFactory.go index d9b0a3af53b..049471dbbd1 100644 --- a/process/factory/shard/preProcessorsContainerFactory.go +++ b/process/factory/shard/preProcessorsContainerFactory.go @@ -279,7 +279,6 @@ func (ppcm *preProcessorsContainerFactory) createValidatorInfoPreProcessor() (pr ppcm.blockSizeComputation, ppcm.dataPool.ValidatorsInfo(), ppcm.store, - ppcm.requestHandler.RequestValidatorsInfo, ppcm.enableEpochsHandler, ) diff --git a/process/mock/epochStartNotifierStub.go b/process/mock/epochStartNotifierStub.go index 3c3679c9737..157bd99020b 100644 --- a/process/mock/epochStartNotifierStub.go +++ b/process/mock/epochStartNotifierStub.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) epochStartHdls []epochStart.ActionHandler } @@ -38,13 +38,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) + esnm.NotifyAllPrepareCalled(metaHdr, body) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) + hdl.EpochStartPrepare(metaHdr, body) } } diff --git a/process/peer/validatorsProvider_test.go b/process/peer/validatorsProvider_test.go index 42a553770c8..ba307e79b8e 100644 --- a/process/peer/validatorsProvider_test.go +++ b/process/peer/validatorsProvider_test.go @@ -647,7 +647,7 @@ func createMockValidatorInfo() *state.ValidatorInfo { func createMockShardValidatorInfo() *state.ShardValidatorInfo { initialInfo := &state.ShardValidatorInfo{ - PublicKey: []byte("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), + PublicKey: bytes.Repeat([]byte("a"), 96), ShardId: 0, List: "eligible", Index: 1, diff --git a/sharding/networksharding/peerShardMapper.go b/sharding/networksharding/peerShardMapper.go index 47548098e85..625596c874a 100644 --- a/sharding/networksharding/peerShardMapper.go +++ b/sharding/networksharding/peerShardMapper.go @@ -11,7 +11,6 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data" logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/ElrondNetwork/elrond-go/common" - "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/p2p" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage" @@ -396,7 +395,7 @@ func (psm *PeerShardMapper) EpochStartAction(hdr data.HeaderHandler) { } // EpochStartPrepare is the method called whenever an action needs to be undertaken in respect to the epoch preparation change -func (psm *PeerShardMapper) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler, _ epochStart.ValidatorInfoCacher) { +func (psm *PeerShardMapper) EpochStartPrepare(metaHdr data.HeaderHandler, _ data.BodyHandler) { if check.IfNil(metaHdr) { log.Warn("nil header on PeerShardMapper.EpochStartPrepare") return diff --git a/sharding/networksharding/peerShardMapper_test.go b/sharding/networksharding/peerShardMapper_test.go index 3735b19ed3d..b6bd8e8c572 100644 --- a/sharding/networksharding/peerShardMapper_test.go +++ b/sharding/networksharding/peerShardMapper_test.go @@ -492,13 +492,12 @@ func TestPeerShardMapper_EpochStartPrepareShouldNotPanic(t *testing.T) { }() psm := createPeerShardMapper() - psm.EpochStartPrepare(nil, nil, nil) + psm.EpochStartPrepare(nil, nil) psm.EpochStartPrepare( &testscommon.HeaderHandlerStub{ EpochField: 0, }, nil, - nil, ) } diff --git a/sharding/nodesCoordinator/errors.go b/sharding/nodesCoordinator/errors.go index e9f210ecdd9..daedcf07f86 100644 --- a/sharding/nodesCoordinator/errors.go +++ b/sharding/nodesCoordinator/errors.go @@ -108,3 +108,6 @@ var ErrNilNodeTypeProvider = errors.New("nil node type provider") // ErrNilEnableEpochsHandler signals that a nil enable epochs handler has been provided var ErrNilEnableEpochsHandler = errors.New("nil enable epochs handler") + +// ErrNilValidatorInfoCacher signals that a nil value for the validator info cacher has been provided +var ErrNilValidatorInfoCacher = errors.New("validator info cacher is nil") diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go index 0f33f231d35..7292320775b 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator.go @@ -94,6 +94,7 @@ type indexHashedNodesCoordinator struct { flagWaitingListFix atomicFlags.Flag nodeTypeProvider NodeTypeProviderHandler enableEpochsHandler common.EnableEpochsHandler + validatorInfoCacher epochStart.ValidatorInfoCacher } // NewIndexHashedNodesCoordinator creates a new index hashed group selector @@ -138,6 +139,7 @@ func NewIndexHashedNodesCoordinator(arguments ArgNodesCoordinator) (*indexHashed nodeTypeProvider: arguments.NodeTypeProvider, isFullArchive: arguments.IsFullArchive, enableEpochsHandler: arguments.EnableEpochsHandler, + validatorInfoCacher: arguments.ValidatorInfoCacher, } ihnc.loadingFromDisk.Store(false) @@ -217,6 +219,9 @@ func checkArguments(arguments ArgNodesCoordinator) error { if check.IfNil(arguments.EnableEpochsHandler) { return ErrNilEnableEpochsHandler } + if check.IfNil(arguments.ValidatorInfoCacher) { + return ErrNilValidatorInfoCacher + } return nil } @@ -539,7 +544,7 @@ func (ihnc *indexHashedNodesCoordinator) GetValidatorsIndexes( // EpochStartPrepare is called when an epoch start event is observed, but not yet confirmed/committed. // Some components may need to do some initialisation on this event -func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if !metaHdr.IsStartOfEpochBlock() { log.Error("could not process EpochStartPrepare on nodesCoordinator - not epoch start block") return @@ -560,7 +565,7 @@ func (ihnc *indexHashedNodesCoordinator) EpochStartPrepare(metaHdr data.HeaderHa ihnc.updateEpochFlags(newEpoch) - allValidatorInfo, err := ihnc.createValidatorInfoFromBody(body, ihnc.numTotalEligible, validatorInfoCacher, newEpoch) + allValidatorInfo, err := ihnc.createValidatorInfoFromBody(body, ihnc.numTotalEligible, newEpoch) if err != nil { log.Error("could not create validator info from body - do nothing on nodesCoordinator epochStartPrepare", "error", err.Error()) return @@ -1164,7 +1169,6 @@ func selectValidators( func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( body data.BodyHandler, previousTotal uint64, - validatorInfoCacher epochStart.ValidatorInfoCacher, epoch uint32, ) ([]*state.ShardValidatorInfo, error) { if check.IfNil(body) { @@ -1183,7 +1187,7 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( } for _, txHash := range peerMiniBlock.TxHashes { - shardValidatorInfo, err := ihnc.getShardValidatorInfoData(txHash, validatorInfoCacher, epoch) + shardValidatorInfo, err := ihnc.getShardValidatorInfoData(txHash, epoch) if err != nil { return nil, err } @@ -1195,9 +1199,9 @@ func (ihnc *indexHashedNodesCoordinator) createValidatorInfoFromBody( return allValidatorInfo, nil } -func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, validatorInfoCacher epochStart.ValidatorInfoCacher, epoch uint32) (*state.ShardValidatorInfo, error) { +func (ihnc *indexHashedNodesCoordinator) getShardValidatorInfoData(txHash []byte, epoch uint32) (*state.ShardValidatorInfo, error) { if epoch >= ihnc.enableEpochsHandler.RefactorPeersMiniBlocksEnableEpoch() { - shardValidatorInfo, err := validatorInfoCacher.GetValidatorInfo(txHash) + shardValidatorInfo, err := ihnc.validatorInfoCacher.GetValidatorInfo(txHash) if err != nil { return nil, err } diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go index b564082f488..39d92d908da 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorLite_test.go @@ -159,7 +159,7 @@ func TestIndexHashedNodesCoordinator_IsEpochInConfig(t *testing.T) { t.Parallel() arguments := createArguments() - + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) @@ -168,9 +168,8 @@ func TestIndexHashedNodesCoordinator_IsEpochInConfig(t *testing.T) { ihnc.updateEpochFlags(epoch) - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) - validatorsInfo, _ := ihnc.createValidatorInfoFromBody(body, 10, validatorInfoCacher, epoch) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) + validatorsInfo, _ := ihnc.createValidatorInfoFromBody(body, 10, epoch) err = ihnc.SetNodesConfigFromValidatorsInfo(epoch, []byte{}, validatorsInfo) require.Nil(t, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go index 03162efa65e..ba2c883f207 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go @@ -19,6 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -95,6 +96,7 @@ func TestIndexHashedGroupSelectorWithRater_OkValShouldWork(t *testing.T) { NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nc, err := NewIndexHashedNodesCoordinator(arguments) assert.Nil(t, err) @@ -190,6 +192,7 @@ func BenchmarkIndexHashedGroupSelectorWithRater_ComputeValidatorsGroup63of400(b NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -264,6 +267,7 @@ func Test_ComputeValidatorsGroup63of400(t *testing.T) { NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) numRounds := uint64(1000000) @@ -338,6 +342,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -391,6 +396,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -458,6 +464,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldWork(t NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -542,6 +549,7 @@ func TestIndexHashedGroupSelectorWithRater_GetAllEligibleValidatorsPublicKeys(t NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -847,6 +855,7 @@ func BenchmarkIndexHashedWithRaterGroupSelector_ComputeValidatorsGroup21of400(b NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index 2dc749b5e56..d74356f3b65 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -28,6 +28,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" + validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -117,6 +118,7 @@ func createArguments() ArgNodesCoordinator { EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ IsRefactorPeersMiniBlocksFlagEnabledField: true, }, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } return arguments } @@ -266,6 +268,7 @@ func TestIndexHashedNodesCoordinator_OkValShouldWork(t *testing.T) { ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -325,6 +328,7 @@ func TestIndexHashedNodesCoordinator_NewCoordinatorTooFewNodesShouldErr(t *testi ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -398,6 +402,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup1ValidatorShouldRetur ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) list2, err := ihnc.ComputeConsensusGroup([]byte("randomness"), 0, 0, 0) @@ -457,6 +462,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10locksNoM ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -544,6 +550,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10BlocksMe ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -614,6 +621,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup63of400TestEqualSameP ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -677,6 +685,7 @@ func BenchmarkIndexHashedGroupSelector_ComputeValidatorsGroup21of400(b *testing. ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -749,6 +758,7 @@ func runBenchmark(consensusGroupCache Cacher, consensusGroupSize int, nodesMap m ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -798,6 +808,7 @@ func computeMemoryRequirements(consensusGroupCache Cacher, consensusGroupSize in ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -937,6 +948,7 @@ func TestIndexHashedNodesCoordinator_GetValidatorWithPublicKeyShouldWork(t *test ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1020,6 +1032,7 @@ func TestIndexHashedGroupSelector_GetAllEligibleValidatorsPublicKeys(t *testing. ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1098,6 +1111,7 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, + ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1156,7 +1170,7 @@ func TestIndexHashedNodesCoordinator_EpochStart(t *testing.T) { t.Parallel() arguments := createArguments() - + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) epoch := uint32(1) @@ -1169,9 +1183,8 @@ func TestIndexHashedNodesCoordinator_EpochStart(t *testing.T) { ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) validators, err := ihnc.GetAllEligibleValidatorsPublicKeys(epoch) @@ -1303,6 +1316,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInEligible(t *testing.T) { t.Parallel() arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() pk := []byte("pk") arguments.SelfPublicKey = pk ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1324,9 +1338,8 @@ func TestIndexHashedNodesCoordinator_EpochStartInEligible(t *testing.T) { }, }, } - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) computedShardId, isValidator := ihnc.computeShardForSelfPublicKey(ihnc.nodesConfig[epoch]) @@ -1339,6 +1352,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInWaiting(t *testing.T) { t.Parallel() arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() pk := []byte("pk") arguments.SelfPublicKey = pk ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1360,9 +1374,8 @@ func TestIndexHashedNodesCoordinator_EpochStartInWaiting(t *testing.T) { }, }, } - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) computedShardId, isValidator := ihnc.computeShardForSelfPublicKey(ihnc.nodesConfig[epoch]) @@ -1374,6 +1387,7 @@ func TestIndexHashedNodesCoordinator_EpochStartInLeaving(t *testing.T) { t.Parallel() arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() pk := []byte("pk") arguments.SelfPublicKey = pk ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1400,9 +1414,8 @@ func TestIndexHashedNodesCoordinator_EpochStartInLeaving(t *testing.T) { }, }, } - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) computedShardId, isValidator := ihnc.computeShardForSelfPublicKey(ihnc.nodesConfig[epoch]) @@ -1459,6 +1472,7 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ IsRefactorPeersMiniBlocksFlagEnabledField: true, }, + ValidatorInfoCacher: dataPool.NewCurrentEpochValidatorInfoPool(), } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -1473,9 +1487,8 @@ func TestIndexHashedNodesCoordinator_EpochStart_EligibleSortedAscendingByIndex(t ihnc.nodesConfig[epoch] = ihnc.nodesConfig[0] - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - body := createBlockBodyFromNodesCoordinator(ihnc, epoch, validatorInfoCacher) - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + body := createBlockBodyFromNodesCoordinator(ihnc, epoch, ihnc.validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) newNodesConfig := ihnc.nodesConfig[1] @@ -1556,6 +1569,7 @@ func TestIndexHashedNodesCoordinator_GetSavedStateKey(t *testing.T) { t.Parallel() args := createArguments() + args.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(args) require.Nil(t, err) @@ -1565,9 +1579,8 @@ func TestIndexHashedNodesCoordinator_GetSavedStateKey(t *testing.T) { Epoch: 1, } - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - body := createBlockBodyFromNodesCoordinator(ihnc, 0, validatorInfoCacher) - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + body := createBlockBodyFromNodesCoordinator(ihnc, 0, ihnc.validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) key := ihnc.GetSavedStateKey() @@ -1640,7 +1653,7 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesEpoch1(t *testi t.Parallel() arguments := createArguments() - + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) @@ -1650,9 +1663,8 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesEpoch1(t *testi Epoch: 1, } - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - body := createBlockBodyFromNodesCoordinator(ihnc, 0, validatorInfoCacher) - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + body := createBlockBodyFromNodesCoordinator(ihnc, 0, ihnc.validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) nodesPrevEpoch, err := ihnc.GetAllEligibleValidatorsPublicKeys(0) @@ -1683,6 +1695,7 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesAfterRevertToEp t.Parallel() arguments := createArguments() + arguments.ValidatorInfoCacher = dataPool.NewCurrentEpochValidatorInfoPool() ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(t, err) @@ -1692,37 +1705,35 @@ func TestIndexHashedNodesCoordinator_GetConsensusWhitelistedNodesAfterRevertToEp Epoch: 1, } - validatorInfoCacher := dataPool.NewCurrentEpochValidatorInfoPool() - - body := createBlockBodyFromNodesCoordinator(ihnc, 0, validatorInfoCacher) - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + body := createBlockBodyFromNodesCoordinator(ihnc, 0, ihnc.validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 1, validatorInfoCacher) + body = createBlockBodyFromNodesCoordinator(ihnc, 1, ihnc.validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, Epoch: 2, } - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 2, validatorInfoCacher) + body = createBlockBodyFromNodesCoordinator(ihnc, 2, ihnc.validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, Epoch: 3, } - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) - body = createBlockBodyFromNodesCoordinator(ihnc, 3, validatorInfoCacher) + body = createBlockBodyFromNodesCoordinator(ihnc, 3, ihnc.validatorInfoCacher) header = &block.MetaBlock{ PrevRandSeed: []byte("rand seed"), EpochStart: block.EpochStart{LastFinalizedHeaders: []block.EpochStartShardData{{}}}, Epoch: 4, } - ihnc.EpochStartPrepare(header, body, validatorInfoCacher) + ihnc.EpochStartPrepare(header, body) ihnc.EpochStartAction(header) nodesEpoch1, err := ihnc.GetAllEligibleValidatorsPublicKeys(1) diff --git a/sharding/nodesCoordinator/interface.go b/sharding/nodesCoordinator/interface.go index 2db2aab8d5e..b53506fc473 100644 --- a/sharding/nodesCoordinator/interface.go +++ b/sharding/nodesCoordinator/interface.go @@ -98,7 +98,7 @@ type RandomSelector interface { // EpochStartActionHandler defines the action taken on epoch start event type EpochStartActionHandler interface { EpochStartAction(hdr data.HeaderHandler) - EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) NotifyOrder() uint32 } diff --git a/sharding/nodesCoordinator/shardingArgs.go b/sharding/nodesCoordinator/shardingArgs.go index e9c5076027a..5071c434976 100644 --- a/sharding/nodesCoordinator/shardingArgs.go +++ b/sharding/nodesCoordinator/shardingArgs.go @@ -5,6 +5,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" "github.com/ElrondNetwork/elrond-go/common" + "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/storage" ) @@ -30,4 +31,5 @@ type ArgNodesCoordinator struct { NodeTypeProvider NodeTypeProviderHandler IsFullArchive bool EnableEpochsHandler common.EnableEpochsHandler + ValidatorInfoCacher epochStart.ValidatorInfoCacher } diff --git a/testscommon/genericMocks/actionHandlerStub.go b/testscommon/genericMocks/actionHandlerStub.go index fd4b0444cab..8595b4cfa8d 100644 --- a/testscommon/genericMocks/actionHandlerStub.go +++ b/testscommon/genericMocks/actionHandlerStub.go @@ -2,13 +2,12 @@ package genericMocks import ( "github.com/ElrondNetwork/elrond-go-core/data" - "github.com/ElrondNetwork/elrond-go/epochStart" ) // ActionHandlerStub - type ActionHandlerStub struct { EpochStartActionCalled func(hdr data.HeaderHandler) - EpochStartPrepareCalled func(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + EpochStartPrepareCalled func(metaHdr data.HeaderHandler, body data.BodyHandler) NotifyOrderCalled func() uint32 } @@ -20,9 +19,9 @@ func (ahs *ActionHandlerStub) EpochStartAction(hdr data.HeaderHandler) { } // EpochStartPrepare - -func (ahs *ActionHandlerStub) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (ahs *ActionHandlerStub) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if ahs.EpochStartPrepareCalled != nil { - ahs.EpochStartPrepareCalled(metaHdr, body, validatorInfoCacher) + ahs.EpochStartPrepareCalled(metaHdr, body) } } diff --git a/testscommon/shardingMocks/nodesCoordinatorStub.go b/testscommon/shardingMocks/nodesCoordinatorStub.go index 3dadb02fda8..a6347e14c15 100644 --- a/testscommon/shardingMocks/nodesCoordinatorStub.go +++ b/testscommon/shardingMocks/nodesCoordinatorStub.go @@ -2,7 +2,6 @@ package shardingMocks import ( "github.com/ElrondNetwork/elrond-go-core/data" - "github.com/ElrondNetwork/elrond-go/epochStart" "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" state "github.com/ElrondNetwork/elrond-go/state" ) @@ -17,7 +16,7 @@ type NodesCoordinatorStub struct { GetAllEligibleValidatorsPublicKeysCalled func(epoch uint32) (map[uint32][][]byte, error) ConsensusGroupSizeCalled func(shardID uint32) int ComputeConsensusGroupCalled func(randomness []byte, round uint64, shardId uint32, epoch uint32) (validatorsGroup []nodesCoordinator.Validator, err error) - EpochStartPrepareCalled func(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + EpochStartPrepareCalled func(metaHdr data.HeaderHandler, body data.BodyHandler) } // NodesCoordinatorToRegistry - @@ -26,9 +25,9 @@ func (ncm *NodesCoordinatorStub) NodesCoordinatorToRegistry() *nodesCoordinator. } // EpochStartPrepare - -func (ncm *NodesCoordinatorStub) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (ncm *NodesCoordinatorStub) EpochStartPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if ncm.EpochStartPrepareCalled != nil { - ncm.EpochStartPrepareCalled(metaHdr, body, validatorInfoCacher) + ncm.EpochStartPrepareCalled(metaHdr, body) } } diff --git a/testscommon/validatorInfoCacher/validatorInfoCacherMock.go b/testscommon/validatorInfoCacher/validatorInfoCacherMock.go deleted file mode 100644 index d448d4ee8df..00000000000 --- a/testscommon/validatorInfoCacher/validatorInfoCacherMock.go +++ /dev/null @@ -1,38 +0,0 @@ -package validatorInfoCacherMock - -import "github.com/ElrondNetwork/elrond-go/state" - -// ValidatorInfoCacherMock - -type ValidatorInfoCacherMock struct { - CleanCalled func() - AddValidatorInfoCalled func(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) - GetValidatorInfoCalled func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) -} - -// Clean - -func (vicm *ValidatorInfoCacherMock) Clean() { - if vicm.CleanCalled != nil { - vicm.CleanCalled() - } -} - -// GetValidatorInfo - -func (vicm *ValidatorInfoCacherMock) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { - if vicm.GetValidatorInfoCalled != nil { - return vicm.GetValidatorInfoCalled(validatorInfoHash) - } - - return nil, nil -} - -// AddValidatorInfo - -func (vicm *ValidatorInfoCacherMock) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { - if vicm.AddValidatorInfoCalled != nil { - vicm.AddValidatorInfoCalled(validatorInfoHash, validatorInfo) - } -} - -// IsInterfaceNil returns true if there is no value under the interface -func (vicm *ValidatorInfoCacherMock) IsInterfaceNil() bool { - return vicm == nil -} diff --git a/testscommon/validatorInfoCacher/validatorInfoCacherStub.go b/testscommon/validatorInfoCacher/validatorInfoCacherStub.go new file mode 100644 index 00000000000..c8d9ad5b443 --- /dev/null +++ b/testscommon/validatorInfoCacher/validatorInfoCacherStub.go @@ -0,0 +1,38 @@ +package validatorInfoCacherStub + +import "github.com/ElrondNetwork/elrond-go/state" + +// ValidatorInfoCacherStub - +type ValidatorInfoCacherStub struct { + CleanCalled func() + AddValidatorInfoCalled func(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) + GetValidatorInfoCalled func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) +} + +// Clean - +func (vics *ValidatorInfoCacherStub) Clean() { + if vics.CleanCalled != nil { + vics.CleanCalled() + } +} + +// GetValidatorInfo - +func (vics *ValidatorInfoCacherStub) GetValidatorInfo(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { + if vics.GetValidatorInfoCalled != nil { + return vics.GetValidatorInfoCalled(validatorInfoHash) + } + + return nil, nil +} + +// AddValidatorInfo - +func (vics *ValidatorInfoCacherStub) AddValidatorInfo(validatorInfoHash []byte, validatorInfo *state.ShardValidatorInfo) { + if vics.AddValidatorInfoCalled != nil { + vics.AddValidatorInfoCalled(validatorInfoHash, validatorInfo) + } +} + +// IsInterfaceNil returns true if there is no value under the interface +func (vics *ValidatorInfoCacherStub) IsInterfaceNil() bool { + return vics == nil +} diff --git a/update/mock/epochStartNotifierStub.go b/update/mock/epochStartNotifierStub.go index e0716b6d111..50ac82f413b 100644 --- a/update/mock/epochStartNotifierStub.go +++ b/update/mock/epochStartNotifierStub.go @@ -10,7 +10,7 @@ type EpochStartNotifierStub struct { RegisterHandlerCalled func(handler epochStart.ActionHandler) UnregisterHandlerCalled func(handler epochStart.ActionHandler) NotifyAllCalled func(hdr data.HeaderHandler) - NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) + NotifyAllPrepareCalled func(hdr data.HeaderHandler, body data.BodyHandler) epochStartHdls []epochStart.ActionHandler NotifyEpochChangeConfirmedCalled func(epoch uint32) RegisterForEpochChangeConfirmedCalled func(handler func(epoch uint32)) @@ -54,13 +54,13 @@ func (esnm *EpochStartNotifierStub) UnregisterHandler(handler epochStart.ActionH } // NotifyAllPrepare - -func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler, validatorInfoCacher epochStart.ValidatorInfoCacher) { +func (esnm *EpochStartNotifierStub) NotifyAllPrepare(metaHdr data.HeaderHandler, body data.BodyHandler) { if esnm.NotifyAllPrepareCalled != nil { - esnm.NotifyAllPrepareCalled(metaHdr, body, validatorInfoCacher) + esnm.NotifyAllPrepareCalled(metaHdr, body) } for _, hdl := range esnm.epochStartHdls { - hdl.EpochStartPrepare(metaHdr, body, validatorInfoCacher) + hdl.EpochStartPrepare(metaHdr, body) } } diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index 8d131c6c253..73210580a32 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -216,7 +216,8 @@ func (ts *transactionsSync) requestTransactionsForNonPeerMiniBlock(miniBlock *bl func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block.MiniBlock) int { missingValidatorsInfo := make([][]byte, 0) for _, txHash := range miniBlock.TxHashes { - if _, ok := ts.mapValidatorsInfo[string(txHash)]; ok { + _, bFoundValidatorInfo := ts.mapValidatorsInfo[string(txHash)] + if bFoundValidatorInfo { continue } From a57334e309020714d24e9342002a69399718edcf Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Mon, 29 Aug 2022 14:55:27 +0300 Subject: [PATCH 65/70] * Fixed after self review --- .../bootstrap/syncValidatorStatus_test.go | 4 +-- epochStart/metachain/trigger_test.go | 4 +-- epochStart/metachain/validators.go | 4 +-- epochStart/metachain/validators_test.go | 4 +-- epochStart/shardchain/trigger_test.go | 6 ++-- integrationTests/consensus/testInitializer.go | 4 +-- integrationTests/nodesCoordinatorFactory.go | 6 ++-- integrationTests/testHeartbeatNode.go | 6 ++-- integrationTests/testP2PNode.go | 6 ++-- .../testProcessorNodeWithCoordinator.go | 4 +-- .../testProcessorNodeWithMultisigner.go | 6 ++-- ...dexHashedNodesCoordinatorWithRater_test.go | 18 ++++++------ .../indexHashedNodesCoordinator_test.go | 28 +++++++++---------- update/sync/syncTransactions.go | 4 +-- 14 files changed, 51 insertions(+), 53 deletions(-) diff --git a/epochStart/bootstrap/syncValidatorStatus_test.go b/epochStart/bootstrap/syncValidatorStatus_test.go index 272ef039960..1244022e01b 100644 --- a/epochStart/bootstrap/syncValidatorStatus_test.go +++ b/epochStart/bootstrap/syncValidatorStatus_test.go @@ -19,7 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -251,7 +251,7 @@ func getSyncValidatorStatusArgs() ArgsNewSyncValidatorStatus { return testscommon.NewCacherStub() }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherStub{} + return &vic.ValidatorInfoCacherStub{} }, }, Marshalizer: &mock.MarshalizerMock{}, diff --git a/epochStart/metachain/trigger_test.go b/epochStart/metachain/trigger_test.go index ee8e25bb70b..d06eca1f8ad 100644 --- a/epochStart/metachain/trigger_test.go +++ b/epochStart/metachain/trigger_test.go @@ -20,7 +20,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageStubs "github.com/ElrondNetwork/elrond-go/testscommon/storage" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -57,7 +57,7 @@ func createMockEpochStartTriggerArguments() *ArgsNewMetaEpochStartTrigger { }, DataPool: &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherStub{} + return &vic.ValidatorInfoCacherStub{} }, }, } diff --git a/epochStart/metachain/validators.go b/epochStart/metachain/validators.go index 18e4051d2af..4963adb2426 100644 --- a/epochStart/metachain/validators.go +++ b/epochStart/metachain/validators.go @@ -3,7 +3,6 @@ package metachain import ( "bytes" "sort" - "sync" "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" @@ -40,7 +39,6 @@ type validatorInfoCreator struct { hasher hashing.Hasher marshalizer marshal.Marshalizer dataPool dataRetriever.PoolsHolder - mutValidatorInfo sync.Mutex enableEpochsHandler common.EnableEpochsHandler } @@ -367,7 +365,7 @@ func (vic *validatorInfoCreator) SaveBlockDataToStorage(_ data.HeaderHandler, bo mbHash := vic.hasher.Compute(string(marshalledData)) err = vic.miniBlockStorage.Put(mbHash, marshalledData) if err != nil { - log.Debug("validatorInfoCreator.SaveBlockDataToStorage.Put", "error", err) + log.Debug("validatorInfoCreator.SaveBlockDataToStorage.Put", "hash", mbHash, "error", err) } } } diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index f8a66c2afdd..4c5eb7a8d1b 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -19,7 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" dataRetrieverMock "github.com/ElrondNetwork/elrond-go/testscommon/dataRetriever" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vics "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/require" ) @@ -126,7 +126,7 @@ func createMockEpochValidatorInfoCreatorsArguments() ArgsNewValidatorInfoCreator } }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherStub{} + return &vics.ValidatorInfoCacherStub{} }, }, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index 2a727965027..f6dac6f672a 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -20,7 +20,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" storageStubs "github.com/ElrondNetwork/elrond-go/testscommon/storage" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -43,7 +43,7 @@ func createMockShardEpochStartTriggerArguments() *ArgsShardEpochStartTrigger { return testscommon.NewCacherStub() }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherStub{} + return &vic.ValidatorInfoCacherStub{} }, }, Storage: &storageStubs.ChainStorerStub{ @@ -360,7 +360,7 @@ func TestTrigger_ReceivedHeaderIsEpochStartTrueWithPeerMiniblocks(t *testing.T) } }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherStub{} + return &vic.ValidatorInfoCacherStub{} }, } args.Uint64Converter = &mock.Uint64ByteSliceConverterMock{ diff --git a/integrationTests/consensus/testInitializer.go b/integrationTests/consensus/testInitializer.go index b1accec315d..8ef844b81e2 100644 --- a/integrationTests/consensus/testInitializer.go +++ b/integrationTests/consensus/testInitializer.go @@ -52,7 +52,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" statusHandlerMock "github.com/ElrondNetwork/elrond-go/testscommon/statusHandler" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/trie" "github.com/ElrondNetwork/elrond-go/trie/hashesHolder" vmcommon "github.com/ElrondNetwork/elrond-vm-common" @@ -536,7 +536,7 @@ func createNodes( EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ IsWaitingListFixFlagEnabledField: true, }, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, _ := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/nodesCoordinatorFactory.go b/integrationTests/nodesCoordinatorFactory.go index 5610a79ef95..0a3cc6193e3 100644 --- a/integrationTests/nodesCoordinatorFactory.go +++ b/integrationTests/nodesCoordinatorFactory.go @@ -11,7 +11,7 @@ import ( "github.com/ElrondNetwork/elrond-go/storage" "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) // ArgIndexHashedNodesCoordinatorFactory - @@ -73,7 +73,7 @@ func (tpn *IndexHashedNodesCoordinatorFactory) CreateNodesCoordinator(arg ArgInd EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{ RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, }, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) if err != nil { @@ -131,7 +131,7 @@ func (ihncrf *IndexHashedNodesCoordinatorWithRaterFactory) CreateNodesCoordinato IsWaitingListFixFlagEnabledField: true, RefactorPeersMiniBlocksEnableEpochField: UnreachableEpoch, }, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } baseCoordinator, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testHeartbeatNode.go b/integrationTests/testHeartbeatNode.go index eb51313085b..81f3b8c6dd5 100644 --- a/integrationTests/testHeartbeatNode.go +++ b/integrationTests/testHeartbeatNode.go @@ -45,7 +45,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/p2pmocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" trieMock "github.com/ElrondNetwork/elrond-go/testscommon/trie" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/update" ) @@ -298,7 +298,7 @@ func CreateNodesWithTestHeartbeatNode( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -344,7 +344,7 @@ func CreateNodesWithTestHeartbeatNode( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testP2PNode.go b/integrationTests/testP2PNode.go index a8feca91eb7..783ebb1e04f 100644 --- a/integrationTests/testP2PNode.go +++ b/integrationTests/testP2PNode.go @@ -32,7 +32,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/p2pmocks" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/ElrondNetwork/elrond-go/update/trigger" ) @@ -356,7 +356,7 @@ func CreateNodesWithTestP2PNodes( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) @@ -402,7 +402,7 @@ func CreateNodesWithTestP2PNodes( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) log.LogIfError(err) diff --git a/integrationTests/testProcessorNodeWithCoordinator.go b/integrationTests/testProcessorNodeWithCoordinator.go index a22d592ea38..e573900ef86 100644 --- a/integrationTests/testProcessorNodeWithCoordinator.go +++ b/integrationTests/testProcessorNodeWithCoordinator.go @@ -15,7 +15,7 @@ import ( "github.com/ElrondNetwork/elrond-go/sharding/nodesCoordinator" "github.com/ElrondNetwork/elrond-go/storage/lrucache" "github.com/ElrondNetwork/elrond-go/testscommon" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) type nodeKeys struct { @@ -75,7 +75,7 @@ func CreateProcessorNodesWithNodesCoordinator( ChanStopNode: endProcess.GetDummyEndProcessChannel(), IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/integrationTests/testProcessorNodeWithMultisigner.go b/integrationTests/testProcessorNodeWithMultisigner.go index fe1d8718a6c..f8964412285 100644 --- a/integrationTests/testProcessorNodeWithMultisigner.go +++ b/integrationTests/testProcessorNodeWithMultisigner.go @@ -31,7 +31,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" "github.com/ElrondNetwork/elrond-go/testscommon/shardingMocks" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" ) // CreateNodesWithNodesCoordinator returns a map with nodes per shard each using a real nodes coordinator @@ -429,7 +429,7 @@ func CreateNodesWithNodesCoordinatorAndHeaderSigVerifier( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoordinatorInstance, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) @@ -544,7 +544,7 @@ func CreateNodesWithNodesCoordinatorKeygenAndSingleSigner( NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &testscommon.EnableEpochsHandlerStub{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nodesCoord, err := nodesCoordinator.NewIndexHashedNodesCoordinator(argumentsNodesCoordinator) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go index ba2c883f207..b86400d2369 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinatorWithRater_test.go @@ -19,7 +19,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -96,7 +96,7 @@ func TestIndexHashedGroupSelectorWithRater_OkValShouldWork(t *testing.T) { NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, err := NewIndexHashedNodesCoordinator(arguments) assert.Nil(t, err) @@ -192,7 +192,7 @@ func BenchmarkIndexHashedGroupSelectorWithRater_ComputeValidatorsGroup63of400(b NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -267,7 +267,7 @@ func Test_ComputeValidatorsGroup63of400(t *testing.T) { NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) numRounds := uint64(1000000) @@ -342,7 +342,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -396,7 +396,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldReturn NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -464,7 +464,7 @@ func TestIndexHashedGroupSelectorWithRater_GetValidatorWithPublicKeyShouldWork(t NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) ihnc, _ := NewIndexHashedNodesCoordinatorWithRater(nc, &mock.RaterMock{}) @@ -549,7 +549,7 @@ func TestIndexHashedGroupSelectorWithRater_GetAllEligibleValidatorsPublicKeys(t NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } nc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -855,7 +855,7 @@ func BenchmarkIndexHashedWithRaterGroupSelector_ComputeValidatorsGroup21of400(b NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, IsFullArchive: false, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index d74356f3b65..76f9fbf0255 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -28,7 +28,7 @@ import ( "github.com/ElrondNetwork/elrond-go/testscommon/genericMocks" "github.com/ElrondNetwork/elrond-go/testscommon/hashingMocks" "github.com/ElrondNetwork/elrond-go/testscommon/nodeTypeProviderMock" - validatorInfoCacherMock "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" + vic "github.com/ElrondNetwork/elrond-go/testscommon/validatorInfoCacher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -118,7 +118,7 @@ func createArguments() ArgNodesCoordinator { EnableEpochsHandler: &mock.EnableEpochsHandlerMock{ IsRefactorPeersMiniBlocksFlagEnabledField: true, }, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } return arguments } @@ -268,7 +268,7 @@ func TestIndexHashedNodesCoordinator_OkValShouldWork(t *testing.T) { ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -328,7 +328,7 @@ func TestIndexHashedNodesCoordinator_NewCoordinatorTooFewNodesShouldErr(t *testi ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -402,7 +402,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup1ValidatorShouldRetur ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) list2, err := ihnc.ComputeConsensusGroup([]byte("randomness"), 0, 0, 0) @@ -462,7 +462,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10locksNoM ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -550,7 +550,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup400of400For10BlocksMe ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -621,7 +621,7 @@ func TestIndexHashedNodesCoordinator_ComputeValidatorsGroup63of400TestEqualSameP ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) @@ -685,7 +685,7 @@ func BenchmarkIndexHashedGroupSelector_ComputeValidatorsGroup21of400(b *testing. ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -758,7 +758,7 @@ func runBenchmark(consensusGroupCache Cacher, consensusGroupSize int, nodesMap m ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -808,7 +808,7 @@ func computeMemoryRequirements(consensusGroupCache Cacher, consensusGroupSize in ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, err := NewIndexHashedNodesCoordinator(arguments) require.Nil(b, err) @@ -948,7 +948,7 @@ func TestIndexHashedNodesCoordinator_GetValidatorWithPublicKeyShouldWork(t *test ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1032,7 +1032,7 @@ func TestIndexHashedGroupSelector_GetAllEligibleValidatorsPublicKeys(t *testing. ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) @@ -1111,7 +1111,7 @@ func TestIndexHashedGroupSelector_GetAllWaitingValidatorsPublicKeys(t *testing.T ChanStopNode: make(chan endProcess.ArgEndProcess), NodeTypeProvider: &nodeTypeProviderMock.NodeTypeProviderStub{}, EnableEpochsHandler: &mock.EnableEpochsHandlerMock{}, - ValidatorInfoCacher: &validatorInfoCacherMock.ValidatorInfoCacherStub{}, + ValidatorInfoCacher: &vic.ValidatorInfoCacherStub{}, } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) diff --git a/update/sync/syncTransactions.go b/update/sync/syncTransactions.go index 73210580a32..00a2584a2a7 100644 --- a/update/sync/syncTransactions.go +++ b/update/sync/syncTransactions.go @@ -216,8 +216,8 @@ func (ts *transactionsSync) requestTransactionsForNonPeerMiniBlock(miniBlock *bl func (ts *transactionsSync) requestTransactionsForPeerMiniBlock(miniBlock *block.MiniBlock) int { missingValidatorsInfo := make([][]byte, 0) for _, txHash := range miniBlock.TxHashes { - _, bFoundValidatorInfo := ts.mapValidatorsInfo[string(txHash)] - if bFoundValidatorInfo { + _, isValidatorInfoFound := ts.mapValidatorsInfo[string(txHash)] + if isValidatorInfoFound { continue } From 23d28bf1061e7834ed715cdad7e7f4f53f18cb6a Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Mon, 29 Aug 2022 17:01:01 +0300 Subject: [PATCH 66/70] * Fixed after merge --- epochStart/metachain/validators_test.go | 32 +++++++++---------- epochStart/shardchain/trigger_test.go | 2 +- .../validatorInfoPreProcessor_test.go | 2 -- .../indexHashedNodesCoordinator_test.go | 8 ++--- 4 files changed, 20 insertions(+), 24 deletions(-) diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index 55d82a17fb6..2362d5ef50f 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -709,7 +709,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + return &vics.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { return nil, errors.New("error") }, @@ -756,7 +756,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + return &vics.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, []byte("a")) { return svi1, nil @@ -814,7 +814,7 @@ func TestEpochValidatorInfoCreator_SetMarshalledValidatorInfoTxsShouldWork(t *te } arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + return &vics.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, []byte("a")) { return svi1, nil @@ -840,13 +840,11 @@ func TestEpochValidatorInfoCreator_SetMarshalledValidatorInfoTxsShouldWork(t *te }, } - marshalledValidatorInfoTxs := make(map[string][][]byte) - vic.setMarshalledValidatorInfoTxs(miniBlock, marshalledValidatorInfoTxs, common.ValidatorInfoTopic) + marshalledValidatorInfoTxs := vic.getMarshalledValidatorInfoTxs(miniBlock) - require.Equal(t, 1, len(marshalledValidatorInfoTxs)) - require.Equal(t, 2, len(marshalledValidatorInfoTxs[common.ValidatorInfoTopic])) - assert.Equal(t, marshalledSVI1, marshalledValidatorInfoTxs[common.ValidatorInfoTopic][0]) - assert.Equal(t, marshalledSVI2, marshalledValidatorInfoTxs[common.ValidatorInfoTopic][1]) + require.Equal(t, 2, len(marshalledValidatorInfoTxs)) + assert.Equal(t, marshalledSVI1, marshalledValidatorInfoTxs[0]) + assert.Equal(t, marshalledSVI2, marshalledValidatorInfoTxs[1]) } func TestEpochValidatorInfoCreator_GetValidatorInfoTxsShouldWork(t *testing.T) { @@ -863,7 +861,7 @@ func TestEpochValidatorInfoCreator_GetValidatorInfoTxsShouldWork(t *testing.T) { } arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + return &vics.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, []byte("a")) { return svi1, nil @@ -927,7 +925,7 @@ func TestEpochValidatorInfoCreator_SetMapShardValidatorInfoShouldWork(t *testing } arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + return &vics.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, []byte("a")) { return svi1, nil @@ -977,7 +975,7 @@ func TestEpochValidatorInfoCreator_GetShardValidatorInfoShouldWork(t *testing.T) } arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + return &vics.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, []byte("a")) { return svi, nil @@ -1005,7 +1003,7 @@ func TestEpochValidatorInfoCreator_GetShardValidatorInfoShouldWork(t *testing.T) } arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + return &vics.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, []byte("a")) { return svi, nil @@ -1037,7 +1035,7 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoShouldWork(t *testing.T) { arguments.ValidatorInfoStorage = storer arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{ + return &vics.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, []byte("a")) { return svi1, nil @@ -1078,7 +1076,7 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoShouldWork(t *testing.T) { assert.Nil(t, msvi3) } -func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromStorageShouldWork(t *testing.T) { +func TestEpochValidatorInfoCreator_RemoveValidatorInfoShouldWork(t *testing.T) { t.Parallel() arguments := createMockEpochValidatorInfoCreatorsArguments() @@ -1117,7 +1115,7 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromStorageShouldWork(t *t _ = storer.Put([]byte("c"), []byte("cc")) _ = storer.Put([]byte("d"), []byte("dd")) - vic.removeValidatorInfoFromStorage(body) + vic.removeValidatorInfo(body) msvi, err := storer.Get([]byte("a")) assert.NotNil(t, err) @@ -1143,7 +1141,7 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromPoolShouldWork(t *test arguments := createMockEpochValidatorInfoCreatorsArguments() arguments.DataPool = &dataRetrieverMock.PoolsHolderStub{ CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &vics.ValidatorInfoCacherStub{} }, ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { return shardedDataCacheNotifierMock diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index 6e735be9169..86dd9b7a0ff 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -668,7 +668,7 @@ func TestTrigger_UpdateMissingValidatorsInfo(t *testing.T) { return testscommon.NewCacherStub() }, CurrEpochValidatorInfoCalled: func() dataRetriever.ValidatorInfoCacher { - return &validatorInfoCacherMock.ValidatorInfoCacherMock{} + return &vic.ValidatorInfoCacherStub{} }, ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { return &testscommon.ShardedDataStub{ diff --git a/process/block/preprocess/validatorInfoPreProcessor_test.go b/process/block/preprocess/validatorInfoPreProcessor_test.go index ddc4bbf745a..a52ed8bb04a 100644 --- a/process/block/preprocess/validatorInfoPreProcessor_test.go +++ b/process/block/preprocess/validatorInfoPreProcessor_test.go @@ -443,7 +443,6 @@ func TestNewValidatorInfoPreprocessor_RestoreValidatorsInfo(t *testing.T) { blockSizeComputation, tdp.ValidatorsInfo(), storer, - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) @@ -487,7 +486,6 @@ func TestNewValidatorInfoPreprocessor_RestoreValidatorsInfo(t *testing.T) { blockSizeComputation, tdp.ValidatorsInfo(), storer, - func(txHashes [][]byte) {}, &testscommon.EnableEpochsHandlerStub{}, ) diff --git a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go index 1be3caf595e..709e170ba8f 100644 --- a/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go +++ b/sharding/nodesCoordinator/indexHashedNodesCoordinator_test.go @@ -2392,7 +2392,7 @@ func TestIndexHashedNodesCoordinator_GetShardValidatorInfoData(t *testing.T) { arguments.EnableEpochsHandler = &mock.EnableEpochsHandlerMock{ RefactorPeersMiniBlocksEnableEpochField: 1, } - validatorInfoCacher := &validatorInfoCacherMock.ValidatorInfoCacherMock{ + arguments.ValidatorInfoCacher = &vic.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, txHash) { return svi, nil @@ -2403,7 +2403,7 @@ func TestIndexHashedNodesCoordinator_GetShardValidatorInfoData(t *testing.T) { ihnc, _ := NewIndexHashedNodesCoordinator(arguments) marshalledSVI, _ := arguments.Marshalizer.Marshal(svi) - shardValidatorInfo, _ := ihnc.getShardValidatorInfoData(marshalledSVI, validatorInfoCacher, 0) + shardValidatorInfo, _ := ihnc.getShardValidatorInfoData(marshalledSVI, 0) require.Equal(t, svi, shardValidatorInfo) }) @@ -2417,7 +2417,7 @@ func TestIndexHashedNodesCoordinator_GetShardValidatorInfoData(t *testing.T) { arguments.EnableEpochsHandler = &mock.EnableEpochsHandlerMock{ RefactorPeersMiniBlocksEnableEpochField: 0, } - validatorInfoCacher := &validatorInfoCacherMock.ValidatorInfoCacherMock{ + arguments.ValidatorInfoCacher = &vic.ValidatorInfoCacherStub{ GetValidatorInfoCalled: func(validatorInfoHash []byte) (*state.ShardValidatorInfo, error) { if bytes.Equal(validatorInfoHash, txHash) { return svi, nil @@ -2427,7 +2427,7 @@ func TestIndexHashedNodesCoordinator_GetShardValidatorInfoData(t *testing.T) { } ihnc, _ := NewIndexHashedNodesCoordinator(arguments) - shardValidatorInfo, _ := ihnc.getShardValidatorInfoData(txHash, validatorInfoCacher, 0) + shardValidatorInfo, _ := ihnc.getShardValidatorInfoData(txHash, 0) require.Equal(t, svi, shardValidatorInfo) }) } From 616e2c21781eba6158748843d93dec3d337a18d1 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 30 Aug 2022 11:53:53 +0300 Subject: [PATCH 67/70] * Fixed after review --- epochStart/metachain/validators_test.go | 228 ++++-------------- .../shardchain/peerMiniBlocksSyncer_test.go | 132 +++------- epochStart/shardchain/trigger_test.go | 2 +- process/block/metablock_test.go | 8 +- update/genesis/export_test.go | 174 +++++-------- update/sync/syncTransactions_test.go | 128 +++------- 6 files changed, 168 insertions(+), 504 deletions(-) diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index 2362d5ef50f..d705871caf5 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -612,21 +612,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: 0, - ReceiverShardID: 1, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := getBlockBody(0, 1, block.TxBlock) marshalledData := vic.CreateMarshalledData(body) assert.Nil(t, marshalledData) }) @@ -653,21 +639,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: 0, - ReceiverShardID: 1, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := getBlockBody(0, 1, block.TxBlock) marshalledData := vic.CreateMarshalledData(body) assert.Equal(t, make(map[string][][]byte), marshalledData) }) @@ -681,21 +653,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: 0, - ReceiverShardID: 1, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := getBlockBody(0, 1, block.PeerBlock) marshalledData := vic.CreateMarshalledData(body) assert.Equal(t, make(map[string][][]byte), marshalledData) }) @@ -718,21 +676,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := getBlockBody(core.MetachainShardId, 0, block.PeerBlock) marshalledData := vic.CreateMarshalledData(body) assert.Equal(t, make(map[string][][]byte), marshalledData) }) @@ -774,21 +718,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := getBlockBody(core.MetachainShardId, 0, block.PeerBlock) marshalledData := vic.CreateMarshalledData(body) require.Equal(t, 1, len(marshalledData)) require.Equal(t, 3, len(marshalledData[common.ValidatorInfoTopic])) @@ -829,17 +759,7 @@ func TestEpochValidatorInfoCreator_SetMarshalledValidatorInfoTxsShouldWork(t *te } vic, _ := NewValidatorInfoCreator(arguments) - miniBlock := &block.MiniBlock{ - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - } - + miniBlock := getMiniBlock(core.MetachainShardId, 0, block.PeerBlock) marshalledValidatorInfoTxs := vic.getMarshalledValidatorInfoTxs(miniBlock) require.Equal(t, 2, len(marshalledValidatorInfoTxs)) @@ -879,31 +799,9 @@ func TestEpochValidatorInfoCreator_GetValidatorInfoTxsShouldWork(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) mapValidatorInfoTxs := vic.GetValidatorInfoTxs(body) require.Equal(t, 3, len(mapValidatorInfoTxs)) @@ -940,17 +838,7 @@ func TestEpochValidatorInfoCreator_SetMapShardValidatorInfoShouldWork(t *testing } vic, _ := NewValidatorInfoCreator(arguments) - miniBlock := &block.MiniBlock{ - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - } - + miniBlock := getMiniBlock(core.MetachainShardId, 0, block.TxBlock) mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) vic.setMapShardValidatorInfo(miniBlock, mapShardValidatorInfo) @@ -1050,17 +938,7 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoShouldWork(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - miniBlock := &block.MiniBlock{ - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - } - + miniBlock := getMiniBlock(core.MetachainShardId, 0, block.TxBlock) vic.saveValidatorInfo(miniBlock) msvi1, err := storer.Get([]byte("a")) @@ -1085,30 +963,9 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoShouldWork(t *testing.T) { arguments.ValidatorInfoStorage = storer vic, _ := NewValidatorInfoCreator(arguments) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) _ = storer.Put([]byte("a"), []byte("aa")) _ = storer.Put([]byte("b"), []byte("bb")) @@ -1150,30 +1007,9 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromPoolShouldWork(t *test vic, _ := NewValidatorInfoCreator(arguments) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) svi1 := &state.ShardValidatorInfo{PublicKey: []byte("aa")} svi2 := &state.ShardValidatorInfo{PublicKey: []byte("bb")} @@ -1203,3 +1039,33 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromPoolShouldWork(t *test assert.True(t, found) assert.Equal(t, svi4, svi) } + +func getBlockBody(senderShardID, receiverShardID uint32, blockType block.Type) *block.Body { + return &block.Body{ + MiniBlocks: []*block.MiniBlock{ + { + SenderShardID: senderShardID, + ReceiverShardID: receiverShardID, + Type: blockType, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + }, + }, + } +} + +func getMiniBlock(senderShardID, receiverShardID uint32, blockType block.Type) *block.MiniBlock { + return &block.MiniBlock{ + SenderShardID: senderShardID, + ReceiverShardID: receiverShardID, + Type: blockType, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } +} diff --git a/epochStart/shardchain/peerMiniBlocksSyncer_test.go b/epochStart/shardchain/peerMiniBlocksSyncer_test.go index c2197a48ff3..592b43a0e68 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer_test.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer_test.go @@ -6,6 +6,7 @@ import ( "time" "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go/epochStart" @@ -372,31 +373,9 @@ func TestValidatorInfoProcessor_SyncValidatorsInfo(t *testing.T) { } syncer, _ := NewPeerMiniBlockSyncer(args) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(body) assert.Equal(t, 3, len(missingValidatorsInfoHashes)) @@ -428,31 +407,9 @@ func TestValidatorInfoProcessor_SyncValidatorsInfo(t *testing.T) { } syncer, _ := NewPeerMiniBlockSyncer(args) - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(body) assert.Nil(t, err) @@ -548,13 +505,13 @@ func TestValidatorInfoProcessor_ReceivedValidatorInfo(t *testing.T) { syncer.numMissingValidatorsInfo = 1 syncer.mutValidatorsInfoForBlock.Unlock() - var err error + wasWithTimeOut := atomic.Flag{} go func() { select { case <-syncer.chRcvAllValidatorsInfo: return case <-time.After(time.Second): - err = process.ErrTimeIsOut + wasWithTimeOut.SetValue(true) return } }() @@ -565,7 +522,7 @@ func TestValidatorInfoProcessor_ReceivedValidatorInfo(t *testing.T) { numMissingValidatorsInfo := syncer.numMissingValidatorsInfo syncer.mutValidatorsInfoForBlock.RUnlock() - assert.Nil(t, err) + assert.False(t, wasWithTimeOut.IsSet()) assert.Equal(t, uint32(0), numMissingValidatorsInfo) }) } @@ -587,31 +544,9 @@ func TestValidatorInfoProcessor_GetAllValidatorsInfoShouldWork(t *testing.T) { syncer.mapAllValidatorsInfo["c"] = svi3 syncer.mutValidatorsInfoForBlock.Unlock() - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) validatorsInfo := syncer.getAllValidatorsInfo(body) assert.Equal(t, 3, len(validatorsInfo)) @@ -641,31 +576,9 @@ func TestValidatorInfoProcessor_ComputeMissingValidatorsInfoShouldWork(t *testin syncer, _ := NewPeerMiniBlockSyncer(args) syncer.initValidatorsInfo() - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.TxBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - { - SenderShardID: core.MetachainShardId, - ReceiverShardID: 0, - Type: block.PeerBlock, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, - } - + body := &block.Body{} + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) syncer.computeMissingValidatorsInfo(body) syncer.mutValidatorsInfoForBlock.RLock() @@ -800,3 +713,16 @@ func TestValidatorInfoProcessor_GetAllMissingValidatorsInfoHashesShouldWork(t *t require.Equal(t, 1, len(missingValidatorsInfoHashes)) assert.Equal(t, []byte("d"), missingValidatorsInfoHashes[0]) } + +func getMiniBlock(senderShardID, receiverShardID uint32, blockType block.Type) *block.MiniBlock { + return &block.MiniBlock{ + SenderShardID: senderShardID, + ReceiverShardID: receiverShardID, + Type: blockType, + TxHashes: [][]byte{ + []byte("a"), + []byte("b"), + []byte("c"), + }, + } +} diff --git a/epochStart/shardchain/trigger_test.go b/epochStart/shardchain/trigger_test.go index 86dd9b7a0ff..04f761071a4 100644 --- a/epochStart/shardchain/trigger_test.go +++ b/epochStart/shardchain/trigger_test.go @@ -630,7 +630,7 @@ func TestTrigger_ClearMissingValidatorsInfoMapShouldWork(t *testing.T) { epochStartTrigger.mutMissingValidatorsInfo.RLock() numMissingValidatorsInfo = len(epochStartTrigger.mapMissingValidatorsInfo) epochStartTrigger.mutMissingValidatorsInfo.RUnlock() - assert.Equal(t, 2, len(epochStartTrigger.mapMissingValidatorsInfo)) + assert.Equal(t, 2, numMissingValidatorsInfo) assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["c"]) assert.Equal(t, uint32(1), epochStartTrigger.mapMissingValidatorsInfo["d"]) diff --git a/process/block/metablock_test.go b/process/block/metablock_test.go index 6df809e155a..2b683516ed7 100644 --- a/process/block/metablock_test.go +++ b/process/block/metablock_test.go @@ -3560,9 +3560,7 @@ func TestMetaProcessor_getAllMarshalledTxs(t *testing.T) { if miniBlock.Type != block.RewardsBlock { continue } - for _, txHash := range miniBlock.TxHashes { - marshalledData["rewards"] = append(marshalledData["rewards"], txHash) - } + marshalledData["rewards"] = append(marshalledData["rewards"], miniBlock.TxHashes...) } return marshalledData }, @@ -3575,9 +3573,7 @@ func TestMetaProcessor_getAllMarshalledTxs(t *testing.T) { if miniBlock.Type != block.PeerBlock { continue } - for _, txHash := range miniBlock.TxHashes { - marshalledData["validatorInfo"] = append(marshalledData["validatorInfo"], txHash) - } + marshalledData["validatorInfo"] = append(marshalledData["validatorInfo"], miniBlock.TxHashes...) } return marshalledData }, diff --git a/update/genesis/export_test.go b/update/genesis/export_test.go index 65a7b1d68f7..2f903fce291 100644 --- a/update/genesis/export_test.go +++ b/update/genesis/export_test.go @@ -260,18 +260,9 @@ func TestExportAll(t *testing.T) { }, } - args := ArgsNewStateExporter{ - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Marshalizer: &mock.MarshalizerMock{}, - StateSyncer: stateSyncer, - HardforkStorer: hs, - Hasher: &hashingMocks.HasherMock{}, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - ExportFolder: "test", - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - } - + args := getDefaultStateExporterArgs() + args.StateSyncer = stateSyncer + args.HardforkStorer = hs stateExporter, _ := NewStateExporter(args) require.False(t, check.IfNil(stateExporter)) @@ -301,17 +292,11 @@ func TestStateExport_ExportTrieShouldExportNodesSetupJson(t *testing.T) { }, } - args := ArgsNewStateExporter{ - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Marshalizer: &mock.MarshalizerMock{}, - StateSyncer: &mock.StateSyncStub{}, - HardforkStorer: hs, - Hasher: &hashingMocks.HasherMock{}, - ExportFolder: testFolderName, - AddressPubKeyConverter: pubKeyConv, - ValidatorPubKeyConverter: pubKeyConv, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - } + args := getDefaultStateExporterArgs() + args.HardforkStorer = hs + args.ExportFolder = testFolderName + args.AddressPubKeyConverter = pubKeyConv + args.ValidatorPubKeyConverter = pubKeyConv trie := &trieMock.TrieStub{ RootCalled: func() ([]byte, error) { @@ -357,17 +342,11 @@ func TestStateExport_ExportNodesSetupJsonShouldExportKeysInAlphabeticalOrder(t * }, } - args := ArgsNewStateExporter{ - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Marshalizer: &mock.MarshalizerMock{}, - StateSyncer: &mock.StateSyncStub{}, - HardforkStorer: hs, - Hasher: &hashingMocks.HasherMock{}, - ExportFolder: testFolderName, - AddressPubKeyConverter: pubKeyConv, - ValidatorPubKeyConverter: pubKeyConv, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - } + args := getDefaultStateExporterArgs() + args.HardforkStorer = hs + args.ExportFolder = testFolderName + args.AddressPubKeyConverter = pubKeyConv + args.ValidatorPubKeyConverter = pubKeyConv stateExporter, err := NewStateExporter(args) require.NoError(t, err) @@ -428,17 +407,9 @@ func TestStateExport_ExportUnfinishedMetaBlocksShouldWork(t *testing.T) { }, } - args := ArgsNewStateExporter{ - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Marshalizer: &mock.MarshalizerMock{}, - StateSyncer: stateSyncer, - HardforkStorer: hs, - Hasher: &hashingMocks.HasherMock{}, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - ExportFolder: "test", - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - } + args := getDefaultStateExporterArgs() + args.StateSyncer = stateSyncer + args.HardforkStorer = hs stateExporter, _ := NewStateExporter(args) require.False(t, check.IfNil(stateExporter)) @@ -456,20 +427,11 @@ func TestStateExport_ExportAllValidatorsInfo(t *testing.T) { t.Parallel() expectedStateSyncerErr := errors.New("state syncer error") - args := ArgsNewStateExporter{ - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Hasher: &mock.HasherStub{}, - StateSyncer: &mock.StateSyncStub{ - GetAllValidatorsInfoCalled: func() (map[string]*state.ShardValidatorInfo, error) { - return nil, expectedStateSyncerErr - }, + args := getDefaultStateExporterArgs() + args.StateSyncer = &mock.StateSyncStub{ + GetAllValidatorsInfoCalled: func() (map[string]*state.ShardValidatorInfo, error) { + return nil, expectedStateSyncerErr }, - HardforkStorer: &mock.HardforkStorerStub{}, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - ExportFolder: "test", } stateExporter, _ := NewStateExporter(args) @@ -481,29 +443,21 @@ func TestStateExport_ExportAllValidatorsInfo(t *testing.T) { t.Parallel() expectedHardforkStorerErr := errors.New("hardfork storer error") - args := ArgsNewStateExporter{ - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Hasher: &mock.HasherStub{}, - StateSyncer: &mock.StateSyncStub{ - GetAllValidatorsInfoCalled: func() (map[string]*state.ShardValidatorInfo, error) { - mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) - shardValidatorInfo := &state.ShardValidatorInfo{ - PublicKey: []byte("x"), - } - mapShardValidatorInfo["key"] = shardValidatorInfo - return mapShardValidatorInfo, nil - }, + args := getDefaultStateExporterArgs() + args.StateSyncer = &mock.StateSyncStub{ + GetAllValidatorsInfoCalled: func() (map[string]*state.ShardValidatorInfo, error) { + mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) + shardValidatorInfo := &state.ShardValidatorInfo{ + PublicKey: []byte("x"), + } + mapShardValidatorInfo["key"] = shardValidatorInfo + return mapShardValidatorInfo, nil }, - HardforkStorer: &mock.HardforkStorerStub{ - WriteCalled: func(identifier string, key []byte, value []byte) error { - return expectedHardforkStorerErr - }, + } + args.HardforkStorer = &mock.HardforkStorerStub{ + WriteCalled: func(identifier string, key []byte, value []byte) error { + return expectedHardforkStorerErr }, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - ExportFolder: "test", } stateExporter, _ := NewStateExporter(args) @@ -515,21 +469,12 @@ func TestStateExport_ExportAllValidatorsInfo(t *testing.T) { t.Parallel() finishedIdentifierWasCalled := false - args := ArgsNewStateExporter{ - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Hasher: &mock.HasherStub{}, - StateSyncer: &mock.StateSyncStub{}, - HardforkStorer: &mock.HardforkStorerStub{ - FinishedIdentifierCalled: func(identifier string) error { - finishedIdentifierWasCalled = true - return nil - }, + args := getDefaultStateExporterArgs() + args.HardforkStorer = &mock.HardforkStorerStub{ + FinishedIdentifierCalled: func(identifier string) error { + finishedIdentifierWasCalled = true + return nil }, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - ExportFolder: "test", } stateExporter, _ := NewStateExporter(args) @@ -546,20 +491,11 @@ func TestStateExport_ExportValidatorInfo(t *testing.T) { t.Parallel() expectedErr := errors.New("error") - args := ArgsNewStateExporter{ - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Hasher: &mock.HasherStub{}, - StateSyncer: &mock.StateSyncStub{}, - HardforkStorer: &mock.HardforkStorerStub{ - WriteCalled: func(identifier string, key []byte, value []byte) error { - return expectedErr - }, + args := getDefaultStateExporterArgs() + args.HardforkStorer = &mock.HardforkStorerStub{ + WriteCalled: func(identifier string, key []byte, value []byte) error { + return expectedErr }, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - ExportFolder: "test", } stateExporter, _ := NewStateExporter(args) @@ -575,17 +511,7 @@ func TestStateExport_ExportValidatorInfo(t *testing.T) { t.Run("export validator info without error", func(t *testing.T) { t.Parallel() - args := ArgsNewStateExporter{ - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: mock.NewOneShardCoordinatorMock(), - Hasher: &mock.HasherStub{}, - StateSyncer: &mock.StateSyncStub{}, - HardforkStorer: &mock.HardforkStorerStub{}, - AddressPubKeyConverter: &mock.PubkeyConverterStub{}, - ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, - GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, - ExportFolder: "test", - } + args := getDefaultStateExporterArgs() stateExporter, _ := NewStateExporter(args) key := "key" @@ -597,3 +523,17 @@ func TestStateExport_ExportValidatorInfo(t *testing.T) { assert.Nil(t, err) }) } + +func getDefaultStateExporterArgs() ArgsNewStateExporter { + return ArgsNewStateExporter{ + ShardCoordinator: mock.NewOneShardCoordinatorMock(), + Marshalizer: &mock.MarshalizerMock{}, + StateSyncer: &mock.StateSyncStub{}, + HardforkStorer: &mock.HardforkStorerStub{}, + Hasher: &hashingMocks.HasherMock{}, + AddressPubKeyConverter: &mock.PubkeyConverterStub{}, + ValidatorPubKeyConverter: &mock.PubkeyConverterStub{}, + ExportFolder: "test", + GenesisNodesSetupHandler: &mock.GenesisNodesSetupHandlerStub{}, + } +} diff --git a/update/sync/syncTransactions_test.go b/update/sync/syncTransactions_test.go index 36a08acb188..6adbe65ac71 100644 --- a/update/sync/syncTransactions_test.go +++ b/update/sync/syncTransactions_test.go @@ -11,6 +11,7 @@ import ( "testing" "time" + "github.com/ElrondNetwork/elrond-go-core/core/atomic" "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/data/block" dataTransaction "github.com/ElrondNetwork/elrond-go-core/data/transaction" @@ -357,22 +358,7 @@ func TestTransactionsSync_RequestTransactionsForPeerMiniBlockShouldWork(t *testi } args := createMockArgs() - args.DataPools = &dataRetrieverMock.PoolsHolderStub{ - ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { - return &testscommon.ShardedDataStub{ - ShardDataStoreCalled: func(cacheID string) storage.Cacher { - return &testscommon.CacherStub{ - PeekCalled: func(key []byte) (value interface{}, ok bool) { - if bytes.Equal(key, []byte("b")) { - return svi2, true - } - return nil, false - }, - } - }, - } - }, - } + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(svi2, []byte("b")) transactionsSyncer, _ := NewTransactionsSyncer(args) miniBlock := &block.MiniBlock{ @@ -436,11 +422,11 @@ func TestTransactionsSync_ReceivedValidatorInfo(t *testing.T) { assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) transactionsSyncer.mutPendingTx.Unlock() - wasReceivedAll := false + wasReceivedAll := atomic.Flag{} go func() { select { case <-transactionsSyncer.chReceivedAll: - wasReceivedAll = true + wasReceivedAll.SetValue(true) return case <-time.After(time.Second): } @@ -451,7 +437,7 @@ func TestTransactionsSync_ReceivedValidatorInfo(t *testing.T) { transactionsSyncer.mutPendingTx.Lock() assert.Equal(t, 1, len(transactionsSyncer.mapValidatorsInfo)) transactionsSyncer.mutPendingTx.Unlock() - assert.True(t, wasReceivedAll) + assert.True(t, wasReceivedAll.IsSet()) } func TestTransactionsSync_GetValidatorInfoFromPoolShouldWork(t *testing.T) { @@ -509,19 +495,7 @@ func TestTransactionsSync_GetValidatorInfoFromPoolShouldWork(t *testing.T) { txHash := []byte("hash") args := createMockArgs() - args.DataPools = &dataRetrieverMock.PoolsHolderStub{ - ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { - return &testscommon.ShardedDataStub{ - ShardDataStoreCalled: func(cacheID string) storage.Cacher { - return &testscommon.CacherStub{ - PeekCalled: func(key []byte) (value interface{}, ok bool) { - return nil, false - }, - } - }, - } - }, - } + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(nil, nil) transactionsSyncer, _ := NewTransactionsSyncer(args) miniBlock := &block.MiniBlock{ @@ -589,22 +563,7 @@ func TestTransactionsSync_GetValidatorInfoFromPoolShouldWork(t *testing.T) { } args := createMockArgs() - args.DataPools = &dataRetrieverMock.PoolsHolderStub{ - ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { - return &testscommon.ShardedDataStub{ - ShardDataStoreCalled: func(cacheID string) storage.Cacher { - return &testscommon.CacherStub{ - PeekCalled: func(key []byte) (value interface{}, ok bool) { - if bytes.Equal(key, txHash) { - return svi, true - } - return nil, false - }, - } - }, - } - }, - } + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(svi, txHash) transactionsSyncer, _ := NewTransactionsSyncer(args) miniBlock := &block.MiniBlock{ @@ -681,22 +640,7 @@ func TestTransactionsSync_GetValidatorInfoFromPoolOrStorage(t *testing.T) { } args := createMockArgs() - args.DataPools = &dataRetrieverMock.PoolsHolderStub{ - ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { - return &testscommon.ShardedDataStub{ - ShardDataStoreCalled: func(cacheID string) storage.Cacher { - return &testscommon.CacherStub{ - PeekCalled: func(key []byte) (value interface{}, ok bool) { - if bytes.Equal(key, txHash) { - return svi, true - } - return nil, false - }, - } - }, - } - }, - } + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(svi, txHash) transactionsSyncer, _ := NewTransactionsSyncer(args) miniBlock := &block.MiniBlock{ @@ -786,22 +730,7 @@ func TestTransactionsSync_GetValidatorInfoFromPoolOrStorage(t *testing.T) { }, nil }, } - args.DataPools = &dataRetrieverMock.PoolsHolderStub{ - ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { - return &testscommon.ShardedDataStub{ - ShardDataStoreCalled: func(cacheID string) storage.Cacher { - return &testscommon.CacherStub{ - PeekCalled: func(key []byte) (value interface{}, ok bool) { - return nil, false - }, - } - }, - SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { - return nil, false - }, - } - }, - } + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(nil, nil) transactionsSyncer, _ := NewTransactionsSyncer(args) miniBlock := &block.MiniBlock{ @@ -842,22 +771,7 @@ func TestTransactionsSync_GetValidatorInfoFromPoolOrStorage(t *testing.T) { }, nil }, } - args.DataPools = &dataRetrieverMock.PoolsHolderStub{ - ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { - return &testscommon.ShardedDataStub{ - ShardDataStoreCalled: func(cacheID string) storage.Cacher { - return &testscommon.CacherStub{ - PeekCalled: func(key []byte) (value interface{}, ok bool) { - return nil, false - }, - } - }, - SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { - return nil, false - }, - } - }, - } + args.DataPools = getDataPoolsWithShardValidatorInfoAndTxHash(nil, nil) transactionsSyncer, _ := NewTransactionsSyncer(args) miniBlock := &block.MiniBlock{ @@ -927,3 +841,25 @@ func TestTransactionsSync_ClearFieldsShouldWork(t *testing.T) { assert.Equal(t, 0, len(transactionsSyncer.mapTxsToMiniBlocks)) assert.Equal(t, 0, len(transactionsSyncer.mapValidatorsInfo)) } + +func getDataPoolsWithShardValidatorInfoAndTxHash(svi *state.ShardValidatorInfo, txHash []byte) dataRetriever.PoolsHolder { + return &dataRetrieverMock.PoolsHolderStub{ + ValidatorsInfoCalled: func() dataRetriever.ShardedDataCacherNotifier { + return &testscommon.ShardedDataStub{ + ShardDataStoreCalled: func(cacheID string) storage.Cacher { + return &testscommon.CacherStub{ + PeekCalled: func(key []byte) (value interface{}, ok bool) { + if bytes.Equal(key, txHash) { + return svi, true + } + return nil, false + }, + } + }, + SearchFirstDataCalled: func(key []byte) (value interface{}, ok bool) { + return nil, false + }, + } + }, + } +} From db2fbffce09797e024eb01713bf34fbbb3affe52 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 30 Aug 2022 12:02:57 +0300 Subject: [PATCH 68/70] * Refactored test methods name --- epochStart/metachain/validators_test.go | 32 +++++++++---------- .../shardchain/peerMiniBlocksSyncer_test.go | 18 +++++------ 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index d705871caf5..99f6cf9dfb2 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -612,7 +612,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := getBlockBody(0, 1, block.TxBlock) + body := createMockBlockBody(0, 1, block.TxBlock) marshalledData := vic.CreateMarshalledData(body) assert.Nil(t, marshalledData) }) @@ -639,7 +639,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := getBlockBody(0, 1, block.TxBlock) + body := createMockBlockBody(0, 1, block.TxBlock) marshalledData := vic.CreateMarshalledData(body) assert.Equal(t, make(map[string][][]byte), marshalledData) }) @@ -653,7 +653,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := getBlockBody(0, 1, block.PeerBlock) + body := createMockBlockBody(0, 1, block.PeerBlock) marshalledData := vic.CreateMarshalledData(body) assert.Equal(t, make(map[string][][]byte), marshalledData) }) @@ -676,7 +676,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := getBlockBody(core.MetachainShardId, 0, block.PeerBlock) + body := createMockBlockBody(core.MetachainShardId, 0, block.PeerBlock) marshalledData := vic.CreateMarshalledData(body) assert.Equal(t, make(map[string][][]byte), marshalledData) }) @@ -718,7 +718,7 @@ func TestEpochValidatorInfoCreator_CreateMarshalledData(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - body := getBlockBody(core.MetachainShardId, 0, block.PeerBlock) + body := createMockBlockBody(core.MetachainShardId, 0, block.PeerBlock) marshalledData := vic.CreateMarshalledData(body) require.Equal(t, 1, len(marshalledData)) require.Equal(t, 3, len(marshalledData[common.ValidatorInfoTopic])) @@ -759,7 +759,7 @@ func TestEpochValidatorInfoCreator_SetMarshalledValidatorInfoTxsShouldWork(t *te } vic, _ := NewValidatorInfoCreator(arguments) - miniBlock := getMiniBlock(core.MetachainShardId, 0, block.PeerBlock) + miniBlock := createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock) marshalledValidatorInfoTxs := vic.getMarshalledValidatorInfoTxs(miniBlock) require.Equal(t, 2, len(marshalledValidatorInfoTxs)) @@ -800,8 +800,8 @@ func TestEpochValidatorInfoCreator_GetValidatorInfoTxsShouldWork(t *testing.T) { vic, _ := NewValidatorInfoCreator(arguments) body := &block.Body{} - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) mapValidatorInfoTxs := vic.GetValidatorInfoTxs(body) require.Equal(t, 3, len(mapValidatorInfoTxs)) @@ -838,7 +838,7 @@ func TestEpochValidatorInfoCreator_SetMapShardValidatorInfoShouldWork(t *testing } vic, _ := NewValidatorInfoCreator(arguments) - miniBlock := getMiniBlock(core.MetachainShardId, 0, block.TxBlock) + miniBlock := createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock) mapShardValidatorInfo := make(map[string]*state.ShardValidatorInfo) vic.setMapShardValidatorInfo(miniBlock, mapShardValidatorInfo) @@ -938,7 +938,7 @@ func TestEpochValidatorInfoCreator_SaveValidatorInfoShouldWork(t *testing.T) { } vic, _ := NewValidatorInfoCreator(arguments) - miniBlock := getMiniBlock(core.MetachainShardId, 0, block.TxBlock) + miniBlock := createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock) vic.saveValidatorInfo(miniBlock) msvi1, err := storer.Get([]byte("a")) @@ -964,8 +964,8 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoShouldWork(t *testing.T) { vic, _ := NewValidatorInfoCreator(arguments) body := &block.Body{} - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) _ = storer.Put([]byte("a"), []byte("aa")) _ = storer.Put([]byte("b"), []byte("bb")) @@ -1008,8 +1008,8 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromPoolShouldWork(t *test vic, _ := NewValidatorInfoCreator(arguments) body := &block.Body{} - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) svi1 := &state.ShardValidatorInfo{PublicKey: []byte("aa")} svi2 := &state.ShardValidatorInfo{PublicKey: []byte("bb")} @@ -1040,7 +1040,7 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromPoolShouldWork(t *test assert.Equal(t, svi4, svi) } -func getBlockBody(senderShardID, receiverShardID uint32, blockType block.Type) *block.Body { +func createMockBlockBody(senderShardID, receiverShardID uint32, blockType block.Type) *block.Body { return &block.Body{ MiniBlocks: []*block.MiniBlock{ { @@ -1057,7 +1057,7 @@ func getBlockBody(senderShardID, receiverShardID uint32, blockType block.Type) * } } -func getMiniBlock(senderShardID, receiverShardID uint32, blockType block.Type) *block.MiniBlock { +func createMockMiniBlock(senderShardID, receiverShardID uint32, blockType block.Type) *block.MiniBlock { return &block.MiniBlock{ SenderShardID: senderShardID, ReceiverShardID: receiverShardID, diff --git a/epochStart/shardchain/peerMiniBlocksSyncer_test.go b/epochStart/shardchain/peerMiniBlocksSyncer_test.go index 592b43a0e68..57ebcd61291 100644 --- a/epochStart/shardchain/peerMiniBlocksSyncer_test.go +++ b/epochStart/shardchain/peerMiniBlocksSyncer_test.go @@ -374,8 +374,8 @@ func TestValidatorInfoProcessor_SyncValidatorsInfo(t *testing.T) { syncer, _ := NewPeerMiniBlockSyncer(args) body := &block.Body{} - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(body) assert.Equal(t, 3, len(missingValidatorsInfoHashes)) @@ -408,8 +408,8 @@ func TestValidatorInfoProcessor_SyncValidatorsInfo(t *testing.T) { syncer, _ := NewPeerMiniBlockSyncer(args) body := &block.Body{} - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) missingValidatorsInfoHashes, validatorsInfo, err := syncer.SyncValidatorsInfo(body) assert.Nil(t, err) @@ -545,8 +545,8 @@ func TestValidatorInfoProcessor_GetAllValidatorsInfoShouldWork(t *testing.T) { syncer.mutValidatorsInfoForBlock.Unlock() body := &block.Body{} - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) validatorsInfo := syncer.getAllValidatorsInfo(body) assert.Equal(t, 3, len(validatorsInfo)) @@ -577,8 +577,8 @@ func TestValidatorInfoProcessor_ComputeMissingValidatorsInfoShouldWork(t *testin syncer.initValidatorsInfo() body := &block.Body{} - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.TxBlock)) - body.MiniBlocks = append(body.MiniBlocks, getMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.TxBlock)) + body.MiniBlocks = append(body.MiniBlocks, createMockMiniBlock(core.MetachainShardId, 0, block.PeerBlock)) syncer.computeMissingValidatorsInfo(body) syncer.mutValidatorsInfoForBlock.RLock() @@ -714,7 +714,7 @@ func TestValidatorInfoProcessor_GetAllMissingValidatorsInfoHashesShouldWork(t *t assert.Equal(t, []byte("d"), missingValidatorsInfoHashes[0]) } -func getMiniBlock(senderShardID, receiverShardID uint32, blockType block.Type) *block.MiniBlock { +func createMockMiniBlock(senderShardID, receiverShardID uint32, blockType block.Type) *block.MiniBlock { return &block.MiniBlock{ SenderShardID: senderShardID, ReceiverShardID: receiverShardID, From b31f222ed5d12627dcb20c97dd2eee884a2bb652 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 30 Aug 2022 14:33:50 +0300 Subject: [PATCH 69/70] * Added some nil checks in shardedDataStub.go --- epochStart/metachain/validators_test.go | 13 +--------- testscommon/shardedDataStub.go | 34 +++++++++++++++++++------ 2 files changed, 27 insertions(+), 20 deletions(-) diff --git a/epochStart/metachain/validators_test.go b/epochStart/metachain/validators_test.go index 99f6cf9dfb2..739f367b8ad 100644 --- a/epochStart/metachain/validators_test.go +++ b/epochStart/metachain/validators_test.go @@ -1042,18 +1042,7 @@ func TestEpochValidatorInfoCreator_RemoveValidatorInfoFromPoolShouldWork(t *test func createMockBlockBody(senderShardID, receiverShardID uint32, blockType block.Type) *block.Body { return &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - SenderShardID: senderShardID, - ReceiverShardID: receiverShardID, - Type: blockType, - TxHashes: [][]byte{ - []byte("a"), - []byte("b"), - []byte("c"), - }, - }, - }, + MiniBlocks: []*block.MiniBlock{createMockMiniBlock(senderShardID, receiverShardID, blockType)}, } } diff --git a/testscommon/shardedDataStub.go b/testscommon/shardedDataStub.go index 31713ef0d60..4a076aa0bd6 100644 --- a/testscommon/shardedDataStub.go +++ b/testscommon/shardedDataStub.go @@ -38,7 +38,10 @@ func (sd *ShardedDataStub) RegisterOnAdded(handler func(key []byte, value interf // ShardDataStore - func (sd *ShardedDataStub) ShardDataStore(cacheID string) storage.Cacher { - return sd.ShardDataStoreCalled(cacheID) + if sd.ShardDataStoreCalled != nil { + return sd.ShardDataStoreCalled(cacheID) + } + return nil } // AddData - @@ -50,37 +53,52 @@ func (sd *ShardedDataStub) AddData(key []byte, data interface{}, sizeInBytes int // SearchFirstData - func (sd *ShardedDataStub) SearchFirstData(key []byte) (value interface{}, ok bool) { - return sd.SearchFirstDataCalled(key) + if sd.SearchFirstDataCalled != nil { + return sd.SearchFirstDataCalled(key) + } + return nil, false } // RemoveData - func (sd *ShardedDataStub) RemoveData(key []byte, cacheID string) { - sd.RemoveDataCalled(key, cacheID) + if sd.RemoveDataCalled != nil { + sd.RemoveDataCalled(key, cacheID) + } } // RemoveDataFromAllShards - func (sd *ShardedDataStub) RemoveDataFromAllShards(key []byte) { - sd.RemoveDataFromAllShardsCalled(key) + if sd.RemoveDataFromAllShardsCalled != nil { + sd.RemoveDataFromAllShardsCalled(key) + } } // MergeShardStores - func (sd *ShardedDataStub) MergeShardStores(sourceCacheID, destCacheID string) { - sd.MergeShardStoresCalled(sourceCacheID, destCacheID) + if sd.MergeShardStoresCalled != nil { + sd.MergeShardStoresCalled(sourceCacheID, destCacheID) + } } // Clear - func (sd *ShardedDataStub) Clear() { - sd.ClearCalled() + if sd.ClearCalled != nil { + sd.ClearCalled() + } } // ClearShardStore - func (sd *ShardedDataStub) ClearShardStore(cacheID string) { - sd.ClearShardStoreCalled(cacheID) + if sd.ClearShardStoreCalled != nil { + sd.ClearShardStoreCalled(cacheID) + } } // RemoveSetOfDataFromPool - func (sd *ShardedDataStub) RemoveSetOfDataFromPool(keys [][]byte, cacheID string) { - sd.RemoveSetOfDataFromPoolCalled(keys, cacheID) + if sd.RemoveSetOfDataFromPoolCalled != nil { + sd.RemoveSetOfDataFromPoolCalled(keys, cacheID) + } } // ImmunizeSetOfDataAgainstEviction - From 39e1d702c5fabccc5bd2c04ba573a9279b1db8c5 Mon Sep 17 00:00:00 2001 From: SebastianMarian Date: Tue, 30 Aug 2022 16:32:20 +0300 Subject: [PATCH 70/70] * Fixed linter issue --- process/peer/constants.go | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/process/peer/constants.go b/process/peer/constants.go index af3619369bd..217c63d387f 100644 --- a/process/peer/constants.go +++ b/process/peer/constants.go @@ -1,12 +1,10 @@ package peer const ( - minSizeInBytes = 1 - maxSizeInBytes = 128 - interceptedValidatorInfoType = "intercepted validator info" - publicKeyProperty = "public key" - publicKeyPropertyRequiredBytesLen = 96 - listProperty = "list" - rewardAddressProperty = "reward address" - rewardAddressPropertyMaxPropertyBytesLen = 32 + minSizeInBytes = 1 + maxSizeInBytes = 128 + interceptedValidatorInfoType = "intercepted validator info" + publicKeyProperty = "public key" + publicKeyPropertyRequiredBytesLen = 96 + listProperty = "list" )