Skip to content
This repository has been archived by the owner on Nov 15, 2023. It is now read-only.

Commit

Permalink
consensus: remove caching functionality from block import pipeline (#…
Browse files Browse the repository at this point in the history
…13551)

* consensus: remove caching functionality from block import pipeline

* client: update docs on Verifier::verify

* node: fix block production benchmark
  • Loading branch information
andresilva authored Mar 7, 2023
1 parent 8d0b756 commit 8a4746e
Show file tree
Hide file tree
Showing 29 changed files with 103 additions and 214 deletions.
2 changes: 1 addition & 1 deletion bin/node/cli/benches/block_production.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ fn import_block(
params.state_action =
StateAction::ApplyChanges(sc_consensus::StorageChanges::Changes(built.storage_changes));
params.fork_choice = Some(ForkChoiceStrategy::LongestChain);
futures::executor::block_on(client.import_block(params, Default::default()))
futures::executor::block_on(client.import_block(params))
.expect("importing a block doesn't fail");
}

Expand Down
2 changes: 1 addition & 1 deletion bin/node/cli/src/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -760,7 +760,7 @@ mod tests {
);
params.fork_choice = Some(ForkChoiceStrategy::LongestChain);

futures::executor::block_on(block_import.import_block(params, Default::default()))
futures::executor::block_on(block_import.import_block(params))
.expect("error importing test block");
},
|service, _| {
Expand Down
6 changes: 2 additions & 4 deletions bin/node/testing/src/bench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -682,10 +682,8 @@ impl BenchContext {
assert_eq!(self.client.chain_info().best_number, 0);

assert_eq!(
futures::executor::block_on(
self.client.import_block(import_params, Default::default())
)
.expect("Failed to import block"),
futures::executor::block_on(self.client.import_block(import_params))
.expect("Failed to import block"),
ImportResult::Imported(ImportedAux {
header_only: false,
clear_justification_requests: false,
Expand Down
14 changes: 5 additions & 9 deletions client/api/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,10 @@

//! Substrate Client data backend
use crate::{
blockchain::{well_known_cache_keys, Backend as BlockchainBackend},
UsageInfo,
};
use std::collections::HashSet;

use parking_lot::RwLock;
use sp_blockchain;

use sp_consensus::BlockOrigin;
use sp_core::offchain::OffchainStorage;
use sp_runtime::{
Expand All @@ -35,7 +33,8 @@ use sp_state_machine::{
OffchainChangesCollection, StorageCollection, StorageIterator,
};
use sp_storage::{ChildInfo, StorageData, StorageKey};
use std::collections::{HashMap, HashSet};

use crate::{blockchain::Backend as BlockchainBackend, UsageInfo};

pub use sp_state_machine::{Backend as StateBackend, KeyValueStates};

Expand Down Expand Up @@ -179,9 +178,6 @@ pub trait BlockImportOperation<Block: BlockT> {
state: NewBlockState,
) -> sp_blockchain::Result<()>;

/// Update cached data.
fn update_cache(&mut self, cache: HashMap<well_known_cache_keys::Id, Vec<u8>>);

/// Inject storage data into the database.
fn update_db_storage(
&mut self,
Expand Down
4 changes: 1 addition & 3 deletions client/api/src/in_mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ use std::{

use crate::{
backend::{self, NewBlockState},
blockchain::{self, well_known_cache_keys::Id as CacheKeyId, BlockStatus, HeaderBackend},
blockchain::{self, BlockStatus, HeaderBackend},
leaves::LeafSet,
UsageInfo,
};
Expand Down Expand Up @@ -549,8 +549,6 @@ where
Ok(())
}

fn update_cache(&mut self, _cache: HashMap<CacheKeyId, Vec<u8>>) {}

fn update_db_storage(
&mut self,
update: <InMemoryBackend<HashFor<Block>> as StateBackend<HashFor<Block>>>::Transaction,
Expand Down
8 changes: 4 additions & 4 deletions client/consensus/aura/src/import_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ use sc_consensus_slots::{check_equivocation, CheckedHeader, InherentDataProvider
use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_DEBUG, CONSENSUS_TRACE};
use sp_api::{ApiExt, ProvideRuntimeApi};
use sp_block_builder::BlockBuilder as BlockBuilderApi;
use sp_blockchain::{well_known_cache_keys::Id as CacheKeyId, HeaderBackend};
use sp_blockchain::HeaderBackend;
use sp_consensus::Error as ConsensusError;
use sp_consensus_aura::{digests::CompatibleDigestItem, inherents::AuraInherentData, AuraApi};
use sp_consensus_slots::Slot;
Expand Down Expand Up @@ -184,7 +184,7 @@ where
async fn verify(
&mut self,
mut block: BlockImportParams<B, ()>,
) -> Result<(BlockImportParams<B, ()>, Option<Vec<(CacheKeyId, Vec<u8>)>>), String> {
) -> Result<BlockImportParams<B, ()>, String> {
// Skip checks that include execution, if being told so or when importing only state.
//
// This is done for example when gap syncing and it is expected that the block after the gap
Expand All @@ -194,7 +194,7 @@ where
// When we are importing only the state of a block, it will be the best block.
block.fork_choice = Some(ForkChoiceStrategy::Custom(block.with_state()));

return Ok((block, Default::default()))
return Ok(block)
}

let hash = block.header.hash();
Expand Down Expand Up @@ -278,7 +278,7 @@ where
block.fork_choice = Some(ForkChoiceStrategy::LongestChain);
block.post_hash = Some(hash);

Ok((block, None))
Ok(block)
},
CheckedHeader::Deferred(a, b) => {
debug!(target: LOG_TARGET, "Checking {:?} failed; {:?}, {:?}.", hash, a, b);
Expand Down
25 changes: 9 additions & 16 deletions client/consensus/babe/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
#![warn(missing_docs)]

use std::{
collections::{HashMap, HashSet},
collections::HashSet,
future::Future,
pin::Pin,
sync::Arc,
Expand Down Expand Up @@ -114,9 +114,7 @@ use sp_blockchain::{
Backend as _, BlockStatus, Error as ClientError, ForkBackend, HeaderBackend, HeaderMetadata,
Result as ClientResult,
};
use sp_consensus::{
BlockOrigin, CacheKeyId, Environment, Error as ConsensusError, Proposer, SelectChain,
};
use sp_consensus::{BlockOrigin, Environment, Error as ConsensusError, Proposer, SelectChain};
use sp_consensus_babe::inherents::BabeInherentData;
use sp_consensus_slots::Slot;
use sp_core::{crypto::ByteArray, ExecutionContext};
Expand Down Expand Up @@ -1131,9 +1129,6 @@ where
}
}

type BlockVerificationResult<Block> =
Result<(BlockImportParams<Block, ()>, Option<Vec<(CacheKeyId, Vec<u8>)>>), String>;

#[async_trait::async_trait]
impl<Block, Client, SelectChain, CIDP> Verifier<Block>
for BabeVerifier<Block, Client, SelectChain, CIDP>
Expand All @@ -1153,7 +1148,7 @@ where
async fn verify(
&mut self,
mut block: BlockImportParams<Block, ()>,
) -> BlockVerificationResult<Block> {
) -> Result<BlockImportParams<Block, ()>, String> {
trace!(
target: LOG_TARGET,
"Verifying origin: {:?} header: {:?} justification(s): {:?} body: {:?}",
Expand All @@ -1177,7 +1172,7 @@ where
// read it from the state after import. We also skip all verifications
// because there's no parent state and we trust the sync module to verify
// that the state is correct and finalized.
return Ok((block, Default::default()))
return Ok(block)
}

debug!(
Expand Down Expand Up @@ -1296,7 +1291,7 @@ where
);
block.post_hash = Some(hash);

Ok((block, Default::default()))
Ok(block)
},
CheckedHeader::Deferred(a, b) => {
debug!(target: LOG_TARGET, "Checking {:?} failed; {:?}, {:?}.", hash, a, b);
Expand Down Expand Up @@ -1368,7 +1363,6 @@ where
async fn import_state(
&mut self,
mut block: BlockImportParams<Block, sp_api::TransactionFor<Client, Block>>,
new_cache: HashMap<CacheKeyId, Vec<u8>>,
) -> Result<ImportResult, ConsensusError> {
let hash = block.post_hash();
let parent_hash = *block.header.parent_hash();
Expand All @@ -1383,7 +1377,7 @@ where
});

// First make the client import the state.
let import_result = self.inner.import_block(block, new_cache).await;
let import_result = self.inner.import_block(block).await;
let aux = match import_result {
Ok(ImportResult::Imported(aux)) => aux,
Ok(r) =>
Expand Down Expand Up @@ -1433,7 +1427,6 @@ where
async fn import_block(
&mut self,
mut block: BlockImportParams<Block, Self::Transaction>,
new_cache: HashMap<CacheKeyId, Vec<u8>>,
) -> Result<ImportResult, Self::Error> {
let hash = block.post_hash();
let number = *block.header.number();
Expand All @@ -1454,11 +1447,11 @@ where
// In case of initial sync intermediates should not be present...
let _ = block.remove_intermediate::<BabeIntermediate<Block>>(INTERMEDIATE_KEY);
block.fork_choice = Some(ForkChoiceStrategy::Custom(false));
return self.inner.import_block(block, new_cache).await.map_err(Into::into)
return self.inner.import_block(block).await.map_err(Into::into)
}

if block.with_state() {
return self.import_state(block, new_cache).await
return self.import_state(block).await
}

let pre_digest = find_pre_digest::<Block>(&block.header).expect(
Expand Down Expand Up @@ -1694,7 +1687,7 @@ where
epoch_changes.release_mutex()
};

let import_result = self.inner.import_block(block, new_cache).await;
let import_result = self.inner.import_block(block).await;

// revert to the original epoch changes in case there's an error
// importing the block
Expand Down
7 changes: 3 additions & 4 deletions client/consensus/babe/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,9 +209,8 @@ where
async fn import_block(
&mut self,
block: BlockImportParams<TestBlock, Self::Transaction>,
new_cache: HashMap<CacheKeyId, Vec<u8>>,
) -> Result<ImportResult, Self::Error> {
Ok(self.0.import_block(block, new_cache).await.expect("importing block failed"))
Ok(self.0.import_block(block).await.expect("importing block failed"))
}

async fn check_block(
Expand Down Expand Up @@ -258,7 +257,7 @@ impl Verifier<TestBlock> for TestVerifier {
async fn verify(
&mut self,
mut block: BlockImportParams<TestBlock, ()>,
) -> Result<(BlockImportParams<TestBlock, ()>, Option<Vec<(CacheKeyId, Vec<u8>)>>), String> {
) -> Result<BlockImportParams<TestBlock, ()>, String> {
// apply post-sealing mutations (i.e. stripping seal, if desired).
(self.mutator)(&mut block.header, Stage::PostSeal);
self.inner.verify(block).await
Expand Down Expand Up @@ -743,7 +742,7 @@ async fn propose_and_import_block<Transaction: Send + 'static>(
import
.insert_intermediate(INTERMEDIATE_KEY, BabeIntermediate::<TestBlock> { epoch_descriptor });
import.fork_choice = Some(ForkChoiceStrategy::LongestChain);
let import_result = block_import.import_block(import, Default::default()).await.unwrap();
let import_result = block_import.import_block(import).await.unwrap();

match import_result {
ImportResult::Imported(_) => {},
Expand Down
9 changes: 4 additions & 5 deletions client/consensus/beefy/src/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.

use std::sync::Arc;

use log::debug;
use sp_consensus_beefy::{BeefyApi, BEEFY_ENGINE_ID};
use std::{collections::HashMap, sync::Arc};

use sp_api::{ProvideRuntimeApi, TransactionFor};
use sp_blockchain::well_known_cache_keys;
use sp_consensus::Error as ConsensusError;
use sp_consensus_beefy::{BeefyApi, BEEFY_ENGINE_ID};
use sp_runtime::{
traits::{Block as BlockT, Header as HeaderT, NumberFor},
EncodedJustification,
Expand Down Expand Up @@ -132,7 +132,6 @@ where
async fn import_block(
&mut self,
mut block: BlockImportParams<Block, Self::Transaction>,
new_cache: HashMap<well_known_cache_keys::Id, Vec<u8>>,
) -> Result<ImportResult, Self::Error> {
let hash = block.post_hash();
let number = *block.header.number();
Expand All @@ -146,7 +145,7 @@ where
});

// Run inner block import.
let inner_import_result = self.inner.import_block(block, new_cache).await?;
let inner_import_result = self.inner.import_block(block).await?;

match (beefy_encoded, &inner_import_result) {
(Some(encoded), ImportResult::Imported(_)) => {
Expand Down
15 changes: 6 additions & 9 deletions client/consensus/beefy/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ use sp_runtime::{
traits::{Header as HeaderT, NumberFor},
BuildStorage, DigestItem, EncodedJustification, Justifications, Storage,
};
use std::{collections::HashMap, marker::PhantomData, sync::Arc, task::Poll};
use std::{marker::PhantomData, sync::Arc, task::Poll};
use substrate_test_runtime_client::{runtime::Header, ClientExt};
use tokio::time::Duration;

Expand Down Expand Up @@ -766,14 +766,11 @@ async fn beefy_importing_justifications() {

// Import block 1 without justifications.
assert_eq!(
block_import
.import_block(params(block.clone(), None), HashMap::new())
.await
.unwrap(),
block_import.import_block(params(block.clone(), None)).await.unwrap(),
ImportResult::Imported(ImportedAux { is_new_best: true, ..Default::default() }),
);
assert_eq!(
block_import.import_block(params(block, None), HashMap::new()).await.unwrap(),
block_import.import_block(params(block, None)).await.unwrap(),
ImportResult::AlreadyInChain,
);

Expand All @@ -788,7 +785,7 @@ async fn beefy_importing_justifications() {
let encoded = versioned_proof.encode();
let justif = Some(Justifications::from((BEEFY_ENGINE_ID, encoded)));
assert_eq!(
block_import.import_block(params(block, justif), HashMap::new()).await.unwrap(),
block_import.import_block(params(block, justif)).await.unwrap(),
ImportResult::Imported(ImportedAux {
bad_justification: false,
is_new_best: true,
Expand Down Expand Up @@ -820,7 +817,7 @@ async fn beefy_importing_justifications() {
let justif = Some(Justifications::from((BEEFY_ENGINE_ID, encoded)));
let mut justif_recv = justif_stream.subscribe(100_000);
assert_eq!(
block_import.import_block(params(block, justif), HashMap::new()).await.unwrap(),
block_import.import_block(params(block, justif)).await.unwrap(),
ImportResult::Imported(ImportedAux {
bad_justification: false,
is_new_best: true,
Expand Down Expand Up @@ -856,7 +853,7 @@ async fn beefy_importing_justifications() {
let justif = Some(Justifications::from((BEEFY_ENGINE_ID, encoded)));
let mut justif_recv = justif_stream.subscribe(100_000);
assert_eq!(
block_import.import_block(params(block, justif), HashMap::new()).await.unwrap(),
block_import.import_block(params(block, justif)).await.unwrap(),
ImportResult::Imported(ImportedAux {
// Still `false` because we don't want to fail import on bad BEEFY justifications.
bad_justification: false,
Expand Down
13 changes: 3 additions & 10 deletions client/consensus/common/src/block_import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use sp_runtime::{
};
use std::{any::Any, borrow::Cow, collections::HashMap, sync::Arc};

use sp_consensus::{BlockOrigin, CacheKeyId, Error};
use sp_consensus::{BlockOrigin, Error};

/// Block import result.
#[derive(Debug, PartialEq, Eq)]
Expand Down Expand Up @@ -348,12 +348,9 @@ pub trait BlockImport<B: BlockT> {
) -> Result<ImportResult, Self::Error>;

/// Import a block.
///
/// Cached data can be accessed through the blockchain cache.
async fn import_block(
&mut self,
block: BlockImportParams<B, Self::Transaction>,
cache: HashMap<CacheKeyId, Vec<u8>>,
) -> Result<ImportResult, Self::Error>;
}

Expand All @@ -374,14 +371,11 @@ where
}

/// Import a block.
///
/// Cached data can be accessed through the blockchain cache.
async fn import_block(
&mut self,
block: BlockImportParams<B, Transaction>,
cache: HashMap<CacheKeyId, Vec<u8>>,
) -> Result<ImportResult, Self::Error> {
(**self).import_block(block, cache).await
(**self).import_block(block).await
}
}

Expand All @@ -405,9 +399,8 @@ where
async fn import_block(
&mut self,
block: BlockImportParams<B, Transaction>,
cache: HashMap<CacheKeyId, Vec<u8>>,
) -> Result<ImportResult, Self::Error> {
(&**self).import_block(block, cache).await
(&**self).import_block(block).await
}
}

Expand Down
Loading

0 comments on commit 8a4746e

Please sign in to comment.