Skip to content

Commit

Permalink
Remove unused incomplete_processing_components
Browse files Browse the repository at this point in the history
  • Loading branch information
dapplion committed Mar 17, 2024
1 parent dd8c0a5 commit f88cda7
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 17 deletions.
8 changes: 0 additions & 8 deletions beacon_node/beacon_chain/src/data_availability_checker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -560,14 +560,6 @@ impl<T: BeaconChainTypes> DataAvailabilityChecker<T> {
self.processing_cache.write().remove(block_root)
}

/// Gather all block roots for which we are not currently processing all components for the
/// given slot.
pub fn incomplete_processing_components(&self, slot: Slot) -> Vec<Hash256> {
self.processing_cache
.read()
.incomplete_processing_components(slot)
}

/// The epoch at which we require a data availability check in block processing.
/// `None` if the `Deneb` fork is disabled.
pub fn data_availability_boundary(&self) -> Option<Epoch> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,6 @@ impl<E: EthSpec> ProcessingCache<E> {
.get(block_root)
.map_or(false, |b| b.block_exists())
}
pub fn incomplete_processing_components(&self, slot: Slot) -> Vec<Hash256> {
let mut roots_missing_components = vec![];
for (&block_root, info) in self.processing_cache.iter() {
if info.slot == slot && !info.is_available() {
roots_missing_components.push(block_root);
}
}
roots_missing_components
}
pub fn len(&self) -> usize {
self.processing_cache.len()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,15 @@ impl<L: Lookup, T: BeaconChainTypes> SingleBlockLookup<L, T> {
if let Some(components) = self.child_components.as_ref() {
self.da_checker.get_missing_blob_ids(block_root, components)
} else {
// TODO(lion): This check is incomplete. The processing cache only reflects blobs that
// are starting to be processed (work event started) and are half way through the
// `process_gossip_blob` routine. Is the complexity of the processing cache justified
// for the rare case a block or blob is downloaded from multiple sources? Gossipsub
// dedups double downloads. Block lookups already track the state of block and blobs
// being downloaded. This feature seems only useful in the rare case a block lookup is
// triggered during a gossip block is in the middle of being processed.
// If that is the usecase, why is this processing-deduplication cache tied to the
// availability view?
let Some(processing_availability_view) =
self.da_checker.get_processing_components(block_root)
else {
Expand Down

0 comments on commit f88cda7

Please sign in to comment.