Skip to content

Commit

Permalink
Cleanup TODOs (#933)
Browse files Browse the repository at this point in the history
* wip TODO cleanup

* Finish sweep

* fmt
  • Loading branch information
austinabell authored Jan 26, 2021
1 parent d7b9b39 commit 3d91af0
Show file tree
Hide file tree
Showing 25 changed files with 69 additions and 76 deletions.
2 changes: 0 additions & 2 deletions blockchain/blocks/tests/ticket_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,3 @@ fn decode_ticket() {
let decoded_ticket: Ticket = from_slice(&TICKET).unwrap();
assert_eq!(ticket, decoded_ticket);
}

// TODO add EPoStProof serialize vector when we are sure it won't change again
19 changes: 10 additions & 9 deletions blockchain/chain/src/store/chain_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,13 @@ use num_traits::Zero;
use rayon::prelude::*;
use serde::Serialize;
use state_tree::StateTree;
use std::collections::{HashMap, HashSet, VecDeque};
use std::error::Error as StdError;
use std::io::Write;
use std::sync::Arc;
use std::{
collections::{HashMap, HashSet, VecDeque},
time::SystemTime,
};

const GENESIS_KEY: &str = "gen_block";
pub const HEAD_KEY: &str = "head";
Expand Down Expand Up @@ -148,12 +151,6 @@ where
(sub, ts)
}

/// Sets tip_index tracker
// TODO handle managing tipset in tracker
pub async fn set_tipset_tracker(&self, _: &BlockHeader) -> Result<(), Error> {
Ok(())
}

/// Writes genesis to blockstore
pub fn set_genesis(&self, header: &BlockHeader) -> Result<Cid, Error> {
set_genesis(self.blockstore(), header)
Expand Down Expand Up @@ -532,7 +529,7 @@ where
let write_task =
task::spawn(async move { header.write_stream_async(&mut writer, &mut rx).await });

// TODO add timer for export
let global_pre_time = SystemTime::now();
info!("chain export started");
Self::walk_snapshot(tipset, recent_roots, skip_old_msgs, |cid| {
let block = self
Expand All @@ -554,7 +551,11 @@ where
write_task
.await
.map_err(|e| Error::Other(format!("Failed to write blocks in export: {}", e)))?;
info!("export finished");

let time = SystemTime::now()
.duration_since(global_pre_time)
.expect("time cannot go backwards");
info!("export finished, took {} seconds", time.as_secs());

Ok(())
}
Expand Down
2 changes: 1 addition & 1 deletion blockchain/chain/src/store/tipset_tracker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use super::Error;
/// Tracks blocks by their height for the purpose of forming tipsets.
#[derive(Default)]
pub struct TipsetTracker<DB> {
// TODO: consider using the `dashmap` crate here
// TODO: look into optimizing https://github.com/ChainSafe/forest/issues/878
entries: RwLock<HashMap<ChainEpoch, Vec<Cid>>>,
db: Arc<DB>,
}
Expand Down
31 changes: 28 additions & 3 deletions blockchain/chain_sync/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ use beacon::{Beacon, BeaconSchedule};
use blocks::{Block, FullTipset, GossipBlock, Tipset, TipsetKeys, TxMeta};
use chain::ChainStore;
use cid::{Cid, Code::Blake2b256};
use clock::ChainEpoch;
use encoding::{Cbor, Error as EncodingError};
use fil_types::verifier::ProofVerifier;
use forest_libp2p::{hello::HelloRequest, NetworkEvent, NetworkMessage};
Expand All @@ -28,10 +29,16 @@ use libp2p::core::PeerId;
use log::{debug, error, info, trace, warn};
use message::{SignedMessage, UnsignedMessage};
use message_pool::{MessagePool, Provider};
use networks::BLOCK_DELAY_SECS;
use serde::Deserialize;
use state_manager::StateManager;
use std::marker::PhantomData;
use std::sync::Arc;
use std::{
marker::PhantomData,
time::{SystemTime, UNIX_EPOCH},
};

const MAX_HEIGHT_DRIFT: u64 = 5;

// TODO revisit this type, necessary for two sets of Arc<Mutex<>> because each state is
// on separate thread and needs to be mutated independently, but the vec needs to be read
Expand Down Expand Up @@ -308,7 +315,6 @@ where

/// Spawns a network handler and begins the syncing process.
pub async fn start(mut self, worker_tx: Sender<Arc<Tipset>>, worker_rx: Receiver<Arc<Tipset>>) {
// TODO benchmark (1 is temporary value to avoid overlap)
for _ in 0..self.sync_config.worker_tasks {
self.spawn_worker(worker_rx.clone()).await;
}
Expand Down Expand Up @@ -399,7 +405,12 @@ where
if ts.blocks().is_empty() {
return Err(Error::NoBlocks);
}
// TODO: Check if tipset has height that is too far ahead to be possible
if self.is_epoch_beyond_curr_max(ts.epoch()) {
error!("Received block with impossibly large height {}", ts.epoch());
return Err(Error::Validation(
"Block has impossibly large height".to_string(),
));
}

for block in ts.blocks() {
if let Some(bad) = self.bad_blocks.peek(block.cid()).await {
Expand Down Expand Up @@ -514,6 +525,20 @@ where
Ok(())
}

fn is_epoch_beyond_curr_max(&self, epoch: ChainEpoch) -> bool {
let genesis = if let Ok(Some(gen)) = self.state_manager.chain_store().genesis() {
gen
} else {
return false;
};
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();

epoch as u64 > ((now - genesis.timestamp()) / BLOCK_DELAY_SECS) + MAX_HEIGHT_DRIFT
}

/// Returns `FullTipset` from store if `TipsetKeys` exist in key-value store otherwise requests
/// `FullTipset` from block sync
async fn fetch_full_tipset(
Expand Down
1 change: 0 additions & 1 deletion blockchain/chain_sync/src/sync_worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -996,7 +996,6 @@ pub fn compute_msg_meta<DB: BlockStore>(
let secp_cids = cids_from_messages(secp_msgs)?;

// generate Amt and batch set message values
// TODO avoid having to clone all cids (from iter function on Amt)
let bls_root = Amt::new_from_slice(blockstore, &bls_cids)?;
let secp_root = Amt::new_from_slice(blockstore, &secp_cids)?;

Expand Down
3 changes: 1 addition & 2 deletions blockchain/message_pool/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
// Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

// TODO Remove public once Message Selection has been implemented
pub mod block_prob;
mod block_prob;
mod config;
mod errors;
mod msg_chain;
Expand Down
4 changes: 2 additions & 2 deletions blockchain/message_pool/src/msg_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,14 +141,14 @@ impl MsgChain {
.then_with(|| self_curr.gas_reward.cmp(&other_curr.gas_reward))
}

pub(crate) fn trim(&mut self, gas_limit: i64, base_fee: &BigInt, allow_negative: bool) {
pub(crate) fn trim(&mut self, gas_limit: i64, base_fee: &BigInt) {
let mut i = self.chain.len() as i64 - 1;
let prev = match self.prev() {
Some(prev) => Some((prev.eff_perf, prev.gas_limit)),
None => None,
};
let mut mc = self.curr_mut();
while i >= 0 && (mc.gas_limit > gas_limit || (!allow_negative && mc.gas_perf < 0.0)) {
while i >= 0 && (mc.gas_limit > gas_limit || (mc.gas_perf < 0.0)) {
let gas_reward = get_gas_reward(&mc.msgs[i as usize], base_fee);
mc.gas_reward -= gas_reward;
mc.gas_limit -= mc.msgs[i as usize].gas_limit();
Expand Down
27 changes: 7 additions & 20 deletions blockchain/message_pool/src/msgpool/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,15 @@ use futures::{future::select, StreamExt};
use log::{error, warn};
use lru::LruCache;
use message::{ChainMessage, Message, SignedMessage, UnsignedMessage};
use networks::{BLOCK_DELAY_SECS, NEWEST_NETWORK_VERSION, UPGRADE_BREEZE_HEIGHT};
use networks::{BLOCK_DELAY_SECS, NEWEST_NETWORK_VERSION};
use num_bigint::{BigInt, Integer};
use num_rational::BigRational;
use num_traits::cast::ToPrimitive;
use state_manager::StateManager;
use state_tree::StateTree;
use std::borrow::BorrowMut;
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::time::Duration;
use std::{borrow::BorrowMut, cmp::Ordering};
use types::verifier::ProofVerifier;
use vm::ActorState;

Expand All @@ -47,13 +46,6 @@ const REPUB_MSG_LIMIT: usize = 30;
const PROPAGATION_DELAY_SECS: u64 = 6;
const REPUBLISH_INTERVAL: u64 = 10 * BLOCK_DELAY_SECS + PROPAGATION_DELAY_SECS;

// this is *temporary* mutilation until we have implemented uncapped miner penalties -- it will go
// away in the next fork.
// TODO: Get rid of this?
fn allow_negative_chains(epoch: i64) -> bool {
epoch < UPGRADE_BREEZE_HEIGHT + 5
}

/// Simple struct that contains a hashmap of messages where k: a message from address, v: a message
/// which corresponds to that address
#[derive(Clone, Default, Debug)]
Expand Down Expand Up @@ -319,7 +311,6 @@ where
let pending = mp.pending.clone();
let republished = mp.republished.clone();

// TODO: Check this
let cur_tipset = mp.cur_tipset.clone();
let repub_trigger = Arc::new(mp.repub_trigger.clone());

Expand Down Expand Up @@ -668,16 +659,14 @@ where

/// Return gas price estimate this has been translated from lotus, a more smart implementation will
/// most likely need to be implemented
// TODO: UPDATE
// TODO: UPDATE https://github.com/ChainSafe/forest/issues/901
pub fn estimate_gas_premium(
&self,
nblocksincl: u64,
_sender: Address,
_gas_limit: u64,
_tsk: TipsetKeys,
) -> Result<BigInt, Error> {
// TODO possibly come up with a smarter way to estimate the gas price
// TODO a smarter way exists now
let min_gas_price = 0;
match nblocksincl {
0 => Ok(BigInt::from(min_gas_price + 2)),
Expand All @@ -693,7 +682,6 @@ where
let msg_vec: Vec<SignedMessage> = local_msgs.iter().cloned().collect();

for k in msg_vec.into_iter() {
// TODO no need to clone message, if error, message could be returned in error
self.add(k.clone()).await.unwrap_or_else(|err| {
if err == Error::SequenceTooLow {
warn!("error adding message: {:?}", err);
Expand Down Expand Up @@ -939,7 +927,7 @@ where
// check the baseFee lower bound -- only republish messages that can be included in the chain
// within the next 20 blocks.
for m in chain.msgs.iter() {
if !allow_negative_chains(ts.epoch()) && m.gas_fee_cap() < &base_fee_lower_bound {
if m.gas_fee_cap() < &base_fee_lower_bound {
msg_chain.invalidate();
continue 'l;
}
Expand All @@ -949,7 +937,7 @@ where
i += 1;
continue;
}
msg_chain.trim(gas_limit, &base_fee, true);
msg_chain.trim(gas_limit, &base_fee);
let mut j = i;
while j < chains.len() - 1 {
if chains[j].compare(&chains[j + 1]) == Ordering::Less {
Expand Down Expand Up @@ -1168,9 +1156,8 @@ where

let mut msgs: Vec<SignedMessage> = Vec::new();
for block in ts.blocks() {
let (umsg, mut smsgs) = api.read().await.messages_for_block(&block)?;
msgs.append(smsgs.as_mut());
// TODO: Unsigned messages
let (umsg, smsgs) = api.read().await.messages_for_block(&block)?;
msgs.extend(smsgs);
for msg in umsg {
let mut bls_sig_cache = bls_sig_cache.write().await;
let smsg = recover_sig(&mut bls_sig_cache, msg).await?;
Expand Down
17 changes: 8 additions & 9 deletions blockchain/message_pool/src/msgpool/selection.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

use super::{allow_negative_chains, create_message_chains, MessagePool, Provider};
use super::{create_message_chains, MessagePool, Provider};
use crate::msg_chain::MsgChain;
use crate::{run_head_change, Error};
use address::Address;
Expand Down Expand Up @@ -59,7 +59,7 @@ where
chains.extend(create_message_chains(&self.api, &actor, &mset, &base_fee, &ts).await?);
}

let (msgs, _) = merge_and_trim(chains, result, &base_fee, &cur_ts, gas_limit, min_gas)?;
let (msgs, _) = merge_and_trim(chains, result, &base_fee, gas_limit, min_gas)?;
Ok(msgs)
}

Expand Down Expand Up @@ -122,15 +122,14 @@ where
return Ok((Vec::new(), gas_limit));
}

merge_and_trim(chains, result, base_fee, ts, gas_limit, min_gas)
merge_and_trim(chains, result, base_fee, gas_limit, min_gas)
}
}
/// Returns merged and trimmed messages with the gas limit
fn merge_and_trim(
chains: Vec<MsgChain>,
result: Vec<SignedMessage>,
base_fee: &BigInt,
ts: &Tipset,
gas_limit: i64,
min_gas: i64,
) -> Result<(Vec<SignedMessage>, i64), Error> {
Expand All @@ -140,14 +139,14 @@ fn merge_and_trim(
// 2. Sort the chains
chains.sort_by(|a, b| b.compare(&a));

if !allow_negative_chains(ts.epoch()) && !chains.is_empty() && chains[0].curr().gas_perf < 0.0 {
if !chains.is_empty() && chains[0].curr().gas_perf < 0.0 {
return Ok((Vec::new(), gas_limit));
}

// 3. Merge chains until the block limit, as long as they have non-negative gas performance
let mut last = chains.len();
for (i, chain) in chains.iter().enumerate() {
if !allow_negative_chains(ts.epoch()) && chain.curr().gas_perf < 0.0 {
if chain.curr().gas_perf < 0.0 {
break;
}
if chain.curr().gas_limit <= gas_limit {
Expand All @@ -159,8 +158,8 @@ fn merge_and_trim(
break;
}
'tail_loop: while gas_limit >= min_gas && last < chains.len() {
//trim, discard negative performing messages
chains[last].trim(gas_limit, base_fee, allow_negative_chains(ts.epoch()));
// trim, discard negative performing messages
chains[last].trim(gas_limit, base_fee);

// push down if it hasn't been invalidated
if chains[last].curr().valid {
Expand All @@ -179,7 +178,7 @@ fn merge_and_trim(
}

// if gas_perf < 0 then we have no more profitable chains
if !allow_negative_chains(ts.epoch()) && chain.curr().gas_perf < 0.0 {
if chain.curr().gas_perf < 0.0 {
break 'tail_loop;
}

Expand Down
2 changes: 1 addition & 1 deletion forest/src/cli/fetch_params_cmd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ impl FetchCommands {

/// Converts a human readable string to a u64 size.
fn ram_to_int(size: &str) -> Result<SectorSize, String> {
// TODO there is no library to do this, but if other sector sizes are supported in future
// * there is no library to do this, but if other sector sizes are supported in future
// this should probably be changed to parse from string to `SectorSize`
let mut trimmed = size.trim_end_matches('B');
trimmed = trimmed.trim_end_matches('b');
Expand Down
1 change: 0 additions & 1 deletion forest/src/daemon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,6 @@ pub(super) async fn start(config: Config) {
);

// Initialize ChainSyncer
// TODO allow for configuring validation strategy (defaulting to full validation)
let chain_syncer = ChainSyncer::<_, _, FullVerifier, _>::new(
Arc::clone(&state_manager),
beacon.clone(),
Expand Down
4 changes: 3 additions & 1 deletion key_management/src/wallet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,9 @@ pub fn try_find<T: KeyStore>(addr: &Address, keystore: &mut T) -> Result<KeyInfo
Ok(k) => Ok(k),
Err(_) => {
let mut new_addr = addr.to_string();
// TODO: This needs to be handled better. See this issue: https://github.com/ChainSafe/forest/issues/867

// Try to replace prefix with testnet, for backwards compatibility
// * We might be able to remove this, look into variants
new_addr.replace_range(0..1, "t");
let key_string = format!("wallet-{}", new_addr);
let key_info = keystore.get(&key_string)?;
Expand Down
1 change: 1 addition & 0 deletions node/forest_libp2p/src/behaviour.rs
Original file line number Diff line number Diff line change
Expand Up @@ -459,6 +459,7 @@ impl ForestBehaviour {
identify: Identify::new(
"ipfs/0.1.0".into(),
// TODO update to include actual version
// https://github.com/ChainSafe/forest/issues/934
format!("forest-{}", "0.1.0"),
local_key.public(),
),
Expand Down
1 change: 0 additions & 1 deletion node/forest_libp2p/src/chain_exchange/message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,6 @@ fn fts_from_bundle_parts(
secp_msgs,
} = messages.ok_or("Tipset bundle did not contain message bundle")?;

// TODO: we may already want to check this on construction of the bundle
if headers.len() != bls_msg_includes.len() || headers.len() != secp_msg_includes.len() {
return Err(
format!("Invalid formed Tipset bundle, lengths of includes does not match blocks. Header len: {}, bls_msg len: {}, secp_msg len: {}", headers.len(), bls_msg_includes.len(), secp_msg_includes.len()),
Expand Down
3 changes: 2 additions & 1 deletion node/rpc/src/gas_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,6 +236,7 @@ where
return Ok(-1);
}
// TODO: Figure out why we always under estimate the gas calculation so we dont need to add 200000
// https://github.com/ChainSafe/forest/issues/901
Ok(rct.gas_used + 200000)
}
None => Ok(-1),
Expand Down Expand Up @@ -284,6 +285,6 @@ where
let gfp = estimate_fee_cap(&data, msg.clone(), 20, tsk).await?;
msg.gas_fee_cap = gfp;
}
// TODO: Cap Gas Fee
// TODO: Cap Gas Fee https://github.com/ChainSafe/forest/issues/901
Ok(msg)
}
Loading

0 comments on commit 3d91af0

Please sign in to comment.