diff --git a/clarity/src/vm/analysis/contract_interface_builder/mod.rs b/clarity/src/vm/analysis/contract_interface_builder/mod.rs index c0ca59bde6..61327605d0 100644 --- a/clarity/src/vm/analysis/contract_interface_builder/mod.rs +++ b/clarity/src/vm/analysis/contract_interface_builder/mod.rs @@ -235,7 +235,7 @@ pub struct ContractInterfaceFunctionArg { } impl ContractInterfaceFunctionArg { - pub fn from_function_args(fnArgs: &Vec) -> Vec { + pub fn from_function_args(fnArgs: &[FunctionArg]) -> Vec { let mut args: Vec = Vec::new(); for ref fnArg in fnArgs.iter() { args.push(ContractInterfaceFunctionArg { diff --git a/clarity/src/vm/ast/parser/v1.rs b/clarity/src/vm/ast/parser/v1.rs index 4400f4b2ae..f62d3e5938 100644 --- a/clarity/src/vm/ast/parser/v1.rs +++ b/clarity/src/vm/ast/parser/v1.rs @@ -491,7 +491,7 @@ enum ParseStackItem { } fn handle_expression( - parse_stack: &mut Vec<(Vec, u32, u32, ParseContext)>, + parse_stack: &mut [(Vec, u32, u32, ParseContext)], outputs: &mut Vec, expr: PreSymbolicExpression, ) { diff --git a/clarity/src/vm/database/clarity_store.rs b/clarity/src/vm/database/clarity_store.rs index d1132bd1fa..a6bf017ccc 100644 --- a/clarity/src/vm/database/clarity_store.rs +++ b/clarity/src/vm/database/clarity_store.rs @@ -51,7 +51,7 @@ pub type SpecialCaseHandler = &'static dyn Fn( // the invoked function name &str, // the function parameters - &Vec, + &[Value], // the result of the function call &Value, ) -> Result<()>; diff --git a/clarity/src/vm/database/key_value_wrapper.rs b/clarity/src/vm/database/key_value_wrapper.rs index f2a4cda0d0..bf26670fd4 100644 --- a/clarity/src/vm/database/key_value_wrapper.rs +++ b/clarity/src/vm/database/key_value_wrapper.rs @@ -34,10 +34,10 @@ type RollbackValueCheck = String; type RollbackValueCheck = (); #[cfg(not(rollback_value_check))] -fn rollback_value_check(_value: &String, _check: &RollbackValueCheck) {} +fn rollback_value_check(_value: &str, _check: &RollbackValueCheck) {} #[cfg(not(rollback_value_check))] -fn rollback_edits_push(edits: &mut Vec<(T, RollbackValueCheck)>, key: T, _value: &String) { +fn rollback_edits_push(edits: &mut Vec<(T, RollbackValueCheck)>, key: T, _value: &str) { edits.push((key, ())); } // this function is used to check the lookup map when committing at the "bottom" of the diff --git a/src/burnchains/affirmation.rs b/src/burnchains/affirmation.rs index 56612e36c1..2611d3b541 100644 --- a/src/burnchains/affirmation.rs +++ b/src/burnchains/affirmation.rs @@ -621,7 +621,7 @@ pub fn read_prepare_phase_commits( pub fn read_parent_block_commits( burnchain_tx: &BurnchainDBTransaction, indexer: &B, - prepare_phase_ops: &Vec>, + prepare_phase_ops: &[Vec], ) -> Result, Error> { let mut parents = HashMap::new(); for ops in prepare_phase_ops.iter() { @@ -691,7 +691,7 @@ pub fn read_parent_block_commits( /// Given a list of prepare-phase block-commits, and a list of parent commits, filter out and remove /// the prepare-phase commits that _don't_ have a parent. pub fn filter_orphan_block_commits( - parents: &Vec, + parents: &[LeaderBlockCommitOp], prepare_phase_ops: Vec>, ) -> Vec> { let mut parent_set = HashSet::new(); @@ -773,7 +773,7 @@ pub fn filter_missed_block_commits( /// exists at all. /// Returns None otherwise fn inner_find_heaviest_block_commit_ptr( - prepare_phase_ops: &Vec>, + prepare_phase_ops: &[Vec], anchor_threshold: u32, ) -> Option<(PoxAnchorPtr, BTreeMap<(u64, u32), (u64, u32)>)> { // sanity check -- must be in order by block height and vtxindex diff --git a/src/burnchains/bitcoin/bits.rs b/src/burnchains/bitcoin/bits.rs index 622e24d2c2..0c4f92a720 100644 --- a/src/burnchains/bitcoin/bits.rs +++ b/src/burnchains/bitcoin/bits.rs @@ -498,12 +498,12 @@ impl BitcoinTxInputRaw { pub fn from_bitcoin_witness_script_sig( script_sig: &Script, - witness: &Vec>, + witness: Vec>, input_txid: (Txid, u32), ) -> BitcoinTxInputRaw { BitcoinTxInputRaw { scriptSig: script_sig.clone().into_bytes(), - witness: witness.clone(), + witness: witness, tx_ref: input_txid, } } @@ -1116,7 +1116,7 @@ mod tests { for script_fixture in tx_fixtures_strange_scriptsig { let tx_input = BitcoinTxInputRaw::from_bitcoin_witness_script_sig( &script_fixture.script, - &vec![], + vec![], (Txid([0; 32]), 0), ); assert_eq!(Some(tx_input), script_fixture.result); @@ -1283,7 +1283,7 @@ mod tests { for (i, txin) in tx.input.iter().enumerate() { let raw_in = BitcoinTxInputRaw::from_bitcoin_witness_script_sig( &txin.script_sig, - &txin.witness, + txin.witness.clone(), to_txid(&txin), ); assert_eq!(raw_in, inputs[i]); diff --git a/src/burnchains/bitcoin/mod.rs b/src/burnchains/bitcoin/mod.rs index 830170aa22..1fb924e772 100644 --- a/src/burnchains/bitcoin/mod.rs +++ b/src/burnchains/bitcoin/mod.rs @@ -234,14 +234,14 @@ impl BitcoinBlock { height: u64, hash: &BurnchainHeaderHash, parent: &BurnchainHeaderHash, - txs: &Vec, + txs: Vec, timestamp: u64, ) -> BitcoinBlock { BitcoinBlock { block_height: height, block_hash: hash.clone(), parent_block_hash: parent.clone(), - txs: txs.clone(), + txs: txs, timestamp: timestamp, } } diff --git a/src/burnchains/bitcoin/spv.rs b/src/burnchains/bitcoin/spv.rs index fa22a445e1..4518dd3930 100644 --- a/src/burnchains/bitcoin/spv.rs +++ b/src/burnchains/bitcoin/spv.rs @@ -381,7 +381,7 @@ impl SpvClient { } /// Calculate the total work over a given interval of headers. - fn get_interval_work(interval_headers: &Vec) -> Uint256 { + fn get_interval_work(interval_headers: &[LoneBlockHeader]) -> Uint256 { let mut work = Uint256::from_u64(0); for hdr in interval_headers.iter() { work = work + hdr.header.work(); diff --git a/src/burnchains/burnchain.rs b/src/burnchains/burnchain.rs index eeb0b673da..dc0b75de61 100644 --- a/src/burnchains/burnchain.rs +++ b/src/burnchains/burnchain.rs @@ -118,7 +118,7 @@ impl BurnchainStateTransition { burnchain: &Burnchain, parent_snapshot: &BlockSnapshot, block_ops: &Vec, - missed_commits: &Vec, + missed_commits: &[MissedBlockCommit], ) -> Result { // block commits and support burns discovered in this block. let mut block_commits: Vec = vec![]; diff --git a/src/burnchains/db.rs b/src/burnchains/db.rs index 3c5c0c8db5..c9f817cb48 100644 --- a/src/burnchains/db.rs +++ b/src/burnchains/db.rs @@ -1376,7 +1376,7 @@ impl BurnchainDB { burnchain: &Burnchain, indexer: &B, block_header: &BurnchainBlockHeader, - blockstack_ops: &Vec, + blockstack_ops: &[BlockstackOperationType], ) -> Result<(), BurnchainError> { let db_tx = self.tx_begin()?; diff --git a/src/burnchains/tests/burnchain.rs b/src/burnchains/tests/burnchain.rs index 605d5b222f..1b67ccac4d 100644 --- a/src/burnchains/tests/burnchain.rs +++ b/src/burnchains/tests/burnchain.rs @@ -613,21 +613,21 @@ fn test_process_block_ops() { 121, &block_121_hash, &first_burn_hash, - &vec![], + vec![], 121, )); let block122 = BurnchainBlock::Bitcoin(BitcoinBlock::new( 122, &block_122_hash, &block_121_hash, - &vec![], + vec![], 122, )); let block123 = BurnchainBlock::Bitcoin(BitcoinBlock::new( 123, &block_123_hash, &block_122_hash, - &vec![], + vec![], 123, )); @@ -786,7 +786,7 @@ fn test_process_block_ops() { 124, &block_124_hash, &block_123_hash, - &vec![], + vec![], 124, )); @@ -1026,7 +1026,7 @@ fn test_burn_snapshot_sequence() { first_block_height + (i + 1) as u64, &burn_block_hash, &parent_burn_block_hash, - &vec![], + vec![], get_epoch_time_secs(), )); diff --git a/src/burnchains/tests/db.rs b/src/burnchains/tests/db.rs index b5f283c603..f37040fd20 100644 --- a/src/burnchains/tests/db.rs +++ b/src/burnchains/tests/db.rs @@ -100,13 +100,8 @@ fn test_store_and_fetch() { let headers = vec![first_block_header.clone()]; let canon_hash = BurnchainHeaderHash([1; 32]); - let canonical_block = BurnchainBlock::Bitcoin(BitcoinBlock::new( - 500, - &canon_hash, - &first_bhh, - &vec![], - 485, - )); + let canonical_block = + BurnchainBlock::Bitcoin(BitcoinBlock::new(500, &canon_hash, &first_bhh, vec![], 485)); let ops = burnchain_db .store_new_burnchain_block( &burnchain, @@ -148,7 +143,7 @@ fn test_store_and_fetch() { 400, &non_canon_hash, &first_bhh, - &broadcast_ops, + broadcast_ops, 350, )); @@ -222,13 +217,8 @@ fn test_classify_stack_stx() { let mut headers = vec![first_block_header.clone()]; let canon_hash = BurnchainHeaderHash([1; 32]); - let canonical_block = BurnchainBlock::Bitcoin(BitcoinBlock::new( - 500, - &canon_hash, - &first_bhh, - &vec![], - 485, - )); + let canonical_block = + BurnchainBlock::Bitcoin(BitcoinBlock::new(500, &canon_hash, &first_bhh, vec![], 485)); let ops = burnchain_db .store_new_burnchain_block( &burnchain, @@ -374,11 +364,12 @@ fn test_classify_stack_stx() { let block_height_1 = 502; let block_hash_1 = BurnchainHeaderHash([3; 32]); + let num_txs_ops_0: u64 = ops_0.len() as u64; let block_0 = BurnchainBlock::Bitcoin(BitcoinBlock::new( block_height_0, &block_hash_0, &first_bhh, - &ops_0, + ops_0, 350, )); @@ -386,15 +377,16 @@ fn test_classify_stack_stx() { block_height: first_block_header.block_height + 1, block_hash: block_hash_0.clone(), parent_block_hash: first_bhh.clone(), - num_txs: ops_0.len() as u64, + num_txs: num_txs_ops_0, timestamp: first_block_header.timestamp + 1, }); + let num_txs_ops_1: u64 = ops_1.len() as u64; let block_1 = BurnchainBlock::Bitcoin(BitcoinBlock::new( block_height_1, &block_hash_1, &block_hash_0, - &ops_1, + ops_1, 360, )); @@ -402,7 +394,7 @@ fn test_classify_stack_stx() { block_height: first_block_header.block_height + 2, block_hash: block_hash_1.clone(), parent_block_hash: block_hash_0.clone(), - num_txs: ops_1.len() as u64, + num_txs: num_txs_ops_1, timestamp: first_block_header.timestamp + 2, }); @@ -1032,13 +1024,8 @@ fn test_classify_delegate_stx() { let canon_hash = BurnchainHeaderHash([1; 32]); - let canonical_block = BurnchainBlock::Bitcoin(BitcoinBlock::new( - 500, - &canon_hash, - &first_bhh, - &vec![], - 485, - )); + let canonical_block = + BurnchainBlock::Bitcoin(BitcoinBlock::new(500, &canon_hash, &first_bhh, vec![], 485)); let mut headers = vec![first_block_header.clone(), canonical_block.header().clone()]; let ops = burnchain_db @@ -1214,11 +1201,13 @@ fn test_classify_delegate_stx() { let block_height_1 = 502; let block_hash_1 = BurnchainHeaderHash([3; 32]); + let ops_0_length = ops_0.len(); + let ops_1_length = ops_1.len(); let block_0 = BurnchainBlock::Bitcoin(BitcoinBlock::new( block_height_0, &block_hash_0, &first_bhh, - &ops_0, + ops_0, 350, )); @@ -1226,14 +1215,14 @@ fn test_classify_delegate_stx() { block_height_1, &block_hash_1, &block_hash_0, - &ops_1, + ops_1, 360, )); headers.push(block_0.header().clone()); headers.push(block_1.header().clone()); - test_debug!("store ops ({}) for block 0", &ops_0.len()); + test_debug!("store ops ({}) for block 0", ops_0_length); let processed_ops_0 = burnchain_db .store_new_burnchain_block(&burnchain, &headers, &block_0, StacksEpochId::Epoch21) .unwrap(); @@ -1244,7 +1233,7 @@ fn test_classify_delegate_stx() { "Only pre_delegate_stx op should have been accepted" ); - test_debug!("store ops ({}) for block 1", &ops_1.len()); + test_debug!("store ops ({}) for block 1", ops_1_length); let processed_ops_1 = burnchain_db .store_new_burnchain_block(&burnchain, &headers, &block_1, StacksEpochId::Epoch21) .unwrap(); diff --git a/src/burnchains/tests/mod.rs b/src/burnchains/tests/mod.rs index b74670d302..80d9f78df5 100644 --- a/src/burnchains/tests/mod.rs +++ b/src/burnchains/tests/mod.rs @@ -509,7 +509,7 @@ impl TestBurnchainBlock { self.block_height, &block_hash, &self.parent_snapshot.burn_header_hash, - &vec![], + vec![], get_epoch_time_secs(), ); let block = BurnchainBlock::Bitcoin(mock_bitcoin_block); @@ -582,7 +582,7 @@ impl TestBurnchainBlock { self.block_height, &block_hash, &self.parent_snapshot.burn_header_hash, - &vec![], + vec![], now, ); let block = BurnchainBlock::Bitcoin(mock_bitcoin_block); diff --git a/src/chainstate/burn/db/sortdb.rs b/src/chainstate/burn/db/sortdb.rs index c6ae61ddb8..6a4ec37678 100644 --- a/src/chainstate/burn/db/sortdb.rs +++ b/src/chainstate/burn/db/sortdb.rs @@ -5092,7 +5092,7 @@ impl<'a> SortitionHandleTx<'a> { &mut self, parent_snapshot: &mut BlockSnapshot, snapshot: &BlockSnapshot, - block_ops: &Vec, + block_ops: &[BlockstackOperationType], next_pox_info: Option, recipient_info: Option<&RewardSetInfo>, initialize_bonus: Option, @@ -5679,7 +5679,7 @@ impl<'a> SortitionHandleTx<'a> { } impl ChainstateDB for SortitionDB { - fn backup(_backup_path: &String) -> Result<(), db_error> { + fn backup(_backup_path: &str) -> Result<(), db_error> { return Err(db_error::NotImplemented); } } diff --git a/src/chainstate/burn/mod.rs b/src/chainstate/burn/mod.rs index 59953cbed6..b9b9317614 100644 --- a/src/chainstate/burn/mod.rs +++ b/src/chainstate/burn/mod.rs @@ -216,7 +216,7 @@ pub trait ConsensusHashExtensions { burn_header_hash: &BurnchainHeaderHash, opshash: &OpsHash, total_burn: u64, - prev_consensus_hashes: &Vec, + prev_consensus_hashes: &[ConsensusHash], pox_id: &PoxId, ) -> ConsensusHash; @@ -255,7 +255,7 @@ impl ConsensusHashExtensions for ConsensusHash { burn_header_hash: &BurnchainHeaderHash, opshash: &OpsHash, total_burn: u64, - prev_consensus_hashes: &Vec, + prev_consensus_hashes: &[ConsensusHash], pox_id: &PoxId, ) -> ConsensusHash { // NOTE: unlike stacks v1, we calculate the next consensus hash diff --git a/src/chainstate/coordinator/mod.rs b/src/chainstate/coordinator/mod.rs index 2783908af6..322efe133f 100644 --- a/src/chainstate/coordinator/mod.rs +++ b/src/chainstate/coordinator/mod.rs @@ -142,10 +142,10 @@ pub trait BlockEventDispatcher { &self, block: &StacksBlock, metadata: &StacksHeaderInfo, - receipts: &Vec, + receipts: &[StacksTransactionReceipt], parent: &StacksBlockId, winner_txid: Txid, - matured_rewards: &Vec, + matured_rewards: &[MinerReward], matured_rewards_info: Option<&MinerRewardInfo>, parent_burn_block_hash: BurnchainHeaderHash, parent_burn_block_height: u32, diff --git a/src/chainstate/coordinator/tests.rs b/src/chainstate/coordinator/tests.rs index cb1b07e31a..aa3ee613e8 100644 --- a/src/chainstate/coordinator/tests.rs +++ b/src/chainstate/coordinator/tests.rs @@ -418,10 +418,10 @@ impl BlockEventDispatcher for NullEventDispatcher { &self, _block: &StacksBlock, _metadata: &StacksHeaderInfo, - _receipts: &Vec, + _receipts: &[StacksTransactionReceipt], _parent: &StacksBlockId, _winner_txid: Txid, - _rewards: &Vec, + _rewards: &[MinerReward], _rewards_info: Option<&MinerRewardInfo>, _parent_burn_block_hash: BurnchainHeaderHash, _parent_burn_block_height: u32, diff --git a/src/chainstate/mod.rs b/src/chainstate/mod.rs index dab58d8b62..8566160f80 100644 --- a/src/chainstate/mod.rs +++ b/src/chainstate/mod.rs @@ -19,7 +19,7 @@ use crate::util_lib::db; use crate::util_lib::db::Error as db_error; pub trait ChainstateDB { - fn backup(backup_path: &String) -> Result<(), db_error>; + fn backup(backup_path: &str) -> Result<(), db_error>; } // needs to come _after_ the macro def above, since they both use this macro diff --git a/src/chainstate/stacks/auth.rs b/src/chainstate/stacks/auth.rs index 0b50e4bdac..13304a44c8 100644 --- a/src/chainstate/stacks/auth.rs +++ b/src/chainstate/stacks/auth.rs @@ -912,7 +912,7 @@ impl TransactionAuth { } } - pub fn from_p2sh(privks: &Vec, num_sigs: u16) -> Option { + pub fn from_p2sh(privks: &[StacksPrivateKey], num_sigs: u16) -> Option { let mut pubks = vec![]; for privk in privks.iter() { pubks.push(StacksPublicKey::from_private(privk)); @@ -933,7 +933,7 @@ impl TransactionAuth { } } - pub fn from_p2wsh(privks: &Vec, num_sigs: u16) -> Option { + pub fn from_p2wsh(privks: &[StacksPrivateKey], num_sigs: u16) -> Option { let mut pubks = vec![]; for privk in privks.iter() { pubks.push(StacksPublicKey::from_private(privk)); diff --git a/src/chainstate/stacks/block.rs b/src/chainstate/stacks/block.rs index f623073fc3..fcd769cb7a 100644 --- a/src/chainstate/stacks/block.rs +++ b/src/chainstate/stacks/block.rs @@ -440,7 +440,7 @@ impl StacksBlock { } /// verify no duplicate txids - pub fn validate_transactions_unique(txs: &Vec) -> bool { + pub fn validate_transactions_unique(txs: &[StacksTransaction]) -> bool { // no duplicates let mut txids = HashMap::new(); for (i, tx) in txs.iter().enumerate() { @@ -515,7 +515,7 @@ impl StacksBlock { } /// verify that a coinbase is present and is on-chain only, or is absent - pub fn validate_coinbase(txs: &Vec, check_present: bool) -> bool { + pub fn validate_coinbase(txs: &[StacksTransaction], check_present: bool) -> bool { let mut found_coinbase = false; let mut coinbase_index = 0; for (i, tx) in txs.iter().enumerate() { diff --git a/src/chainstate/stacks/db/accounts.rs b/src/chainstate/stacks/db/accounts.rs index 2c630af13e..1861b77333 100644 --- a/src/chainstate/stacks/db/accounts.rs +++ b/src/chainstate/stacks/db/accounts.rs @@ -555,7 +555,7 @@ impl StacksChainState { pub fn insert_miner_payment_schedule<'a>( tx: &mut DBTx<'a>, block_reward: &MinerPaymentSchedule, - user_burns: &Vec, + user_burns: &[StagingUserBurnSupport], ) -> Result<(), Error> { assert!(block_reward.burnchain_commit_burn < i64::MAX as u64); assert!(block_reward.burnchain_sortition_burn < i64::MAX as u64); @@ -970,7 +970,7 @@ impl StacksChainState { parent_block_epoch: StacksEpochId, participant: &MinerPaymentSchedule, miner: &MinerPaymentSchedule, - users: &Vec, + users: &[MinerPaymentSchedule], parent: &MinerPaymentSchedule, poison_reporter_opt: Option<&StacksAddress>, ) -> (MinerReward, MinerReward) { diff --git a/src/chainstate/stacks/db/blocks.rs b/src/chainstate/stacks/db/blocks.rs index 7632da6bf8..bafeb503b8 100644 --- a/src/chainstate/stacks/db/blocks.rs +++ b/src/chainstate/stacks/db/blocks.rs @@ -193,10 +193,10 @@ impl BlockEventDispatcher for DummyEventDispatcher { &self, _block: &StacksBlock, _metadata: &StacksHeaderInfo, - _receipts: &Vec, + _receipts: &[StacksTransactionReceipt], _parent: &StacksBlockId, _winner_txid: Txid, - _rewards: &Vec, + _rewards: &[MinerReward], _rewards_info: Option<&MinerRewardInfo>, _parent_burn_block_hash: BurnchainHeaderHash, _parent_burn_block_height: u32, @@ -853,7 +853,7 @@ impl StacksChainState { Ok(()) } - pub fn atomic_file_write(path: &str, bytes: &Vec) -> Result<(), Error> { + pub fn atomic_file_write(path: &str, bytes: &[u8]) -> Result<(), Error> { StacksChainState::atomic_file_store(path, false, |ref mut fd| { fd.write_all(bytes) .map_err(|e| Error::DBError(db_error::IOError(e))) @@ -2049,7 +2049,7 @@ impl StacksChainState { tx: &mut DBTx<'a>, consensus_hash: &ConsensusHash, block_hash: &BlockHeaderHash, - burn_supports: &Vec, + burn_supports: &[UserBurnSupportOp], ) -> Result<(), Error> { for burn_support in burn_supports.iter() { assert!(burn_support.burn_fee < i64::MAX as u64); @@ -3729,7 +3729,7 @@ impl StacksChainState { fn extract_signed_microblocks( parent_anchored_block_header: &StacksBlockHeader, - microblocks: &Vec, + microblocks: &[StacksMicroblock], ) -> Vec { let mut signed_microblocks = vec![]; for microblock in microblocks.iter() { @@ -3763,7 +3763,7 @@ impl StacksChainState { pub fn validate_parent_microblock_stream( parent_anchored_block_header: &StacksBlockHeader, anchored_block_header: &StacksBlockHeader, - microblocks: &Vec, + microblocks: &[StacksMicroblock], verify_signatures: bool, ) -> Option<(usize, Option)> { if anchored_block_header.is_first_mined() { @@ -3784,7 +3784,7 @@ impl StacksChainState { let signed_microblocks = if verify_signatures { StacksChainState::extract_signed_microblocks(&parent_anchored_block_header, microblocks) } else { - microblocks.clone() + microblocks.to_owned() }; if signed_microblocks.len() == 0 { @@ -4813,7 +4813,7 @@ impl StacksChainState { /// Return the fees and burns. pub fn process_microblocks_transactions( clarity_tx: &mut ClarityTx, - microblocks: &Vec, + microblocks: &[StacksMicroblock], ast_rules: ASTRules, ) -> Result<(u128, u128, Vec), (Error, BlockHeaderHash)> { let mut fees = 0u128; @@ -5239,7 +5239,7 @@ impl StacksChainState { pub fn process_matured_miner_rewards<'a, 'b>( clarity_tx: &mut ClarityTx<'a, 'b>, miner_share: &MinerReward, - users_share: &Vec, + users_share: &[MinerReward], parent_share: &MinerReward, ) -> Result { let mut coinbase_reward = miner_share.coinbase; @@ -5312,7 +5312,7 @@ impl StacksChainState { pub fn get_parent_matured_miner( conn: &DBConn, mainnet: bool, - latest_matured_miners: &Vec, + latest_matured_miners: &[MinerPaymentSchedule], ) -> Result { let parent_miner = if let Some(ref miner) = latest_matured_miners.first().as_ref() { StacksChainState::get_scheduled_block_rewards_at_block( @@ -5901,7 +5901,7 @@ impl StacksChainState { microblocks: &Vec, // parent microblocks burnchain_commit_burn: u64, burnchain_sortition_burn: u64, - user_burns: &Vec, + user_burns: &[StagingUserBurnSupport], affirmation_weight: u64, ) -> Result<(StacksEpochReceipt, PreCommitClarityBlock<'a>), Error> { debug!( diff --git a/src/chainstate/stacks/db/mod.rs b/src/chainstate/stacks/db/mod.rs index 34980ceb83..71853f22e2 100644 --- a/src/chainstate/stacks/db/mod.rs +++ b/src/chainstate/stacks/db/mod.rs @@ -2380,7 +2380,7 @@ impl StacksChainState { new_burnchain_timestamp: u64, microblock_tail_opt: Option, block_reward: &MinerPaymentSchedule, - user_burns: &Vec, + user_burns: &[StagingUserBurnSupport], mature_miner_payouts: Option<(MinerReward, Vec, MinerReward, MinerRewardInfo)>, // (miner, [users], parent, matured rewards) anchor_block_cost: &ExecutionCost, anchor_block_size: u64, diff --git a/src/chainstate/stacks/index/node.rs b/src/chainstate/stacks/index/node.rs index d441d37c8b..074623012a 100644 --- a/src/chainstate/stacks/index/node.rs +++ b/src/chainstate/stacks/index/node.rs @@ -607,19 +607,19 @@ impl PartialEq for TrieLeaf { } impl TrieLeaf { - pub fn new(path: &Vec, data: &Vec) -> TrieLeaf { + pub fn new(path: &[u8], data: &Vec) -> TrieLeaf { assert!(data.len() <= 40); let mut bytes = [0u8; 40]; bytes.copy_from_slice(&data[..]); TrieLeaf { - path: path.clone(), + path: path.to_owned(), data: MARFValue(bytes), } } - pub fn from_value(path: &Vec, value: MARFValue) -> TrieLeaf { + pub fn from_value(path: &[u8], value: MARFValue) -> TrieLeaf { TrieLeaf { - path: path.clone(), + path: path.to_owned(), data: value, } } @@ -669,9 +669,9 @@ impl fmt::Debug for TrieNode4 { } impl TrieNode4 { - pub fn new(path: &Vec) -> TrieNode4 { + pub fn new(path: &[u8]) -> TrieNode4 { TrieNode4 { - path: path.clone(), + path: path.to_owned(), ptrs: [TriePtr::default(); 4], } } @@ -696,9 +696,9 @@ impl fmt::Debug for TrieNode16 { } impl TrieNode16 { - pub fn new(path: &Vec) -> TrieNode16 { + pub fn new(path: &[u8]) -> TrieNode16 { TrieNode16 { - path: path.clone(), + path: path.to_owned(), ptrs: [TriePtr::default(); 16], } } @@ -744,9 +744,9 @@ impl PartialEq for TrieNode48 { } impl TrieNode48 { - pub fn new(path: &Vec) -> TrieNode48 { + pub fn new(path: &[u8]) -> TrieNode48 { TrieNode48 { - path: path.clone(), + path: path.to_owned(), indexes: [-1; 256], ptrs: [TriePtr::default(); 48], } @@ -793,9 +793,9 @@ impl PartialEq for TrieNode256 { } impl TrieNode256 { - pub fn new(path: &Vec) -> TrieNode256 { + pub fn new(path: &[u8]) -> TrieNode256 { TrieNode256 { - path: path.clone(), + path: path.to_owned(), ptrs: [TriePtr::default(); 256], } } diff --git a/src/chainstate/stacks/index/proofs.rs b/src/chainstate/stacks/index/proofs.rs index d6165abac8..ddf224b91a 100644 --- a/src/chainstate/stacks/index/proofs.rs +++ b/src/chainstate/stacks/index/proofs.rs @@ -1567,8 +1567,8 @@ impl TrieMerkleProof { /// If the path doesn't resolve, return an error (NotFoundError) pub fn from_entry( storage: &mut TrieStorageConnection, - key: &String, - value: &String, + key: &str, + value: &str, root_block_header: &T, ) -> Result, Error> { let marf_value = MARFValue::from_value(value); diff --git a/src/chainstate/stacks/mod.rs b/src/chainstate/stacks/mod.rs index 6ddae489b5..934b92ddb0 100644 --- a/src/chainstate/stacks/mod.rs +++ b/src/chainstate/stacks/mod.rs @@ -724,7 +724,7 @@ impl NonfungibleConditionCode { } } - pub fn was_sent(nft_sent_condition: &Value, nfts_sent: &Vec) -> bool { + pub fn was_sent(nft_sent_condition: &Value, nfts_sent: &[Value]) -> bool { for asset_sent in nfts_sent.iter() { if *asset_sent == *nft_sent_condition { // asset was sent, and is no longer owned by this principal @@ -734,7 +734,7 @@ impl NonfungibleConditionCode { return false; } - pub fn check(&self, nft_sent_condition: &Value, nfts_sent: &Vec) -> bool { + pub fn check(&self, nft_sent_condition: &Value, nfts_sent: &[Value]) -> bool { match *self { NonfungibleConditionCode::Sent => { NonfungibleConditionCode::was_sent(nft_sent_condition, nfts_sent) diff --git a/src/clarity_cli.rs b/src/clarity_cli.rs index 5edd76703e..bfa8e26c0a 100644 --- a/src/clarity_cli.rs +++ b/src/clarity_cli.rs @@ -404,7 +404,7 @@ where // like in_block, but does _not_ advance the chain tip. Used for read-only queries against the // chain tip itself. -fn at_chaintip(db_path: &String, mut marf_kv: MarfedKV, f: F) -> R +fn at_chaintip(db_path: &str, mut marf_kv: MarfedKV, f: F) -> R where F: FnOnce(WritableMarfStore) -> (WritableMarfStore, R), { diff --git a/src/clarity_vm/special.rs b/src/clarity_vm/special.rs index 15eb778421..b02b6aba33 100644 --- a/src/clarity_vm/special.rs +++ b/src/clarity_vm/special.rs @@ -947,7 +947,7 @@ pub fn handle_contract_call_special_cases( _sponsor: Option<&PrincipalData>, contract_id: &QualifiedContractIdentifier, function_name: &str, - args: &Vec, + args: &[Value], result: &Value, ) -> Result<()> { if *contract_id == boot_code_id(POX_1_NAME, global_context.mainnet) { diff --git a/src/net/chat.rs b/src/net/chat.rs index 30d97d60e1..1cd1f5b8c2 100644 --- a/src/net/chat.rs +++ b/src/net/chat.rs @@ -1629,7 +1629,7 @@ impl ConversationP2P { /// Verify that there are no cycles in our relayers list. /// Identify relayers by public key hash - fn check_relayer_cycles(relayers: &Vec) -> bool { + fn check_relayer_cycles(relayers: &[RelayData]) -> bool { let mut addrs = HashSet::new(); for r in relayers.iter() { if addrs.contains(&r.peer.public_key_hash) { @@ -1641,7 +1641,7 @@ impl ConversationP2P { } /// Verify that we aren't in this relayers list - fn check_relayers_remote(local_peer: &LocalPeer, relayers: &Vec) -> bool { + fn check_relayers_remote(local_peer: &LocalPeer, relayers: &[RelayData]) -> bool { let addr = local_peer.to_neighbor_addr(); for r in relayers.iter() { if r.peer.public_key_hash == addr.public_key_hash { @@ -1659,7 +1659,7 @@ impl ConversationP2P { &mut self, local_peer: &LocalPeer, preamble: &Preamble, - relayers: &Vec, + relayers: &[RelayData], ) -> bool { if !ConversationP2P::check_relayer_cycles(relayers) { debug!( diff --git a/src/net/http.rs b/src/net/http.rs index 73a42cfa11..a0a74fefdb 100644 --- a/src/net/http.rs +++ b/src/net/http.rs @@ -3143,7 +3143,7 @@ impl HttpResponseType { regex: &Regex, request_version: HttpVersion, preamble: &HttpResponsePreamble, - request_path: &String, + request_path: &str, fd: &mut R, len_hint: Option, parser: F, diff --git a/src/net/mod.rs b/src/net/mod.rs index f3d9a07b95..76b5a14782 100644 --- a/src/net/mod.rs +++ b/src/net/mod.rs @@ -2401,10 +2401,10 @@ pub mod test { &self, block: &StacksBlock, metadata: &StacksHeaderInfo, - receipts: &Vec, + receipts: &[events::StacksTransactionReceipt], parent: &StacksBlockId, winner_txid: Txid, - matured_rewards: &Vec, + matured_rewards: &[accounts::MinerReward], matured_rewards_info: Option<&MinerRewardInfo>, parent_burn_block_hash: BurnchainHeaderHash, parent_burn_block_height: u32, @@ -2416,10 +2416,10 @@ pub mod test { self.blocks.lock().unwrap().push(TestEventObserverBlock { block: block.clone(), metadata: metadata.clone(), - receipts: receipts.clone(), + receipts: receipts.to_owned(), parent: parent.clone(), winner_txid, - matured_rewards: matured_rewards.clone(), + matured_rewards: matured_rewards.to_owned(), matured_rewards_info: matured_rewards_info.map(|info| info.clone()), }) } diff --git a/src/net/relay.rs b/src/net/relay.rs index 9cf30e2d79..ddacfd38eb 100644 --- a/src/net/relay.rs +++ b/src/net/relay.rs @@ -294,7 +294,7 @@ impl RelayerStats { /// Map neighbors to the frequency of their AS numbers in the given neighbors list fn count_ASNs( conn: &DBConn, - neighbors: &Vec, + neighbors: &[NeighborKey], ) -> Result, net_error> { // look up ASNs let mut asns = HashMap::new(); @@ -338,7 +338,7 @@ impl RelayerStats { /// to some other peer that's already forwarding it data. Thus, we don't need to do so. pub fn get_inbound_relay_rankings( &self, - neighbors: &Vec, + neighbors: &[NeighborKey], msg: &R, warmup_threshold: usize, ) -> HashMap { @@ -374,7 +374,7 @@ impl RelayerStats { pub fn get_outbound_relay_rankings( &self, peerdb: &PeerDB, - neighbors: &Vec, + neighbors: &[NeighborKey], ) -> Result, net_error> { let asn_counts = RelayerStats::count_ASNs(peerdb.conn(), neighbors)?; let asn_total = asn_counts.values().fold(0, |t, s| t + s); diff --git a/stacks-common/src/util/hash.rs b/stacks-common/src/util/hash.rs index e39a8d0d2e..9660210dc9 100644 --- a/stacks-common/src/util/hash.rs +++ b/stacks-common/src/util/hash.rs @@ -546,7 +546,7 @@ where /// Get the path from the given data's leaf up to the root. /// will be None if the data isn't a leaf. - pub fn path(&self, data: &Vec) -> Option> { + pub fn path(&self, data: &[u8]) -> Option> { let leaf_hash = MerkleTree::get_leaf_hash(&data[..]); let mut hash_index = match self.find_hash_index(&leaf_hash, 0) { None => { @@ -589,7 +589,7 @@ where } /// Verify a datum and its Merkle path against a Merkle root - pub fn path_verify(data: &Vec, path: &MerklePath, root: &H) -> bool { + pub fn path_verify(data: &[u8], path: &MerklePath, root: &H) -> bool { if path.len() < 1 { // invalid path return false; @@ -680,7 +680,7 @@ pub fn to_bin(s: &[u8]) -> String { } /// Convert a vec of u8 to a hex string -pub fn bytes_to_hex(s: &Vec) -> String { +pub fn bytes_to_hex(s: &[u8]) -> String { to_hex(&s[..]) } diff --git a/stacks-common/src/util/vrf.rs b/stacks-common/src/util/vrf.rs index b0b80b260c..7a628d1a99 100644 --- a/stacks-common/src/util/vrf.rs +++ b/stacks-common/src/util/vrf.rs @@ -156,7 +156,7 @@ impl VRFPrivateKey { VRFPrivateKey(keypair.secret) } - pub fn from_hex(h: &String) -> Option { + pub fn from_hex(h: &str) -> Option { match hex_bytes(h) { Ok(b) => match ed25519_PrivateKey::from_bytes(&b[..]) { Ok(pk) => Some(VRFPrivateKey(pk)), @@ -356,11 +356,11 @@ impl VRFProof { } } - pub fn from_bytes(bytes: &Vec) -> Option { + pub fn from_bytes(bytes: &[u8]) -> Option { VRFProof::from_slice(&bytes[..]) } - pub fn from_hex(hex_str: &String) -> Option { + pub fn from_hex(hex_str: &str) -> Option { match hex_bytes(hex_str) { Ok(b) => VRFProof::from_slice(&b[..]), Err(_) => None, @@ -556,11 +556,7 @@ impl VRF { /// Return Ok(false) if not /// Return Err(Error) if the public key is invalid, or we are unable to do one of the /// necessary internal data conversions. - pub fn verify( - Y_point: &VRFPublicKey, - proof: &VRFProof, - alpha: &Vec, - ) -> Result { + pub fn verify(Y_point: &VRFPublicKey, proof: &VRFProof, alpha: &[u8]) -> Result { let H_point = VRF::hash_to_curve(Y_point, alpha); let s_reduced = proof.s().reduce(); let Y_point_ed = CompressedEdwardsY(Y_point.to_bytes()) diff --git a/testnet/stacks-node/src/burnchains/mocknet_controller.rs b/testnet/stacks-node/src/burnchains/mocknet_controller.rs index af8f4b8c1d..9d451d81d0 100644 --- a/testnet/stacks-node/src/burnchains/mocknet_controller.rs +++ b/testnet/stacks-node/src/burnchains/mocknet_controller.rs @@ -57,7 +57,7 @@ impl MocknetController { current_block.block_height + 1, &BurnchainHeaderHash::from_bytes(next_hash.as_bytes()).unwrap(), ¤t_block.burn_header_hash, - &vec![], + vec![], get_epoch_time_secs(), )); block.header() diff --git a/testnet/stacks-node/src/event_dispatcher.rs b/testnet/stacks-node/src/event_dispatcher.rs index a14d8d8399..137ed6f3b0 100644 --- a/testnet/stacks-node/src/event_dispatcher.rs +++ b/testnet/stacks-node/src/event_dispatcher.rs @@ -345,7 +345,7 @@ impl EventObserver { filtered_events: Vec<(usize, &(bool, Txid, &StacksTransactionEvent))>, block: &StacksBlock, metadata: &StacksHeaderInfo, - receipts: &Vec, + receipts: &[StacksTransactionReceipt], parent_index_hash: &StacksBlockId, winner_txid: &Txid, mature_rewards: &serde_json::Value, @@ -459,10 +459,10 @@ impl BlockEventDispatcher for EventDispatcher { &self, block: &StacksBlock, metadata: &StacksHeaderInfo, - receipts: &Vec, + receipts: &[StacksTransactionReceipt], parent: &StacksBlockId, winner_txid: Txid, - mature_rewards: &Vec, + mature_rewards: &[MinerReward], mature_rewards_info: Option<&MinerRewardInfo>, parent_burn_block_hash: BurnchainHeaderHash, parent_burn_block_height: u32, @@ -660,10 +660,10 @@ impl EventDispatcher { &self, block: &StacksBlock, metadata: &StacksHeaderInfo, - receipts: &Vec, + receipts: &[StacksTransactionReceipt], parent_index_hash: &StacksBlockId, winner_txid: Txid, - mature_rewards: &Vec, + mature_rewards: &[MinerReward], mature_rewards_info: Option<&MinerRewardInfo>, parent_burn_block_hash: BurnchainHeaderHash, parent_burn_block_height: u32, @@ -672,7 +672,7 @@ impl EventDispatcher { mblock_confirmed_consumed: &ExecutionCost, pox_constants: &PoxConstants, ) { - let all_receipts = receipts.clone(); + let all_receipts = receipts.to_owned(); let (dispatch_matrix, events) = self.create_dispatch_matrix_and_event_vector(&all_receipts); if dispatch_matrix.len() > 0 {