From 12b6851dd46ada1f653c874a496db26c1bfc828e Mon Sep 17 00:00:00 2001 From: Vincent Geddes Date: Thu, 7 Oct 2021 22:50:42 +0200 Subject: [PATCH] Housekeeping: Update pallets to FRAME 2.0 syntax (#527) --- parachain/pallets/assets/src/benchmarking.rs | 2 +- .../basic-channel/src/inbound/benchmarking.rs | 25 +- .../pallets/basic-channel/src/inbound/mod.rs | 116 ++- .../pallets/basic-channel/src/inbound/test.rs | 17 +- .../basic-channel/src/inbound/weights.rs | 9 + .../src/outbound/benchmarking.rs | 14 +- .../pallets/basic-channel/src/outbound/mod.rs | 295 +++--- .../basic-channel/src/outbound/test.rs | 17 +- .../basic-channel/src/outbound/weights.rs | 15 + parachain/pallets/dispatch/src/lib.rs | 148 +-- parachain/pallets/dot-app/src/benchmarking.rs | 11 +- parachain/pallets/dot-app/src/lib.rs | 25 +- parachain/pallets/dot-app/src/mock.rs | 8 +- parachain/pallets/dot-app/src/tests.rs | 4 +- .../pallets/erc20-app/src/benchmarking.rs | 9 +- parachain/pallets/erc20-app/src/lib.rs | 148 +-- parachain/pallets/erc20-app/src/mock.rs | 3 +- parachain/pallets/erc20-app/src/tests.rs | 10 +- parachain/pallets/erc20-app/src/weights.rs | 11 + parachain/pallets/erc721-app/src/lib.rs | 5 +- parachain/pallets/erc721-app/src/tests.rs | 2 +- parachain/pallets/eth-app/src/benchmarking.rs | 9 +- parachain/pallets/eth-app/src/lib.rs | 143 +-- parachain/pallets/eth-app/src/mock.rs | 3 +- parachain/pallets/eth-app/src/tests.rs | 9 +- parachain/pallets/eth-app/src/weights.rs | 11 + .../src/benchmarking/mod.rs | 47 +- .../pallets/ethereum-light-client/src/lib.rs | 868 +++++++++--------- .../pallets/ethereum-light-client/src/mock.rs | 9 +- .../ethereum-light-client/src/tests.rs | 42 +- .../ethereum-light-client/src/weights.rs | 15 + .../src/inbound/benchmarking.rs | 25 +- .../incentivized-channel/src/inbound/mod.rs | 230 +++-- .../incentivized-channel/src/inbound/test.rs | 16 +- .../src/inbound/weights.rs | 11 + .../src/outbound/benchmarking.rs | 18 +- .../incentivized-channel/src/outbound/mod.rs | 304 +++--- .../incentivized-channel/src/outbound/test.rs | 19 +- .../src/outbound/weights.rs | 14 + .../primitives/ethereum/src/difficulty.rs | 4 +- parachain/runtime/common/src/lib.rs | 8 +- parachain/runtime/local/src/lib.rs | 22 +- parachain/runtime/rococo/src/lib.rs | 20 +- parachain/runtime/snowbridge/src/lib.rs | 20 +- parachain/src/chain_spec/local.rs | 1 - parachain/src/chain_spec/rococo.rs | 1 - parachain/src/chain_spec/snowbridge.rs | 1 - relayer/relays/parachain/beefy-listener.go | 4 +- test/package.json | 2 +- test/src/subclient/index.js | 2 +- test/yarn.lock | 8 +- 51 files changed, 1518 insertions(+), 1262 deletions(-) create mode 100644 parachain/pallets/basic-channel/src/inbound/weights.rs create mode 100644 parachain/pallets/basic-channel/src/outbound/weights.rs create mode 100644 parachain/pallets/erc20-app/src/weights.rs create mode 100644 parachain/pallets/eth-app/src/weights.rs create mode 100644 parachain/pallets/ethereum-light-client/src/weights.rs create mode 100644 parachain/pallets/incentivized-channel/src/inbound/weights.rs create mode 100644 parachain/pallets/incentivized-channel/src/outbound/weights.rs diff --git a/parachain/pallets/assets/src/benchmarking.rs b/parachain/pallets/assets/src/benchmarking.rs index b159196c9de51..0a67dc25e0099 100644 --- a/parachain/pallets/assets/src/benchmarking.rs +++ b/parachain/pallets/assets/src/benchmarking.rs @@ -9,7 +9,7 @@ use frame_benchmarking::{account, benchmarks, whitelisted_caller, impl_benchmark use sp_core::H160; #[allow(unused_imports)] -use crate::Module as Assets; +use crate::Pallet as Assets; fn set_balance(asset_id: &AssetId, who: &T::AccountId, amount: &U256) { TotalIssuance::insert(asset_id, amount); diff --git a/parachain/pallets/basic-channel/src/inbound/benchmarking.rs b/parachain/pallets/basic-channel/src/inbound/benchmarking.rs index 87857c0321056..cdac1a20c936b 100644 --- a/parachain/pallets/basic-channel/src/inbound/benchmarking.rs +++ b/parachain/pallets/basic-channel/src/inbound/benchmarking.rs @@ -1,19 +1,16 @@ -//! BasicInboundChannel pallet benchmarking - -#![cfg(feature = "runtime-benchmarks")] - use super::*; use frame_system::{RawOrigin, self, EventRecord}; use frame_benchmarking::{benchmarks, whitelisted_caller, impl_benchmark_test_suite}; use hex_literal::hex; use sp_std::convert::TryInto; +use sp_std::prelude::*; use snowbridge_core::{ChannelId, Message, MessageId, Proof}; use snowbridge_ethereum::{Log, Header}; #[allow(unused_imports)] -use crate::inbound::Module as BasicInboundChannel; +use crate::inbound::Pallet as BasicInboundChannel; fn assert_last_event(system_event: ::Event) { let events = frame_system::Pallet::::events(); @@ -39,8 +36,8 @@ benchmarks! { let envelope: envelope::Envelope = rlp::decode::(&message.data) .map(|log| log.try_into().unwrap()) .unwrap(); - Nonce::put(envelope.nonce - 1); - SourceChannel::put(envelope.channel); + >::put(envelope.nonce - 1); + >::put(envelope.channel); T::Verifier::initialize_storage( vec![header], @@ -50,7 +47,7 @@ benchmarks! { }: _(RawOrigin::Signed(caller.clone()), message) verify { - assert_eq!(envelope.nonce, Nonce::get()); + assert_eq!(envelope.nonce, >::get()); let message_id = MessageId::new(ChannelId::Basic, envelope.nonce); if let Some(event) = T::MessageDispatch::successful_dispatch_event(message_id) { @@ -65,8 +62,8 @@ benchmarks! { let envelope: envelope::Envelope = rlp::decode::(&message.data) .map(|log| log.try_into().unwrap()) .unwrap(); - Nonce::put(envelope.nonce - 1); - SourceChannel::put(envelope.channel); + >::put(envelope.nonce - 1); + >::put(envelope.channel); T::Verifier::initialize_storage( vec![header], @@ -76,7 +73,7 @@ benchmarks! { }: submit(RawOrigin::Signed(caller.clone()), message) verify { - assert_eq!(envelope.nonce, Nonce::get()); + assert_eq!(envelope.nonce, >::get()); let message_id = MessageId::new(ChannelId::Basic, envelope.nonce); if let Some(event) = T::MessageDispatch::successful_dispatch_event(message_id) { @@ -91,8 +88,8 @@ benchmarks! { let envelope: envelope::Envelope = rlp::decode::(&message.data) .map(|log| log.try_into().unwrap()) .unwrap(); - Nonce::put(envelope.nonce - 1); - SourceChannel::put(envelope.channel); + >::put(envelope.nonce - 1); + >::put(envelope.channel); T::Verifier::initialize_storage( vec![header], @@ -102,7 +99,7 @@ benchmarks! { }: submit(RawOrigin::Signed(caller.clone()), message) verify { - assert_eq!(envelope.nonce, Nonce::get()); + assert_eq!(envelope.nonce, >::get()); let message_id = MessageId::new(ChannelId::Basic, envelope.nonce); if let Some(event) = T::MessageDispatch::successful_dispatch_event(message_id) { diff --git a/parachain/pallets/basic-channel/src/inbound/mod.rs b/parachain/pallets/basic-channel/src/inbound/mod.rs index fd50418385d95..96c7b8d1bfc34 100644 --- a/parachain/pallets/basic-channel/src/inbound/mod.rs +++ b/parachain/pallets/basic-channel/src/inbound/mod.rs @@ -1,11 +1,15 @@ -use frame_support::{ - decl_error, decl_event, decl_module, decl_storage, - dispatch::DispatchResult, - weights::Weight, -}; -use frame_system::{self as system, ensure_signed}; +mod envelope; + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + +pub mod weights; + +#[cfg(test)] +mod test; + +use frame_system::ensure_signed; use sp_core::H160; -use sp_std::prelude::*; use sp_std::convert::TryFrom; use snowbridge_core::{ ChannelId, Message, MessageId, @@ -13,51 +17,44 @@ use snowbridge_core::{ }; use envelope::Envelope; +pub use weights::WeightInfo; -mod benchmarking; +pub use pallet::*; -#[cfg(test)] -mod test; +#[frame_support::pallet] +pub mod pallet { -mod envelope; + use super::*; -/// Weight functions needed for this pallet. -pub trait WeightInfo { - fn submit() -> Weight; -} + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; -impl WeightInfo for () { - fn submit() -> Weight { 0 } -} + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); -pub trait Config: system::Config { - type Event: From + Into<::Event>; + #[pallet::config] + pub trait Config: frame_system::Config { + type Event: From> + IsType<::Event>; - /// Verifier module for message verification. - type Verifier: Verifier; + /// Verifier module for message verification. + type Verifier: Verifier; - /// Verifier module for message verification. - type MessageDispatch: MessageDispatch; + /// Verifier module for message verification. + type MessageDispatch: MessageDispatch; - /// Weight information for extrinsics in this pallet - type WeightInfo: WeightInfo; -} - -decl_storage! { - trait Store for Module as BasicInboundModule { - pub SourceChannel get(fn source_channel) config(): H160; - pub Nonce: u64; + /// Weight information for extrinsics in this pallet + type WeightInfo: WeightInfo; } -} -decl_event! { - pub enum Event { + #[pallet::hooks] + impl Hooks> for Pallet {} - } -} + #[pallet::event] + pub enum Event {} -decl_error! { - pub enum Error for Module { + #[pallet::error] + pub enum Error { /// Message came from an invalid outbound channel on the Ethereum side. InvalidSourceChannel, /// Message has an invalid envelope. @@ -65,17 +62,40 @@ decl_error! { /// Message has an unexpected nonce. InvalidNonce, } -} -decl_module! { - pub struct Module for enum Call where origin: T::Origin { + /// Source channel on the ethereum side + #[pallet::storage] + #[pallet::getter(fn source_channel)] + pub type SourceChannel = StorageValue<_, H160, ValueQuery>; + + #[pallet::storage] + pub type Nonce = StorageValue<_, u64, ValueQuery>; - type Error = Error; + #[pallet::genesis_config] + pub struct GenesisConfig { + pub source_channel: H160, + } - fn deposit_event() = default; + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + source_channel: Default::default(), + } + } + } + + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + >::put(self.source_channel); + } + } - #[weight = T::WeightInfo::submit()] - pub fn submit(origin, message: Message) -> DispatchResult { + #[pallet::call] + impl Pallet { + #[pallet::weight(T::WeightInfo::submit())] + pub fn submit(origin: OriginFor, message: Message) -> DispatchResult { ensure_signed(origin)?; // submit message to verifier for verification let log = T::Verifier::verify(&message)?; @@ -85,12 +105,12 @@ decl_module! { // Verify that the message was submitted to us from a known // outbound channel on the ethereum side - if envelope.channel != SourceChannel::get() { + if envelope.channel != >::get() { return Err(Error::::InvalidSourceChannel.into()) } // Verify message nonce - Nonce::try_mutate(|nonce| -> DispatchResult { + >::try_mutate(|nonce| -> DispatchResult { if envelope.nonce != *nonce + 1 { Err(Error::::InvalidNonce.into()) } else { diff --git a/parachain/pallets/basic-channel/src/inbound/test.rs b/parachain/pallets/basic-channel/src/inbound/test.rs index bfd60ab16147b..9de518b89f245 100644 --- a/parachain/pallets/basic-channel/src/inbound/test.rs +++ b/parachain/pallets/basic-channel/src/inbound/test.rs @@ -1,6 +1,6 @@ - use super::*; +use frame_support::traits::GenesisBuild; use sp_core::{H160, H256}; use frame_support::{ assert_ok, assert_noop, @@ -19,7 +19,6 @@ use snowbridge_ethereum::{Header as EthereumHeader, Log, U256}; use hex_literal::hex; use crate::inbound::Error; - use crate::inbound as basic_inbound_channel; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; @@ -32,7 +31,7 @@ frame_support::construct_runtime!( UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Pallet, Call, Storage, Event}, - BasicInboundChannel: basic_inbound_channel::{Pallet, Call, Storage, Event}, + BasicInboundChannel: basic_inbound_channel::{Pallet, Call, Storage, Event}, } ); @@ -43,7 +42,7 @@ parameter_types! { pub const BlockHashCount: u64 = 250; } -impl system::Config for Test { +impl frame_system::Config for Test { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); @@ -89,7 +88,7 @@ impl MessageDispatch for MockMessageDispatch { fn dispatch(_: H160, _: MessageId, _: &[u8]) {} #[cfg(feature = "runtime-benchmarks")] - fn successful_dispatch_event(_: MessageId) -> Option<::Event> { + fn successful_dispatch_event(_: MessageId) -> Option<::Event> { None } } @@ -110,7 +109,7 @@ pub fn new_tester(source_channel: H160) -> sp_io::TestExternalities { pub fn new_tester_with_config(config: basic_inbound_channel::GenesisConfig) -> sp_io::TestExternalities { let mut storage = frame_system::GenesisConfig::default().build_storage::().unwrap(); - config.assimilate_storage(&mut storage).unwrap(); + GenesisBuild::::assimilate_storage(&config, &mut storage).unwrap(); let mut ext: sp_io::TestExternalities = storage.into(); ext.execute_with(|| System::set_block_number(1)); @@ -197,7 +196,7 @@ fn test_submit() { }, }; assert_ok!(BasicInboundChannel::submit(origin.clone(), message_1)); - let nonce: u64 = Nonce::get(); + let nonce: u64 = >::get(); assert_eq!(nonce, 1); // Submit message 2 @@ -210,7 +209,7 @@ fn test_submit() { }, }; assert_ok!(BasicInboundChannel::submit(origin.clone(), message_2)); - let nonce: u64 = Nonce::get(); + let nonce: u64 = >::get(); assert_eq!(nonce, 2); }); } @@ -231,7 +230,7 @@ fn test_submit_with_invalid_nonce() { }, }; assert_ok!(BasicInboundChannel::submit(origin.clone(), message.clone())); - let nonce: u64 = Nonce::get(); + let nonce: u64 = >::get(); assert_eq!(nonce, 1); // Submit the same again diff --git a/parachain/pallets/basic-channel/src/inbound/weights.rs b/parachain/pallets/basic-channel/src/inbound/weights.rs new file mode 100644 index 0000000000000..80a7087d7337f --- /dev/null +++ b/parachain/pallets/basic-channel/src/inbound/weights.rs @@ -0,0 +1,9 @@ +use frame_support::weights::Weight; + +pub trait WeightInfo { + fn submit() -> Weight; +} + +impl WeightInfo for () { + fn submit() -> Weight { 0 } +} diff --git a/parachain/pallets/basic-channel/src/outbound/benchmarking.rs b/parachain/pallets/basic-channel/src/outbound/benchmarking.rs index 7bd1dc97ea51d..1133695c4e1a8 100644 --- a/parachain/pallets/basic-channel/src/outbound/benchmarking.rs +++ b/parachain/pallets/basic-channel/src/outbound/benchmarking.rs @@ -9,7 +9,7 @@ use frame_benchmarking::{ use frame_support::traits::OnInitialize; #[allow(unused_imports)] -use crate::outbound::Module as BasicOutboundChannel; +use crate::outbound::Pallet as BasicOutboundChannel; const SEED: u32 = 0; @@ -22,7 +22,7 @@ benchmarks! { for _ in 0 .. m { let payload: Vec = (0..).take(p as usize).collect(); - MessageQueue::append(Message { + >::append(Message { target: H160::zero(), nonce: 0u64, payload, @@ -33,16 +33,16 @@ benchmarks! { }: { BasicOutboundChannel::::on_initialize(block_number) } verify { - assert_eq!(MessageQueue::get().len(), 0); + assert_eq!(>::get().len(), 0); } // Benchmark 'on_initialize` for the best case, i.e. nothing is done // because it's not a commitment interval. on_initialize_non_interval { - MessageQueue::append(Message { + >::append(Message { target: H160::zero(), nonce: 0u64, - payload: vec![1u8; T::MaxMessagePayloadSize::get()], + payload: vec![1u8; T::MaxMessagePayloadSize::get() as usize], }); Interval::::put::(10u32.into()); @@ -50,13 +50,13 @@ benchmarks! { }: { BasicOutboundChannel::::on_initialize(block_number) } verify { - assert_eq!(MessageQueue::get().len(), 1); + assert_eq!(>::get().len(), 1); } // Benchmark 'on_initialize` for the case where it is a commitment interval // but there are no messages in the queue. on_initialize_no_messages { - MessageQueue::kill(); + >::kill(); let block_number = Interval::::get(); diff --git a/parachain/pallets/basic-channel/src/outbound/mod.rs b/parachain/pallets/basic-channel/src/outbound/mod.rs index 37140c49476c2..9c0388d0c0a2b 100644 --- a/parachain/pallets/basic-channel/src/outbound/mod.rs +++ b/parachain/pallets/basic-channel/src/outbound/mod.rs @@ -1,27 +1,29 @@ +pub mod weights; + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + +#[cfg(test)] +mod test; + use codec::{Encode, Decode}; use ethabi::{self, Token}; use frame_support::{ - decl_error, decl_event, decl_module, decl_storage, - weights::Weight, dispatch::DispatchResult, traits::{Get, EnsureOrigin}, ensure, }; -use frame_system::{self as system}; use sp_core::{H160, H256, RuntimeDebug}; use sp_io::offchain_index; use sp_runtime::{ traits::{Hash, Zero, StaticLookup}, }; + use sp_std::prelude::*; use snowbridge_core::{ChannelId, MessageNonce, types::AuxiliaryDigestItem}; -#[cfg(feature = "runtime-benchmarks")] -mod benchmarking; - -#[cfg(test)] -mod test; +pub use weights::WeightInfo; /// Wire-format for committed messages #[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug)] @@ -34,64 +36,51 @@ pub struct Message { payload: Vec, } -/// Weight functions needed for this pallet. -pub trait WeightInfo { - fn on_initialize(num_messages: u32, avg_payload_bytes: u32) -> Weight; - fn on_initialize_non_interval() -> Weight; - fn on_initialize_no_messages() -> Weight; - fn set_principal() -> Weight; -} +pub use pallet::*; -impl WeightInfo for () { - fn on_initialize(_: u32, _: u32) -> Weight { 0 } - fn on_initialize_non_interval() -> Weight { 0 } - fn on_initialize_no_messages() -> Weight { 0 } - fn set_principal() -> Weight { 0 } -} - -pub trait Config: system::Config { - type Event: From + Into<::Event>; +#[frame_support::pallet] +pub mod pallet { - /// Prefix for offchain storage keys. - const INDEXING_PREFIX: &'static [u8]; + use super::*; - type Hashing: Hash; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; - // Max bytes in a message payload - type MaxMessagePayloadSize: Get; + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); - /// Max number of messages that can be queued and committed in one go for a given channel. - type MaxMessagesPerCommit: Get; + #[pallet::config] + pub trait Config: frame_system::Config { + type Event: From> + IsType<::Event>; - type SetPrincipalOrigin: EnsureOrigin; + /// Prefix for offchain storage keys. + const INDEXING_PREFIX: &'static [u8]; - /// Weight information for extrinsics in this pallet - type WeightInfo: WeightInfo; -} + type Hashing: Hash; -decl_storage! { - trait Store for Module as BasicOutboundModule { - /// Interval between committing messages. - Interval get(fn interval) config(): T::BlockNumber; + /// Max bytes in a message payload + #[pallet::constant] + type MaxMessagePayloadSize: Get; - /// Messages waiting to be committed. - MessageQueue: Vec; + /// Max number of messages per commitment + #[pallet::constant] + type MaxMessagesPerCommit: Get; - /// The Account authorized to submit messages - Principal get(fn principal) config(): T::AccountId; + type SetPrincipalOrigin: EnsureOrigin; - pub Nonce: u64; + /// Weight information for extrinsics in this pallet + type WeightInfo: WeightInfo; } -} -decl_event! { - pub enum Event { + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + pub enum Event { MessageAccepted(MessageNonce), } -} -decl_error! { - pub enum Error for Module { + #[pallet::error] + pub enum Error { /// The message payload exceeds byte limit. PayloadTooLarge, /// No more messages can be queued for the channel during this commit cycle. @@ -101,13 +90,50 @@ decl_error! { /// Not authorized to send message NotAuthorized, } -} -decl_module! { - pub struct Module for enum Call where origin: T::Origin { - type Error = Error; - fn deposit_event() = default; + /// Interval between commitments + #[pallet::storage] + #[pallet::getter(fn interval)] + pub(super) type Interval = StorageValue<_, T::BlockNumber, ValueQuery>; + + /// Messages waiting to be committed. + #[pallet::storage] + pub(super) type MessageQueue = StorageValue<_, Vec, ValueQuery>; + /// Fee for accepting a message + #[pallet::storage] + #[pallet::getter(fn principal)] + pub(super) type Principal = StorageValue<_, T::AccountId, ValueQuery>; + + #[pallet::storage] + pub type Nonce = StorageValue<_, u64, ValueQuery>; + + #[pallet::genesis_config] + pub struct GenesisConfig { + pub interval: T::BlockNumber, + pub principal: T::AccountId, + } + + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + interval: Default::default(), + principal: Default::default(), + } + } + } + + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + >::put(self.interval); + >::put(self.principal.clone()); + } + } + + #[pallet::hooks] + impl Hooks> for Pallet { // Generate a message commitment every [`Interval`] blocks. // // The commitment hash is included in an [`AuxiliaryDigestItem`] in the block header, @@ -119,101 +145,104 @@ decl_module! { T::WeightInfo::on_initialize_non_interval() } } + } - #[weight = T::WeightInfo::set_principal()] - pub fn set_principal(origin, principal: ::Source) -> DispatchResult { + #[pallet::call] + impl Pallet { + #[pallet::weight(T::WeightInfo::set_principal())] + pub fn set_principal(origin: OriginFor, principal: ::Source) -> DispatchResult { T::SetPrincipalOrigin::ensure_origin(origin)?; let principal = T::Lookup::lookup(principal)?; >::put(principal); Ok(()) } } -} -impl Module { - - /// Submit message on the outbound channel - pub fn submit(who: &T::AccountId, target: H160, payload: &[u8]) -> DispatchResult { - ensure!( - *who == Self::principal(), - Error::::NotAuthorized, - ); - ensure!( - MessageQueue::decode_len().unwrap_or(0) < T::MaxMessagesPerCommit::get(), - Error::::QueueSizeLimitReached, - ); - ensure!( - payload.len() <= T::MaxMessagePayloadSize::get(), - Error::::PayloadTooLarge, - ); - - Nonce::try_mutate(|nonce| -> DispatchResult { - if let Some(v) = nonce.checked_add(1) { - *nonce = v; - } else { - return Err(Error::::Overflow.into()) - } - - MessageQueue::append( - Message { - target, - nonce: *nonce, - payload: payload.to_vec(), - }, + impl Pallet { + /// Submit message on the outbound channel + pub fn submit(who: &T::AccountId, target: H160, payload: &[u8]) -> DispatchResult { + ensure!( + *who == Self::principal(), + Error::::NotAuthorized, + ); + ensure!( + >::decode_len().unwrap_or(0) < T::MaxMessagesPerCommit::get() as usize, + Error::::QueueSizeLimitReached, + ); + ensure!( + payload.len() <= T::MaxMessagePayloadSize::get() as usize, + Error::::PayloadTooLarge, ); - >::deposit_event(Event::MessageAccepted(*nonce)); - Ok(()) - }) - } - fn commit() -> Weight { - let messages: Vec = MessageQueue::take(); - if messages.is_empty() { - return T::WeightInfo::on_initialize_no_messages(); + >::try_mutate(|nonce| -> DispatchResult { + if let Some(v) = nonce.checked_add(1) { + *nonce = v; + } else { + return Err(Error::::Overflow.into()) + } + + >::append( + Message { + target, + nonce: *nonce, + payload: payload.to_vec(), + }, + ); + Self::deposit_event(Event::MessageAccepted(*nonce)); + Ok(()) + }) } - let commitment_hash = Self::make_commitment_hash(&messages); - let average_payload_size = Self::average_payload_size(&messages); + fn commit() -> Weight { + let messages: Vec = >::take(); + if messages.is_empty() { + return T::WeightInfo::on_initialize_no_messages(); + } - let digest_item = AuxiliaryDigestItem::Commitment( - ChannelId::Basic, - commitment_hash.clone() - ).into(); - >::deposit_log(digest_item); + let commitment_hash = Self::make_commitment_hash(&messages); + let average_payload_size = Self::average_payload_size(&messages); - let key = Self::make_offchain_key(commitment_hash); - offchain_index::set(&*key, &messages.encode()); + let digest_item = AuxiliaryDigestItem::Commitment( + ChannelId::Basic, + commitment_hash.clone() + ).into(); + >::deposit_log(digest_item); - T::WeightInfo::on_initialize( - messages.len() as u32, - average_payload_size as u32 - ) - } + let key = Self::make_offchain_key(commitment_hash); + offchain_index::set(&*key, &messages.encode()); - fn make_commitment_hash(messages: &[Message]) -> H256 { - let messages: Vec = messages - .iter() - .map(|message| { - Token::Tuple(vec![ - Token::Address(message.target), - Token::Uint(message.nonce.into()), - Token::Bytes(message.payload.clone()) - ]) - }) - .collect(); - let input = ethabi::encode(&vec![Token::Array(messages)]); - ::Hashing::hash(&input) - } + T::WeightInfo::on_initialize( + messages.len() as u32, + average_payload_size as u32 + ) + } - fn average_payload_size(messages: &[Message]) -> usize { - let sum: usize = messages.iter() - .fold(0, |acc, x| acc + x.payload.len()); - // We overestimate message payload size rather than underestimate. - // So add 1 here to account for integer division truncation. - (sum / messages.len()).saturating_add(1) - } + fn make_commitment_hash(messages: &[Message]) -> H256 { + let messages: Vec = messages + .iter() + .map(|message| { + Token::Tuple(vec![ + Token::Address(message.target), + Token::Uint(message.nonce.into()), + Token::Bytes(message.payload.clone()) + ]) + }) + .collect(); + let input = ethabi::encode(&vec![Token::Array(messages)]); + ::Hashing::hash(&input) + } + + fn average_payload_size(messages: &[Message]) -> usize { + let sum: usize = messages.iter() + .fold(0, |acc, x| acc + x.payload.len()); + // We overestimate message payload size rather than underestimate. + // So add 1 here to account for integer division truncation. + (sum / messages.len()).saturating_add(1) + } - fn make_offchain_key(hash: H256) -> Vec { - (T::INDEXING_PREFIX, ChannelId::Basic, hash).encode() + fn make_offchain_key(hash: H256) -> Vec { + (T::INDEXING_PREFIX, ChannelId::Basic, hash).encode() + } } } + diff --git a/parachain/pallets/basic-channel/src/outbound/test.rs b/parachain/pallets/basic-channel/src/outbound/test.rs index f8d2b42a29dab..8552b0fde233b 100644 --- a/parachain/pallets/basic-channel/src/outbound/test.rs +++ b/parachain/pallets/basic-channel/src/outbound/test.rs @@ -1,5 +1,6 @@ use super::*; +use frame_support::traits::GenesisBuild; use sp_core::{H160, H256}; use frame_support::{ assert_ok, assert_noop, @@ -24,7 +25,7 @@ frame_support::construct_runtime!( UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Pallet, Call, Storage, Event}, - BasicOutboundChannel: basic_outbound_channel::{Pallet, Call, Storage, Event}, + BasicOutboundChannel: basic_outbound_channel::{Pallet, Call, Config, Storage, Event}, } ); @@ -35,7 +36,7 @@ parameter_types! { pub const BlockHashCount: u64 = 250; } -impl system::Config for Test { +impl frame_system::Config for Test { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); @@ -62,8 +63,8 @@ impl system::Config for Test { } parameter_types! { - pub const MaxMessagePayloadSize: usize = 128; - pub const MaxMessagesPerCommit: usize = 5; + pub const MaxMessagePayloadSize: u64 = 128; + pub const MaxMessagesPerCommit: u64 = 5; } impl basic_outbound_channel::Config for Test { @@ -98,10 +99,10 @@ fn test_submit() { let who: AccountId = Keyring::Bob.into(); assert_ok!(BasicOutboundChannel::submit(&who, target, &vec![0, 1, 2])); - assert_eq!(Nonce::get(), 1); + assert_eq!(>::get(), 1); assert_ok!(BasicOutboundChannel::submit(&who, target, &vec![0, 1, 2])); - assert_eq!(Nonce::get(), 2); + assert_eq!(>::get(), 2); }); } @@ -130,7 +131,7 @@ fn test_submit_exceeds_payload_limit() { let who: AccountId = Keyring::Bob.into(); let max_payload_bytes = MaxMessagePayloadSize::get(); - let payload: Vec = (0..).take(max_payload_bytes + 1).collect(); + let payload: Vec = (0..).take(max_payload_bytes as usize + 1).collect(); assert_noop!( BasicOutboundChannel::submit(&who, target, payload.as_slice()), @@ -145,7 +146,7 @@ fn test_submit_fails_on_nonce_overflow() { let target = H160::zero(); let who: AccountId = Keyring::Bob.into(); - Nonce::set(u64::MAX); + >::set(u64::MAX); assert_noop!( BasicOutboundChannel::submit(&who, target, &vec![0, 1, 2]), Error::::Overflow, diff --git a/parachain/pallets/basic-channel/src/outbound/weights.rs b/parachain/pallets/basic-channel/src/outbound/weights.rs new file mode 100644 index 0000000000000..93b23e3a3344f --- /dev/null +++ b/parachain/pallets/basic-channel/src/outbound/weights.rs @@ -0,0 +1,15 @@ +use frame_support::weights::Weight; + +pub trait WeightInfo { + fn on_initialize(num_messages: u32, avg_payload_bytes: u32) -> Weight; + fn on_initialize_non_interval() -> Weight; + fn on_initialize_no_messages() -> Weight; + fn set_principal() -> Weight; +} + +impl WeightInfo for () { + fn on_initialize(_: u32, _: u32) -> Weight { 0 } + fn on_initialize_non_interval() -> Weight { 0 } + fn on_initialize_no_messages() -> Weight { 0 } + fn set_principal() -> Weight { 0 } +} diff --git a/parachain/pallets/dispatch/src/lib.rs b/parachain/pallets/dispatch/src/lib.rs index 6050270505d95..240442a22eb5b 100644 --- a/parachain/pallets/dispatch/src/lib.rs +++ b/parachain/pallets/dispatch/src/lib.rs @@ -1,7 +1,6 @@ #![cfg_attr(not(feature = "std"), no_std)] use frame_support::{ - decl_event, decl_module, decl_storage, dispatch::{Parameter, Dispatchable, DispatchResult}, traits::{EnsureOrigin, Contains}, weights::GetDispatchInfo, @@ -9,7 +8,6 @@ use frame_support::{ use sp_core::RuntimeDebug; -use frame_system::{self as system}; use sp_core::H160; use sp_std::prelude::*; @@ -18,11 +16,11 @@ use snowbridge_core::MessageDispatch; use codec::{Encode, Decode}; #[derive(Copy, Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug)] -pub struct Origin(pub H160); +pub struct RawOrigin(pub H160); -impl From for Origin { - fn from(hash: H160) -> Origin { - Origin(hash) +impl From for RawOrigin { + fn from(hash: H160) -> RawOrigin { + RawOrigin(hash) } } @@ -30,7 +28,7 @@ pub struct EnsureEthereumAccount; impl EnsureOrigin for EnsureEthereumAccount where - OuterOrigin: Into> + From + OuterOrigin: Into> + From { type Success = H160; @@ -40,90 +38,104 @@ where #[cfg(feature = "runtime-benchmarks")] fn successful_origin() -> OuterOrigin { - OuterOrigin::from(Origin(H160::repeat_byte(2))) + OuterOrigin::from(RawOrigin(H160::repeat_byte(2))) } } -pub trait Config: system::Config { +pub use pallet::*; - /// The overarching event type. - type Event: From> + Into<::Event>; +#[frame_support::pallet] +pub mod pallet { - /// The overarching origin type. - type Origin: From; + use super::*; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); + + #[pallet::config] + pub trait Config: frame_system::Config { + /// The overarching event type. + type Event: From> + IsType<::Event>; + + /// The overarching origin type. + type Origin: From; + + /// Id of the message. Whenever message is passed to the dispatch module, it emits + /// event with this id + dispatch result. + type MessageId: Parameter; + + /// The overarching dispatch call type. + type Call: Parameter + + GetDispatchInfo + + Dispatchable< + Origin = ::Origin, + PostInfo = frame_support::dispatch::PostDispatchInfo, + >; + + /// The pallet will filter all incoming calls right before they're dispatched. If this filter + /// rejects the call, special event (`Event::MessageRejected`) is emitted. + type CallFilter: Contains<::Call>; + } - /// Id of the message. Whenever message is passed to the dispatch module, it emits - /// event with this id + dispatch result. - type MessageId: Parameter; + #[pallet::hooks] + impl Hooks> for Pallet {} - /// The overarching dispatch call type. - type Call: Parameter - + GetDispatchInfo - + Dispatchable< - Origin = ::Origin, - PostInfo = frame_support::dispatch::PostDispatchInfo, - >; + #[pallet::call] + impl Pallet {} - /// The pallet will filter all incoming calls right before they're dispatched. If this filter - /// rejects the call, special event (`Event::MessageRejected`) is emitted. - type CallFilter: Contains<::Call>; -} - -decl_storage! { - trait Store for Module as Dispatch {} -} - -decl_event! { - /// Events for the Bridge module. - pub enum Event where ::MessageId { + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + #[pallet::metadata(T::MessageId = "MessageId")] + pub enum Event { /// Message has been dispatched with given result. - MessageDispatched(MessageId, DispatchResult), + MessageDispatched(T::MessageId, DispatchResult), /// Message has been rejected - MessageRejected(MessageId), + MessageRejected(T::MessageId), /// We have failed to decode a Call from the message. - MessageDecodeFailed(MessageId), + MessageDecodeFailed(T::MessageId), } -} -decl_module! { - pub struct Module for enum Call where origin: ::Origin { - fn deposit_event() = default; - } -} + #[pallet::origin] + pub type Origin = RawOrigin; -pub type MessageIdOf = ::MessageId; + pub type MessageIdOf = ::MessageId; -impl MessageDispatch> for Module { - fn dispatch(source: H160, id: MessageIdOf, payload: &[u8]) { - let call = match ::Call::decode(&mut &payload[..]) { - Ok(call) => call, - Err(_) => { - Self::deposit_event(RawEvent::MessageDecodeFailed(id)); + impl MessageDispatch> for Pallet { + fn dispatch(source: H160, id: MessageIdOf, payload: &[u8]) { + let call = match ::Call::decode(&mut &payload[..]) { + Ok(call) => call, + Err(_) => { + Self::deposit_event(Event::MessageDecodeFailed(id)); + return; + } + }; + + if !T::CallFilter::contains(&call) { + Self::deposit_event(Event::MessageRejected(id)); return; } - }; - - if !T::CallFilter::contains(&call) { - Self::deposit_event(RawEvent::MessageRejected(id)); - return; - } - let origin = Origin(source).into(); - let result = call.dispatch(origin); + let origin = RawOrigin(source).into(); + let result = call.dispatch(origin); - Self::deposit_event(RawEvent::MessageDispatched( - id, - result.map(drop).map_err(|e| e.error), - )); - } + Self::deposit_event(Event::MessageDispatched( + id, + result.map(drop).map_err(|e| e.error), + )); + } - #[cfg(feature = "runtime-benchmarks")] - fn successful_dispatch_event(id: MessageIdOf) -> Option<::Event> { - let event: ::Event = RawEvent::MessageDispatched(id, Ok(())).into(); - Some(event.into()) + #[cfg(feature = "runtime-benchmarks")] + fn successful_dispatch_event(id: MessageIdOf) -> Option<::Event> { + let event: ::Event = Event::MessageDispatched(id, Ok(())).into(); + Some(event.into()) + } } } + #[cfg(test)] mod tests { use super::*; diff --git a/parachain/pallets/dot-app/src/benchmarking.rs b/parachain/pallets/dot-app/src/benchmarking.rs index 2acb8e7b67258..c1a1a134c9c29 100644 --- a/parachain/pallets/dot-app/src/benchmarking.rs +++ b/parachain/pallets/dot-app/src/benchmarking.rs @@ -1,7 +1,4 @@ //! DotApp pallet benchmarking - -#![cfg(feature = "runtime-benchmarks")] - use super::*; use frame_system::RawOrigin; @@ -27,7 +24,7 @@ benchmarks! { // The amount is chosen such that balance - amount < existential_deposit // so that the account is reaped let amount = existential_deposit * 9u32.into() + 1u32.into(); - + T::Currency::make_free_balance_be(&caller, balance); T::Currency::make_free_balance_be(&lock_account, 0u32.into()); @@ -51,7 +48,7 @@ benchmarks! { let balance = existential_deposit * 10u32.into(); let amount = existential_deposit * 8u32.into(); - + T::Currency::make_free_balance_be(&caller, balance); T::Currency::make_free_balance_be(&lock_account, 0u32.into()); @@ -77,11 +74,11 @@ benchmarks! { let recipient: T::AccountId = account("recipient", 0, 0); let recipient_lookup: ::Source = T::Lookup::unlookup(recipient.clone()); let sender = H160::zero(); - + let balance = existential_deposit * 10u32.into(); let amount = existential_deposit * 8u32.into(); let amount_wrapped = wrap::(amount, T::Decimals::get()).unwrap(); - + T::Currency::make_free_balance_be(&lock_account, balance); let call = Call::::unlock(sender, recipient_lookup, amount_wrapped); diff --git a/parachain/pallets/dot-app/src/lib.rs b/parachain/pallets/dot-app/src/lib.rs index 9e1667eca702c..a5d55f5217ac9 100644 --- a/parachain/pallets/dot-app/src/lib.rs +++ b/parachain/pallets/dot-app/src/lib.rs @@ -1,10 +1,12 @@ #![cfg_attr(not(feature = "std"), no_std)] -mod benchmarking; mod payload; pub mod primitives; pub mod weights; +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + #[cfg(test)] mod mock; @@ -68,19 +70,6 @@ pub mod pallet { type WeightInfo: WeightInfo; } - #[pallet::hooks] - impl Hooks> for Pallet { - // Verify that `T::Decimals` is 10 (DOT), or 12 (KSM) to guarantee - // safe conversions between native and wrapped DOT. - #[cfg(feature = "std")] - fn integrity_test() { - sp_io::TestExternalities::new_empty().execute_with(|| { - let allowed_decimals: &[u32] = &[10, 12]; - let decimals = T::Decimals::get(); - assert!(allowed_decimals.contains(&decimals)) - }); - } - } #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] @@ -105,23 +94,21 @@ pub mod pallet { } #[pallet::genesis_config] - pub struct GenesisConfig { + pub struct GenesisConfig { pub address: H160, - pub phantom: sp_std::marker::PhantomData, } #[cfg(feature = "std")] - impl Default for GenesisConfig { + impl Default for GenesisConfig { fn default() -> Self { Self { address: Default::default(), - phantom: Default::default(), } } } #[pallet::genesis_build] - impl GenesisBuild for GenesisConfig { + impl GenesisBuild for GenesisConfig { fn build(&self) { >::put(self.address); } diff --git a/parachain/pallets/dot-app/src/mock.rs b/parachain/pallets/dot-app/src/mock.rs index 19d4a94c8374a..7689b4f2c0329 100644 --- a/parachain/pallets/dot-app/src/mock.rs +++ b/parachain/pallets/dot-app/src/mock.rs @@ -30,7 +30,7 @@ frame_support::construct_runtime!( System: frame_system::{Pallet, Call, Storage, Event}, Balances: pallet_balances::{Pallet, Call, Storage, Event}, Dispatch: snowbridge_dispatch::{Pallet, Call, Storage, Origin, Event}, - DotApp: dot_app::{Pallet, Call, Config, Storage, Event}, + DotApp: dot_app::{Pallet, Call, Config, Storage, Event}, } ); @@ -129,12 +129,10 @@ pub fn new_tester() -> sp_io::TestExternalities { .build_storage::() .unwrap(); - let config: dot_app::GenesisConfig = dot_app::GenesisConfig { + let config = dot_app::GenesisConfig { address: H160::repeat_byte(1), - phantom: Default::default(), }; - - config.assimilate_storage(&mut storage).unwrap(); + GenesisBuild::::assimilate_storage(&config, &mut storage).unwrap(); let mut ext: sp_io::TestExternalities = storage.into(); ext.execute_with(|| System::set_block_number(1)); diff --git a/parachain/pallets/dot-app/src/tests.rs b/parachain/pallets/dot-app/src/tests.rs index dcd24116059bb..a9af371d8e37c 100644 --- a/parachain/pallets/dot-app/src/tests.rs +++ b/parachain/pallets/dot-app/src/tests.rs @@ -53,7 +53,7 @@ fn should_unlock() { assert_ok!( DotApp::unlock( - snowbridge_dispatch::Origin(peer_contract).into(), + snowbridge_dispatch::RawOrigin(peer_contract).into(), sender, recipient.clone(), amount_wrapped, @@ -83,7 +83,7 @@ fn should_not_unlock_on_bad_origin_failure() { assert_noop!( DotApp::unlock( - snowbridge_dispatch::Origin(unknown_peer_contract).into(), + snowbridge_dispatch::RawOrigin(unknown_peer_contract).into(), sender, recipient.clone(), amount_wrapped, diff --git a/parachain/pallets/erc20-app/src/benchmarking.rs b/parachain/pallets/erc20-app/src/benchmarking.rs index 361d05a6049b5..c86954e4469e5 100644 --- a/parachain/pallets/erc20-app/src/benchmarking.rs +++ b/parachain/pallets/erc20-app/src/benchmarking.rs @@ -1,7 +1,4 @@ //! ERC20App pallet benchmarking - -#![cfg(feature = "runtime-benchmarks")] - use super::*; use frame_system::RawOrigin; @@ -10,7 +7,7 @@ use frame_benchmarking::{account, benchmarks, whitelisted_caller, impl_benchmark use sp_core::H160; #[allow(unused_imports)] -use crate::Module as ERC20App; +use crate::Pallet as ERC20App; benchmarks! { // Benchmark `burn` extrinsic under worst case conditions: @@ -28,13 +25,13 @@ benchmarks! { verify { assert_eq!(T::Assets::balance(AssetId::Token(token), &caller), U256::zero()); } - + // Benchmark `mint` extrinsic under worst case conditions: // * `mint` successfully adds amount to recipient account mint { let origin = T::CallOrigin::successful_origin(); if let Ok(caller) = T::CallOrigin::try_origin(origin.clone()) { - Address::put(caller); + >::put(caller); } else { return Err("Failed to extract caller address from origin"); } diff --git a/parachain/pallets/erc20-app/src/lib.rs b/parachain/pallets/erc20-app/src/lib.rs index 57c8e42d27e41..4618a76de7d14 100644 --- a/parachain/pallets/erc20-app/src/lib.rs +++ b/parachain/pallets/erc20-app/src/lib.rs @@ -16,13 +16,23 @@ //! #![cfg_attr(not(feature = "std"), no_std)] -use frame_system::{self as system, ensure_signed}; +mod payload; +mod weights; + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + +#[cfg(test)] +mod mock; + +#[cfg(test)] +mod tests; + +use frame_system::{ensure_signed}; use frame_support::{ - decl_error, decl_event, decl_module, decl_storage, dispatch::{DispatchError, DispatchResult}, traits::EnsureOrigin, transactional, - weights::Weight, }; use sp_runtime::traits::StaticLookup; use sp_std::prelude::*; @@ -30,77 +40,87 @@ use sp_core::{H160, U256}; use snowbridge_core::{ChannelId, OutboundRouter, AssetId, MultiAsset}; -mod payload; use payload::OutboundPayload; +pub use weights::WeightInfo; -mod benchmarking; +pub use pallet::*; -#[cfg(test)] -mod mock; +#[frame_support::pallet] +pub mod pallet { -#[cfg(test)] -mod tests; - -/// Weight functions needed for this pallet. -pub trait WeightInfo { - fn burn() -> Weight; - fn mint() -> Weight; -} + use super::*; -impl WeightInfo for () { - fn burn() -> Weight { 0 } - fn mint() -> Weight { 0 } -} + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; -pub trait Config: system::Config { - type Event: From> + Into<::Event>; + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); - type Assets: MultiAsset<::AccountId>; + #[pallet::config] + pub trait Config: frame_system::Config { + type Event: From> + IsType<::Event>; - type OutboundRouter: OutboundRouter; + type Assets: MultiAsset<::AccountId>; - type CallOrigin: EnsureOrigin; + type OutboundRouter: OutboundRouter; - type WeightInfo: WeightInfo; -} + type CallOrigin: EnsureOrigin; -decl_storage! { - trait Store for Module as Erc20Module { - /// Address of the peer application on the Ethereum side. - Address get(fn address) config(): H160; + type WeightInfo: WeightInfo; } -} -decl_event! { - /// Events for the ERC20 module. - pub enum Event - where - AccountId = ::AccountId, - { - Burned(H160, AccountId, H160, U256), - Minted(H160, H160, AccountId, U256), - } -} + #[pallet::hooks] + impl Hooks> for Pallet {} -decl_error! { - pub enum Error for Module { - /// The submitted payload could not be decoded. - InvalidPayload, + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + #[pallet::metadata(T::AccountId = "AccountId")] + pub enum Event { + Burned(H160, T::AccountId, H160, U256), + Minted(H160, H160, T::AccountId, U256), } -} -decl_module! { + #[pallet::storage] + #[pallet::getter(fn address)] + pub(super) type Address = StorageValue<_, H160, ValueQuery>; - pub struct Module for enum Call where origin: T::Origin { + #[pallet::error] + pub enum Error {} + + #[pallet::genesis_config] + pub struct GenesisConfig { + pub address: H160, + } - type Error = Error; + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + address: Default::default(), + } + } + } - fn deposit_event() = default; + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + >::put(self.address); + } + } - /// Burn an ERC20 token balance - #[weight = T::WeightInfo::burn()] + #[pallet::call] + impl Pallet { + #[pallet::weight(T::WeightInfo::burn())] #[transactional] - pub fn burn(origin, channel_id: ChannelId, token: H160, recipient: H160, amount: U256) -> DispatchResult { + pub fn burn( + origin: OriginFor, + channel_id: ChannelId, + token: H160, + recipient: H160, + amount: U256 + ) -> DispatchResult { let who = ensure_signed(origin)?; T::Assets::withdraw(AssetId::Token(token), &who, amount)?; @@ -112,26 +132,34 @@ decl_module! { amount: amount }; - T::OutboundRouter::submit(channel_id, &who, Address::get(), &message.encode())?; - Self::deposit_event(RawEvent::Burned(token, who.clone(), recipient, amount)); + T::OutboundRouter::submit(channel_id, &who, >::get(), &message.encode())?; + Self::deposit_event(Event::Burned(token, who.clone(), recipient, amount)); Ok(()) } - #[weight = T::WeightInfo::mint()] + #[pallet::weight(T::WeightInfo::mint())] #[transactional] - pub fn mint(origin, token: H160, sender: H160, recipient: ::Source, amount: U256) -> DispatchResult { + pub fn mint( + origin: OriginFor, + token: H160, + sender: H160, + recipient: ::Source, + amount: U256 + ) -> DispatchResult { let who = T::CallOrigin::ensure_origin(origin)?; - if who != Address::get() { + if who != >::get() { return Err(DispatchError::BadOrigin.into()); } let recipient = T::Lookup::lookup(recipient)?; T::Assets::deposit(AssetId::Token(token), &recipient, amount)?; - Self::deposit_event(RawEvent::Minted(token, sender, recipient, amount)); + Self::deposit_event(Event::Minted(token, sender, recipient, amount)); Ok(()) } - } } + + + diff --git a/parachain/pallets/erc20-app/src/mock.rs b/parachain/pallets/erc20-app/src/mock.rs index a24f6819bc58f..19b224a9e1b9a 100644 --- a/parachain/pallets/erc20-app/src/mock.rs +++ b/parachain/pallets/erc20-app/src/mock.rs @@ -1,6 +1,7 @@ // Mock runtime use sp_std::marker::PhantomData; +use frame_support::traits::GenesisBuild; use sp_core::{H160, H256}; use frame_support::{ parameter_types, @@ -109,7 +110,7 @@ pub fn new_tester() -> sp_io::TestExternalities { let config = erc20_app::GenesisConfig { address: H160::repeat_byte(1), }; - config.assimilate_storage(&mut storage).unwrap(); + GenesisBuild::::assimilate_storage(&config, &mut storage).unwrap(); let mut ext: sp_io::TestExternalities = storage.into(); ext.execute_with(|| System::set_block_number(1)); diff --git a/parachain/pallets/erc20-app/src/tests.rs b/parachain/pallets/erc20-app/src/tests.rs index 3401bbddc12af..9e5c398235481 100644 --- a/parachain/pallets/erc20-app/src/tests.rs +++ b/parachain/pallets/erc20-app/src/tests.rs @@ -1,11 +1,9 @@ -use crate::mock::{new_tester, Event, System, AccountId, Origin, Assets, Erc20App}; +use crate::mock::{Test, new_tester, Event, System, AccountId, Origin, Assets, Erc20App}; use frame_support::{assert_ok, assert_noop, dispatch::DispatchError}; use sp_keyring::AccountKeyring as Keyring; use sp_core::H160; use snowbridge_core::{ChannelId, AssetId, MultiAsset}; -use crate::RawEvent; - fn last_event() -> Event { System::events().pop().expect("Event expected").event } @@ -20,7 +18,7 @@ fn mints_after_handling_ethereum_event() { let amount = 10; assert_ok!( Erc20App::mint( - snowbridge_dispatch::Origin(peer_contract).into(), + snowbridge_dispatch::RawOrigin(peer_contract).into(), token, sender, recipient.clone(), @@ -30,7 +28,7 @@ fn mints_after_handling_ethereum_event() { assert_eq!(Assets::balance(AssetId::Token(token), &recipient), amount.into()); assert_eq!( - Event::Erc20App(RawEvent::Minted(token, sender, recipient, amount.into())), + Event::Erc20App(crate::Event::::Minted(token, sender, recipient, amount.into())), last_event() ); }); @@ -52,7 +50,7 @@ fn burn_should_emit_bridge_event() { 20.into())); assert_eq!( - Event::Erc20App(RawEvent::Burned(token_id, bob, recipient, 20.into())), + Event::Erc20App(crate::Event::::Burned(token_id, bob, recipient, 20.into())), last_event() ); }); diff --git a/parachain/pallets/erc20-app/src/weights.rs b/parachain/pallets/erc20-app/src/weights.rs new file mode 100644 index 0000000000000..28f8eba6776e8 --- /dev/null +++ b/parachain/pallets/erc20-app/src/weights.rs @@ -0,0 +1,11 @@ +use frame_support::weights::Weight; + +pub trait WeightInfo { + fn burn() -> Weight; + fn mint() -> Weight; +} + +impl WeightInfo for () { + fn burn() -> Weight { 0 } + fn mint() -> Weight { 0 } +} diff --git a/parachain/pallets/erc721-app/src/lib.rs b/parachain/pallets/erc721-app/src/lib.rs index 2fc0694e9d7b9..dc77379d40fe0 100644 --- a/parachain/pallets/erc721-app/src/lib.rs +++ b/parachain/pallets/erc721-app/src/lib.rs @@ -120,10 +120,7 @@ pub mod module { #[pallet::genesis_build] impl GenesisBuild for GenesisConfig { fn build(&self) { - Address::::try_mutate(|addr| -> Result { - *addr = self.address; - Ok(*addr) - }).expect("Setting address cannot fail during genesis"); + >::put(self.address); } } diff --git a/parachain/pallets/erc721-app/src/tests.rs b/parachain/pallets/erc721-app/src/tests.rs index 1f27c8618a28e..e2e0b42d3d930 100644 --- a/parachain/pallets/erc721-app/src/tests.rs +++ b/parachain/pallets/erc721-app/src/tests.rs @@ -22,7 +22,7 @@ fn mints_after_handling_ethereum_event() { let recipient: AccountId = Keyring::Bob.into(); assert_ok!(Erc721App::mint( - snowbridge_dispatch::Origin(peer_contract).into(), + snowbridge_dispatch::RawOrigin(peer_contract).into(), sender, recipient.clone(), token_contract, diff --git a/parachain/pallets/eth-app/src/benchmarking.rs b/parachain/pallets/eth-app/src/benchmarking.rs index 028a3fd98f498..98c4d7eda7070 100644 --- a/parachain/pallets/eth-app/src/benchmarking.rs +++ b/parachain/pallets/eth-app/src/benchmarking.rs @@ -1,7 +1,4 @@ //! ETHApp pallet benchmarking - -#![cfg(feature = "runtime-benchmarks")] - use super::*; use frame_system::RawOrigin; @@ -10,7 +7,7 @@ use frame_benchmarking::{account, benchmarks, whitelisted_caller, impl_benchmark use sp_core::H160; #[allow(unused_imports)] -use crate::Module as ETHApp; +use crate::Pallet as ETHApp; benchmarks! { // Benchmark `burn` extrinsic under worst case conditions: @@ -27,13 +24,13 @@ benchmarks! { verify { assert_eq!(T::Asset::balance(&caller), U256::zero()); } - + // Benchmark `mint` extrinsic under worst case conditions: // * `mint` successfully adds amount to recipient account mint { let origin = T::CallOrigin::successful_origin(); if let Ok(caller) = T::CallOrigin::try_origin(origin.clone()) { - Address::put(caller); + >::put(caller); } else { return Err("Failed to extract caller address from origin"); } diff --git a/parachain/pallets/eth-app/src/lib.rs b/parachain/pallets/eth-app/src/lib.rs index 8419ce32874a5..b982747598be2 100644 --- a/parachain/pallets/eth-app/src/lib.rs +++ b/parachain/pallets/eth-app/src/lib.rs @@ -16,13 +16,23 @@ //! #![cfg_attr(not(feature = "std"), no_std)] -use frame_system::{self as system, ensure_signed}; +mod payload; +pub mod weights; + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + +#[cfg(test)] +mod mock; + +#[cfg(test)] +mod tests; + +use frame_system::ensure_signed; use frame_support::{ - decl_error, decl_event, decl_module, decl_storage, dispatch::{DispatchError, DispatchResult}, traits::EnsureOrigin, transactional, - weights::Weight, }; use sp_runtime::traits::StaticLookup; use sp_std::prelude::*; @@ -30,76 +40,86 @@ use sp_core::{H160, U256}; use snowbridge_core::{ChannelId, SingleAsset, OutboundRouter}; -mod payload; use payload::OutboundPayload; +pub use weights::WeightInfo; -mod benchmarking; +pub use pallet::*; -#[cfg(test)] -mod mock; +#[frame_support::pallet] +pub mod pallet { -#[cfg(test)] -mod tests; + use super::*; -/// Weight functions needed for this pallet. -pub trait WeightInfo { - fn burn() -> Weight; - fn mint() -> Weight; -} + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; -impl WeightInfo for () { - fn burn() -> Weight { 0 } - fn mint() -> Weight { 0 } -} - -pub trait Config: system::Config { - type Event: From> + Into<::Event>; + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); - type Asset: SingleAsset<::AccountId>; + #[pallet::config] + pub trait Config: frame_system::Config { + type Event: From> + IsType<::Event>; - type OutboundRouter: OutboundRouter; + type Asset: SingleAsset<::AccountId>; - type CallOrigin: EnsureOrigin; + type OutboundRouter: OutboundRouter; - type WeightInfo: WeightInfo; -} + type CallOrigin: EnsureOrigin; -decl_storage! { - trait Store for Module as EthModule { - /// Address of the peer application on the Ethereum side. - Address get(fn address) config(): H160; + type WeightInfo: WeightInfo; } -} -decl_event!( - /// Events for the ETH module. - pub enum Event - where - AccountId = ::AccountId - { - Burned(AccountId, H160, U256), - Minted(H160, AccountId, U256), - } -); + #[pallet::hooks] + impl Hooks> for Pallet {} -decl_error! { - pub enum Error for Module { - /// The submitted payload could not be decoded. - InvalidPayload, + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + #[pallet::metadata(T::AccountId = "AccountId")] + pub enum Event { + Burned(T::AccountId, H160, U256), + Minted(H160, T::AccountId, U256), } -} -decl_module! { - pub struct Module for enum Call where origin: T::Origin { + #[pallet::storage] + #[pallet::getter(fn address)] + pub(super) type Address = StorageValue<_, H160, ValueQuery>; + + #[pallet::error] + pub enum Error {} + + #[pallet::genesis_config] + pub struct GenesisConfig { + pub address: H160, + } - type Error = Error; + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + address: Default::default(), + } + } + } - fn deposit_event() = default; + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + >::put(self.address); + } + } + #[pallet::call] + impl Pallet { // Users should burn their holdings to release funds on the Ethereum side - #[weight = T::WeightInfo::burn()] + #[pallet::weight(T::WeightInfo::burn())] #[transactional] - pub fn burn(origin, channel_id: ChannelId, recipient: H160, amount: U256) -> DispatchResult { + pub fn burn( + origin: OriginFor, + channel_id: ChannelId, + recipient: H160, + amount: U256 + ) -> DispatchResult { let who = ensure_signed(origin)?; T::Asset::withdraw(&who, amount)?; @@ -110,25 +130,32 @@ decl_module! { amount: amount }; - T::OutboundRouter::submit(channel_id, &who, Address::get(), &message.encode())?; - Self::deposit_event(RawEvent::Burned(who.clone(), recipient, amount)); + T::OutboundRouter::submit(channel_id, &who, >::get(), &message.encode())?; + Self::deposit_event(Event::Burned(who.clone(), recipient, amount)); Ok(()) } - #[weight = T::WeightInfo::mint()] + #[pallet::weight(T::WeightInfo::mint())] #[transactional] - pub fn mint(origin, sender: H160, recipient: ::Source, amount: U256) -> DispatchResult { + pub fn mint( + origin: OriginFor, + sender: H160, + recipient: ::Source, + amount: U256 + ) -> DispatchResult { let who = T::CallOrigin::ensure_origin(origin)?; - if who != Address::get() { + if who != >::get() { return Err(DispatchError::BadOrigin.into()); } let recipient = T::Lookup::lookup(recipient)?; T::Asset::deposit(&recipient, amount)?; - Self::deposit_event(RawEvent::Minted(sender, recipient.clone(), amount)); + Self::deposit_event(Event::Minted(sender, recipient.clone(), amount)); Ok(()) } } + } + diff --git a/parachain/pallets/eth-app/src/mock.rs b/parachain/pallets/eth-app/src/mock.rs index f3d1a3d8c652b..85fdc2201492c 100644 --- a/parachain/pallets/eth-app/src/mock.rs +++ b/parachain/pallets/eth-app/src/mock.rs @@ -1,6 +1,7 @@ use sp_std::marker::PhantomData; // Mock runtime +use frame_support::traits::GenesisBuild; use sp_core::{H160, H256}; use frame_support::{ parameter_types, @@ -112,7 +113,7 @@ pub fn new_tester() -> sp_io::TestExternalities { let config = eth_app::GenesisConfig { address: H160::repeat_byte(1), }; - config.assimilate_storage(&mut storage).unwrap(); + GenesisBuild::::assimilate_storage(&config, &mut storage).unwrap(); let mut ext: sp_io::TestExternalities = storage.into(); ext.execute_with(|| System::set_block_number(1)); diff --git a/parachain/pallets/eth-app/src/tests.rs b/parachain/pallets/eth-app/src/tests.rs index 0f3b65bf8ffc5..69d5c9f00d557 100644 --- a/parachain/pallets/eth-app/src/tests.rs +++ b/parachain/pallets/eth-app/src/tests.rs @@ -1,8 +1,7 @@ -use crate::mock::{new_tester, AccountId, Origin, Event, System, Asset, EthApp}; +use crate::mock::{Test, new_tester, AccountId, Origin, Event, System, Asset, EthApp}; use frame_support::{assert_ok, assert_noop, dispatch::DispatchError}; use sp_keyring::AccountKeyring as Keyring; use sp_core::H160; -use crate::RawEvent; use snowbridge_core::{SingleAsset, ChannelId}; @@ -19,7 +18,7 @@ fn mints_after_handling_ethereum_event() { let amount = 10; assert_ok!( EthApp::mint( - snowbridge_dispatch::Origin(peer_contract).into(), + snowbridge_dispatch::RawOrigin(peer_contract).into(), sender, recipient.clone(), amount.into() @@ -28,7 +27,7 @@ fn mints_after_handling_ethereum_event() { assert_eq!(Asset::balance(&recipient), amount.into()); assert_eq!( - Event::EthApp(RawEvent::Minted(sender, recipient, amount.into())), + Event::EthApp(crate::Event::::Minted(sender, recipient, amount.into())), last_event() ); }); @@ -48,7 +47,7 @@ fn burn_should_emit_bridge_event() { 20.into())); assert_eq!( - Event::EthApp(RawEvent::Burned(bob, recipient, 20.into())), + Event::EthApp(crate::Event::::Burned(bob, recipient, 20.into())), last_event() ); }); diff --git a/parachain/pallets/eth-app/src/weights.rs b/parachain/pallets/eth-app/src/weights.rs new file mode 100644 index 0000000000000..28f8eba6776e8 --- /dev/null +++ b/parachain/pallets/eth-app/src/weights.rs @@ -0,0 +1,11 @@ +use frame_support::weights::Weight; + +pub trait WeightInfo { + fn burn() -> Weight; + fn mint() -> Weight; +} + +impl WeightInfo for () { + fn burn() -> Weight { 0 } + fn mint() -> Weight { 0 } +} diff --git a/parachain/pallets/ethereum-light-client/src/benchmarking/mod.rs b/parachain/pallets/ethereum-light-client/src/benchmarking/mod.rs index c7bf2911f2973..300f0a1f4910a 100644 --- a/parachain/pallets/ethereum-light-client/src/benchmarking/mod.rs +++ b/parachain/pallets/ethereum-light-client/src/benchmarking/mod.rs @@ -1,14 +1,11 @@ //! EthereumLightClient pallet benchmarking - -#![cfg(feature = "runtime-benchmarks")] - use super::*; use frame_system::RawOrigin; use frame_benchmarking::{benchmarks, whitelisted_caller, impl_benchmark_test_suite}; #[allow(unused_imports)] -use crate::Module as EthereumLightClient; +use crate::Pallet as EthereumLightClient; mod data; @@ -18,16 +15,16 @@ mod data; /// contain strictly increasing block numbers. const RESERVED_FOR_PRUNING: usize = HEADERS_TO_PRUNE_IN_SINGLE_IMPORT as usize; -fn get_best_block() -> (EthereumHeaderId, U256) { - BestBlock::get() +fn get_best_block() -> (EthereumHeaderId, U256) { + >::get() } -fn get_blocks_to_prune() -> PruningRange { - BlocksToPrune::get() +fn get_blocks_to_prune() -> PruningRange { + >::get() } -fn set_blocks_to_prune(oldest_unpruned: u64, oldest_to_keep: u64) { - BlocksToPrune::put(PruningRange { +fn set_blocks_to_prune(oldest_unpruned: u64, oldest_to_keep: u64) { + >::put(PruningRange { oldest_unpruned_block: oldest_unpruned, oldest_block_to_keep: oldest_to_keep, }); @@ -36,7 +33,7 @@ fn set_blocks_to_prune(oldest_unpruned: u64, oldest_to_keep: u64) { fn assert_header_pruned(hash: H256, number: u64) { assert!(Headers::::get(hash).is_none()); - let hashes_at_number = HeadersByNumber::get(number); + let hashes_at_number = >::get(number); assert!( hashes_at_number.is_none() || !hashes_at_number.unwrap().contains(&hash), ); @@ -52,7 +49,7 @@ benchmarks! { // * Pruned headers will come from distinct block numbers so that we have the max // number of HeaderByNumber::take calls. // * The last pruned header will have siblings that we don't prune and have to - // re-insert using HeadersByNumber::insert. + // re-insert using >::insert. import_header { let caller: T::AccountId = whitelisted_caller(); let descendants_until_final = T::DescendantsUntilFinalized::get(); @@ -69,7 +66,7 @@ benchmarks! { descendants_until_final, )?; - set_blocks_to_prune( + set_blocks_to_prune::( headers[0].number, headers[next_finalized_idx].number, ); @@ -79,7 +76,7 @@ benchmarks! { // Check that the best header has been updated let best = &headers[next_tip_idx]; assert_eq!( - get_best_block().0, + get_best_block::().0, EthereumHeaderId { number: best.number, hash: best.compute_hash(), @@ -93,7 +90,7 @@ benchmarks! { .for_each(|h| assert_header_pruned::(h.compute_hash(), h.number)); let last_pruned_sibling = &headers[RESERVED_FOR_PRUNING]; assert_eq!( - get_blocks_to_prune().oldest_unpruned_block, + get_blocks_to_prune::().oldest_unpruned_block, last_pruned_sibling.number, ); } @@ -107,7 +104,7 @@ benchmarks! { // * Pruned headers will come from distinct block numbers so that we have the max // number of HeaderByNumber::take calls. // * The last pruned header will have siblings that we don't prune and have to - // re-insert using HeadersByNumber::insert. + // re-insert using >::insert. import_header_not_new_finalized_with_max_prune { let caller: T::AccountId = whitelisted_caller(); let descendants_until_final = T::DescendantsUntilFinalized::get(); @@ -129,7 +126,7 @@ benchmarks! { descendants_until_final, )?; - set_blocks_to_prune( + set_blocks_to_prune::( headers[0].number, headers[finalized_idx].number, ); @@ -139,7 +136,7 @@ benchmarks! { // Check that the best header has been updated let best = &headers[next_tip_idx]; assert_eq!( - get_best_block().0, + get_best_block::().0, EthereumHeaderId { number: best.number, hash: best.compute_hash(), @@ -153,7 +150,7 @@ benchmarks! { .for_each(|h| assert_header_pruned::(h.compute_hash(), h.number)); let last_pruned_sibling = &headers[RESERVED_FOR_PRUNING]; assert_eq!( - get_blocks_to_prune().oldest_unpruned_block, + get_blocks_to_prune::().oldest_unpruned_block, last_pruned_sibling.number, ); } @@ -180,7 +177,7 @@ benchmarks! { descendants_until_final, )?; - set_blocks_to_prune( + set_blocks_to_prune::( headers[0].number, headers[0].number + 1, ); @@ -190,7 +187,7 @@ benchmarks! { // Check that the best header has been updated let best = &headers[next_tip_idx]; assert_eq!( - get_best_block().0, + get_best_block::().0, EthereumHeaderId { number: best.number, hash: best.compute_hash(), @@ -201,7 +198,7 @@ benchmarks! { let oldest_header = &headers[0]; assert_header_pruned::(oldest_header.compute_hash(), oldest_header.number); assert_eq!( - get_blocks_to_prune().oldest_unpruned_block, + get_blocks_to_prune::().oldest_unpruned_block, oldest_header.number + 1, ); } @@ -233,7 +230,7 @@ benchmarks! { descendants_until_final, )?; - set_blocks_to_prune( + set_blocks_to_prune::( headers[0].number, headers[0].number + 1, ); @@ -243,7 +240,7 @@ benchmarks! { // Check that the best header has been updated let best = &headers[next_tip_idx]; assert_eq!( - get_best_block().0, + get_best_block::().0, EthereumHeaderId { number: best.number, hash: best.compute_hash(), @@ -254,7 +251,7 @@ benchmarks! { let oldest_header = &headers[0]; assert_header_pruned::(oldest_header.compute_hash(), oldest_header.number); assert_eq!( - get_blocks_to_prune().oldest_unpruned_block, + get_blocks_to_prune::().oldest_unpruned_block, oldest_header.number + 1, ); } diff --git a/parachain/pallets/ethereum-light-client/src/lib.rs b/parachain/pallets/ethereum-light-client/src/lib.rs index 8b6cce2bfea99..0ac98c2829d6b 100644 --- a/parachain/pallets/ethereum-light-client/src/lib.rs +++ b/parachain/pallets/ethereum-light-client/src/lib.rs @@ -19,11 +19,22 @@ #![allow(unused_variables)] #![cfg_attr(not(feature = "std"), no_std)] -use frame_system::{self as system, ensure_signed}; +mod weights; + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + +#[cfg(test)] +mod mock; + +#[cfg(test)] +mod tests; + +use frame_system::ensure_signed; use frame_support::{ - decl_module, decl_storage, decl_event, decl_error, ensure, log, dispatch::{DispatchError, DispatchResult}, - traits::Get, weights::Weight, + traits::Get, + log, }; use sp_runtime::RuntimeDebug; use sp_std::prelude::*; @@ -39,13 +50,7 @@ pub use snowbridge_ethereum::{ Header as EthereumHeader, difficulty::DifficultyConfig as EthereumDifficultyConfig, }; -mod benchmarking; - -#[cfg(test)] -mod mock; - -#[cfg(test)] -mod tests; +pub use weights::WeightInfo; /// Max number of finalized headers to keep. const FINALIZED_HEADERS_TO_KEEP: u64 = 50_000; @@ -78,78 +83,43 @@ struct PruningRange { pub oldest_block_to_keep: u64, } -/// Weight functions needed for this pallet. -pub trait WeightInfo { - fn import_header() -> Weight; - fn import_header_not_new_finalized_with_max_prune() -> Weight; - fn import_header_new_finalized_with_single_prune() -> Weight; - fn import_header_not_new_finalized_with_single_prune() -> Weight; -} - -impl WeightInfo for () { - fn import_header() -> Weight { 0 } - fn import_header_not_new_finalized_with_max_prune() -> Weight { 0 } - fn import_header_new_finalized_with_single_prune() -> Weight { 0 } - fn import_header_not_new_finalized_with_single_prune() -> Weight { 0 } -} - -pub trait Config: system::Config { - type Event: From + Into<::Event>; - /// The number of descendants, in the highest difficulty chain, a block - /// needs to have in order to be considered final. - type DescendantsUntilFinalized: Get; - /// Ethereum network parameters for header difficulty - type DifficultyConfig: Get; - /// Determines whether Ethash PoW is verified for headers - /// NOTE: Should only be false for dev - type VerifyPoW: Get; - /// Weight information for extrinsics in this pallet - type WeightInfo: WeightInfo; -} - -decl_storage! { - trait Store for Module as EthereumLightClient { - /// Best known block. - BestBlock: (EthereumHeaderId, U256); - /// Range of blocks that we want to prune. - BlocksToPrune: PruningRange; - /// Best finalized block. - FinalizedBlock: EthereumHeaderId; - /// Map of imported headers by hash. - Headers: map hasher(identity) H256 => Option>; - /// Map of imported header hashes by number. - HeadersByNumber: map hasher(blake2_128_concat) u64 => Option>; +pub use pallet::*; + +#[frame_support::pallet] +pub mod pallet { + + use super::*; + + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); + + #[pallet::config] + pub trait Config: frame_system::Config { + type Event: From> + IsType<::Event>; + /// The number of descendants, in the highest difficulty chain, a block + /// needs to have in order to be considered final. + #[pallet::constant] + type DescendantsUntilFinalized: Get; + /// Ethereum network parameters for header difficulty + #[pallet::constant] + type DifficultyConfig: Get; + /// Determines whether Ethash PoW is verified for headers + /// NOTE: Should only be false for dev + #[pallet::constant] + type VerifyPoW: Get; + /// Weight information for extrinsics in this pallet + type WeightInfo: WeightInfo; } - add_extra_genesis { - config(initial_header): EthereumHeader; - config(initial_difficulty): U256; + #[pallet::event] + pub enum Event {} - build(|config| { - let initial_header = &config.initial_header; - - Module::::initialize_storage( - vec![initial_header.clone()], - config.initial_difficulty, - 0, // descendants_until_final = 0 forces the initial header to be finalized - ).unwrap(); - - BlocksToPrune::put(PruningRange { - oldest_unpruned_block: initial_header.number, - oldest_block_to_keep: initial_header.number, - }); - }) - } -} - -decl_event!( - /// This module has no events - pub enum Event { - } -); - -decl_error! { - pub enum Error for Module { + #[pallet::error] + pub enum Error { /// Header is same height or older than finalized block (we don't support forks). AncientHeader, /// Header referenced in inclusion proof doesn't exist, e.g. because it's @@ -172,14 +142,64 @@ decl_error! { /// This should never be returned - indicates a bug Unknown, } -} -decl_module! { - pub struct Module for enum Call where origin: T::Origin { - type Error = Error; + #[pallet::hooks] + impl Hooks> for Pallet {} + + /// Best known block. + #[pallet::storage] + pub(super) type BestBlock = StorageValue<_, (EthereumHeaderId, U256), ValueQuery>; + + /// Range of blocks that we want to prune. + #[pallet::storage] + pub(super) type BlocksToPrune = StorageValue<_, PruningRange, ValueQuery>; + + /// Best finalized block. + #[pallet::storage] + pub(super) type FinalizedBlock = StorageValue<_, EthereumHeaderId, ValueQuery>; + + /// Map of imported headers by hash. + #[pallet::storage] + pub(super) type Headers = StorageMap<_, Identity, H256, StoredHeader, OptionQuery>; - fn deposit_event() = default; + /// Map of imported header hashes by number. + #[pallet::storage] + pub(super) type HeadersByNumber = StorageMap<_, Twox64Concat, u64, Vec, OptionQuery>; + #[pallet::genesis_config] + pub struct GenesisConfig { + pub initial_header: EthereumHeader, + pub initial_difficulty: U256, + } + + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + initial_header: Default::default(), + initial_difficulty: Default::default(), + } + } + } + + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + Pallet::::initialize_storage( + vec![self.initial_header.clone()], + self.initial_difficulty, + 0, // descendants_until_final = 0 forces the initial header to be finalized + ).unwrap(); + + >::put(PruningRange { + oldest_unpruned_block: self.initial_header.number, + oldest_block_to_keep: self.initial_header.number, + }); + } + } + + #[pallet::call] + impl Pallet { /// Import a single Ethereum PoW header. /// /// Note that this extrinsic has a very high weight. The weight is affected by the @@ -192,8 +212,12 @@ decl_module! { /// for each DAG node selected in the "hashimoto"-loop. /// - Iterating over ancestors: min `DescendantsUntilFinalized` reads to find the /// newly finalized ancestor of a header. - #[weight = T::WeightInfo::import_header()] - pub fn import_header(origin, header: EthereumHeader, proof: Vec) -> DispatchResult { + #[pallet::weight(T::WeightInfo::import_header())] + pub fn import_header( + origin: OriginFor, + header: EthereumHeader, + proof: Vec + ) -> DispatchResult { let sender = ensure_signed(origin)?; log::trace!( @@ -235,392 +259,398 @@ decl_module! { Ok(()) } } -} -impl Module { - // Validate an Ethereum header for import - fn validate_header_to_import(header: &EthereumHeader, proof: &[EthashProofData]) -> DispatchResult { - let hash = header.compute_hash(); - ensure!( - !Headers::::contains_key(hash), - Error::::DuplicateHeader, - ); - - let parent = Headers::::get(header.parent_hash) - .ok_or(Error::::MissingParentHeader)? - .header; - - let finalized_header_id = FinalizedBlock::get(); - ensure!( - header.number > finalized_header_id.number, - Error::::AncientHeader, - ); - - // This iterates over DescendantsUntilFinalized headers in both the worst and - // average case. Since we know that the parent header was imported successfully, - // we know that the newest finalized header is at most, and on average, - // DescendantsUntilFinalized headers before the parent. - let ancestor_at_finalized_number = ancestry::(header.parent_hash) - .find(|(_, ancestor)| ancestor.number == finalized_header_id.number); - // We must find a matching ancestor above since AncientHeader check ensures - // that iteration starts at or after the latest finalized block. - ensure!( - ancestor_at_finalized_number.is_some(), - Error::::Unknown, - ); - ensure!( - ancestor_at_finalized_number.unwrap().0 == finalized_header_id.hash, - Error::::HeaderOnStaleFork, - ); - - if !T::VerifyPoW::get() { - return Ok(()); - } + impl Pallet { + // Validate an Ethereum header for import + fn validate_header_to_import(header: &EthereumHeader, proof: &[EthashProofData]) -> DispatchResult { + let hash = header.compute_hash(); + ensure!( + !>::contains_key(hash), + Error::::DuplicateHeader, + ); - // See YellowPaper formula (50) in section 4.3.4 - ensure!( - header.gas_used <= header.gas_limit - && header.gas_limit < parent.gas_limit * 1025 / 1024 - && header.gas_limit > parent.gas_limit * 1023 / 1024 - && header.gas_limit >= 5000.into() - && header.timestamp > parent.timestamp - && header.number == parent.number + 1 - && header.extra_data.len() <= 32, - Error::::InvalidHeader, - ); - - log::trace!( - target: "ethereum-light-client", - "Header {} passed basic verification", - header.number - ); - - let difficulty_config = T::DifficultyConfig::get(); - let header_difficulty = calc_difficulty(&difficulty_config, header.timestamp, &parent) - .map_err(|_| Error::::InvalidHeader)?; - ensure!( - header.difficulty == header_difficulty, - Error::::InvalidHeader, - ); - - log::trace!( - target: "ethereum-light-client", - "Header {} passed difficulty verification", - header.number - ); - - let header_mix_hash = header.mix_hash().ok_or(Error::::InvalidHeader)?; - let header_nonce = header.nonce().ok_or(Error::::InvalidHeader)?; - let (mix_hash, result) = EthashProver::new().hashimoto_merkle( - header.compute_partial_hash(), - header_nonce, - header.number, - proof, - ).map_err(|_| Error::::InvalidHeader)?; - - log::trace!( - target: "ethereum-light-client", - "Header {} passed PoW verification", - header.number - ); - ensure!( - mix_hash == header_mix_hash - && U256::from(result.0) < ethash::cross_boundary(header.difficulty), - Error::::InvalidHeader, - ); - - Ok(()) - } + let parent = >::get(header.parent_hash) + .ok_or(Error::::MissingParentHeader)? + .header; - // Import a new, validated Ethereum header - fn import_validated_header(sender: &T::AccountId, header: &EthereumHeader) -> DispatchResult { - let hash = header.compute_hash(); - let stored_parent_header = Headers::::get(header.parent_hash) - .ok_or(Error::::MissingParentHeader)?; - let total_difficulty = stored_parent_header.total_difficulty - .checked_add(header.difficulty) - .ok_or("Total difficulty overflow")?; - let header_to_store = StoredHeader { - submitter: Some(sender.clone()), - header: header.clone(), - total_difficulty, - finalized: false, - }; - - Headers::::insert(hash, header_to_store); - - if HeadersByNumber::contains_key(header.number) { - HeadersByNumber::mutate(header.number, |option| -> DispatchResult { - if let Some(hashes) = option { - hashes.push(hash); - return Ok(()); - } - Err(Error::::Unknown.into()) - })?; - } else { - HeadersByNumber::insert(header.number, vec![hash]); + let finalized_header_id = >::get(); + ensure!( + header.number > finalized_header_id.number, + Error::::AncientHeader, + ); + + // This iterates over DescendantsUntilFinalized headers in both the worst and + // average case. Since we know that the parent header was imported successfully, + // we know that the newest finalized header is at most, and on average, + // DescendantsUntilFinalized headers before the parent. + let ancestor_at_finalized_number = ancestry::(header.parent_hash) + .find(|(_, ancestor)| ancestor.number == finalized_header_id.number); + // We must find a matching ancestor above since AncientHeader check ensures + // that iteration starts at or after the latest finalized block. + ensure!( + ancestor_at_finalized_number.is_some(), + Error::::Unknown, + ); + ensure!( + ancestor_at_finalized_number.unwrap().0 == finalized_header_id.hash, + Error::::HeaderOnStaleFork, + ); + + if !T::VerifyPoW::get() { + return Ok(()); + } + + // See YellowPaper formula (50) in section 4.3.4 + ensure!( + header.gas_used <= header.gas_limit + && header.gas_limit < parent.gas_limit * 1025 / 1024 + && header.gas_limit > parent.gas_limit * 1023 / 1024 + && header.gas_limit >= 5000.into() + && header.timestamp > parent.timestamp + && header.number == parent.number + 1 + && header.extra_data.len() <= 32, + Error::::InvalidHeader, + ); + + log::trace!( + target: "ethereum-light-client", + "Header {} passed basic verification", + header.number + ); + + let difficulty_config = T::DifficultyConfig::get(); + let header_difficulty = calc_difficulty(&difficulty_config, header.timestamp, &parent) + .map_err(|_| Error::::InvalidHeader)?; + ensure!( + header.difficulty == header_difficulty, + Error::::InvalidHeader, + ); + + log::trace!( + target: "ethereum-light-client", + "Header {} passed difficulty verification", + header.number + ); + + let header_mix_hash = header.mix_hash().ok_or(Error::::InvalidHeader)?; + let header_nonce = header.nonce().ok_or(Error::::InvalidHeader)?; + let (mix_hash, result) = EthashProver::new().hashimoto_merkle( + header.compute_partial_hash(), + header_nonce, + header.number, + proof, + ).map_err(|_| Error::::InvalidHeader)?; + + log::trace!( + target: "ethereum-light-client", + "Header {} passed PoW verification", + header.number + ); + ensure!( + mix_hash == header_mix_hash + && U256::from(result.0) < ethash::cross_boundary(header.difficulty), + Error::::InvalidHeader, + ); + + Ok(()) } - // Maybe track new highest difficulty chain - let (_, highest_difficulty) = BestBlock::get(); - if total_difficulty > highest_difficulty || (!T::VerifyPoW::get() && total_difficulty == U256::zero()) { - let best_block_id = EthereumHeaderId { - number: header.number, - hash, + // Import a new, validated Ethereum header + fn import_validated_header(sender: &T::AccountId, header: &EthereumHeader) -> DispatchResult { + let hash = header.compute_hash(); + let stored_parent_header = >::get(header.parent_hash) + .ok_or(Error::::MissingParentHeader)?; + let total_difficulty = stored_parent_header.total_difficulty + .checked_add(header.difficulty) + .ok_or("Total difficulty overflow")?; + let header_to_store = StoredHeader { + submitter: Some(sender.clone()), + header: header.clone(), + total_difficulty, + finalized: false, }; - BestBlock::put((best_block_id, total_difficulty)); - - // Finalize blocks if possible - let finalized_block_id = FinalizedBlock::get(); - let new_finalized_block_id = Self::get_best_finalized_header( - &best_block_id, - &finalized_block_id, - )?; - if new_finalized_block_id != finalized_block_id { - FinalizedBlock::put(new_finalized_block_id); - Headers::::mutate(new_finalized_block_id.hash, |option| -> DispatchResult { - if let Some(header) = option { - header.finalized = true; + + >::insert(hash, header_to_store); + + if >::contains_key(header.number) { + >::mutate(header.number, |option| -> DispatchResult { + if let Some(hashes) = option { + hashes.push(hash); return Ok(()); } Err(Error::::Unknown.into()) })?; + } else { + >::insert(header.number, vec![hash]); } - // Clean up old headers - let pruning_range = BlocksToPrune::get(); - let new_pruning_range = Self::prune_header_range( - &pruning_range, - HEADERS_TO_PRUNE_IN_SINGLE_IMPORT, - new_finalized_block_id.number.saturating_sub(FINALIZED_HEADERS_TO_KEEP), - ); - if new_pruning_range != pruning_range { - BlocksToPrune::put(new_pruning_range); - } - } - - Ok(()) - } - - // Return the latest block that can be finalized based on the given - // highest difficulty chain and previously finalized block. - fn get_best_finalized_header( - best_block_id: &EthereumHeaderId, - finalized_block_id: &EthereumHeaderId, - ) -> Result { - let required_descendants = T::DescendantsUntilFinalized::get() as usize; - let maybe_newly_finalized_ancestor = ancestry::(best_block_id.hash) - .enumerate() - .find_map(|(i, pair)| if i < required_descendants { None } else { Some(pair) }); - - match maybe_newly_finalized_ancestor { - Some((hash, header)) => { - // The header is newly finalized if it is younger than the current - // finalized block - if header.number > finalized_block_id.number { - return Ok(EthereumHeaderId { - hash: hash, - number: header.number, - }); + // Maybe track new highest difficulty chain + let (_, highest_difficulty) = >::get(); + if total_difficulty > highest_difficulty || (!T::VerifyPoW::get() && total_difficulty == U256::zero()) { + let best_block_id = EthereumHeaderId { + number: header.number, + hash, + }; + >::put((best_block_id, total_difficulty)); + + // Finalize blocks if possible + let finalized_block_id = >::get(); + let new_finalized_block_id = Self::get_best_finalized_header( + &best_block_id, + &finalized_block_id, + )?; + if new_finalized_block_id != finalized_block_id { + >::put(new_finalized_block_id); + >::mutate(new_finalized_block_id.hash, |option| -> DispatchResult { + if let Some(header) = option { + header.finalized = true; + return Ok(()); + } + Err(Error::::Unknown.into()) + })?; } - if hash != finalized_block_id.hash { - return Err(Error::::Unknown.into()); + + // Clean up old headers + let pruning_range = >::get(); + let new_pruning_range = Self::prune_header_range( + &pruning_range, + HEADERS_TO_PRUNE_IN_SINGLE_IMPORT, + new_finalized_block_id.number.saturating_sub(FINALIZED_HEADERS_TO_KEEP), + ); + if new_pruning_range != pruning_range { + >::put(new_pruning_range); } - Ok(finalized_block_id.clone()) } - None => Ok(finalized_block_id.clone()) + + Ok(()) } - } - // Remove old headers, from oldest to newest, in the provided range - // (adjusted to `prune_end` if newer). Only up to `max_headers_to_prune` - // will be removed. - fn prune_header_range( - pruning_range: &PruningRange, - max_headers_to_prune: u64, - prune_end: u64, - ) -> PruningRange { - let mut new_pruning_range = pruning_range.clone(); - - // We can only increase this since pruning cannot be reverted... - if prune_end > new_pruning_range.oldest_block_to_keep { - new_pruning_range.oldest_block_to_keep = prune_end; + // Return the latest block that can be finalized based on the given + // highest difficulty chain and previously finalized block. + fn get_best_finalized_header( + best_block_id: &EthereumHeaderId, + finalized_block_id: &EthereumHeaderId, + ) -> Result { + let required_descendants = T::DescendantsUntilFinalized::get() as usize; + let maybe_newly_finalized_ancestor = ancestry::(best_block_id.hash) + .enumerate() + .find_map(|(i, pair)| if i < required_descendants { None } else { Some(pair) }); + + match maybe_newly_finalized_ancestor { + Some((hash, header)) => { + // The header is newly finalized if it is younger than the current + // finalized block + if header.number > finalized_block_id.number { + return Ok(EthereumHeaderId { + hash: hash, + number: header.number, + }); + } + if hash != finalized_block_id.hash { + return Err(Error::::Unknown.into()); + } + Ok(finalized_block_id.clone()) + } + None => Ok(finalized_block_id.clone()) + } } - let start = new_pruning_range.oldest_unpruned_block; - let end = new_pruning_range.oldest_block_to_keep; - let mut blocks_pruned = 0; - for number in start..end { - if blocks_pruned == max_headers_to_prune { - break; + // Remove old headers, from oldest to newest, in the provided range + // (adjusted to `prune_end` if newer). Only up to `max_headers_to_prune` + // will be removed. + pub(super) fn prune_header_range( + pruning_range: &PruningRange, + max_headers_to_prune: u64, + prune_end: u64, + ) -> PruningRange { + let mut new_pruning_range = pruning_range.clone(); + + // We can only increase this since pruning cannot be reverted... + if prune_end > new_pruning_range.oldest_block_to_keep { + new_pruning_range.oldest_block_to_keep = prune_end; } - if let Some(hashes_at_number) = HeadersByNumber::take(number) { - let mut remaining = hashes_at_number.len(); - for hash in hashes_at_number.iter() { - Headers::::remove(hash); - blocks_pruned += 1; - remaining -= 1; - if blocks_pruned == max_headers_to_prune { - break; - } + let start = new_pruning_range.oldest_unpruned_block; + let end = new_pruning_range.oldest_block_to_keep; + let mut blocks_pruned = 0; + for number in start..end { + if blocks_pruned == max_headers_to_prune { + break; } - if remaining > 0 { - let remainder = &hashes_at_number[hashes_at_number.len() - remaining..]; - HeadersByNumber::insert(number, remainder); + if let Some(hashes_at_number) = >::take(number) { + let mut remaining = hashes_at_number.len(); + for hash in hashes_at_number.iter() { + >::remove(hash); + blocks_pruned += 1; + remaining -= 1; + if blocks_pruned == max_headers_to_prune { + break; + } + } + + if remaining > 0 { + let remainder = &hashes_at_number[hashes_at_number.len() - remaining..]; + >::insert(number, remainder); + } else { + new_pruning_range.oldest_unpruned_block = number + 1; + } } else { new_pruning_range.oldest_unpruned_block = number + 1; } - } else { - new_pruning_range.oldest_unpruned_block = number + 1; } - } - - new_pruning_range - } - - // Verifies that the receipt encoded in proof.data is included - // in the block given by proof.block_hash. Inclusion is only - // recognized if the block has been finalized. - fn verify_receipt_inclusion(proof: &Proof) -> Result { - let stored_header = Headers::::get(proof.block_hash) - .ok_or(Error::::MissingHeader)?; - ensure!(stored_header.finalized, Error::::HeaderNotFinalized); - - let result = stored_header.header.check_receipt_proof(&proof.data.1) - .ok_or(Error::::InvalidProof)?; + new_pruning_range + } - match result { - Ok(receipt) => Ok(receipt), - Err(err) => { - log::trace!( - target: "ethereum-light-client", - "Failed to decode transaction receipt: {}", - err - ); - Err(Error::::InvalidProof.into()) + // Verifies that the receipt encoded in proof.data is included + // in the block given by proof.block_hash. Inclusion is only + // recognized if the block has been finalized. + fn verify_receipt_inclusion(proof: &Proof) -> Result { + let stored_header = >::get(proof.block_hash) + .ok_or(Error::::MissingHeader)?; + + ensure!(stored_header.finalized, Error::::HeaderNotFinalized); + + let result = stored_header.header.check_receipt_proof(&proof.data.1) + .ok_or(Error::::InvalidProof)?; + + match result { + Ok(receipt) => Ok(receipt), + Err(err) => { + log::trace!( + target: "ethereum-light-client", + "Failed to decode transaction receipt: {}", + err + ); + Err(Error::::InvalidProof.into()) + } } } } -} - -/// Return iterator over header ancestors, starting at given hash -fn ancestry(mut hash: H256) -> impl Iterator { - sp_std::iter::from_fn(move || { - let header = Headers::::get(&hash)?.header; - let current_hash = hash; - hash = header.parent_hash; - Some((current_hash, header)) - }) -} - -impl Verifier for Module { - /// Verify a message by verifying the existence of the corresponding - /// Ethereum log in a block. Returns the log if successful. - fn verify(message: &Message) -> Result { - let receipt = Self::verify_receipt_inclusion(&message.proof)?; + /// Return iterator over header ancestors, starting at given hash + fn ancestry(mut hash: H256) -> impl Iterator { + sp_std::iter::from_fn(move || { + let header = >::get(&hash)?.header; + let current_hash = hash; + hash = header.parent_hash; + Some((current_hash, header)) + }) + } - log::trace!( - target: "ethereum-light-client", - "Verified receipt inclusion for transaction at index {} in block {}", - message.proof.tx_index, message.proof.block_hash, - ); + impl Verifier for Pallet { - let log: Log = rlp::decode(&message.data) - .map_err(|_| Error::::DecodeFailed)?; + /// Verify a message by verifying the existence of the corresponding + /// Ethereum log in a block. Returns the log if successful. + fn verify(message: &Message) -> Result { + let receipt = Self::verify_receipt_inclusion(&message.proof)?; - if !receipt.contains_log(&log) { log::trace!( target: "ethereum-light-client", - "Event log not found in receipt for transaction at index {} in block {}", + "Verified receipt inclusion for transaction at index {} in block {}", message.proof.tx_index, message.proof.block_hash, ); - return Err(Error::::InvalidProof.into()); - } - Ok(log) - } + let log: Log = rlp::decode(&message.data) + .map_err(|_| Error::::DecodeFailed)?; - /// Import an ordered vec of Ethereum headers without performing - /// validation. - /// - /// NOTE: This should only be used to initialize empty storage. - fn initialize_storage( - headers: Vec, - initial_difficulty: U256, - descendants_until_final: u8, - ) -> Result<(), &'static str> { - let insert_header_fn = |header: &EthereumHeader, total_difficulty: U256| { - let hash = header.compute_hash(); - Headers::::insert( - hash, - StoredHeader { - submitter: None, - header: header.clone(), - total_difficulty: total_difficulty, - finalized: false, - }, - ); - HeadersByNumber::append(header.number, hash); - - EthereumHeaderId { - number: header.number, - hash: hash, + if !receipt.contains_log(&log) { + log::trace!( + target: "ethereum-light-client", + "Event log not found in receipt for transaction at index {} in block {}", + message.proof.tx_index, message.proof.block_hash, + ); + return Err(Error::::InvalidProof.into()); } - }; - let oldest_header = headers.get(0).ok_or("Need at least one header")?; - let mut best_block_difficulty = initial_difficulty; - let mut best_block_id = insert_header_fn(&oldest_header, best_block_difficulty); + Ok(log) + } - for (i, header) in headers.iter().enumerate().skip(1) { - let prev_block_num = headers[i - 1].number; - ensure!( - header.number == prev_block_num || header.number == prev_block_num + 1, - "Headers must be in order", - ); + /// Import an ordered vec of Ethereum headers without performing + /// validation. + /// + /// NOTE: This should only be used to initialize empty storage. + fn initialize_storage( + headers: Vec, + initial_difficulty: U256, + descendants_until_final: u8, + ) -> Result<(), &'static str> { + let insert_header_fn = |header: &EthereumHeader, total_difficulty: U256| { + let hash = header.compute_hash(); + >::insert( + hash, + StoredHeader { + submitter: None, + header: header.clone(), + total_difficulty: total_difficulty, + finalized: false, + }, + ); + >::append(header.number, hash); - let total_difficulty = { - let parent = Headers::::get(header.parent_hash).ok_or("Missing parent header")?; - parent.total_difficulty + header.difficulty + EthereumHeaderId { + number: header.number, + hash: hash, + } }; - let block_id = insert_header_fn(&header, total_difficulty); + let oldest_header = headers.get(0).ok_or("Need at least one header")?; + let mut best_block_difficulty = initial_difficulty; + let mut best_block_id = insert_header_fn(&oldest_header, best_block_difficulty); - if total_difficulty > best_block_difficulty { - best_block_difficulty = total_difficulty; - best_block_id = block_id; - } - } + for (i, header) in headers.iter().enumerate().skip(1) { + let prev_block_num = headers[i - 1].number; + ensure!( + header.number == prev_block_num || header.number == prev_block_num + 1, + "Headers must be in order", + ); - BestBlock::put((best_block_id, best_block_difficulty)); + let total_difficulty = { + let parent = >::get(header.parent_hash).ok_or("Missing parent header")?; + parent.total_difficulty + header.difficulty + }; - let maybe_finalized_ancestor = ancestry::(best_block_id.hash) - .enumerate() - .find_map(|(i, pair)| if i < descendants_until_final as usize { None } else { Some(pair) }); - if let Some((hash, header)) = maybe_finalized_ancestor { - FinalizedBlock::put(EthereumHeaderId { - hash: hash, - number: header.number, - }); - let mut next_hash = Ok(hash); - loop { - match next_hash { - Ok(hash) => next_hash = Headers::::mutate(hash, |option| { - if let Some(header) = option { - header.finalized = true; - return Ok(header.header.parent_hash); - } - Err("No header at hash") - }), - _ => break, + let block_id = insert_header_fn(&header, total_difficulty); + + if total_difficulty > best_block_difficulty { + best_block_difficulty = total_difficulty; + best_block_id = block_id; } } - } - Ok(()) + >::put((best_block_id, best_block_difficulty)); + + let maybe_finalized_ancestor = ancestry::(best_block_id.hash) + .enumerate() + .find_map(|(i, pair)| if i < descendants_until_final as usize { None } else { Some(pair) }); + if let Some((hash, header)) = maybe_finalized_ancestor { + >::put(EthereumHeaderId { + hash: hash, + number: header.number, + }); + let mut next_hash = Ok(hash); + loop { + match next_hash { + Ok(hash) => next_hash = >::mutate(hash, |option| { + if let Some(header) = option { + header.finalized = true; + return Ok(header.header.parent_hash); + } + Err("No header at hash") + }), + _ => break, + } + } + } + + Ok(()) + } } + } + + + + + diff --git a/parachain/pallets/ethereum-light-client/src/mock.rs b/parachain/pallets/ethereum-light-client/src/mock.rs index 179bd932ccdfe..7ab8d5a724fd3 100644 --- a/parachain/pallets/ethereum-light-client/src/mock.rs +++ b/parachain/pallets/ethereum-light-client/src/mock.rs @@ -1,4 +1,5 @@ // Mock runtime +use frame_support::traits::GenesisBuild; use snowbridge_core::{Message, Proof}; use snowbridge_testutils::BlockWithProofs; use crate::{EthashProofData, EthereumHeader, EthereumDifficultyConfig}; @@ -36,11 +37,11 @@ pub mod mock_verifier { UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Pallet, Call, Storage, Event}, - Verifier: verifier::{Pallet, Call, Storage, Event}, + Verifier: verifier::{Pallet, Call, Config, Storage, Event}, } ); - impl system::Config for Test { + impl frame_system::Config for Test { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); @@ -95,7 +96,7 @@ pub mod mock_verifier_with_pow { UncheckedExtrinsic = UncheckedExtrinsic, { System: frame_system::{Pallet, Call, Storage, Event}, - Verifier: verifier::{Pallet, Call, Storage, Event}, + Verifier: verifier::{Pallet, Call, Config, Storage, Event}, } ); @@ -269,7 +270,7 @@ pub fn new_tester() -> sp_io::TestExternalities { pub fn new_tester_with_config(config: crate::GenesisConfig) -> sp_io::TestExternalities { let mut storage = system::GenesisConfig::default().build_storage::().unwrap(); - config.assimilate_storage::(&mut storage).unwrap(); + GenesisBuild::::assimilate_storage(&config, &mut storage).unwrap(); let ext: sp_io::TestExternalities = storage.into(); //ext.execute_with(|| >::set_block_number(); diff --git a/parachain/pallets/ethereum-light-client/src/tests.rs b/parachain/pallets/ethereum-light-client/src/tests.rs index 7446543a38722..4a31f96fbcb98 100644 --- a/parachain/pallets/ethereum-light-client/src/tests.rs +++ b/parachain/pallets/ethereum-light-client/src/tests.rs @@ -16,7 +16,6 @@ use crate::mock::mock_verifier::{ Origin, }; -use crate::sp_api_hidden_includes_decl_storage::hidden_include::{StorageMap, StorageValue}; use frame_support::{assert_err, assert_ok}; use sp_keyring::AccountKeyring as Keyring; use sp_runtime::DispatchError; @@ -25,6 +24,7 @@ use crate::{ HeadersByNumber, PruningRange, }; + #[test] fn it_tracks_highest_difficulty_ethereum_chain() { new_tester::().execute_with(|| { @@ -46,7 +46,7 @@ fn it_tracks_highest_difficulty_ethereum_chain() { Default::default(), )); - let (header_id, highest_difficulty) = BestBlock::get(); + let (header_id, highest_difficulty) = >::get(); assert_eq!(header_id.hash, child1_hash); assert_eq!(highest_difficulty, 0xbc140caa61087i64.into()); }); @@ -74,9 +74,9 @@ fn it_tracks_multiple_unfinalized_ethereum_forks() { Default::default(), )); - assert!(Headers::::contains_key(child1_hash)); - assert!(Headers::::contains_key(child2_hash)); - assert_eq!(HeadersByNumber::get(1).unwrap(), vec![child1_hash, child2_hash]); + assert!(>::contains_key(child1_hash)); + assert!(>::contains_key(child2_hash)); + assert_eq!(>::get(1).unwrap(), vec![child1_hash, child2_hash]); }); } @@ -113,10 +113,10 @@ fn it_tracks_only_one_finalized_ethereum_fork() { )); } // Relies on DescendantsUntilFinalized = 2 - assert_eq!(FinalizedBlock::get().hash, block1_hash); - assert!(Headers::::get(block1_hash).unwrap().finalized); - assert!(Headers::::get(block2_hash).unwrap().finalized == false); - assert_eq!(BestBlock::get().0.hash, block3_hash); + assert_eq!(>::get().hash, block1_hash); + assert!(>::get(block1_hash).unwrap().finalized); + assert!(>::get(block2_hash).unwrap().finalized == false); + assert_eq!(BestBlock::::get().0.hash, block3_hash); // With invalid forks (invalid since B1 is final): // B0 @@ -185,8 +185,8 @@ fn it_prunes_ethereum_headers_correctly() { new_range, PruningRange { oldest_unpruned_block: 1, oldest_block_to_keep: 1 }, ); - assert!(!Headers::::contains_key(genesis_ethereum_block_hash())); - assert!(!HeadersByNumber::contains_key(0)); + assert!(!>::contains_key(genesis_ethereum_block_hash())); + assert!(!>::contains_key(0)); // Prune next block (B1) let new_range = Verifier::prune_header_range( @@ -198,9 +198,9 @@ fn it_prunes_ethereum_headers_correctly() { new_range, PruningRange { oldest_unpruned_block: 1, oldest_block_to_keep: 2 }, ); - assert!(!Headers::::contains_key(block1_hash)); - assert!(Headers::::contains_key(block4_hash)); - assert_eq!(HeadersByNumber::get(1).unwrap(), vec![block4_hash]); + assert!(!>::contains_key(block1_hash)); + assert!(>::contains_key(block4_hash)); + assert_eq!(>::get(1).unwrap(), vec![block4_hash]); // Prune next two blocks (B4, B2) let new_range = Verifier::prune_header_range( @@ -212,14 +212,14 @@ fn it_prunes_ethereum_headers_correctly() { new_range, PruningRange { oldest_unpruned_block: 3, oldest_block_to_keep: 4 }, ); - assert!(!Headers::::contains_key(block4_hash)); - assert!(!HeadersByNumber::contains_key(1)); - assert!(!Headers::::contains_key(block2_hash)); - assert!(!HeadersByNumber::contains_key(2)); + assert!(!>::contains_key(block4_hash)); + assert!(!>::contains_key(1)); + assert!(!>::contains_key(block2_hash)); + assert!(!>::contains_key(2)); // Finally, we're left with B3 - assert!(Headers::::contains_key(block3_hash)); - assert_eq!(HeadersByNumber::get(3).unwrap(), vec![block3_hash]); + assert!(>::contains_key(block3_hash)); + assert_eq!(HeadersByNumber::::get(3).unwrap(), vec![block3_hash]); }); } @@ -451,7 +451,7 @@ fn it_denies_receipt_inclusion_for_invalid_header() { Default::default(), )); } - assert_eq!(FinalizedBlock::get().hash, block1_alt_hash); + assert_eq!(>::get().hash, block1_alt_hash); // A finalized header at this height exists, but it's not block1 assert_err!( diff --git a/parachain/pallets/ethereum-light-client/src/weights.rs b/parachain/pallets/ethereum-light-client/src/weights.rs new file mode 100644 index 0000000000000..578c864ba4705 --- /dev/null +++ b/parachain/pallets/ethereum-light-client/src/weights.rs @@ -0,0 +1,15 @@ +use frame_support::weights::Weight; + +pub trait WeightInfo { + fn import_header() -> Weight; + fn import_header_not_new_finalized_with_max_prune() -> Weight; + fn import_header_new_finalized_with_single_prune() -> Weight; + fn import_header_not_new_finalized_with_single_prune() -> Weight; +} + +impl WeightInfo for () { + fn import_header() -> Weight { 0 } + fn import_header_not_new_finalized_with_max_prune() -> Weight { 0 } + fn import_header_new_finalized_with_single_prune() -> Weight { 0 } + fn import_header_not_new_finalized_with_single_prune() -> Weight { 0 } +} diff --git a/parachain/pallets/incentivized-channel/src/inbound/benchmarking.rs b/parachain/pallets/incentivized-channel/src/inbound/benchmarking.rs index feb0c287f6204..6ff3107237272 100644 --- a/parachain/pallets/incentivized-channel/src/inbound/benchmarking.rs +++ b/parachain/pallets/incentivized-channel/src/inbound/benchmarking.rs @@ -8,12 +8,13 @@ use frame_system::{RawOrigin, self, EventRecord}; use frame_benchmarking::{benchmarks, whitelisted_caller, impl_benchmark_test_suite}; use hex_literal::hex; use sp_std::convert::TryInto; +use sp_std::prelude::*; use snowbridge_core::{ChannelId, Message, MessageId, Proof}; use snowbridge_ethereum::{Log, Header}; #[allow(unused_imports)] -use crate::inbound::Module as IncentivizedInboundChannel; +use crate::inbound::Pallet as IncentivizedInboundChannel; fn assert_last_event(system_event: ::Event) { let events = frame_system::Pallet::::events(); @@ -39,8 +40,8 @@ benchmarks! { let envelope: envelope::Envelope = rlp::decode::(&message.data) .map(|log| log.try_into().unwrap()) .unwrap(); - Nonce::put(envelope.nonce - 1); - SourceChannel::put(envelope.channel); + >::put(envelope.nonce - 1); + >::put(envelope.channel); T::Verifier::initialize_storage( vec![header], @@ -50,7 +51,7 @@ benchmarks! { }: _(RawOrigin::Signed(caller.clone()), message) verify { - assert_eq!(envelope.nonce, Nonce::get()); + assert_eq!(envelope.nonce, >::get()); let message_id = MessageId::new(ChannelId::Incentivized, envelope.nonce); if let Some(event) = T::MessageDispatch::successful_dispatch_event(message_id) { @@ -68,11 +69,11 @@ benchmarks! { // Pick a value that is different from the initial RewardFraction let fraction = Perbill::from_percent(50); - assert!(RewardFraction::get() != fraction); + assert!(>::get() != fraction); }: _(authorized_origin, fraction) verify { - assert_eq!(RewardFraction::get(), fraction); + assert_eq!(>::get(), fraction); } #[extra] @@ -82,8 +83,8 @@ benchmarks! { let envelope: envelope::Envelope = rlp::decode::(&message.data) .map(|log| log.try_into().unwrap()) .unwrap(); - Nonce::put(envelope.nonce - 1); - SourceChannel::put(envelope.channel); + >::put(envelope.nonce - 1); + >::put(envelope.channel); T::Verifier::initialize_storage( vec![header], @@ -93,7 +94,7 @@ benchmarks! { }: submit(RawOrigin::Signed(caller.clone()), message) verify { - assert_eq!(envelope.nonce, Nonce::get()); + assert_eq!(envelope.nonce, >::get()); let message_id = MessageId::new(ChannelId::Incentivized, envelope.nonce); if let Some(event) = T::MessageDispatch::successful_dispatch_event(message_id) { @@ -108,8 +109,8 @@ benchmarks! { let envelope: envelope::Envelope = rlp::decode::(&message.data) .map(|log| log.try_into().unwrap()) .unwrap(); - Nonce::put(envelope.nonce - 1); - SourceChannel::put(envelope.channel); + >::put(envelope.nonce - 1); + >::put(envelope.channel); T::Verifier::initialize_storage( vec![header], @@ -119,7 +120,7 @@ benchmarks! { }: submit(RawOrigin::Signed(caller.clone()), message) verify { - assert_eq!(envelope.nonce, Nonce::get()); + assert_eq!(envelope.nonce, >::get()); let message_id = MessageId::new(ChannelId::Incentivized, envelope.nonce); if let Some(event) = T::MessageDispatch::successful_dispatch_event(message_id) { diff --git a/parachain/pallets/incentivized-channel/src/inbound/mod.rs b/parachain/pallets/incentivized-channel/src/inbound/mod.rs index 21756515d3cd6..1affc4b51348a 100644 --- a/parachain/pallets/incentivized-channel/src/inbound/mod.rs +++ b/parachain/pallets/incentivized-channel/src/inbound/mod.rs @@ -1,18 +1,20 @@ +mod envelope; +mod benchmarking; +pub mod weights; + +#[cfg(test)] +mod test; + use frame_support::{ - decl_error, decl_event, decl_module, decl_storage, - dispatch::DispatchResult, traits::{ Currency, Get, ExistenceRequirement::KeepAlive, WithdrawReasons, Imbalance, EnsureOrigin, }, - storage::StorageValue, log, - weights::Weight, }; -use frame_system::{self as system, ensure_signed}; +use frame_system::{ensure_signed}; use sp_core::{U256, H160}; -use sp_std::prelude::*; use sp_std::convert::TryFrom; use snowbridge_core::{ ChannelId, Message, MessageId, @@ -20,72 +22,61 @@ use snowbridge_core::{ }; use envelope::Envelope; +pub use weights::WeightInfo; use sp_runtime::{Perbill, traits::{Zero, Convert}}; -mod benchmarking; - -#[cfg(test)] -mod test; - -mod envelope; - -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -type PositiveImbalanceOf = <::Currency as Currency<::AccountId>>::PositiveImbalance; +pub use pallet::*; -/// Weight functions needed for this pallet. -pub trait WeightInfo { - fn submit() -> Weight; - fn set_reward_fraction() -> Weight; -} +#[frame_support::pallet] +pub mod pallet { -impl WeightInfo for () { - fn submit() -> Weight { 0 } - fn set_reward_fraction() -> Weight { 0 } -} + use super::*; -pub trait Config: system::Config { - type Event: From + Into<::Event>; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; - /// Verifier module for message verification. - type Verifier: Verifier; + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); - /// Verifier module for message verification. - type MessageDispatch: MessageDispatch; + #[pallet::config] + pub trait Config: frame_system::Config { + type Event: From> + IsType<::Event>; - type Currency: Currency; + /// Verifier module for message verification. + type Verifier: Verifier; - /// Source of funds to pay relayers - type SourceAccount: Get; + /// Verifier module for message verification. + type MessageDispatch: MessageDispatch; - /// Treasury Account - type TreasuryAccount: Get; + type Currency: Currency; - type FeeConverter: Convert>; + /// Source of funds to pay relayers + #[pallet::constant] + type SourceAccount: Get; - /// The origin which may update reward related params - type UpdateOrigin: EnsureOrigin; + /// Treasury Account + #[pallet::constant] + type TreasuryAccount: Get; - /// Weight information for extrinsics in this pallet - type WeightInfo: WeightInfo; -} + type FeeConverter: Convert>; -decl_storage! { - trait Store for Module as IncentivizedInboundModule { - pub SourceChannel get(fn source_channel) config(): H160; - pub Nonce: u64; - pub RewardFraction get(fn reward_fraction) config(): Perbill; + /// The origin which may update reward related params + type UpdateOrigin: EnsureOrigin; + /// Weight information for extrinsics in this pallet + type WeightInfo: WeightInfo; } -} -decl_event! { - pub enum Event { - } -} + #[pallet::hooks] + impl Hooks> for Pallet {} + + #[pallet::event] + pub enum Event {} -decl_error! { - pub enum Error for Module { + #[pallet::error] + pub enum Error { /// Message came from an invalid outbound channel on the Ethereum side. InvalidSourceChannel, /// Message has an invalid envelope. @@ -93,17 +84,51 @@ decl_error! { /// Message has an unexpected nonce. InvalidNonce, } -} -decl_module! { - pub struct Module for enum Call where origin: T::Origin { + /// Source channel on the ethereum side + #[pallet::storage] + #[pallet::getter(fn source_channel)] + pub type SourceChannel = StorageValue<_, H160, ValueQuery>; - type Error = Error; + #[pallet::storage] + pub type Nonce = StorageValue<_, u64, ValueQuery>; - fn deposit_event() = default; + /// Fraction of reward going to relayer + #[pallet::storage] + #[pallet::getter(fn reward_fraction)] + pub type RewardFraction = StorageValue<_, Perbill, ValueQuery>; - #[weight = T::WeightInfo::submit()] - pub fn submit(origin, message: Message) -> DispatchResult { + #[pallet::genesis_config] + pub struct GenesisConfig { + pub source_channel: H160, + pub reward_fraction: Perbill, + } + + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + source_channel: Default::default(), + reward_fraction: Perbill::one() + } + } + } + + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + >::put(self.source_channel); + >::put(self.reward_fraction); + } + } + + #[pallet::call] + impl Pallet { + #[pallet::weight(T::WeightInfo::submit())] + pub fn submit( + origin: OriginFor, + message: Message + ) -> DispatchResult { let relayer = ensure_signed(origin)?; // submit message to verifier for verification let log = T::Verifier::verify(&message)?; @@ -113,12 +138,12 @@ decl_module! { // Verify that the message was submitted to us from a known // outbound channel on the ethereum side - if envelope.channel != SourceChannel::get() { + if envelope.channel != >::get() { return Err(Error::::InvalidSourceChannel.into()) } // Verify message nonce - Nonce::try_mutate(|nonce| -> DispatchResult { + >::try_mutate(|nonce| -> DispatchResult { if envelope.nonce != *nonce + 1 { Err(Error::::InvalidNonce.into()) } else { @@ -135,55 +160,58 @@ decl_module! { Ok(()) } - #[weight = T::WeightInfo::set_reward_fraction()] - pub fn set_reward_fraction(origin, fraction: Perbill) -> DispatchResult { + #[pallet::weight(T::WeightInfo::set_reward_fraction())] + pub fn set_reward_fraction(origin: OriginFor, fraction: Perbill) -> DispatchResult { T::UpdateOrigin::ensure_origin(origin)?; - RewardFraction::set(fraction); + >::set(fraction); Ok(()) } - } -} -impl Module { - /* - * Pay the message submission fee into the relayer and treasury account. - * - * - If the fee is zero, do nothing - * - Otherwise, withdraw the fee amount from the DotApp module account, returning a negative imbalance - * - Figure out the fraction of the fee amount that should be paid to the relayer - * - Pay the relayer if their account exists, returning a positive imbalance. - * - Adjust the negative imbalance by offsetting the amount paid to the relayer - * - Resolve the negative imbalance by depositing it into the treasury account - */ - fn handle_fee(amount: BalanceOf, relayer: &T::AccountId) { - if amount.is_zero() { - return; - } - - let imbalance = match T::Currency::withdraw(&T::SourceAccount::get(), amount, WithdrawReasons::TRANSFER, KeepAlive) { - Ok(imbalance) => imbalance, - Err(err) => { - log::error!("Unable to withdraw from source account: {:?}", err); + pub type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; + pub type PositiveImbalanceOf = <::Currency as Currency<::AccountId>>::PositiveImbalance; + + impl Pallet { + /* + * Pay the message submission fee into the relayer and treasury account. + * + * - If the fee is zero, do nothing + * - Otherwise, withdraw the fee amount from the DotApp module account, returning a negative imbalance + * - Figure out the fraction of the fee amount that should be paid to the relayer + * - Pay the relayer if their account exists, returning a positive imbalance. + * - Adjust the negative imbalance by offsetting the amount paid to the relayer + * - Resolve the negative imbalance by depositing it into the treasury account + */ + pub(super) fn handle_fee(amount: BalanceOf, relayer: &T::AccountId) { + if amount.is_zero() { return; } - }; - let reward_fraction: Perbill = RewardFraction::get(); - let reward_amount = reward_fraction.mul_ceil(amount); + let imbalance = match T::Currency::withdraw(&T::SourceAccount::get(), amount, WithdrawReasons::TRANSFER, KeepAlive) { + Ok(imbalance) => imbalance, + Err(err) => { + log::error!("Unable to withdraw from source account: {:?}", err); + return; + } + }; - let rewarded = T::Currency::deposit_into_existing(relayer, reward_amount) - .unwrap_or_else(|_| PositiveImbalanceOf::::zero()); + let reward_fraction: Perbill = >::get(); + let reward_amount = reward_fraction.mul_ceil(amount); - let adjusted_imbalance = match imbalance.offset(rewarded).same() { - Ok(imbalance) => imbalance, - Err(_) => { - log::error!("Unable to offset imbalance"); - return; - } - }; + let rewarded = T::Currency::deposit_into_existing(relayer, reward_amount) + .unwrap_or_else(|_| PositiveImbalanceOf::::zero()); - T::Currency::resolve_creating(&T::TreasuryAccount::get(), adjusted_imbalance); - } + let adjusted_imbalance = match imbalance.offset(rewarded).same() { + Ok(imbalance) => imbalance, + Err(_) => { + log::error!("Unable to offset imbalance"); + return; + } + }; + T::Currency::resolve_creating(&T::TreasuryAccount::get(), adjusted_imbalance); + } + } } + + diff --git a/parachain/pallets/incentivized-channel/src/inbound/test.rs b/parachain/pallets/incentivized-channel/src/inbound/test.rs index 4d7f98161a250..b7bec04439a3f 100644 --- a/parachain/pallets/incentivized-channel/src/inbound/test.rs +++ b/parachain/pallets/incentivized-channel/src/inbound/test.rs @@ -1,6 +1,6 @@ - use super::*; +use frame_support::traits::GenesisBuild; use sp_core::{H160, H256}; use frame_support::{ assert_ok, assert_noop, @@ -35,7 +35,7 @@ frame_support::construct_runtime!( { System: frame_system::{Pallet, Call, Storage, Event}, Balances: pallet_balances::{Pallet, Call, Storage, Event}, - IncentivizedInboundChannel: incentivized_inbound_channel::{Pallet, Call, Storage, Event}, + IncentivizedInboundChannel: incentivized_inbound_channel::{Pallet, Call, Storage, Event}, } ); @@ -47,7 +47,7 @@ parameter_types! { pub const BlockHashCount: u64 = 250; } -impl system::Config for Test { +impl frame_system::Config for Test { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); @@ -115,7 +115,7 @@ impl MessageDispatch for MockMessageDispatch { fn dispatch(_: H160, _: MessageId, _: &[u8]) {} #[cfg(feature = "runtime-benchmarks")] - fn successful_dispatch_event(_: MessageId) -> Option<::Event> { + fn successful_dispatch_event(_: MessageId) -> Option<::Event> { None } } @@ -155,7 +155,7 @@ pub fn new_tester(source_channel: H160) -> sp_io::TestExternalities { pub fn new_tester_with_config(config: incentivized_inbound_channel::GenesisConfig) -> sp_io::TestExternalities { let mut storage = frame_system::GenesisConfig::default().build_storage::().unwrap(); - config.assimilate_storage(&mut storage).unwrap(); + GenesisBuild::::assimilate_storage(&config, &mut storage).unwrap(); let mut ext: sp_io::TestExternalities = storage.into(); ext.execute_with(|| System::set_block_number(1)); @@ -232,7 +232,7 @@ fn test_submit() { }, }; assert_ok!(IncentivizedInboundChannel::submit(origin.clone(), message_1)); - let nonce: u64 = Nonce::get(); + let nonce: u64 = >::get(); assert_eq!(nonce, 1); // Submit message 2 @@ -245,7 +245,7 @@ fn test_submit() { }, }; assert_ok!(IncentivizedInboundChannel::submit(origin.clone(), message_2)); - let nonce: u64 = Nonce::get(); + let nonce: u64 = >::get(); assert_eq!(nonce, 2); }); } @@ -266,7 +266,7 @@ fn test_submit_with_invalid_nonce() { }, }; assert_ok!(IncentivizedInboundChannel::submit(origin.clone(), message.clone())); - let nonce: u64 = Nonce::get(); + let nonce: u64 = >::get(); assert_eq!(nonce, 1); // Submit the same again diff --git a/parachain/pallets/incentivized-channel/src/inbound/weights.rs b/parachain/pallets/incentivized-channel/src/inbound/weights.rs new file mode 100644 index 0000000000000..ddf2a42f2008c --- /dev/null +++ b/parachain/pallets/incentivized-channel/src/inbound/weights.rs @@ -0,0 +1,11 @@ +use frame_support::weights::Weight; + +pub trait WeightInfo { + fn submit() -> Weight; + fn set_reward_fraction() -> Weight; +} + +impl WeightInfo for () { + fn submit() -> Weight { 0 } + fn set_reward_fraction() -> Weight { 0 } +} diff --git a/parachain/pallets/incentivized-channel/src/outbound/benchmarking.rs b/parachain/pallets/incentivized-channel/src/outbound/benchmarking.rs index b95e1fa1da8ee..423c4cee201ff 100644 --- a/parachain/pallets/incentivized-channel/src/outbound/benchmarking.rs +++ b/parachain/pallets/incentivized-channel/src/outbound/benchmarking.rs @@ -6,7 +6,7 @@ use frame_benchmarking::{benchmarks, impl_benchmark_test_suite}; use frame_support::traits::OnInitialize; #[allow(unused_imports)] -use crate::outbound::Module as IncentivizedOutboundChannel; +use crate::outbound::Pallet as IncentivizedOutboundChannel; benchmarks! { // Benchmark `on_initialize` under worst case conditions, i.e. messages @@ -17,7 +17,7 @@ benchmarks! { for _ in 0 .. m { let payload: Vec = (0..).take(p as usize).collect(); - MessageQueue::append(Message { + >::append(Message { target: H160::zero(), nonce: 0u64, fee: U256::zero(), @@ -29,17 +29,17 @@ benchmarks! { }: { IncentivizedOutboundChannel::::on_initialize(block_number) } verify { - assert_eq!(MessageQueue::get().len(), 0); + assert_eq!(>::get().len(), 0); } // Benchmark 'on_initialize` for the best case, i.e. nothing is done // because it's not a commitment interval. on_initialize_non_interval { - MessageQueue::append(Message { + >::append(Message { target: H160::zero(), nonce: 0u64, fee: U256::zero(), - payload: vec![1u8; T::MaxMessagePayloadSize::get()], + payload: vec![1u8; T::MaxMessagePayloadSize::get() as usize], }); Interval::::put::(10u32.into()); @@ -47,13 +47,13 @@ benchmarks! { }: { IncentivizedOutboundChannel::::on_initialize(block_number) } verify { - assert_eq!(MessageQueue::get().len(), 1); + assert_eq!(>::get().len(), 1); } // Benchmark 'on_initialize` for the case where it is a commitment interval // but there are no messages in the queue. on_initialize_no_messages { - MessageQueue::kill(); + >::kill(); let block_number = Interval::::get(); @@ -68,11 +68,11 @@ benchmarks! { }; let new_fee : U256 = 32000000.into(); - assert!(Fee::get() != new_fee); + assert!(>::get() != new_fee); }: _(authorized_origin, new_fee) verify { - assert_eq!(Fee::get(), new_fee); + assert_eq!(>::get(), new_fee); } } diff --git a/parachain/pallets/incentivized-channel/src/outbound/mod.rs b/parachain/pallets/incentivized-channel/src/outbound/mod.rs index 7d8ab223ed405..9298c73b7488e 100644 --- a/parachain/pallets/incentivized-channel/src/outbound/mod.rs +++ b/parachain/pallets/incentivized-channel/src/outbound/mod.rs @@ -1,27 +1,29 @@ +pub mod weights; + +#[cfg(feature = "runtime-benchmarks")] +mod benchmarking; + +#[cfg(test)] +mod test; + use codec::{Encode, Decode}; use ethabi::{self, Token}; use frame_support::{ - decl_error, decl_event, decl_module, decl_storage, - weights::Weight, dispatch::DispatchResult, traits::{Get, EnsureOrigin}, ensure, }; -use frame_system::{self as system}; use sp_core::{H160, H256, U256, RuntimeDebug}; use sp_io::offchain_index; use sp_runtime::{ traits::{Hash, Zero}, }; + use sp_std::prelude::*; use snowbridge_core::{SingleAsset, ChannelId, MessageNonce, types::AuxiliaryDigestItem}; -#[cfg(feature = "runtime-benchmarks")] -mod benchmarking; - -#[cfg(test)] -mod test; +pub use weights::WeightInfo; /// Wire-format for committed messages #[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug)] @@ -36,66 +38,54 @@ pub struct Message { payload: Vec, } -/// Weight functions needed for this pallet. -pub trait WeightInfo { - fn on_initialize(num_messages: u32, avg_payload_bytes: u32) -> Weight; - fn on_initialize_non_interval() -> Weight; - fn on_initialize_no_messages() -> Weight; - fn set_fee() -> Weight; -} - -impl WeightInfo for () { - fn on_initialize(_: u32, _: u32) -> Weight { 0 } - fn on_initialize_non_interval() -> Weight { 0 } - fn on_initialize_no_messages() -> Weight { 0 } - fn set_fee() -> Weight { 0 } -} +pub use pallet::*; -pub trait Config: system::Config { - type Event: From + Into<::Event>; +#[frame_support::pallet] +pub mod pallet { - /// Prefix for offchain storage keys. - const INDEXING_PREFIX: &'static [u8]; + use super::*; - type Hashing: Hash; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; - // Max bytes in a message payload - type MaxMessagePayloadSize: Get; + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); - /// Max number of messages that can be queued and committed in one go for a given channel. - type MaxMessagesPerCommit: Get; + #[pallet::config] + pub trait Config: frame_system::Config { + type Event: From> + IsType<::Event>; - type FeeCurrency: SingleAsset<::AccountId>; + /// Prefix for offchain storage keys. + const INDEXING_PREFIX: &'static [u8]; - /// The origin which may update reward related params - type SetFeeOrigin: EnsureOrigin; + type Hashing: Hash; - /// Weight information for extrinsics in this pallet - type WeightInfo: WeightInfo; -} + /// Max bytes in a message payload + #[pallet::constant] + type MaxMessagePayloadSize: Get; -decl_storage! { - trait Store for Module as IncentivizedOutboundModule { - /// Interval between committing messages. - Interval get(fn interval) config(): T::BlockNumber; + /// Max number of messages per commitment + #[pallet::constant] + type MaxMessagesPerCommit: Get; - /// Messages waiting to be committed. - MessageQueue: Vec; + type FeeCurrency: SingleAsset<::AccountId>; - pub Nonce: u64; + /// The origin which may update reward related params + type SetFeeOrigin: EnsureOrigin; - pub Fee get(fn fee) config(): U256; + /// Weight information for extrinsics in this pallet + type WeightInfo: WeightInfo; } -} -decl_event! { - pub enum Event { + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + pub enum Event { MessageAccepted(MessageNonce), } -} -decl_error! { - pub enum Error for Module { + #[pallet::error] + pub enum Error { /// The message payload exceeds byte limit. PayloadTooLarge, /// No more messages can be queued for the channel during this commit cycle. @@ -105,13 +95,50 @@ decl_error! { /// Cannot increment nonce Overflow, } -} -decl_module! { - pub struct Module for enum Call where origin: T::Origin { - type Error = Error; - fn deposit_event() = default; + /// Interval between commitments + #[pallet::storage] + #[pallet::getter(fn interval)] + pub(super) type Interval = StorageValue<_, T::BlockNumber, ValueQuery>; + + /// Messages waiting to be committed. + #[pallet::storage] + pub(super) type MessageQueue = StorageValue<_, Vec, ValueQuery>; + + /// Fee for accepting a message + #[pallet::storage] + #[pallet::getter(fn fee)] + pub(super) type Fee = StorageValue<_, U256, ValueQuery>; + + #[pallet::storage] + pub type Nonce = StorageValue<_, u64, ValueQuery>; + + #[pallet::genesis_config] + pub struct GenesisConfig { + pub interval: T::BlockNumber, + pub fee: U256, + } + + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + interval: Default::default(), + fee: Default::default(), + } + } + } + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + >::put(self.interval); + >::put(self.fee); + } + } + + #[pallet::hooks] + impl Hooks> for Pallet { // Generate a message commitment every [`Interval`] blocks. // // The commitment hash is included in an [`AuxiliaryDigestItem`] in the block header, @@ -123,102 +150,105 @@ decl_module! { T::WeightInfo::on_initialize_non_interval() } } + } - #[weight = T::WeightInfo::set_fee()] - pub fn set_fee(origin, amount: U256) -> DispatchResult { + #[pallet::call] + impl Pallet { + #[pallet::weight(T::WeightInfo::set_fee())] + pub fn set_fee(origin: OriginFor, amount: U256) -> DispatchResult { T::SetFeeOrigin::ensure_origin(origin)?; - Fee::set(amount); + >::put(amount); Ok(()) } } -} - -impl Module { - - /// Submit message on the outbound channel - pub fn submit(who: &T::AccountId, target: H160, payload: &[u8]) -> DispatchResult { - ensure!( - MessageQueue::decode_len().unwrap_or(0) < T::MaxMessagesPerCommit::get(), - Error::::QueueSizeLimitReached, - ); - ensure!( - payload.len() <= T::MaxMessagePayloadSize::get(), - Error::::PayloadTooLarge, - ); - - Nonce::try_mutate(|nonce| -> DispatchResult { - if let Some(v) = nonce.checked_add(1) { - *nonce = v; - } else { - return Err(Error::::Overflow.into()) - } - // Attempt to charge a fee for message submission - let fee = Self::fee(); - T::FeeCurrency::withdraw(who, fee).map_err(|_| Error::::NoFunds)?; - - MessageQueue::append( - Message { - target, - nonce: *nonce, - fee, - payload: payload.to_vec(), - }, + impl Pallet { + /// Submit message on the outbound channel + pub fn submit(who: &T::AccountId, target: H160, payload: &[u8]) -> DispatchResult { + ensure!( + >::decode_len().unwrap_or(0) < T::MaxMessagesPerCommit::get() as usize, + Error::::QueueSizeLimitReached, + ); + ensure!( + payload.len() <= T::MaxMessagePayloadSize::get() as usize, + Error::::PayloadTooLarge, ); - >::deposit_event(Event::MessageAccepted(*nonce)); - Ok(()) - }) - } - fn commit() -> Weight { - let messages: Vec = MessageQueue::take(); - if messages.is_empty() { - return T::WeightInfo::on_initialize_no_messages(); + >::try_mutate(|nonce| -> DispatchResult { + if let Some(v) = nonce.checked_add(1) { + *nonce = v; + } else { + return Err(Error::::Overflow.into()) + } + + // Attempt to charge a fee for message submission + let fee = Self::fee(); + T::FeeCurrency::withdraw(who, fee).map_err(|_| Error::::NoFunds)?; + + >::append( + Message { + target, + nonce: *nonce, + fee, + payload: payload.to_vec(), + }, + ); + Self::deposit_event(Event::MessageAccepted(*nonce)); + Ok(()) + }) } - let commitment_hash = Self::make_commitment_hash(&messages); - let average_payload_size = Self::average_payload_size(&messages); + fn commit() -> Weight { + let messages: Vec = >::take(); + if messages.is_empty() { + return T::WeightInfo::on_initialize_no_messages(); + } - let digest_item = AuxiliaryDigestItem::Commitment( - ChannelId::Incentivized, - commitment_hash.clone() - ).into(); - >::deposit_log(digest_item); + let commitment_hash = Self::make_commitment_hash(&messages); + let average_payload_size = Self::average_payload_size(&messages); - let key = Self::make_offchain_key(commitment_hash); - offchain_index::set(&*key, &messages.encode()); + let digest_item = AuxiliaryDigestItem::Commitment( + ChannelId::Incentivized, + commitment_hash.clone() + ).into(); + >::deposit_log(digest_item); - T::WeightInfo::on_initialize( - messages.len() as u32, - average_payload_size as u32 - ) - } + let key = Self::make_offchain_key(commitment_hash); + offchain_index::set(&*key, &messages.encode()); - fn make_commitment_hash(messages: &[Message]) -> H256 { - let messages: Vec = messages - .iter() - .map(|message| { - Token::Tuple(vec![ - Token::Address(message.target), - Token::Uint(message.nonce.into()), - Token::Uint(message.fee.into()), - Token::Bytes(message.payload.clone()) - ]) - }) - .collect(); - let input = ethabi::encode(&vec![Token::Array(messages)]); - ::Hashing::hash(&input) - } + T::WeightInfo::on_initialize( + messages.len() as u32, + average_payload_size as u32 + ) + } - fn average_payload_size(messages: &[Message]) -> usize { - let sum: usize = messages.iter() - .fold(0, |acc, x| acc + x.payload.len()); - // We overestimate message payload size rather than underestimate. - // So add 1 here to account for integer division truncation. - (sum / messages.len()).saturating_add(1) - } + fn make_commitment_hash(messages: &[Message]) -> H256 { + let messages: Vec = messages + .iter() + .map(|message| { + Token::Tuple(vec![ + Token::Address(message.target), + Token::Uint(message.nonce.into()), + Token::Uint(message.fee.into()), + Token::Bytes(message.payload.clone()) + ]) + }) + .collect(); + let input = ethabi::encode(&vec![Token::Array(messages)]); + ::Hashing::hash(&input) + } + + fn average_payload_size(messages: &[Message]) -> usize { + let sum: usize = messages.iter() + .fold(0, |acc, x| acc + x.payload.len()); + // We overestimate message payload size rather than underestimate. + // So add 1 here to account for integer division truncation. + (sum / messages.len()).saturating_add(1) + } - fn make_offchain_key(hash: H256) -> Vec { - (T::INDEXING_PREFIX, ChannelId::Incentivized, hash).encode() + fn make_offchain_key(hash: H256) -> Vec { + (T::INDEXING_PREFIX, ChannelId::Incentivized, hash).encode() + } } } + diff --git a/parachain/pallets/incentivized-channel/src/outbound/test.rs b/parachain/pallets/incentivized-channel/src/outbound/test.rs index dc4e1ef3ed33d..a06dd6c940c05 100644 --- a/parachain/pallets/incentivized-channel/src/outbound/test.rs +++ b/parachain/pallets/incentivized-channel/src/outbound/test.rs @@ -1,5 +1,6 @@ use super::*; +use frame_support::traits::GenesisBuild; use sp_core::{H160, H256}; use frame_support::{ assert_ok, assert_noop, @@ -28,7 +29,7 @@ frame_support::construct_runtime!( { System: frame_system::{Pallet, Call, Storage, Event}, Assets: snowbridge_assets::{Pallet, Call, Storage, Event}, - IncentivizedOutboundChannel: incentivized_outbound_channel::{Pallet, Call, Storage, Event}, + IncentivizedOutboundChannel: incentivized_outbound_channel::{Pallet, Call, Config, Storage, Event}, } ); @@ -39,7 +40,7 @@ parameter_types! { pub const BlockHashCount: u64 = 250; } -impl system::Config for Test { +impl frame_system::Config for Test { type BaseCallFilter = (); type BlockWeights = (); type BlockLength = (); @@ -71,8 +72,8 @@ impl snowbridge_assets::Config for Test { } parameter_types! { - pub const MaxMessagePayloadSize: usize = 128; - pub const MaxMessagesPerCommit: usize = 5; + pub const MaxMessagePayloadSize: u64 = 128; + pub const MaxMessagesPerCommit: u64 = 5; pub const Ether: AssetId = AssetId::ETH; } @@ -93,8 +94,8 @@ pub fn new_tester() -> sp_io::TestExternalities { let mut storage = frame_system::GenesisConfig::default().build_storage::().unwrap(); let config: incentivized_outbound_channel::GenesisConfig = incentivized_outbound_channel::GenesisConfig { + interval: 1u64, fee: 100.into(), - interval: 1u64 }; config.assimilate_storage(&mut storage).unwrap(); @@ -114,10 +115,10 @@ fn test_submit() { FeeCurrency::deposit(&who, 300.into()).unwrap(); assert_ok!(IncentivizedOutboundChannel::submit(&who, target, &vec![0, 1, 2])); - assert_eq!(Nonce::get(), 1); + assert_eq!(>::get(), 1); assert_ok!(IncentivizedOutboundChannel::submit(&who, target, &vec![0, 1, 2])); - assert_eq!(Nonce::get(), 2); + assert_eq!(>::get(), 2); }); } @@ -194,7 +195,7 @@ fn test_submit_exceeds_payload_limit() { let who: AccountId = Keyring::Bob.into(); let max_payload_bytes = MaxMessagePayloadSize::get(); - let payload: Vec = (0..).take(max_payload_bytes + 1).collect(); + let payload: Vec = (0..).take(max_payload_bytes as usize + 1).collect(); assert_noop!( IncentivizedOutboundChannel::submit(&who, target, payload.as_slice()), @@ -209,7 +210,7 @@ fn test_submit_fails_on_nonce_overflow() { let target = H160::zero(); let who: AccountId = Keyring::Bob.into(); - Nonce::set(u64::MAX); + >::set(u64::MAX); assert_noop!( IncentivizedOutboundChannel::submit(&who, target, &vec![0, 1, 2]), Error::::Overflow, diff --git a/parachain/pallets/incentivized-channel/src/outbound/weights.rs b/parachain/pallets/incentivized-channel/src/outbound/weights.rs new file mode 100644 index 0000000000000..2cac8a7d07721 --- /dev/null +++ b/parachain/pallets/incentivized-channel/src/outbound/weights.rs @@ -0,0 +1,14 @@ +use frame_support::weights::Weight; +pub trait WeightInfo { + fn on_initialize(num_messages: u32, avg_payload_bytes: u32) -> Weight; + fn on_initialize_non_interval() -> Weight; + fn on_initialize_no_messages() -> Weight; + fn set_fee() -> Weight; +} + +impl WeightInfo for () { + fn on_initialize(_: u32, _: u32) -> Weight { 0 } + fn on_initialize_non_interval() -> Weight { 0 } + fn on_initialize_no_messages() -> Weight { 0 } + fn set_fee() -> Weight { 0 } +} diff --git a/parachain/primitives/ethereum/src/difficulty.rs b/parachain/primitives/ethereum/src/difficulty.rs index e87deb61c193b..39eab013d7282 100644 --- a/parachain/primitives/ethereum/src/difficulty.rs +++ b/parachain/primitives/ethereum/src/difficulty.rs @@ -3,6 +3,8 @@ use ethereum_types::U256; use sp_runtime::RuntimeDebug; use sp_std::convert::TryFrom; +use codec::{Encode, Decode}; + const DIFFICULTY_BOUND_DIVISOR: u32 = 11; // right-shifts equivalent to division by 2048 const EXP_DIFFICULTY_PERIOD: u64 = 100000; const MINIMUM_DIFFICULTY: u32 = 131072; @@ -21,7 +23,7 @@ pub enum BombDelay { /// Describes when hard forks occurred that affect difficulty calculations. These /// values are network-specific. -#[derive(PartialEq, RuntimeDebug)] +#[derive(Copy, Clone, Encode, Decode, PartialEq, RuntimeDebug)] pub struct DifficultyConfig { // Block number on which Byzantium (EIP-649) rules activated pub byzantium_fork_block: u64, diff --git a/parachain/runtime/common/src/lib.rs b/parachain/runtime/common/src/lib.rs index 1340118a7eab6..b4bfef1e197ac 100644 --- a/parachain/runtime/common/src/lib.rs +++ b/parachain/runtime/common/src/lib.rs @@ -50,16 +50,16 @@ where { fn submit(channel_id: ChannelId, who: &T::AccountId, target: H160, payload: &[u8]) -> DispatchResult { match channel_id { - ChannelId::Basic => basic_channel::outbound::Module::::submit(who, target, payload), - ChannelId::Incentivized => incentivized_channel::outbound::Module::::submit(who, target, payload), + ChannelId::Basic => basic_channel::outbound::Pallet::::submit(who, target, payload), + ChannelId::Incentivized => incentivized_channel::outbound::Pallet::::submit(who, target, payload), } } } parameter_types! { pub const Ether: AssetId = AssetId::ETH; - pub const MaxMessagePayloadSize: usize = 256; - pub const MaxMessagesPerCommit: usize = 20; + pub const MaxMessagePayloadSize: u64 = 256; + pub const MaxMessagesPerCommit: u64 = 20; } parameter_types! { diff --git a/parachain/runtime/local/src/lib.rs b/parachain/runtime/local/src/lib.rs index 0ea6d4d87ccd9..791e72428b426 100644 --- a/parachain/runtime/local/src/lib.rs +++ b/parachain/runtime/local/src/lib.rs @@ -46,8 +46,6 @@ pub use frame_support::{ match_type, }; use frame_system::{EnsureOneOf, EnsureRoot}; -pub use pallet_balances::Call as BalancesCall; -pub use pallet_timestamp::Call as TimestampCall; use pallet_transaction_payment::FeeDetails; use pallet_transaction_payment_rpc_runtime_api::RuntimeDispatchInfo; #[cfg(any(feature = "std", test))] @@ -508,8 +506,8 @@ use incentivized_channel::outbound as incentivized_channel_outbound; impl basic_channel_inbound::Config for Runtime { type Event = Event; - type Verifier = ethereum_light_client::Module; - type MessageDispatch = dispatch::Module; + type Verifier = ethereum_light_client::Pallet; + type MessageDispatch = dispatch::Pallet; type WeightInfo = (); } @@ -538,8 +536,8 @@ impl Convert for FeeConverter { impl incentivized_channel_inbound::Config for Runtime { type Event = Event; - type Verifier = ethereum_light_client::Module; - type MessageDispatch = dispatch::Module; + type Verifier = ethereum_light_client::Pallet; + type MessageDispatch = dispatch::Pallet; type Currency = Balances; type SourceAccount = SourceAccount; type TreasuryAccount = TreasuryAccount; @@ -651,12 +649,12 @@ construct_runtime!( LocalCouncilMembership: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 9, // Bridge Infrastructure - BasicInboundChannel: basic_channel_inbound::{Pallet, Call, Config, Storage, Event} = 10, - BasicOutboundChannel: basic_channel_outbound::{Pallet, Config, Storage, Event} = 11, - IncentivizedInboundChannel: incentivized_channel_inbound::{Pallet, Call, Config, Storage, Event} = 12, - IncentivizedOutboundChannel: incentivized_channel_outbound::{Pallet, Config, Storage, Event} = 13, + BasicInboundChannel: basic_channel_inbound::{Pallet, Call, Config, Storage, Event} = 10, + BasicOutboundChannel: basic_channel_outbound::{Pallet, Call, Config, Storage, Event} = 11, + IncentivizedInboundChannel: incentivized_channel_inbound::{Pallet, Call, Config, Storage, Event} = 12, + IncentivizedOutboundChannel: incentivized_channel_outbound::{Pallet, Call, Config, Storage, Event} = 13, Dispatch: dispatch::{Pallet, Call, Storage, Event, Origin} = 14, - EthereumLightClient: ethereum_light_client::{Pallet, Call, Storage, Event, Config} = 15, + EthereumLightClient: ethereum_light_client::{Pallet, Call, Config, Storage, Event} = 15, Assets: assets::{Pallet, Call, Config, Storage, Event} = 16, NFT: nft::{Pallet, Call, Config, Storage} = 24, @@ -675,7 +673,7 @@ construct_runtime!( // Bridge applications // NOTE: Do not change the following pallet indices without updating // the peer apps (smart contracts) on the Ethereum side. - DotApp: dot_app::{Pallet, Call, Config, Storage, Event} = 64, + DotApp: dot_app::{Pallet, Call, Config, Storage, Event} = 64, EthApp: eth_app::{Pallet, Call, Config, Storage, Event} = 65, Erc20App: erc20_app::{Pallet, Call, Config, Storage, Event} = 66, Erc721App: erc721_app::{Pallet, Call, Config, Storage, Event} = 67, diff --git a/parachain/runtime/rococo/src/lib.rs b/parachain/runtime/rococo/src/lib.rs index dfc192eda2627..1be111d50fbf3 100644 --- a/parachain/runtime/rococo/src/lib.rs +++ b/parachain/runtime/rococo/src/lib.rs @@ -510,8 +510,8 @@ use incentivized_channel::outbound as incentivized_channel_outbound; impl basic_channel_inbound::Config for Runtime { type Event = Event; - type Verifier = ethereum_light_client::Module; - type MessageDispatch = dispatch::Module; + type Verifier = ethereum_light_client::Pallet; + type MessageDispatch = dispatch::Pallet; type WeightInfo = weights::basic_channel_inbound_weights::WeightInfo; } @@ -540,8 +540,8 @@ impl Convert for FeeConverter { impl incentivized_channel_inbound::Config for Runtime { type Event = Event; - type Verifier = ethereum_light_client::Module; - type MessageDispatch = dispatch::Module; + type Verifier = ethereum_light_client::Pallet; + type MessageDispatch = dispatch::Pallet; type Currency = Balances; type SourceAccount = SourceAccount; type TreasuryAccount = TreasuryAccount; @@ -653,12 +653,12 @@ construct_runtime!( LocalCouncilMembership: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 9, // Bridge Infrastructure - BasicInboundChannel: basic_channel_inbound::{Pallet, Call, Config, Storage, Event} = 10, - BasicOutboundChannel: basic_channel_outbound::{Pallet, Config, Storage, Event} = 11, - IncentivizedInboundChannel: incentivized_channel_inbound::{Pallet, Call, Config, Storage, Event} = 12, - IncentivizedOutboundChannel: incentivized_channel_outbound::{Pallet, Config, Storage, Event} = 13, + BasicInboundChannel: basic_channel_inbound::{Pallet, Call, Config, Storage, Event} = 10, + BasicOutboundChannel: basic_channel_outbound::{Pallet, Call, Config, Storage, Event} = 11, + IncentivizedInboundChannel: incentivized_channel_inbound::{Pallet, Call, Config, Storage, Event} = 12, + IncentivizedOutboundChannel: incentivized_channel_outbound::{Pallet, Call, Config, Storage, Event} = 13, Dispatch: dispatch::{Pallet, Call, Storage, Event, Origin} = 14, - EthereumLightClient: ethereum_light_client::{Pallet, Call, Storage, Event, Config} = 15, + EthereumLightClient: ethereum_light_client::{Pallet, Call, Config, Storage, Event} = 15, Assets: assets::{Pallet, Call, Config, Storage, Event} = 16, NFT: nft::{Pallet, Call, Config, Storage} = 24, @@ -677,7 +677,7 @@ construct_runtime!( // Bridge applications // NOTE: Do not change the following pallet indices without updating // the peer apps (smart contracts) on the Ethereum side. - DotApp: dot_app::{Pallet, Call, Config, Storage, Event} = 64, + DotApp: dot_app::{Pallet, Call, Config, Storage, Event} = 64, EthApp: eth_app::{Pallet, Call, Config, Storage, Event} = 65, Erc20App: erc20_app::{Pallet, Call, Config, Storage, Event} = 66, Erc721App: erc721_app::{Pallet, Call, Config, Storage, Event} = 67, diff --git a/parachain/runtime/snowbridge/src/lib.rs b/parachain/runtime/snowbridge/src/lib.rs index 1caa5315189b8..a7392fa67ac85 100644 --- a/parachain/runtime/snowbridge/src/lib.rs +++ b/parachain/runtime/snowbridge/src/lib.rs @@ -510,8 +510,8 @@ use incentivized_channel::outbound as incentivized_channel_outbound; impl basic_channel_inbound::Config for Runtime { type Event = Event; - type Verifier = ethereum_light_client::Module; - type MessageDispatch = dispatch::Module; + type Verifier = ethereum_light_client::Pallet; + type MessageDispatch = dispatch::Pallet; type WeightInfo = weights::basic_channel_inbound_weights::WeightInfo; } @@ -540,8 +540,8 @@ impl Convert for FeeConverter { impl incentivized_channel_inbound::Config for Runtime { type Event = Event; - type Verifier = ethereum_light_client::Module; - type MessageDispatch = dispatch::Module; + type Verifier = ethereum_light_client::Pallet; + type MessageDispatch = dispatch::Pallet; type Currency = Balances; type SourceAccount = SourceAccount; type TreasuryAccount = TreasuryAccount; @@ -653,12 +653,12 @@ construct_runtime!( LocalCouncilMembership: pallet_membership::::{Pallet, Call, Storage, Event, Config} = 9, // Bridge Infrastructure - BasicInboundChannel: basic_channel_inbound::{Pallet, Call, Config, Storage, Event} = 10, - BasicOutboundChannel: basic_channel_outbound::{Pallet, Config, Storage, Event} = 11, - IncentivizedInboundChannel: incentivized_channel_inbound::{Pallet, Call, Config, Storage, Event} = 12, - IncentivizedOutboundChannel: incentivized_channel_outbound::{Pallet, Config, Storage, Event} = 13, + BasicInboundChannel: basic_channel_inbound::{Pallet, Call, Config, Storage, Event} = 10, + BasicOutboundChannel: basic_channel_outbound::{Pallet, Call, Config, Storage, Event} = 11, + IncentivizedInboundChannel: incentivized_channel_inbound::{Pallet, Call, Config, Storage, Event} = 12, + IncentivizedOutboundChannel: incentivized_channel_outbound::{Pallet, Call, Config, Storage, Event} = 13, Dispatch: dispatch::{Pallet, Call, Storage, Event, Origin} = 14, - EthereumLightClient: ethereum_light_client::{Pallet, Call, Storage, Event, Config} = 15, + EthereumLightClient: ethereum_light_client::{Pallet, Call, Config, Storage, Event} = 15, Assets: assets::{Pallet, Call, Config, Storage, Event} = 16, NFT: nft::{Pallet, Call, Config, Storage} = 24, @@ -677,7 +677,7 @@ construct_runtime!( // Bridge applications // NOTE: Do not change the following pallet indices without updating // the peer apps (smart contracts) on the Ethereum side. - DotApp: dot_app::{Pallet, Call, Config, Storage, Event} = 64, + DotApp: dot_app::{Pallet, Call, Config, Storage, Event} = 64, EthApp: eth_app::{Pallet, Call, Config, Storage, Event} = 65, Erc20App: erc20_app::{Pallet, Call, Config, Storage, Event} = 66, Erc721App: erc721_app::{Pallet, Call, Config, Storage, Event} = 67, diff --git a/parachain/src/chain_spec/local.rs b/parachain/src/chain_spec/local.rs index d01cfa1a698e3..a2e9bebd7f82b 100644 --- a/parachain/src/chain_spec/local.rs +++ b/parachain/src/chain_spec/local.rs @@ -130,7 +130,6 @@ fn testnet_genesis( }, dot_app: local_runtime::DotAppConfig { address: hex!["8cF6147918A5CBb672703F879f385036f8793a24"].into(), - phantom: Default::default(), }, eth_app: local_runtime::EthAppConfig { address: hex!["B1185EDE04202fE62D38F5db72F71e38Ff3E8305"].into() diff --git a/parachain/src/chain_spec/rococo.rs b/parachain/src/chain_spec/rococo.rs index 64666b6285ed3..5ec2ce35f4496 100644 --- a/parachain/src/chain_spec/rococo.rs +++ b/parachain/src/chain_spec/rococo.rs @@ -130,7 +130,6 @@ fn testnet_genesis( }, dot_app: rococo_runtime::DotAppConfig { address: hex!["3f839E70117C64744930De8567Ae7A5363487cA3"].into(), - phantom: Default::default(), }, eth_app: rococo_runtime::EthAppConfig { address: hex!["3f0839385DB9cBEa8E73AdA6fa0CFe07E321F61d"].into() diff --git a/parachain/src/chain_spec/snowbridge.rs b/parachain/src/chain_spec/snowbridge.rs index 508e088e2bec1..ef0bc9361946e 100644 --- a/parachain/src/chain_spec/snowbridge.rs +++ b/parachain/src/chain_spec/snowbridge.rs @@ -130,7 +130,6 @@ fn testnet_genesis( }, dot_app: snowbridge_runtime::DotAppConfig { address: hex!["3f839E70117C64744930De8567Ae7A5363487cA3"].into(), - phantom: Default::default(), }, eth_app: snowbridge_runtime::EthAppConfig { address: hex!["3f0839385DB9cBEa8E73AdA6fa0CFe07E321F61d"].into() diff --git a/relayer/relays/parachain/beefy-listener.go b/relayer/relays/parachain/beefy-listener.go index c2b2281352ce2..26d407cf7486e 100644 --- a/relayer/relays/parachain/beefy-listener.go +++ b/relayer/relays/parachain/beefy-listener.go @@ -340,7 +340,7 @@ func (li *BeefyListener) discoverCatchupTasks( "nonce": ethIncentivizedNonce, }).Info("Checked latest nonce delivered to ethereum incentivized channel") - paraBasicNonceKey, err := types.CreateStorageKey(li.parachainConnection.Metadata(), "BasicOutboundModule", "Nonce", nil, nil) + paraBasicNonceKey, err := types.CreateStorageKey(li.parachainConnection.Metadata(), "BasicOutboundChannel", "Nonce", nil, nil) if err != nil { return nil, err } @@ -357,7 +357,7 @@ func (li *BeefyListener) discoverCatchupTasks( "nonce": uint64(paraBasicNonce), }).Info("Checked latest nonce generated by parachain basic channel") - paraIncentivizedNonceKey, err := types.CreateStorageKey(li.parachainConnection.Metadata(), "IncentivizedOutboundModule", "Nonce", nil, nil) + paraIncentivizedNonceKey, err := types.CreateStorageKey(li.parachainConnection.Metadata(), "IncentivizedOutboundChannel", "Nonce", nil, nil) if err != nil { log.Error(err) return nil, err diff --git a/test/package.json b/test/package.json index 1a4597867b2f1..2710f40b44c29 100644 --- a/test/package.json +++ b/test/package.json @@ -10,7 +10,7 @@ "@polkadot/api": "^5.3.2", "@polkadot/util": "^7.1.1", "@polkadot/util-crypto": "^7.1.1", - "@snowfork/snowbridge-types": "^0.2.5", + "@snowfork/snowbridge-types": "0.2.6", "@types/node": "^16.4.2", "@types/yargs": "^17.0.2", "bignumber.js": "^9.0.0", diff --git a/test/src/subclient/index.js b/test/src/subclient/index.js index 0a92e621b21e3..60c65f71c6131 100644 --- a/test/src/subclient/index.js +++ b/test/src/subclient/index.js @@ -14,7 +14,7 @@ class SubClient { const provider = new WsProvider(this.endpoint); this.api = await ApiPromise.create({ provider, - typesBundle: bundle + typesBundle: bundle, }) this.keyring = new Keyring({ type: 'sr25519' }); diff --git a/test/yarn.lock b/test/yarn.lock index 7e9a449d96239..4e3a759c97f79 100644 --- a/test/yarn.lock +++ b/test/yarn.lock @@ -585,10 +585,10 @@ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== -"@snowfork/snowbridge-types@^0.2.5": - version "0.2.5" - resolved "https://registry.yarnpkg.com/@snowfork/snowbridge-types/-/snowbridge-types-0.2.5.tgz#59b86e7f6f4de7636a5f25ba97cea19ca1182e14" - integrity sha512-wqUgfDd1frjzUpbAsOisaezoBLNOgIJiSRhSknKZYDegEqrg+/0jcPhrgXKqJxqMiQnrYmh0KXH9lo2FaadyzQ== +"@snowfork/snowbridge-types@0.2.6": + version "0.2.6" + resolved "https://registry.yarnpkg.com/@snowfork/snowbridge-types/-/snowbridge-types-0.2.6.tgz#b195d3e37ad3f4d686a38bbc4fb32eeb47163724" + integrity sha512-a05XjY0Gt/PX2sB0osuJIyMVY/UGH6fRRJf4xhLRB/6YfstysMkrnMC4nTWGnM9z73FOJB2UiATehV9Frj4I5A== dependencies: "@polkadot/keyring" "^7.1.1" "@polkadot/types" "^5.3.2"