Skip to content

Commit

Permalink
[stable2409] Backport #5644 (#5651)
Browse files Browse the repository at this point in the history
Backport #5644 into `stable2409` (cc @bkchr).

<!--
  # To be used by other automation, do not modify:
  original-pr-number: #${pull_number}
-->

---------

Signed-off-by: Oliver Tale-Yazdi <oliver.tale-yazdi@parity.io>
Co-authored-by: Bastian Köcher <git@kchr.de>
Co-authored-by: Oliver Tale-Yazdi <oliver.tale-yazdi@parity.io>
  • Loading branch information
3 people authored Sep 9, 2024
1 parent 823ecee commit 3b9bb9b
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 49 deletions.
8 changes: 8 additions & 0 deletions prdoc/pr_5644.prdoc
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
title: 'pallet-utility: Improve weight annotations'
doc:
- audience: Runtime Dev
description: |-
Prevent allocations when calculating the weights.
crates:
- name: pallet-utility
bump: patch
80 changes: 31 additions & 49 deletions substrate/frame/utility/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ pub use pallet::*;
#[frame_support::pallet]
pub mod pallet {
use super::*;
use frame_support::pallet_prelude::*;
use frame_support::{dispatch::DispatchClass, pallet_prelude::*};
use frame_system::pallet_prelude::*;

#[pallet::pallet]
Expand Down Expand Up @@ -183,21 +183,8 @@ pub mod pallet {
/// event is deposited.
#[pallet::call_index(0)]
#[pallet::weight({
let dispatch_infos = calls.iter().map(|call| call.get_dispatch_info()).collect::<Vec<_>>();
let dispatch_weight = dispatch_infos.iter()
.map(|di| di.weight)
.fold(Weight::zero(), |total: Weight, weight: Weight| total.saturating_add(weight))
.saturating_add(T::WeightInfo::batch(calls.len() as u32));
let dispatch_class = {
let all_operational = dispatch_infos.iter()
.map(|di| di.class)
.all(|class| class == DispatchClass::Operational);
if all_operational {
DispatchClass::Operational
} else {
DispatchClass::Normal
}
};
let (dispatch_weight, dispatch_class) = Pallet::<T>::weight_and_dispatch_class(&calls);
let dispatch_weight = dispatch_weight.saturating_add(T::WeightInfo::batch(calls.len() as u32));
(dispatch_weight, dispatch_class)
})]
pub fn batch(
Expand Down Expand Up @@ -233,13 +220,13 @@ pub mod pallet {
// Take the weight of this function itself into account.
let base_weight = T::WeightInfo::batch(index.saturating_add(1) as u32);
// Return the actual used weight + base_weight of this call.
return Ok(Some(base_weight + weight).into())
return Ok(Some(base_weight.saturating_add(weight)).into())
}
Self::deposit_event(Event::ItemCompleted);
}
Self::deposit_event(Event::BatchCompleted);
let base_weight = T::WeightInfo::batch(calls_len as u32);
Ok(Some(base_weight + weight).into())
Ok(Some(base_weight.saturating_add(weight)).into())
}

/// Send a call through an indexed pseudonym of the sender.
Expand Down Expand Up @@ -305,21 +292,8 @@ pub mod pallet {
/// - O(C) where C is the number of calls to be batched.
#[pallet::call_index(2)]
#[pallet::weight({
let dispatch_infos = calls.iter().map(|call| call.get_dispatch_info()).collect::<Vec<_>>();
let dispatch_weight = dispatch_infos.iter()
.map(|di| di.weight)
.fold(Weight::zero(), |total: Weight, weight: Weight| total.saturating_add(weight))
.saturating_add(T::WeightInfo::batch_all(calls.len() as u32));
let dispatch_class = {
let all_operational = dispatch_infos.iter()
.map(|di| di.class)
.all(|class| class == DispatchClass::Operational);
if all_operational {
DispatchClass::Operational
} else {
DispatchClass::Normal
}
};
let (dispatch_weight, dispatch_class) = Pallet::<T>::weight_and_dispatch_class(&calls);
let dispatch_weight = dispatch_weight.saturating_add(T::WeightInfo::batch_all(calls.len() as u32));
(dispatch_weight, dispatch_class)
})]
pub fn batch_all(
Expand Down Expand Up @@ -359,7 +333,7 @@ pub mod pallet {
// Take the weight of this function itself into account.
let base_weight = T::WeightInfo::batch_all(index.saturating_add(1) as u32);
// Return the actual used weight + base_weight of this call.
err.post_info = Some(base_weight + weight).into();
err.post_info = Some(base_weight.saturating_add(weight)).into();
err
})?;
Self::deposit_event(Event::ItemCompleted);
Expand Down Expand Up @@ -414,21 +388,8 @@ pub mod pallet {
/// - O(C) where C is the number of calls to be batched.
#[pallet::call_index(4)]
#[pallet::weight({
let dispatch_infos = calls.iter().map(|call| call.get_dispatch_info()).collect::<Vec<_>>();
let dispatch_weight = dispatch_infos.iter()
.map(|di| di.weight)
.fold(Weight::zero(), |total: Weight, weight: Weight| total.saturating_add(weight))
.saturating_add(T::WeightInfo::force_batch(calls.len() as u32));
let dispatch_class = {
let all_operational = dispatch_infos.iter()
.map(|di| di.class)
.all(|class| class == DispatchClass::Operational);
if all_operational {
DispatchClass::Operational
} else {
DispatchClass::Normal
}
};
let (dispatch_weight, dispatch_class) = Pallet::<T>::weight_and_dispatch_class(&calls);
let dispatch_weight = dispatch_weight.saturating_add(T::WeightInfo::force_batch(calls.len() as u32));
(dispatch_weight, dispatch_class)
})]
pub fn force_batch(
Expand Down Expand Up @@ -494,6 +455,27 @@ pub mod pallet {
res.map(|_| ()).map_err(|e| e.error)
}
}

impl<T: Config> Pallet<T> {
/// Get the accumulated `weight` and the dispatch class for the given `calls`.
fn weight_and_dispatch_class(
calls: &[<T as Config>::RuntimeCall],
) -> (Weight, DispatchClass) {
let dispatch_infos = calls.iter().map(|call| call.get_dispatch_info());
let (dispatch_weight, dispatch_class) = dispatch_infos.fold(
(Weight::zero(), DispatchClass::Operational),
|(total_weight, dispatch_class): (Weight, DispatchClass), di| {
(
total_weight.saturating_add(di.weight),
// If not all are `Operational`, we want to use `DispatchClass::Normal`.
if di.class == DispatchClass::Normal { di.class } else { dispatch_class },
)
},
);

(dispatch_weight, dispatch_class)
}
}
}

/// A pallet identifier. These are per pallet and should be stored in a registry somewhere.
Expand Down

0 comments on commit 3b9bb9b

Please sign in to comment.