Skip to content

Commit

Permalink
fix(analytics)!: properly take the base token analytics after each mi…
Browse files Browse the repository at this point in the history
…lestone (#1179)

* Properly take the base token analytics after each milestone

* Ensure that test analytics do not contain previous milestone data unless required. Fix expected base token values.

* Oops included a change I didn't mean to
  • Loading branch information
Alexandcoats authored Mar 10, 2023
1 parent e30ca37 commit 02d28b3
Show file tree
Hide file tree
Showing 11 changed files with 76 additions and 59 deletions.
2 changes: 1 addition & 1 deletion src/analytics/ledger/active_addresses.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub(crate) struct AddressActivityMeasurement {

/// Computes the number of addresses that were active during a given time interval.
#[allow(missing_docs)]
#[derive(Debug, Default, Serialize, Deserialize)]
#[derive(Debug, Default)]
pub(crate) struct AddressActivityAnalytics {
addresses: HashSet<Address>,
}
Expand Down
4 changes: 2 additions & 2 deletions src/analytics/ledger/base_token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use super::*;
use crate::model::utxo::{Address, TokenAmount};

/// Measures activity of the base token, such as Shimmer or IOTA.
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, Default)]
pub(crate) struct BaseTokenActivityMeasurement {
/// Represents the amount of tokens transferred. Tokens that are send back to an address are not counted.
pub(crate) booked_amount: TokenAmount,
Expand Down Expand Up @@ -45,6 +45,6 @@ impl Analytics for BaseTokenActivityMeasurement {
}

fn take_measurement(&mut self, _ctx: &dyn AnalyticsContext) -> Self::Measurement {
*self
std::mem::take(self)
}
}
2 changes: 1 addition & 1 deletion src/analytics/ledger/output_activity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use super::*;
use crate::model::utxo::{Address, AliasId, NftId};

/// Nft activity statistics.
#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, Default, PartialEq)]
pub(crate) struct OutputActivityMeasurement {
pub(crate) nft: NftActivityMeasurement,
pub(crate) alias: AliasActivityMeasurement,
Expand Down
2 changes: 1 addition & 1 deletion src/analytics/ledger/transaction_size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl TransactionSizeBuckets {
}
}

#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, Default)]
pub(crate) struct TransactionSizeMeasurement {
pub(crate) input_buckets: TransactionSizeBuckets,
pub(crate) output_buckets: TransactionSizeBuckets,
Expand Down
7 changes: 6 additions & 1 deletion src/analytics/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -419,16 +419,22 @@ mod test {

#[derive(Serialize, Deserialize)]
struct TestAnalytics {
#[serde(skip)]
active_addresses: AddressActivityAnalytics,
address_balance: AddressBalancesAnalytics,
#[serde(skip)]
base_tokens: BaseTokenActivityMeasurement,
ledger_outputs: LedgerOutputMeasurement,
ledger_size: LedgerSizeAnalytics,
#[serde(skip)]
output_activity: OutputActivityMeasurement,
#[serde(skip)]
transaction_size: TransactionSizeMeasurement,
unclaimed_tokens: UnclaimedTokenMeasurement,
unlock_conditions: UnlockConditionMeasurement,
#[serde(skip)]
block_activity: BlockActivityMeasurement,
#[serde(skip)]
milestone_size: MilestoneSizeMeasurement,
}

Expand Down Expand Up @@ -529,7 +535,6 @@ mod test {
ron::de::from_reader(File::open("tests/data/measurements.ron").unwrap()).unwrap();
for (milestone, analytics) in analytics_map {
let expected = &expected[&milestone];
println!("{milestone} - {analytics:#?}");

macro_rules! assert_expected {
($path:expr) => {
Expand Down
2 changes: 1 addition & 1 deletion src/analytics/tangle/block_activity.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use super::*;
use crate::model::metadata::LedgerInclusionState;

/// The type of payloads that occured within a single milestone.
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, Default)]
pub(crate) struct BlockActivityMeasurement {
pub(crate) milestone_count: usize,
pub(crate) no_payload_count: usize,
Expand Down
2 changes: 1 addition & 1 deletion src/analytics/tangle/milestone_size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
use super::*;

/// Milestone size statistics.
#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, Default)]
pub(crate) struct MilestoneSizeMeasurement {
pub(crate) total_milestone_payload_bytes: usize,
pub(crate) total_tagged_data_payload_bytes: usize,
Expand Down
2 changes: 0 additions & 2 deletions src/analytics/tangle/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@

//! Statistics about the tangle.
use serde::{Deserialize, Serialize};

pub(crate) use self::{
block_activity::BlockActivityMeasurement, milestone_size::MilestoneSizeMeasurement,
protocol_params::ProtocolParamsAnalytics,
Expand Down
102 changes: 58 additions & 44 deletions tests/data/measurement_gatherer.mongodb
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,20 @@ for (let ledger_index = start_index; ledger_index<=end_index; ++ledger_index) {
// { "$count" : "address_with_balance_count" },
// ]).toArray()[0];

// ms.base_tokens = db.stardust_outputs.aggregate([
// { "$match": {
// "metadata.booked.milestone_index": ledger_index,
// } },
// { "$group": {
// "_id": null,
// "booked_amount": { "$sum": { "$toDecimal": "$output.amount" } }
// } },
// { "$project": {
// "_id": 0,
// "booked_amount": 1,
// }}
// ]).toArray()[0];

// ms.ledger_outputs = db.stardust_outputs.aggregate([
// { "$match": {
// "metadata.booked.milestone_index": { "$lte": ledger_index },
Expand Down Expand Up @@ -92,50 +106,50 @@ for (let ledger_index = start_index; ledger_index<=end_index; ++ledger_index) {
// } },
// ]).toArray()[0];

ms.transaction_size = db.stardust_blocks.aggregate([
{ "$match": {
"metadata.referenced_by_milestone_index": ledger_index,
"block.payload.kind": "transaction",
} },
{ "$lookup": {
"from": "stardust_outputs",
"localField": "block.payload.transaction_id",
"foreignField": "_id.transaction_id",
"as": "outputs",
} },
{ "$set": {
"input_count": { "$size": "$block.payload.essence.inputs" },
"output_count": { "$size": "$outputs" },
} },
{ "$group": {
"_id": null,
"inputs_1": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 1 ]}, 1, 0 ] } },
"inputs_2": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 2 ]}, 1, 0 ] } },
"inputs_3": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 3 ]}, 1, 0 ] } },
"inputs_4": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 4 ]}, 1, 0 ] } },
"inputs_5": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 5 ]}, 1, 0 ] } },
"inputs_6": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 6 ]}, 1, 0 ] } },
"inputs_7": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 7 ]}, 1, 0 ] } },
"inputs_small": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$input_count", 8 ]}, { "$lt": [ "$input_count", 16 ] } ] }, 1, 0 ] } },
"inputs_medium": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$input_count", 16 ]}, { "$lt": [ "$input_count", 32 ] } ] }, 1, 0 ] } },
"inputs_large": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$input_count", 32 ]}, { "$lt": [ "$input_count", 64 ] } ] }, 1, 0 ] } },
"inputs_huge": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$input_count", 64 ]}, { "$lt": [ "$input_count", 128 ] } ] }, 1, 0 ] } },
"outputs_1": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 1 ]}, 1, 0 ] } },
"outputs_2": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 2 ]}, 1, 0 ] } },
"outputs_3": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 3 ]}, 1, 0 ] } },
"outputs_4": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 4 ]}, 1, 0 ] } },
"outputs_5": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 5 ]}, 1, 0 ] } },
"outputs_6": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 6 ]}, 1, 0 ] } },
"outputs_7": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 7 ]}, 1, 0 ] } },
"outputs_small": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$output_count", 8 ]}, { "$lt": [ "$output_count", 16 ] } ] }, 1, 0 ] } },
"outputs_medium": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$output_count", 16 ]}, { "$lt": [ "$output_count", 32 ] } ] }, 1, 0 ] } },
"outputs_large": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$output_count", 32 ]}, { "$lt": [ "$output_count", 64 ] } ] }, 1, 0 ] } },
"outputs_huge": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$output_count", 64 ]}, { "$lt": [ "$output_count", 128 ] } ] }, 1, 0 ] } },
} },
{ "$project": {
"_id": 0,
}}
]).toArray()[0];
// ms.transaction_size = db.stardust_blocks.aggregate([
// { "$match": {
// "metadata.referenced_by_milestone_index": ledger_index,
// "block.payload.kind": "transaction",
// } },
// { "$lookup": {
// "from": "stardust_outputs",
// "localField": "block.payload.transaction_id",
// "foreignField": "_id.transaction_id",
// "as": "outputs",
// } },
// { "$set": {
// "input_count": { "$size": "$block.payload.essence.inputs" },
// "output_count": { "$size": "$outputs" },
// } },
// { "$group": {
// "_id": null,
// "inputs_1": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 1 ]}, 1, 0 ] } },
// "inputs_2": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 2 ]}, 1, 0 ] } },
// "inputs_3": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 3 ]}, 1, 0 ] } },
// "inputs_4": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 4 ]}, 1, 0 ] } },
// "inputs_5": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 5 ]}, 1, 0 ] } },
// "inputs_6": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 6 ]}, 1, 0 ] } },
// "inputs_7": { "$sum": { "$cond": [ { "$eq": [ "$input_count", 7 ]}, 1, 0 ] } },
// "inputs_small": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$input_count", 8 ]}, { "$lt": [ "$input_count", 16 ] } ] }, 1, 0 ] } },
// "inputs_medium": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$input_count", 16 ]}, { "$lt": [ "$input_count", 32 ] } ] }, 1, 0 ] } },
// "inputs_large": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$input_count", 32 ]}, { "$lt": [ "$input_count", 64 ] } ] }, 1, 0 ] } },
// "inputs_huge": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$input_count", 64 ]}, { "$lt": [ "$input_count", 128 ] } ] }, 1, 0 ] } },
// "outputs_1": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 1 ]}, 1, 0 ] } },
// "outputs_2": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 2 ]}, 1, 0 ] } },
// "outputs_3": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 3 ]}, 1, 0 ] } },
// "outputs_4": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 4 ]}, 1, 0 ] } },
// "outputs_5": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 5 ]}, 1, 0 ] } },
// "outputs_6": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 6 ]}, 1, 0 ] } },
// "outputs_7": { "$sum": { "$cond": [ { "$eq": [ "$output_count", 7 ]}, 1, 0 ] } },
// "outputs_small": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$output_count", 8 ]}, { "$lt": [ "$output_count", 16 ] } ] }, 1, 0 ] } },
// "outputs_medium": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$output_count", 16 ]}, { "$lt": [ "$output_count", 32 ] } ] }, 1, 0 ] } },
// "outputs_large": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$output_count", 32 ]}, { "$lt": [ "$output_count", 64 ] } ] }, 1, 0 ] } },
// "outputs_huge": { "$sum": { "$cond": [ { "$and": [ { "$gte": [ "$output_count", 64 ]}, { "$lt": [ "$output_count", 128 ] } ] }, 1, 0 ] } },
// } },
// { "$project": {
// "_id": 0,
// }}
// ]).toArray()[0];

// ms.unclaimed_tokens = db.stardust_outputs.aggregate([
// { "$match": {
Expand Down
10 changes: 5 additions & 5 deletions tests/data/measurements.ron
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
17339: {
"analytics.active_addresses.count": 32,
"analytics.address_balance.address_with_balance_count": 111983,
/* UNREPRESENTED */ "analytics.base_tokens.booked_amount.0": 96847628508,
"analytics.base_tokens.booked_amount.0": 96847628508,
/* UNREPRESENTED */ "analytics.base_tokens.transferred_amount.0": 95428996456,
"analytics.ledger_outputs.basic.count": 99398,
"analytics.ledger_outputs.basic.amount.0": 1813618032119665,
Expand Down Expand Up @@ -74,8 +74,8 @@
17340: {
"analytics.active_addresses.count": 14,
"analytics.address_balance.address_with_balance_count": 111981,
/* UNREPRESENTED */ "analytics.base_tokens.booked_amount.0": 97738659308,
/* UNREPRESENTED */ "analytics.base_tokens.transferred_amount.0": 96318557256,
"analytics.base_tokens.booked_amount.0": 891030800,
/* UNREPRESENTED */ "analytics.base_tokens.transferred_amount.0": 889560800,
"analytics.ledger_outputs.basic.count": 99398,
"analytics.ledger_outputs.basic.amount.0": 1813618032119665,
"analytics.ledger_outputs.alias.count": 40,
Expand Down Expand Up @@ -145,8 +145,8 @@
17341: {
"analytics.active_addresses.count": 30,
"analytics.address_balance.address_with_balance_count": 112005,
/* UNREPRESENTED */ "analytics.base_tokens.booked_amount.0": 98413982908,
/* UNREPRESENTED */ "analytics.base_tokens.transferred_amount.0": 96324042456,
"analytics.base_tokens.booked_amount.0": 675323600,
/* UNREPRESENTED */ "analytics.base_tokens.transferred_amount.0": 5485200,
"analytics.ledger_outputs.basic.count": 99400,
"analytics.ledger_outputs.basic.amount.0": 1813618028256565,
"analytics.ledger_outputs.alias.count": 40,
Expand Down
Binary file modified tests/data/ms_17338_analytics_compressed
Binary file not shown.

0 comments on commit 02d28b3

Please sign in to comment.