Skip to content

Commit

Permalink
feat(types): add Copy and Into<Bson> impls (#230)
Browse files Browse the repository at this point in the history
* Add Into<Bson> for common types and make many types Copy
  • Loading branch information
Alexandcoats authored and grtlr committed May 31, 2022
1 parent 0edf21e commit 165303c
Show file tree
Hide file tree
Showing 26 changed files with 173 additions and 54 deletions.
32 changes: 16 additions & 16 deletions src/db/collections/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

use futures::{Stream, StreamExt};
use mongodb::{
bson::{self, doc, to_bson, to_document},
bson::{self, doc},
error::Error,
options::{FindOneOptions, FindOptions, UpdateOptions},
};
Expand Down Expand Up @@ -66,7 +66,7 @@ impl MongoDb {
self.0
.collection::<Block>(BlockDocument::COLLECTION)
.find_one(
doc! {"_id": bson::to_bson(block_id)?},
doc! {"_id": block_id},
FindOneOptions::builder().projection(doc! {"block": 1 }).build(),
)
.await
Expand All @@ -77,7 +77,7 @@ impl MongoDb {
self.0
.collection::<Vec<u8>>(BlockDocument::COLLECTION)
.find_one(
doc! {"_id": bson::to_bson(block_id)?},
doc! {"_id": block_id},
FindOneOptions::builder().projection(doc! {"raw": 1 }).build(),
)
.await
Expand All @@ -88,7 +88,7 @@ impl MongoDb {
self.0
.collection::<BlockMetadata>(BlockDocument::COLLECTION)
.find_one(
doc! {"_id": bson::to_bson(block_id)?},
doc! {"_id": block_id},
FindOneOptions::builder().projection(doc! {"metadata": 1 }).build(),
)
.await
Expand All @@ -104,7 +104,7 @@ impl MongoDb {
self.0
.collection::<BlockId>(BlockDocument::COLLECTION)
.find(
doc! {"block.parents": bson::to_bson(block_id)?},
doc! {"block.parents": block_id},
FindOptions::builder()
.skip((page_size * page) as u64)
.sort(doc! {"metadata.referenced_by_milestone_index": -1})
Expand All @@ -125,7 +125,7 @@ impl MongoDb {
white_flag_index: u32,
) -> Result<(), Error> {
let block_document = BlockDocument {
block_id: block_id.clone(),
block_id,
block,
raw,
metadata,
Expand All @@ -135,8 +135,8 @@ impl MongoDb {
self.0
.collection::<BlockDocument>(BlockDocument::COLLECTION)
.update_one(
doc! { "_id": to_bson(&block_id)? },
doc! { "$set": to_document(&block_document)? },
doc! { "_id": block_id },
doc! { "$set": bson::to_document(&block_document)? },
UpdateOptions::builder().upsert(true).build(),
)
.await?;
Expand All @@ -150,8 +150,8 @@ impl MongoDb {
.collection::<Block>(BlockDocument::COLLECTION)
.find_one(
doc! {
"inclusion_state": bson::to_bson(&LedgerInclusionState::Included)?,
"block.payload.transaction_id": bson::to_bson(transaction_id)?,
"inclusion_state": LedgerInclusionState::Included,
"block.payload.transaction_id": transaction_id,
},
FindOneOptions::builder().projection(doc! {"block": 1 }).build(),
)
Expand All @@ -173,15 +173,15 @@ impl MongoDb {
// Only outputs for this address
doc! { "$match": {
"milestone_index": { "$gt": start_milestone, "$lt": end_milestone },
"inclusion_state": bson::to_bson(&LedgerInclusionState::Included)?,
"block.payload.essence.outputs.unlocks": bson::to_bson(&address)?
"inclusion_state": LedgerInclusionState::Included,
"block.payload.essence.outputs.unlocks": &address
} },
doc! { "$set": {
"block.payload.essence.outputs": {
"$filter": {
"input": "$block.payload.essence.outputs",
"as": "output",
"cond": { "$eq": [ "$$output.unlock_conditions", bson::to_bson(&address)? ] }
"cond": { "$eq": [ "$$output.unlock_conditions", &address ] }
}
}
} },
Expand All @@ -195,7 +195,7 @@ impl MongoDb {
"pipeline": [
// Match using the output's index
{ "$match": {
"inclusion_state": bson::to_bson(&LedgerInclusionState::Included)?,
"inclusion_state": LedgerInclusionState::Included,
"block.payload.essence.inputs.transaction_id": "$$transaction_id",
"block.payload.essence.inputs.index": "$$index"
} },
Expand Down Expand Up @@ -266,7 +266,7 @@ impl MongoDb {
.aggregate(
vec![
doc! { "$match": {
"inclusion_state": bson::to_bson(&LedgerInclusionState::Included)?,
"inclusion_state": LedgerInclusionState::Included,
"milestone_index": { "$gt": start_milestone, "$lt": end_milestone },
"block.payload.kind": "transaction",
} },
Expand All @@ -276,7 +276,7 @@ impl MongoDb {
"let": { "transaction_id": "$block.payload.essence.inputs.transaction_id", "index": "$block.payload.essence.inputs.index" },
"pipeline": [
{ "$match": {
"inclusion_state": bson::to_bson(&LedgerInclusionState::Included)?,
"inclusion_state": LedgerInclusionState::Included,
"block.payload.transaction_id": "$$transaction_id",
} },
{ "$set": {
Expand Down
12 changes: 6 additions & 6 deletions src/db/collections/ledger_update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

use futures::Stream;
use mongodb::{
bson::{doc, to_bson, Bson},
bson::{doc, Bson},
error::Error,
options::FindOptions,
};
Expand Down Expand Up @@ -71,7 +71,7 @@ impl MongoDb {
for owner in output.owning_addresses() {
let ledger_update_document = LedgerUpdateDocument {
owner,
output_id: metadata.output_id.clone(),
output_id: metadata.output_id,
at: metadata.spent.clone().map_or(metadata.booked.clone(), |s| s.spent),
is_spent: metadata.spent.is_some(),
};
Expand All @@ -84,7 +84,7 @@ impl MongoDb {
}

// Upsert outputs
self.upsert_output_with_metadata(metadata.output_id.clone(), output, metadata)
self.upsert_output_with_metadata(metadata.output_id, output, metadata)
.await?;
}

Expand All @@ -107,9 +107,9 @@ impl MongoDb {
.collection::<LedgerUpdateRecord>(LedgerUpdateDocument::COLLECTION)
.find(
doc! {
"address": { "$eq": to_bson(&address)? },
"at.milestone_index": { "$gte": to_bson(&cursor.0)? },
"output_id": { "$gte": to_bson(&cursor.1)? },
"address": { "$eq": &address },
"at.milestone_index": { "$gte": &cursor.0 },
"output_id": { "$gte": &cursor.1 },
},
options,
)
Expand Down
2 changes: 1 addition & 1 deletion src/db/collections/milestone.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl MongoDb {
self.0
.collection::<Payload>(MilestoneDocument::COLLECTION)
.find_one(
doc! {"milestone_id": bson::to_bson(milestone_id)?},
doc! {"milestone_id": milestone_id},
FindOneOptions::builder().projection(doc! {"payload": 1 }).build(),
)
.await
Expand Down
12 changes: 6 additions & 6 deletions src/db/collections/output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0

use mongodb::{
bson::{self, doc, to_bson, to_document},
bson::{self, doc},
error::Error,
options::{FindOneOptions, UpdateOptions},
};
Expand Down Expand Up @@ -40,16 +40,16 @@ impl MongoDb {
metadata: OutputMetadata,
) -> Result<(), Error> {
let output_document = OutputDocument {
output_id: output_id.clone(),
output_id,
output,
metadata,
};

self.0
.collection::<OutputDocument>(OutputDocument::COLLECTION)
.update_one(
doc! { "_id": to_bson(&output_id)?},
doc! {"$set": to_document(&output_document)? },
doc! { "_id": &output_id},
doc! {"$set": bson::to_document(&output_document)? },
UpdateOptions::builder().upsert(true).build(),
)
.await?;
Expand All @@ -62,7 +62,7 @@ impl MongoDb {
self.0
.collection::<Output>(OutputDocument::COLLECTION)
.find_one(
doc! {"_id": bson::to_bson(output_id)?},
doc! {"_id": output_id},
Some(FindOneOptions::builder().projection(doc! {"output": 1 }).build()),
)
.await
Expand Down Expand Up @@ -90,7 +90,7 @@ impl MongoDb {
self.0
.collection::<OutputMetadata>(OutputDocument::COLLECTION)
.find_one(
doc! {"_id": bson::to_bson(output_id)?},
doc! {"_id": output_id},
Some(FindOneOptions::builder().projection(doc! {"metadata": 1 }).build()),
)
.await
Expand Down
8 changes: 8 additions & 0 deletions src/types/ledger/inclusion_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
// SPDX-License-Identifier: Apache-2.0

use bee_rest_api_stardust::types::dtos as bee;
use mongodb::bson::Bson;
use serde::{Deserialize, Serialize};

/// A block's ledger inclusion state.
Expand All @@ -18,6 +19,13 @@ pub enum LedgerInclusionState {
NoTransaction,
}

impl From<LedgerInclusionState> for Bson {
fn from(val: LedgerInclusionState) -> Self {
// Unwrap: Cannot fail as type is well defined
mongodb::bson::to_bson(&val).unwrap()
}
}

#[cfg(feature = "stardust")]
impl From<bee::LedgerInclusionStateDto> for LedgerInclusionState {
fn from(value: bee::LedgerInclusionStateDto) -> Self {
Expand Down
3 changes: 1 addition & 2 deletions src/types/ledger/output_metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,10 @@ pub struct OutputWithMetadata {
impl From<inx::LedgerOutput> for OutputWithMetadata {
fn from(value: inx::LedgerOutput) -> Self {
let output_id = OutputId::from(value.output_id);
let transaction_id = output_id.transaction_id.clone();
let metadata = OutputMetadata {
output_id,
block_id: value.block_id.into(),
transaction_id,
transaction_id: output_id.transaction_id,
booked: MilestoneIndexTimestamp {
milestone_index: value.milestone_index_booked.into(),
milestone_timestamp: value.milestone_timestamp_booked.into(),
Expand Down
10 changes: 9 additions & 1 deletion src/types/stardust/block/address/alias.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
use std::str::FromStr;

use bee_block_stardust::address as bee;
use mongodb::bson::Bson;
use serde::{Deserialize, Serialize};

use crate::types::stardust::block::AliasId;

#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct AliasAddress(pub AliasId);

Expand All @@ -31,3 +32,10 @@ impl FromStr for AliasAddress {
Ok(bee::AliasAddress::from_str(s)?.into())
}
}

impl From<AliasAddress> for Bson {
fn from(val: AliasAddress) -> Self {
// Unwrap: Cannot fail as type is well defined
mongodb::bson::to_bson(&val).unwrap()
}
}
13 changes: 12 additions & 1 deletion src/types/stardust/block/address/ed25519.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
use std::str::FromStr;

use bee_block_stardust::address as bee;
use mongodb::bson::{spec::BinarySubtype, Binary, Bson};
use serde::{Deserialize, Serialize};

use crate::types::util::bytify;

#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct Ed25519Address(#[serde(with = "bytify")] pub [u8; Self::LENGTH]);

Expand All @@ -35,3 +36,13 @@ impl FromStr for Ed25519Address {
Ok(bee::Ed25519Address::from_str(s)?.into())
}
}

impl From<Ed25519Address> for Bson {
fn from(val: Ed25519Address) -> Self {
Binary {
subtype: BinarySubtype::Generic,
bytes: val.0.to_vec(),
}
.into()
}
}
13 changes: 12 additions & 1 deletion src/types/stardust/block/address/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
use std::str::FromStr;

use bee_block_stardust::address as bee;
use mongodb::bson::{doc, Bson};
use serde::{Deserialize, Serialize};

mod alias;
Expand All @@ -12,7 +13,7 @@ mod nft;

pub use self::{alias::AliasAddress, ed25519::Ed25519Address, nft::NftAddress};

#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum Address {
#[serde(rename = "ed25519")]
Ed25519(Ed25519Address),
Expand Down Expand Up @@ -50,6 +51,13 @@ impl FromStr for Address {
}
}

impl From<Address> for Bson {
fn from(val: Address) -> Self {
// Unwrap: Cannot fail as type is well defined
mongodb::bson::to_bson(&val).unwrap()
}
}

#[cfg(test)]
pub(crate) mod test {
use mongodb::bson::{from_bson, to_bson};
Expand All @@ -60,14 +68,17 @@ pub(crate) mod test {
fn test_address_bson() {
let address = Address::from(bee::Address::Ed25519(bee_test::rand::address::rand_ed25519_address()));
let bson = to_bson(&address).unwrap();
assert_eq!(Bson::from(address), bson);
assert_eq!(address, from_bson::<Address>(bson).unwrap());

let address = Address::from(bee::Address::Alias(bee_test::rand::address::rand_alias_address()));
let bson = to_bson(&address).unwrap();
assert_eq!(Bson::from(address), bson);
assert_eq!(address, from_bson::<Address>(bson).unwrap());

let address = Address::from(bee::Address::Nft(bee_test::rand::address::rand_nft_address()));
let bson = to_bson(&address).unwrap();
assert_eq!(Bson::from(address), bson);
assert_eq!(address, from_bson::<Address>(bson).unwrap());
}
}
10 changes: 9 additions & 1 deletion src/types/stardust/block/address/nft.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
use std::str::FromStr;

use bee_block_stardust::address as bee;
use mongodb::bson::Bson;
use serde::{Deserialize, Serialize};

use crate::types::stardust::block::NftId;

#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)]
pub struct NftAddress(pub NftId);

Expand All @@ -31,3 +32,10 @@ impl FromStr for NftAddress {
Ok(bee::NftAddress::from_str(s)?.into())
}
}

impl From<NftAddress> for Bson {
fn from(val: NftAddress) -> Self {
// Unwrap: Cannot fail as type is well defined
mongodb::bson::to_bson(&val).unwrap()
}
}
Loading

0 comments on commit 165303c

Please sign in to comment.