diff --git a/Cargo.lock b/Cargo.lock index 6856c684bc4..8dd0b4c850f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1490,8 +1490,8 @@ dependencies = [ "slog-async", "slog-envlogger", "slog-term", - "stable-hash 0.3.2", - "stable-hash 0.4.1", + "stable-hash 0.3.3", + "stable-hash 0.4.2", "structopt", "strum", "strum_macros", @@ -1642,8 +1642,8 @@ dependencies = [ "once_cell", "parking_lot 0.12.1", "pretty_assertions 1.2.1", - "stable-hash 0.3.2", - "stable-hash 0.4.1", + "stable-hash 0.3.3", + "stable-hash 0.4.2", "test-store", ] @@ -1847,7 +1847,7 @@ dependencies = [ "postgres-openssl", "rand", "serde", - "stable-hash 0.3.2", + "stable-hash 0.3.3", "test-store", "uuid 1.1.2", ] @@ -3915,9 +3915,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] name = "stable-hash" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2dfeb73d08f4ae1f48059f31f53a3768fad3dc54622631e0a3eff1d07a4ca19" +checksum = "10196e68950ed99c0d2db7a30ffaf4dfe0bbf2f9af2ae0457ee8ad396e0a2dd7" dependencies = [ "blake3 0.3.8", "firestorm 0.4.6", @@ -3929,9 +3929,9 @@ dependencies = [ [[package]] name = "stable-hash" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "442fcfd15605146b373dafa4bea8368e98d9b51350e5da5a44ab2923c5ac3ca8" +checksum = "af75bd21beb162eab69de76abbb803d4111735ead00d5086dcc6f4ddb3b53cc9" dependencies = [ "blake3 0.3.8", "firestorm 0.5.0", diff --git a/chain/near/src/trigger.rs b/chain/near/src/trigger.rs index 747fdf1503a..ecdc8a3d2f7 100644 --- a/chain/near/src/trigger.rs +++ b/chain/near/src/trigger.rs @@ -5,11 +5,7 @@ use graph::cheap_clone::CheapClone; use graph::prelude::hex; use graph::prelude::web3::types::H256; use graph::prelude::BlockNumber; -use graph::runtime::asc_new; -use graph::runtime::gas::GasCounter; -use graph::runtime::AscHeap; -use graph::runtime::AscPtr; -use graph::runtime::DeterministicHostError; +use graph::runtime::{asc_new, gas::GasCounter, AscHeap, AscPtr, DeterministicHostError}; use std::{cmp::Ordering, sync::Arc}; use crate::codec; @@ -155,6 +151,7 @@ mod tests { data::subgraph::API_VERSION_0_0_5, prelude::{hex, BigInt}, runtime::gas::GasCounter, + util::mem::init_slice, }; #[test] @@ -448,12 +445,18 @@ mod tests { Ok((self.memory.len() - bytes.len()) as u32) } - fn get( + fn read_u32(&self, offset: u32, gas: &GasCounter) -> Result { + let mut data = [std::mem::MaybeUninit::::uninit(); 4]; + let init = self.read(offset, &mut data, gas)?; + Ok(u32::from_le_bytes(init.try_into().unwrap())) + } + + fn read<'a>( &self, offset: u32, - size: u32, + buffer: &'a mut [std::mem::MaybeUninit], _gas: &GasCounter, - ) -> Result, DeterministicHostError> { + ) -> Result<&'a mut [u8], DeterministicHostError> { let memory_byte_count = self.memory.len(); if memory_byte_count == 0 { return Err(DeterministicHostError::from(anyhow!( @@ -462,7 +465,7 @@ mod tests { } let start_offset = offset as usize; - let end_offset_exclusive = start_offset + size as usize; + let end_offset_exclusive = start_offset + buffer.len(); if start_offset >= memory_byte_count { return Err(DeterministicHostError::from(anyhow!( @@ -480,7 +483,9 @@ mod tests { ))); } - return Ok(Vec::from(&self.memory[start_offset..end_offset_exclusive])); + let src = &self.memory[start_offset..end_offset_exclusive]; + + Ok(init_slice(src, buffer)) } fn api_version(&self) -> graph::semver::Version { diff --git a/core/src/subgraph/runner.rs b/core/src/subgraph/runner.rs index 8dcc44a1753..bbde93fbae2 100644 --- a/core/src/subgraph/runner.rs +++ b/core/src/subgraph/runner.rs @@ -10,7 +10,10 @@ use graph::blockchain::block_stream::{BlockStreamEvent, BlockWithTriggers}; use graph::blockchain::{Block, Blockchain, DataSource, TriggerFilter as _}; use graph::components::{ store::ModificationsAndCache, - subgraph::{CausalityRegion, MappingError, ProofOfIndexing, SharedProofOfIndexing}, + subgraph::{ + CausalityRegion, MappingError, ProofOfIndexing, ProofOfIndexingVersion, + SharedProofOfIndexing, + }, }; use graph::data::store::scalar::Bytes; use graph::data::subgraph::{ @@ -162,6 +165,10 @@ where let proof_of_indexing = if self.inputs.store.supports_proof_of_indexing().await? { Some(Arc::new(AtomicRefCell::new(ProofOfIndexing::new( block_ptr.number, + // TODO: (Fast PoI) This should also support the Fast + // variant when indicated by the subgraph manifest. + // See also a0a79c0f-919f-4d97-a82 a-439a1ff78230 + ProofOfIndexingVersion::Legacy, )))) } else { None diff --git a/graph/Cargo.toml b/graph/Cargo.toml index 21419023363..58beaa26fe9 100644 --- a/graph/Cargo.toml +++ b/graph/Cargo.toml @@ -33,8 +33,8 @@ serde_derive = "1.0.125" serde_json = { version = "1.0", features = ["arbitrary_precision"] } serde_yaml = "0.8" slog = { version = "2.7.0", features = ["release_max_level_trace", "max_level_trace"] } -stable-hash_legacy = { version = "0.3.2", package = "stable-hash" } -stable-hash = { version = "0.4.1"} +stable-hash_legacy = { version = "0.3.3", package = "stable-hash" } +stable-hash = { version = "0.4.2"} strum = "0.21.0" strum_macros = "0.21.1" slog-async = "2.5.0" diff --git a/graph/src/blockchain/types.rs b/graph/src/blockchain/types.rs index 72a7cd79867..de7bbce0ed7 100644 --- a/graph/src/blockchain/types.rs +++ b/graph/src/blockchain/types.rs @@ -1,6 +1,4 @@ use anyhow::anyhow; -use stable_hash::{FieldAddress, StableHash}; -use stable_hash_legacy::SequenceNumber; use std::convert::TryFrom; use std::{fmt, str::FromStr}; use web3::types::{Block, H256}; @@ -8,12 +6,15 @@ use web3::types::{Block, H256}; use crate::data::graphql::IntoValue; use crate::object; use crate::prelude::{r, BigInt, TryFromValue, ValueMap}; +use crate::util::stable_hash_glue::{impl_stable_hash, AsBytes}; use crate::{cheap_clone::CheapClone, components::store::BlockNumber}; /// A simple marker for byte arrays that are really block hashes #[derive(Clone, Default, PartialEq, Eq, Hash)] pub struct BlockHash(pub Box<[u8]>); +impl_stable_hash!(BlockHash(transparent: AsBytes)); + impl BlockHash { pub fn as_slice(&self) -> &[u8] { &self.0 @@ -94,28 +95,7 @@ pub struct BlockPtr { impl CheapClone for BlockPtr {} -impl stable_hash_legacy::StableHash for BlockPtr { - fn stable_hash( - &self, - mut sequence_number: H::Seq, - state: &mut H, - ) { - let BlockPtr { hash, number } = self; - - stable_hash_legacy::utils::AsBytes(hash.0.as_ref()) - .stable_hash(sequence_number.next_child(), state); - stable_hash_legacy::StableHash::stable_hash(number, sequence_number.next_child(), state); - } -} - -impl StableHash for BlockPtr { - fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - let BlockPtr { hash, number } = self; - - stable_hash::utils::AsBytes(hash.0.as_ref()).stable_hash(field_address.child(0), state); - stable_hash::StableHash::stable_hash(number, field_address.child(1), state); - } -} +impl_stable_hash!(BlockPtr { hash, number }); impl BlockPtr { pub fn new(hash: BlockHash, number: BlockNumber) -> Self { diff --git a/graph/src/components/store/mod.rs b/graph/src/components/store/mod.rs index a8204e69640..285dde2590c 100644 --- a/graph/src/components/store/mod.rs +++ b/graph/src/components/store/mod.rs @@ -5,8 +5,6 @@ mod traits; pub use cache::{CachedEthereumCall, EntityCache, ModificationsAndCache}; pub use err::StoreError; use itertools::Itertools; -use stable_hash::{FieldAddress, StableHash}; -use stable_hash_legacy::SequenceNumber; pub use traits::*; use futures::stream::poll_fn; @@ -26,6 +24,7 @@ use crate::blockchain::{Block, Blockchain}; use crate::data::store::scalar::Bytes; use crate::data::store::*; use crate::prelude::*; +use crate::util::stable_hash_glue::impl_stable_hash; /// The type name of an entity. This is the string that is used in the /// subgraph's GraphQL schema as `type NAME @entity { .. }` @@ -110,46 +109,11 @@ pub struct EntityKey { pub entity_id: String, } -impl stable_hash_legacy::StableHash for EntityKey { - fn stable_hash( - &self, - mut sequence_number: H::Seq, - state: &mut H, - ) { - let Self { - subgraph_id, - entity_type, - entity_id, - } = self; - - stable_hash_legacy::StableHash::stable_hash( - subgraph_id, - sequence_number.next_child(), - state, - ); - stable_hash_legacy::StableHash::stable_hash( - &entity_type.as_str(), - sequence_number.next_child(), - state, - ); - stable_hash_legacy::StableHash::stable_hash(entity_id, sequence_number.next_child(), state); - } -} - -impl StableHash for EntityKey { - fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - let Self { - subgraph_id, - entity_type, - entity_id, - } = self; - - subgraph_id.stable_hash(field_address.child(0), state); - - stable_hash::StableHash::stable_hash(&entity_type.as_str(), field_address.child(1), state); - stable_hash::StableHash::stable_hash(entity_id, field_address.child(2), state); - } -} +impl_stable_hash!(EntityKey { + subgraph_id, + entity_type: EntityType::as_str, + entity_id +}); impl EntityKey { pub fn data(subgraph_id: DeploymentHash, entity_type: String, entity_id: String) -> Self { diff --git a/graph/src/components/subgraph/mod.rs b/graph/src/components/subgraph/mod.rs index 535da0b0caa..cd930c85924 100644 --- a/graph/src/components/subgraph/mod.rs +++ b/graph/src/components/subgraph/mod.rs @@ -11,8 +11,8 @@ pub use self::host::{HostMetrics, MappingError, RuntimeHost, RuntimeHostBuilder} pub use self::instance::{BlockState, DataSourceTemplateInfo}; pub use self::instance_manager::SubgraphInstanceManager; pub use self::proof_of_indexing::{ - BlockEventStream, CausalityRegion, ProofOfIndexing, ProofOfIndexingEvent, - ProofOfIndexingFinisher, SharedProofOfIndexing, + CausalityRegion, ProofOfIndexing, ProofOfIndexingEvent, ProofOfIndexingFinisher, + ProofOfIndexingVersion, SharedProofOfIndexing, }; pub use self::provider::SubgraphAssignmentProvider; pub use self::registrar::{SubgraphRegistrar, SubgraphVersionSwitchingMode}; diff --git a/graph/src/components/subgraph/proof_of_indexing/mod.rs b/graph/src/components/subgraph/proof_of_indexing/mod.rs index da08584859c..712b84d0694 100644 --- a/graph/src/components/subgraph/proof_of_indexing/mod.rs +++ b/graph/src/components/subgraph/proof_of_indexing/mod.rs @@ -3,12 +3,18 @@ mod online; mod reference; pub use event::ProofOfIndexingEvent; -pub use online::{BlockEventStream, ProofOfIndexing, ProofOfIndexingFinisher}; +pub use online::{ProofOfIndexing, ProofOfIndexingFinisher}; pub use reference::CausalityRegion; use atomic_refcell::AtomicRefCell; use std::sync::Arc; +#[derive(Copy, Clone, Debug)] +pub enum ProofOfIndexingVersion { + Fast, + Legacy, +} + /// This concoction of types is to allow MappingContext to be static, yet still /// have shared mutable data for derive_with_empty_block_state. The static /// requirement is so that host exports can be static for wasmtime. @@ -34,61 +40,90 @@ mod tests { use online::ProofOfIndexingFinisher; use reference::*; use slog::{o, Discard, Logger}; + use stable_hash::{fast_stable_hash, utils::check_for_child_errors}; use stable_hash_legacy::crypto::SetHasher; - use stable_hash_legacy::utils::stable_hash; + use stable_hash_legacy::utils::stable_hash as stable_hash_legacy; use std::collections::HashMap; use std::convert::TryInto; use web3::types::{Address, H256}; /// Verify that the stable hash of a reference and online implementation match - fn check_equal(reference: &PoI) -> String { + fn check(case: Case, cache: &mut HashMap) { let logger = Logger::root(Discard, o!()); - // The code is meant to approximate what happens during indexing as - // close as possible. The API for the online PoI is meant to be - // pretty foolproof so that the actual usage will also match. + // Does a sanity check to ensure that the schema itself is correct, + // which is separate to verifying that the online/offline version + // return the same result. + check_for_child_errors(&case.data).expect("Found child errors"); - // Create a database which stores intermediate PoIs - let mut db = HashMap::>::new(); + let offline_fast = tiny_keccak::keccak256(&fast_stable_hash(&case.data).to_le_bytes()); + let offline_legacy = stable_hash_legacy::(&case.data); - let mut block_count = 1; - for causality_region in reference.causality_regions.values() { - block_count = causality_region.blocks.len(); - break; - } + for (version, offline, hardcoded) in [ + (ProofOfIndexingVersion::Legacy, offline_legacy, case.legacy), + (ProofOfIndexingVersion::Fast, offline_fast, case.fast), + ] { + // The code is meant to approximate what happens during indexing as + // close as possible. The API for the online PoI is meant to be + // pretty foolproof so that the actual usage will also match. + + // Create a database which stores intermediate PoIs + let mut db = HashMap::>::new(); + + let mut block_count = 1; + for causality_region in case.data.causality_regions.values() { + block_count = causality_region.blocks.len(); + break; + } + + for block_i in 0..block_count { + let mut stream = ProofOfIndexing::new(block_i.try_into().unwrap(), version); - for block_i in 0..block_count { - let mut stream = ProofOfIndexing::new(block_i.try_into().unwrap()); + for (name, region) in case.data.causality_regions.iter() { + let block = ®ion.blocks[block_i]; - for (name, region) in reference.causality_regions.iter() { - let block = ®ion.blocks[block_i]; + for evt in block.events.iter() { + stream.write(&logger, name, evt); + } + } - for evt in block.events.iter() { - stream.write(&logger, name, evt); + for (name, region) in stream.take() { + let prev = db.get(&name); + let update = region.pause(prev.map(|v| &v[..])); + db.insert(name, update); } } - for (name, region) in stream.take() { - let prev = db.get(&name); - let update = region.pause(prev.map(|v| &v[..])); - db.insert(name, update); + let block_number = (block_count - 1) as u64; + let block_ptr = BlockPtr::from((case.data.block_hash, block_number)); + + // This region emulates the request + let mut finisher = ProofOfIndexingFinisher::new( + &block_ptr, + &case.data.subgraph_id, + &case.data.indexer, + version, + ); + for (name, region) in db.iter() { + finisher.add_causality_region(name, region); } - } - let block_number = (block_count - 1) as u64; - let block_ptr = BlockPtr::from((reference.block_hash, block_number)); + let online = hex::encode(finisher.finish()); + let offline = hex::encode(&offline); + assert_eq!(&online, &offline); + assert_eq!(&online, hardcoded); - // This region emulates the request - let mut finisher = - ProofOfIndexingFinisher::new(&block_ptr, &reference.subgraph_id, &reference.indexer); - for (name, region) in db.iter() { - finisher.add_causality_region(name, region); + if let Some(prev) = cache.insert(offline, case.name) { + panic!("Found conflict for case: {} == {}", case.name, prev); + } } + } - let online = hex::encode(finisher.finish()); - let offline = hex::encode(stable_hash::(reference)); - assert_eq!(&online, &offline); - offline + struct Case<'a> { + name: &'static str, + legacy: &'static str, + fast: &'static str, + data: PoI<'a>, } /// This test checks that each case resolves to a unique hash, and that @@ -104,162 +139,180 @@ mod tests { "null".to_owned() => Value::Null, }; - let mut cases = hashmap! { + let mut cases = vec![ // Simple case of basically nothing - "genesis" => PoI { - subgraph_id: DeploymentHash::new("test").unwrap(), - block_hash: H256::repeat_byte(1), - causality_regions: HashMap::new(), - indexer: None, + Case { + name: "genesis", + legacy: "401e5bef572bc3a56b0ced0eb6cb4619d2ca748db6af8855828d16ff3446cfdd", + fast: "dced49c45eac68e8b3d8f857928e7be6c270f2db8b56b0d7f27ce725100bae01", + data: PoI { + subgraph_id: DeploymentHash::new("test").unwrap(), + block_hash: H256::repeat_byte(1), + causality_regions: HashMap::new(), + indexer: None, + }, }, - // Add an event - "one_event" => PoI { - subgraph_id: DeploymentHash::new("test").unwrap(), - block_hash: H256::repeat_byte(1), - causality_regions: hashmap! { - "eth".to_owned() => CausalityRegion { - blocks: vec! [ - Block::default(), - Block { - events: vec![ - ProofOfIndexingEvent::SetEntity { - entity_type: "t", - id: "id", - data: &data_empty, - } - ] - } - ], + Case { + name: "one_event", + legacy: "9241634bfc8a9a12c796a0de6f326326a74967cd477ee7ce78fbac20a9e9c303", + fast: "bb3c37659d4bc799b9dcf3d17b1b1e93847f5fc0b2c50ee6a27f13b5c07f7e97", + data: PoI { + subgraph_id: DeploymentHash::new("test").unwrap(), + block_hash: H256::repeat_byte(1), + causality_regions: hashmap! { + "eth".to_owned() => CausalityRegion { + blocks: vec! [ + Block::default(), + Block { + events: vec![ + ProofOfIndexingEvent::SetEntity { + entity_type: "t", + id: "id", + data: &data_empty, + } + ] + } + ], + }, }, + indexer: Some(Address::repeat_byte(1)), }, - indexer: Some(Address::repeat_byte(1)), }, - // Try adding a couple more blocks, including an empty block on the end - "multiple_blocks" => PoI { - subgraph_id: DeploymentHash::new("b").unwrap(), - block_hash: H256::repeat_byte(3), - causality_regions: hashmap! { - "eth".to_owned() => CausalityRegion { - blocks: vec! [ - Block::default(), - Block { - events: vec![ - ProofOfIndexingEvent::SetEntity { - entity_type: "type", - id: "id", - data: &data, - } - ] - }, - Block::default(), - Block { - events: vec![ - ProofOfIndexingEvent::SetEntity { - entity_type: "type", - id: "id", - data: &data_empty, - } - ] - }, - Block::default(), - ], + Case { + name: "multiple_blocks", + legacy: "775fa30bbaef2a8659456a317923a36f46e3715e6c9cf43203dd3486af4e361f", + fast: "3bb882049e8f4a11cd4a7a005c6ce3b3c779a0e90057a9556c595660e626268d", + data: PoI { + subgraph_id: DeploymentHash::new("b").unwrap(), + block_hash: H256::repeat_byte(3), + causality_regions: hashmap! { + "eth".to_owned() => CausalityRegion { + blocks: vec! [ + Block::default(), + Block { + events: vec![ + ProofOfIndexingEvent::SetEntity { + entity_type: "type", + id: "id", + data: &data, + } + ] + }, + Block::default(), + Block { + events: vec![ + ProofOfIndexingEvent::SetEntity { + entity_type: "type", + id: "id", + data: &data_empty, + } + ] + }, + Block::default(), + ], + }, }, + indexer: Some(Address::repeat_byte(1)), }, - indexer: Some(Address::repeat_byte(1)), }, - // Try adding another causality region - "causality_regions" => PoI { - subgraph_id: DeploymentHash::new("b").unwrap(), - block_hash: H256::repeat_byte(3), - causality_regions: hashmap! { - "eth".to_owned() => CausalityRegion { - blocks: vec! [ - Block::default(), - Block { - events: vec![ - ProofOfIndexingEvent::SetEntity { - entity_type: "type", - id: "id", - data: &data2, - } - ] - }, - Block::default(), - Block { - events: vec![ - ProofOfIndexingEvent::RemoveEntity { - entity_type: "type", - id: "id", - } - ] - }, - Block::default(), - ], - }, - "ipfs".to_owned() => CausalityRegion { - blocks: vec! [ - Block::default(), - Block { - events: vec![ - ProofOfIndexingEvent::SetEntity { - entity_type: "type", - id: "id", - data: &data, - } - ] - }, - Block::default(), - Block { - events: vec![ - ProofOfIndexingEvent::SetEntity { - entity_type: "type", - id: "id", - data: &data, - } - ] - }, - Block::default(), - ], + Case { + name: "causality_regions", + legacy: "13e6fd2b581911c80d935d4f098b40ef3d87cbc564b5a635c81b06091a381e54", + fast: "b2cb70acd4a1337a67df810fe4c5c2fb3d3a3b2b8eb137dbb592bd6014869362", + data: PoI { + subgraph_id: DeploymentHash::new("b").unwrap(), + block_hash: H256::repeat_byte(3), + causality_regions: hashmap! { + "eth".to_owned() => CausalityRegion { + blocks: vec! [ + Block::default(), + Block { + events: vec![ + ProofOfIndexingEvent::SetEntity { + entity_type: "type", + id: "id", + data: &data2, + } + ] + }, + Block::default(), + Block { + events: vec![ + ProofOfIndexingEvent::RemoveEntity { + entity_type: "type", + id: "id", + } + ] + }, + Block::default(), + ], + }, + "ipfs".to_owned() => CausalityRegion { + blocks: vec! [ + Block::default(), + Block { + events: vec![ + ProofOfIndexingEvent::SetEntity { + entity_type: "type", + id: "id", + data: &data, + } + ] + }, + Block::default(), + Block { + events: vec![ + ProofOfIndexingEvent::SetEntity { + entity_type: "type", + id: "id", + data: &data, + } + ] + }, + Block::default(), + ], + }, }, + indexer: Some(Address::repeat_byte(1)), }, - indexer: Some(Address::repeat_byte(1)), }, - // Back to the one event case, but try adding some data. - "data" => PoI { - subgraph_id: DeploymentHash::new("test").unwrap(), - block_hash: H256::repeat_byte(1), - causality_regions: hashmap! { - "eth".to_owned() => CausalityRegion { - blocks: vec! [ - Block::default(), - Block { - events: vec![ - ProofOfIndexingEvent::SetEntity { - entity_type: "type", - id: "id", - data: &data, - } - ] - } - ], + Case { + name: "data", + legacy: "cd3020511cf4c88dd2be542aca4f95bb2a67b06e29f444bcdf44009933b8ff31", + fast: "a992ba24702615a3f591014f7351acf85a35b75e1f8646fc8d77509c4b5d31ed", + data: PoI { + subgraph_id: DeploymentHash::new("test").unwrap(), + block_hash: H256::repeat_byte(1), + causality_regions: hashmap! { + "eth".to_owned() => CausalityRegion { + blocks: vec! [ + Block::default(), + Block { + events: vec![ + ProofOfIndexingEvent::SetEntity { + entity_type: "type", + id: "id", + data: &data, + } + ] + } + ], + }, }, + indexer: Some(Address::repeat_byte(4)), }, - indexer: Some(Address::repeat_byte(4)), }, - }; + ]; // Lots of data up there ⬆️ to test. Finally, loop over each case, comparing the reference and // online version, then checking that there are no conflicts for the reference versions. let mut results = HashMap::new(); - for (name, data) in cases.drain() { - let result = check_equal(&data); - if let Some(prev) = results.insert(result, name) { - assert!(false, "Found conflict for case: {} == {}", name, prev); - } + for case in cases.drain(..) { + check(case, &mut results); } } } diff --git a/graph/src/components/subgraph/proof_of_indexing/online.rs b/graph/src/components/subgraph/proof_of_indexing/online.rs index 5f4c6c4469e..013f2a8f9bb 100644 --- a/graph/src/components/subgraph/proof_of_indexing/online.rs +++ b/graph/src/components/subgraph/proof_of_indexing/online.rs @@ -2,26 +2,64 @@ //! Any hash constructed from here should be the same as if the same data was given //! to the reference implementation, but this is updated incrementally -use super::ProofOfIndexingEvent; +use super::{ProofOfIndexingEvent, ProofOfIndexingVersion}; use crate::{ blockchain::BlockPtr, prelude::{debug, BlockNumber, DeploymentHash, Logger, ENV_VARS}, + util::stable_hash_glue::AsBytes, }; +use stable_hash::{fast::FastStableHasher, FieldAddress, StableHash, StableHasher}; use stable_hash_legacy::crypto::{Blake3SeqNo, SetHasher}; -use stable_hash_legacy::prelude::*; -use stable_hash_legacy::utils::AsBytes; +use stable_hash_legacy::prelude::{ + StableHash as StableHashLegacy, StableHasher as StableHasherLegacy, *, +}; use std::collections::HashMap; use std::convert::TryInto; use std::fmt; use web3::types::Address; -pub enum BlockEventStream { - LegacyHasherStream { - vec_length: u64, - handler_start: u64, - seq_no: Blake3SeqNo, - digest: SetHasher, - }, +pub struct BlockEventStream { + vec_length: u64, + handler_start: u64, + block_index: u64, + hasher: Hashers, +} + +enum Hashers { + Fast(FastStableHasher), + Legacy(SetHasher), +} + +impl Hashers { + fn new(version: ProofOfIndexingVersion) -> Self { + match version { + ProofOfIndexingVersion::Legacy => Hashers::Legacy(SetHasher::new()), + ProofOfIndexingVersion::Fast => Hashers::Fast(FastStableHasher::new()), + } + } + + fn from_bytes(bytes: &[u8]) -> Self { + match bytes.try_into() { + Ok(bytes) => Hashers::Fast(FastStableHasher::from_bytes(bytes)), + Err(_) => Hashers::Legacy(SetHasher::from_bytes(bytes)), + } + } + + fn write(&mut self, value: &T, children: &[u64]) + where + T: StableHash + StableHashLegacy, + { + match self { + Hashers::Fast(fast) => { + let addr = children.iter().fold(u128::root(), |s, i| s.child(*i)); + StableHash::stable_hash(value, addr, fast); + } + Hashers::Legacy(legacy) => { + let seq_no = traverse_seq_no(children); + StableHashLegacy::stable_hash(value, seq_no, legacy); + } + } + } } /// Go directly to a SequenceNumber identifying a field within a struct. @@ -54,27 +92,23 @@ pub enum BlockEventStream { /// 0, // Vec[0] /// 1, // Inner.inner_str ///]) -// Performance: Could write a specialized function for this easily, avoiding a bunch of clones of Blake3SeqNo -fn traverse_seq_no(counts: &[usize]) -> Blake3SeqNo { +// Performance: Could write a specialized function for this, avoiding a bunch of clones of Blake3SeqNo +fn traverse_seq_no(counts: &[u64]) -> Blake3SeqNo { counts.iter().fold(Blake3SeqNo::root(), |mut s, i| { - s.skip(*i); + s.skip(*i as usize); s.next_child() }) } impl BlockEventStream { - fn new_legacy(block_number: BlockNumber) -> Self { - let events = traverse_seq_no(&[ - 1, // kvp -> v - 0, // CausalityRegion.blocks: Vec - block_number.try_into().unwrap(), // Vec -> [i] - 0, // Block.events -> Vec - ]); - Self::LegacyHasherStream { + fn new(block_number: BlockNumber, version: ProofOfIndexingVersion) -> Self { + let block_index: u64 = block_number.try_into().unwrap(); + + Self { vec_length: 0, handler_start: 0, - seq_no: events, - digest: SetHasher::new(), + block_index, + hasher: Hashers::new(version), } } @@ -82,54 +116,51 @@ impl BlockEventStream { /// be resumed later. Cases in which the hash function is resumed include /// when asking for the final PoI, or when combining with the next modified /// block via the argument `prev` - pub fn pause(self, prev: Option<&[u8]>) -> Vec { - match self { - BlockEventStream::LegacyHasherStream { - vec_length, - handler_start: _, - seq_no, - mut digest, - } => { - vec_length.stable_hash(seq_no, &mut digest); - let mut state = digest; + pub fn pause(mut self, prev: Option<&[u8]>) -> Vec { + self.hasher + .write(&self.vec_length, &[1, 0, self.block_index, 0]); + match self.hasher { + Hashers::Legacy(mut digest) => { if let Some(prev) = prev { let prev = SetHasher::from_bytes(prev); - state.finish_unordered(prev, SequenceNumber::root()); + // SequenceNumber::root() is misleading here since the parameter + // is unused. + digest.finish_unordered(prev, SequenceNumber::root()); } - state.to_bytes() + digest.to_bytes() + } + Hashers::Fast(mut digest) => { + if let Some(prev) = prev { + let prev = prev + .try_into() + .expect("Expected valid fast stable hash representation"); + let prev = FastStableHasher::from_bytes(prev); + digest.mixin(&prev); + } + digest.to_bytes().to_vec() } } } fn write(&mut self, event: &ProofOfIndexingEvent<'_>) { - match self { - BlockEventStream::LegacyHasherStream { - ref mut vec_length, - handler_start: _, - seq_no, - ref mut digest, - } => { - *vec_length += 1; - event.stable_hash(seq_no.next_child(), digest); - } - } + let children = &[ + 1, // kvp -> v + 0, // CausalityRegion.blocks: Vec + self.block_index, // Vec -> [i] + 0, // Block.events -> Vec + self.vec_length, + ]; + self.hasher.write(&event, children); + self.vec_length += 1; } fn start_handler(&mut self) { - match self { - BlockEventStream::LegacyHasherStream { - ref vec_length, - ref mut handler_start, - seq_no: _, - digest: _, - } => { - *handler_start = *vec_length; - } - } + self.handler_start = self.vec_length; } } -#[derive(Default)] + pub struct ProofOfIndexing { + version: ProofOfIndexingVersion, block_number: BlockNumber, /// The POI is updated for each data source independently. This is necessary because /// some data sources (eg: IPFS files) may be unreliable and therefore cannot mix @@ -145,8 +176,9 @@ impl fmt::Debug for ProofOfIndexing { } impl ProofOfIndexing { - pub fn new(block_number: BlockNumber) -> Self { + pub fn new(block_number: BlockNumber, version: ProofOfIndexingVersion) -> Self { Self { + version, block_number, per_causality_region: HashMap::new(), } @@ -155,13 +187,8 @@ impl ProofOfIndexing { impl ProofOfIndexing { pub fn write_deterministic_error(&mut self, logger: &Logger, causality_region: &str) { - let redacted_events = self.with_causality_region(causality_region, |entry| match entry { - BlockEventStream::LegacyHasherStream { - ref vec_length, - ref handler_start, - seq_no: _, - digest: _, - } => vec_length - handler_start, + let redacted_events = self.with_causality_region(causality_region, |entry| { + entry.vec_length - entry.handler_start }); self.write( @@ -195,14 +222,14 @@ impl ProofOfIndexing { } // This is just here because the raw_entry API is not stabilized. - pub fn with_causality_region(&mut self, causality_region: &str, f: F) -> T + fn with_causality_region(&mut self, causality_region: &str, f: F) -> T where F: FnOnce(&mut BlockEventStream) -> T, { if let Some(causality_region) = self.per_causality_region.get_mut(causality_region) { f(causality_region) } else { - let mut entry = BlockEventStream::new_legacy(self.block_number); + let mut entry = BlockEventStream::new(self.block_number, self.version); let result = f(&mut entry); self.per_causality_region .insert(causality_region.to_owned(), entry); @@ -217,34 +244,27 @@ impl ProofOfIndexing { pub struct ProofOfIndexingFinisher { block_number: BlockNumber, - state: SetHasher, + state: Hashers, causality_count: usize, } impl ProofOfIndexingFinisher { - pub fn new(block: &BlockPtr, subgraph_id: &DeploymentHash, indexer: &Option
) -> Self { - let mut state = SetHasher::new(); - - // Add the subgraph id - let subgraph_id_seq_no = traverse_seq_no(&[ - 1, // PoI.subgraph_id - ]); - subgraph_id.stable_hash(subgraph_id_seq_no, &mut state); - - // Add the block hash - let block_hash_seq_no = traverse_seq_no(&[ - 2, // PoI.block_hash - ]); - AsBytes(block.hash_slice()).stable_hash(block_hash_seq_no, &mut state); - - // Add the indexer - let indexer_seq_no = traverse_seq_no(&[ - 3, // PoI.indexer - ]); - indexer - .as_ref() - .map(|i| AsBytes(i.as_bytes())) - .stable_hash(indexer_seq_no, &mut state); + pub fn new( + block: &BlockPtr, + subgraph_id: &DeploymentHash, + indexer: &Option
, + version: ProofOfIndexingVersion, + ) -> Self { + let mut state = Hashers::new(version); + + // Add PoI.subgraph_id + state.write(&subgraph_id, &[1]); + + // Add PoI.block_hash + state.write(&AsBytes(block.hash_slice()), &[2]); + + // Add PoI.indexer + state.write(&indexer.as_ref().map(|i| AsBytes(i.as_bytes())), &[3]); ProofOfIndexingFinisher { block_number: block.number, @@ -254,48 +274,43 @@ impl ProofOfIndexingFinisher { } pub fn add_causality_region(&mut self, name: &str, region: &[u8]) { - let mut state = SetHasher::from_bytes(region); + let mut state = Hashers::from_bytes(region); - // Finish the blocks vec - let blocks_seq_no = traverse_seq_no(&[ - 1, // kvp -> v - 0, // CausalityRegion.blocks: Vec - ]); + // Finish the blocks vec by writing kvp[v], CausalityRegion.blocks.len() // + 1 is to account that the length of the blocks array for the genesis block is 1, not 0. - (self.block_number + 1).stable_hash(blocks_seq_no, &mut state); - - // Add the name. - let name_seq_no = traverse_seq_no(&[ - 0, // kvp -> k - ]); - name.stable_hash(name_seq_no, &mut state); + state.write(&(self.block_number + 1), &[1, 0]); - let state = state.finish(); + // Add the name (kvp[k]). + state.write(&name, &[0]); - // Mixin the region with the final value - let causality_regions_member_seq_no = traverse_seq_no(&[ - 0, // Poi.causality_regions - 1, // unordered collection member - ]); + // Mixin the region into PoI.causality_regions. + match state { + Hashers::Legacy(legacy) => { + let state = legacy.finish(); + self.state.write(&AsBytes(&state), &[0, 1]); + } + Hashers::Fast(fast) => { + let state = fast.to_bytes(); + self.state.write(&AsBytes(&state), &[0]); + } + } - self.state.write(causality_regions_member_seq_no, &state); self.causality_count += 1; } - pub fn finish(mut self) -> ::Out { - let causality_regions_count_seq_no = traverse_seq_no(&[ - 0, // Poi.causality_regions - 2, // unordered collection count - ]); - - // Note that technically to get the same sequence number one would need - // to call causality_regions_count_seq_no.skip(self.causality_count); - // but it turns out that the result happens to be the same for - // non-negative numbers. - - self.causality_count - .stable_hash(causality_regions_count_seq_no, &mut self.state); + pub fn finish(mut self) -> [u8; 32] { + if let Hashers::Legacy(_) = self.state { + // Add PoI.causality_regions.len() + // Note that technically to get the same sequence number one would need + // to call causality_regions_count_seq_no.skip(self.causality_count); + // but it turns out that the result happens to be the same for + // non-negative numbers. + self.state.write(&self.causality_count, &[0, 2]); + } - self.state.finish() + match self.state { + Hashers::Legacy(legacy) => legacy.finish(), + Hashers::Fast(fast) => tiny_keccak::keccak256(&fast.finish().to_le_bytes()), + } } } diff --git a/graph/src/components/subgraph/proof_of_indexing/reference.rs b/graph/src/components/subgraph/proof_of_indexing/reference.rs index e63e4ba8d97..63d9703cc85 100644 --- a/graph/src/components/subgraph/proof_of_indexing/reference.rs +++ b/graph/src/components/subgraph/proof_of_indexing/reference.rs @@ -1,7 +1,6 @@ use super::ProofOfIndexingEvent; use crate::prelude::DeploymentHash; -use stable_hash::{utils::AsBytes, FieldAddress, StableHash}; -use stable_hash_legacy::SequenceNumber; +use crate::util::stable_hash_glue::{impl_stable_hash, AsBytes}; use std::collections::HashMap; use web3::types::{Address, H256}; @@ -17,80 +16,26 @@ pub struct PoI<'a> { pub indexer: Option
, } -impl stable_hash_legacy::StableHash for PoI<'_> { - fn stable_hash( - &self, - mut sequence_number: H::Seq, - state: &mut H, - ) { - let PoI { - causality_regions, - subgraph_id, - block_hash, - indexer, - } = self; - - stable_hash_legacy::StableHash::stable_hash( - causality_regions, - sequence_number.next_child(), - state, - ); - stable_hash_legacy::StableHash::stable_hash( - subgraph_id, - sequence_number.next_child(), - state, - ); - stable_hash_legacy::utils::AsBytes(block_hash.as_bytes()) - .stable_hash(sequence_number.next_child(), state); - indexer - .as_ref() - .map(|i| stable_hash_legacy::utils::AsBytes(i.as_bytes())) - .stable_hash(sequence_number.next_child(), state); - } +fn h256_as_bytes(val: &H256) -> AsBytes<&[u8]> { + AsBytes(val.as_bytes()) } -impl StableHash for PoI<'_> { - fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - let PoI { - causality_regions, - subgraph_id, - block_hash, - indexer: _, - } = self; - - StableHash::stable_hash(causality_regions, field_address.child(0), state); - subgraph_id.stable_hash(field_address.child(1), state); - AsBytes(block_hash.as_bytes()).stable_hash(field_address.child(2), state); - self.indexer - .as_ref() - .map(|i| AsBytes(i.as_bytes())) - .stable_hash(field_address.child(3), state); - } +fn indexer_opt_as_bytes(val: &Option
) -> Option> { + val.as_ref().map(|v| AsBytes(v.as_bytes())) } +impl_stable_hash!(PoI<'_> { + causality_regions, + subgraph_id, + block_hash: h256_as_bytes, + indexer: indexer_opt_as_bytes +}); + pub struct CausalityRegion<'a> { pub blocks: Vec>, } -impl stable_hash_legacy::StableHash for CausalityRegion<'_> { - fn stable_hash( - &self, - mut sequence_number: H::Seq, - state: &mut H, - ) { - let CausalityRegion { blocks } = self; - - stable_hash_legacy::StableHash::stable_hash(blocks, sequence_number.next_child(), state); - } -} - -impl StableHash for CausalityRegion<'_> { - fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - let CausalityRegion { blocks } = self; - - StableHash::stable_hash(blocks, field_address.child(0), state); - } -} +impl_stable_hash!(CausalityRegion<'_> {blocks}); impl CausalityRegion<'_> { pub fn from_network(network: &str) -> String { @@ -103,22 +48,4 @@ pub struct Block<'a> { pub events: Vec>, } -impl stable_hash_legacy::StableHash for Block<'_> { - fn stable_hash( - &self, - mut sequence_number: H::Seq, - state: &mut H, - ) { - let Block { events } = self; - - stable_hash_legacy::StableHash::stable_hash(events, sequence_number.next_child(), state); - } -} - -impl StableHash for Block<'_> { - fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - let Block { events } = self; - - StableHash::stable_hash(events, field_address.child(0), state); - } -} +impl_stable_hash!(Block<'_> {events}); diff --git a/graph/src/data/store/mod.rs b/graph/src/data/store/mod.rs index 773d2425c63..4e5668a4877 100644 --- a/graph/src/data/store/mod.rs +++ b/graph/src/data/store/mod.rs @@ -585,9 +585,6 @@ where } } -// Note: Do not modify fields without making a backward compatible change to the -// StableHash impl (below) An entity is represented as a map of attribute names -// to values. /// An entity is represented as a map of attribute names to values. #[derive(Clone, Debug, Default, Deserialize, PartialEq, Eq, Serialize)] pub struct Entity(HashMap); @@ -600,13 +597,15 @@ impl stable_hash_legacy::StableHash for Entity { state: &mut H, ) { use stable_hash_legacy::SequenceNumber; - stable_hash_legacy::StableHash::stable_hash(&self.0, sequence_number.next_child(), state); + let Self(inner) = self; + stable_hash_legacy::StableHash::stable_hash(inner, sequence_number.next_child(), state); } } impl StableHash for Entity { fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - StableHash::stable_hash(&self.0, field_address.child(0), state); + let Self(inner) = self; + StableHash::stable_hash(inner, field_address.child(0), state); } } diff --git a/graph/src/data/store/scalar.rs b/graph/src/data/store/scalar.rs index da9ba89accf..33e6925ecba 100644 --- a/graph/src/data/store/scalar.rs +++ b/graph/src/data/store/scalar.rs @@ -4,7 +4,7 @@ use diesel_derives::{AsExpression, FromSqlRow}; use hex; use num_bigint; use serde::{self, Deserialize, Serialize}; -use stable_hash::utils::{AsBytes, AsInt}; +use stable_hash::utils::AsInt; use stable_hash::{FieldAddress, StableHash}; use stable_hash_legacy::SequenceNumber; use thiserror::Error; @@ -19,6 +19,7 @@ use std::str::FromStr; pub use num_bigint::Sign as BigIntSign; use crate::blockchain::BlockHash; +use crate::util::stable_hash_glue::{impl_stable_hash, AsBytes}; /// All operations on `BigDecimal` return a normalized value. // Caveat: The exponent is currently an i64 and may overflow. See @@ -542,21 +543,7 @@ impl fmt::Debug for Bytes { } } -impl stable_hash_legacy::StableHash for Bytes { - fn stable_hash( - &self, - sequence_number: H::Seq, - state: &mut H, - ) { - stable_hash_legacy::utils::AsBytes(&self.0).stable_hash(sequence_number, state); - } -} - -impl StableHash for Bytes { - fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - AsBytes(&self.0).stable_hash(field_address, state); - } -} +impl_stable_hash!(Bytes(transparent: AsBytes)); impl Bytes { pub fn as_slice(&self) -> &[u8] { diff --git a/graph/src/data/subgraph/mod.rs b/graph/src/data/subgraph/mod.rs index a61d81a2730..a8033b9c0d8 100644 --- a/graph/src/data/subgraph/mod.rs +++ b/graph/src/data/subgraph/mod.rs @@ -63,8 +63,6 @@ where .map(Some) } -// Note: This has a StableHash impl. Do not modify fields without a backward -// compatible change to the StableHash impl (below) /// The IPFS hash used to identifiy a deployment externally, i.e., the /// `Qm..` string that `graph-cli` prints when deploying to a subgraph #[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] @@ -77,13 +75,15 @@ impl stable_hash_legacy::StableHash for DeploymentHash { mut sequence_number: H::Seq, state: &mut H, ) { - stable_hash_legacy::StableHash::stable_hash(&self.0, sequence_number.next_child(), state); + let Self(inner) = self; + stable_hash_legacy::StableHash::stable_hash(inner, sequence_number.next_child(), state); } } impl StableHash for DeploymentHash { fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - stable_hash::StableHash::stable_hash(&self.0, field_address.child(0), state); + let Self(inner) = self; + stable_hash::StableHash::stable_hash(inner, field_address.child(0), state); } } diff --git a/graph/src/data/subgraph/schema.rs b/graph/src/data/subgraph/schema.rs index 6bba4e3a3ff..c8e5d7f80df 100644 --- a/graph/src/data/subgraph/schema.rs +++ b/graph/src/data/subgraph/schema.rs @@ -5,8 +5,6 @@ use hex; use lazy_static::lazy_static; use rand::rngs::OsRng; use rand::Rng; -use stable_hash::{FieldAddress, StableHash}; -use stable_hash_legacy::SequenceNumber; use std::str::FromStr; use std::{fmt, fmt::Display}; @@ -15,6 +13,7 @@ use crate::data::graphql::TryFromValue; use crate::data::store::Value; use crate::data::subgraph::SubgraphManifest; use crate::prelude::*; +use crate::util::stable_hash_glue::impl_stable_hash; use crate::{blockchain::Blockchain, components::store::EntityType}; pub const POI_TABLE: &str = "poi2$"; @@ -202,55 +201,13 @@ impl Display for SubgraphError { } } -impl stable_hash_legacy::StableHash for SubgraphError { - fn stable_hash( - &self, - mut sequence_number: H::Seq, - state: &mut H, - ) { - let SubgraphError { - subgraph_id, - message, - block_ptr, - handler, - deterministic, - } = self; - stable_hash_legacy::StableHash::stable_hash( - &subgraph_id, - sequence_number.next_child(), - state, - ); - stable_hash_legacy::StableHash::stable_hash(&message, sequence_number.next_child(), state); - stable_hash_legacy::StableHash::stable_hash( - &block_ptr, - sequence_number.next_child(), - state, - ); - stable_hash_legacy::StableHash::stable_hash(&handler, sequence_number.next_child(), state); - stable_hash_legacy::StableHash::stable_hash( - &deterministic, - sequence_number.next_child(), - state, - ); - } -} - -impl StableHash for SubgraphError { - fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - let SubgraphError { - subgraph_id, - message, - block_ptr, - handler, - deterministic, - } = self; - StableHash::stable_hash(subgraph_id, field_address.child(0), state); - StableHash::stable_hash(message, field_address.child(1), state); - StableHash::stable_hash(block_ptr, field_address.child(2), state); - StableHash::stable_hash(handler, field_address.child(3), state); - StableHash::stable_hash(deterministic, field_address.child(4), state); - } -} +impl_stable_hash!(SubgraphError { + subgraph_id, + message, + block_ptr, + handler, + deterministic +}); pub fn generate_entity_id() -> String { // Fast crypto RNG from operating system diff --git a/graph/src/runtime/asc_heap.rs b/graph/src/runtime/asc_heap.rs index 80bd2ea55fa..e5284b0f7c6 100644 --- a/graph/src/runtime/asc_heap.rs +++ b/graph/src/runtime/asc_heap.rs @@ -1,3 +1,5 @@ +use std::mem::MaybeUninit; + use semver::Version; use super::{ @@ -11,12 +13,14 @@ pub trait AscHeap { /// Allocate new space and write `bytes`, return the allocated address. fn raw_new(&mut self, bytes: &[u8], gas: &GasCounter) -> Result; - fn get( + fn read<'a>( &self, offset: u32, - size: u32, + buffer: &'a mut [MaybeUninit], gas: &GasCounter, - ) -> Result, DeterministicHostError>; + ) -> Result<&'a mut [u8], DeterministicHostError>; + + fn read_u32(&self, offset: u32, gas: &GasCounter) -> Result; fn api_version(&self) -> Version; @@ -60,18 +64,6 @@ where T::from_asc_obj(asc_ptr.read_ptr(heap, gas)?, heap, gas) } -pub fn try_asc_get( - heap: &H, - asc_ptr: AscPtr, - gas: &GasCounter, -) -> Result -where - C: AscType + AscIndexId, - T: TryFromAscObj, -{ - T::try_from_asc_obj(asc_ptr.read_ptr(heap, gas)?, heap, gas) -} - /// Type that can be converted to an Asc object of class `C`. pub trait ToAscObj { fn to_asc_obj( @@ -102,20 +94,10 @@ impl> ToAscObj for &T { } /// Type that can be converted from an Asc object of class `C`. -pub trait FromAscObj { +pub trait FromAscObj: Sized { fn from_asc_obj( obj: C, heap: &H, gas: &GasCounter, - ) -> Result - where - Self: Sized; -} - -pub trait TryFromAscObj: Sized { - fn try_from_asc_obj( - obj: C, - heap: &H, - gas: &GasCounter, ) -> Result; } diff --git a/graph/src/runtime/asc_ptr.rs b/graph/src/runtime/asc_ptr.rs index 3f620aeb7d1..aa7c1c6c0e3 100644 --- a/graph/src/runtime/asc_ptr.rs +++ b/graph/src/runtime/asc_ptr.rs @@ -5,7 +5,7 @@ use super::{AscHeap, AscIndexId, AscType, IndexForAscTypeId}; use semver::Version; use std::fmt; use std::marker::PhantomData; -use std::mem::size_of; +use std::mem::MaybeUninit; /// The `rt_size` field contained in an AssemblyScript header has a size of 4 bytes. const SIZE_OF_RT_SIZE: u32 = 4; @@ -59,11 +59,26 @@ impl AscPtr { gas: &GasCounter, ) -> Result { let len = match heap.api_version() { + // TODO: The version check here conflicts with the comment on C::asc_size, + // which states "Only used for version <= 0.0.3." version if version <= Version::new(0, 0, 4) => C::asc_size(self, heap, gas), _ => self.read_len(heap, gas), }?; - let bytes = heap.get(self.0, len, gas)?; - C::from_asc_bytes(&bytes, &heap.api_version()) + + let using_buffer = |buffer: &mut [MaybeUninit]| { + let buffer = heap.read(self.0, buffer, gas)?; + C::from_asc_bytes(buffer, &heap.api_version()) + }; + + let len = len as usize; + + if len <= 32 { + let mut buffer = [MaybeUninit::::uninit(); 32]; + using_buffer(&mut buffer[..len]) + } else { + let mut buffer = Vec::with_capacity(len); + using_buffer(buffer.spare_capacity_mut()) + } } /// Allocate `asc_obj` as an Asc object of class `C`. @@ -112,10 +127,7 @@ impl AscPtr { gas: &GasCounter, ) -> Result { // Read the bytes pointed to by `self` as the bytes of a `u32`. - let raw_bytes = heap.get(self.0, size_of::() as u32, gas)?; - let mut u32_bytes: [u8; size_of::()] = [0; size_of::()]; - u32_bytes.copy_from_slice(&raw_bytes); - Ok(u32::from_le_bytes(u32_bytes)) + heap.read_u32(self.0, gas) } /// Helper that generates an AssemblyScript header. @@ -177,10 +189,8 @@ impl AscPtr { self.0 )) })?; - let raw_bytes = heap.get(start_of_rt_size, size_of::() as u32, gas)?; - let mut u32_bytes: [u8; size_of::()] = [0; size_of::()]; - u32_bytes.copy_from_slice(&raw_bytes); - Ok(u32::from_le_bytes(u32_bytes)) + + heap.read_u32(start_of_rt_size, gas) } /// Conversion to `u64` for use with `AscEnum`. diff --git a/graph/src/runtime/mod.rs b/graph/src/runtime/mod.rs index 3ec15573ccd..65fc7c4c8c0 100644 --- a/graph/src/runtime/mod.rs +++ b/graph/src/runtime/mod.rs @@ -8,7 +8,7 @@ pub mod gas; mod asc_heap; mod asc_ptr; -pub use asc_heap::{asc_get, asc_new, try_asc_get, AscHeap, FromAscObj, ToAscObj, TryFromAscObj}; +pub use asc_heap::{asc_get, asc_new, AscHeap, FromAscObj, ToAscObj}; pub use asc_ptr::AscPtr; use anyhow::Error; diff --git a/graph/src/util/mem.rs b/graph/src/util/mem.rs new file mode 100644 index 00000000000..b98b7d5ed87 --- /dev/null +++ b/graph/src/util/mem.rs @@ -0,0 +1,13 @@ +use std::mem::{transmute, MaybeUninit}; + +/// Temporarily needed until MaybeUninit::write_slice is stabilized. +pub fn init_slice<'a, T>(src: &[T], dst: &'a mut [MaybeUninit]) -> &'a mut [T] +where + T: Copy, +{ + unsafe { + let uninit_src: &[MaybeUninit] = transmute(src); + dst.copy_from_slice(uninit_src); + &mut *(dst as *mut [MaybeUninit] as *mut [T]) + } +} diff --git a/graph/src/util/mod.rs b/graph/src/util/mod.rs index 21506f80cfc..a7bc8ccb034 100644 --- a/graph/src/util/mod.rs +++ b/graph/src/util/mod.rs @@ -22,3 +22,7 @@ pub mod jobs; pub mod backoff; pub mod bounded_queue; + +pub mod stable_hash_glue; + +pub mod mem; diff --git a/graph/src/util/stable_hash_glue.rs b/graph/src/util/stable_hash_glue.rs new file mode 100644 index 00000000000..8c872c4bcdd --- /dev/null +++ b/graph/src/util/stable_hash_glue.rs @@ -0,0 +1,40 @@ +use stable_hash::{StableHash, StableHasher}; +use stable_hash_legacy::prelude::{ + StableHash as StableHashLegacy, StableHasher as StableHasherLegacy, +}; + +/// Implements StableHash and StableHashLegacy. This macro supports two forms: +/// Struct { field1, field2, ... } and Tuple(transparent). Each field supports +/// an optional modifier. For example: Tuple(transparent: AsBytes) +#[macro_export] +macro_rules! _impl_stable_hash { + ($T:ident$(<$lt:lifetime>)? {$($field:ident$(:$e:path)?),*}) => { + ::stable_hash::impl_stable_hash!($T$(<$lt>)? {$($field$(:$e)?),*}); + ::stable_hash_legacy::impl_stable_hash!($T$(<$lt>)? {$($field$(:$e)?),*}); + }; + ($T:ident$(<$lt:lifetime>)? (transparent$(:$e:path)?)) => { + ::stable_hash::impl_stable_hash!($T$(<$lt>)? (transparent$(:$e)?)); + ::stable_hash_legacy::impl_stable_hash!($T$(<$lt>)? (transparent$(:$e)?)); + }; +} +pub use crate::_impl_stable_hash as impl_stable_hash; + +pub struct AsBytes(pub T); + +impl StableHashLegacy for AsBytes +where + T: AsRef<[u8]>, +{ + fn stable_hash(&self, sequence_number: H::Seq, state: &mut H) { + stable_hash_legacy::utils::AsBytes(self.0.as_ref()).stable_hash(sequence_number, state); + } +} + +impl StableHash for AsBytes +where + T: AsRef<[u8]>, +{ + fn stable_hash(&self, field_address: H::Addr, state: &mut H) { + stable_hash::utils::AsBytes(self.0.as_ref()).stable_hash(field_address, state); + } +} diff --git a/graphql/Cargo.toml b/graphql/Cargo.toml index 24470c00f14..b21113e736b 100644 --- a/graphql/Cargo.toml +++ b/graphql/Cargo.toml @@ -11,9 +11,9 @@ graphql-tools = "0.0.19" indexmap = "1.9" Inflector = "0.11.3" lazy_static = "1.2.0" -stable-hash_legacy = { version = "0.3.2", package = "stable-hash" } -stable-hash = { version = "0.4.1"} once_cell = "1.12.1" +stable-hash_legacy = { version = "0.3.3", package = "stable-hash" } +stable-hash = { version = "0.4.2"} defer = "0.1" parking_lot = "0.12" anyhow = "1.0" diff --git a/graphql/src/execution/execution.rs b/graphql/src/execution/execution.rs index 26cbe052f54..85778321414 100644 --- a/graphql/src/execution/execution.rs +++ b/graphql/src/execution/execution.rs @@ -7,8 +7,6 @@ use graph::{ }; use lazy_static::lazy_static; use parking_lot::MutexGuard; -use stable_hash::{FieldAddress, StableHash, StableHasher}; -use stable_hash_legacy::SequenceNumber; use std::time::Instant; use std::{borrow::ToOwned, collections::HashSet}; @@ -16,7 +14,7 @@ use graph::data::graphql::*; use graph::data::query::CacheStatus; use graph::env::CachedSubgraphIds; use graph::prelude::*; -use graph::util::lfu_cache::LfuCache; +use graph::util::{lfu_cache::LfuCache, stable_hash_glue::impl_stable_hash}; use super::QueryHash; use crate::execution::ast as a; @@ -69,62 +67,33 @@ struct HashableQuery<'a> { block_ptr: &'a BlockPtr, } -/// Note that the use of StableHash here is a little bit loose. In particular, -/// we are converting items to a string inside here as a quick-and-dirty -/// implementation. This precludes the ability to add new fields (unlikely -/// anyway). So, this hash isn't really Stable in the way that the StableHash -/// crate defines it. Since hashes are only persisted for this process, we don't -/// need that property. The reason we are using StableHash is to get collision -/// resistance and use it's foolproof API to prevent easy mistakes instead. -/// -/// This is also only as collision resistant insofar as the to_string impls are -/// collision resistant. It is highly likely that this is ok, since these come -/// from an ast. -/// -/// It is possible that multiple asts that are effectively the same query with -/// different representations. This is considered not an issue. The worst -/// possible outcome is that the same query will have multiple cache entries. -/// But, the wrong result should not be served. -impl stable_hash_legacy::StableHash for HashableQuery<'_> { - fn stable_hash( - &self, - mut sequence_number: H::Seq, - state: &mut H, - ) { - stable_hash_legacy::StableHash::stable_hash( - &self.query_schema_id, - sequence_number.next_child(), - state, - ); - - // Not stable! Uses to_string - stable_hash_legacy::StableHash::stable_hash( - &format!("{:?}", self.selection_set), - sequence_number.next_child(), - state, - ); - - stable_hash_legacy::StableHash::stable_hash( - &self.block_ptr, - sequence_number.next_child(), - state, - ); - } -} - -impl StableHash for HashableQuery<'_> { - fn stable_hash(&self, field_address: H::Addr, state: &mut H) { - StableHash::stable_hash(&self.query_schema_id, field_address.child(0), state); - - // Not stable! Uses to_string - StableHash::stable_hash( - &format!("{:?}", self.selection_set), - field_address.child(1), - state, - ); - - StableHash::stable_hash(&self.block_ptr, field_address.child(2), state); - } +// Note that the use of StableHash here is a little bit loose. In particular, +// we are converting items to a string inside here as a quick-and-dirty +// implementation. This precludes the ability to add new fields (unlikely +// anyway). So, this hash isn't really Stable in the way that the StableHash +// crate defines it. Since hashes are only persisted for this process, we don't +// need that property. The reason we are using StableHash is to get collision +// resistance and use it's foolproof API to prevent easy mistakes instead. +// +// This is also only as collision resistant insofar as the to_string impls are +// collision resistant. It is highly likely that this is ok, since these come +// from an ast. +// +// It is possible that multiple asts that are effectively the same query with +// different representations. This is considered not an issue. The worst +// possible outcome is that the same query will have multiple cache entries. +// But, the wrong result should not be served. +impl_stable_hash!(HashableQuery<'_> { + query_schema_id, + // Not stable! Uses to_string + // TODO: Performance: Save a cryptographic hash (Blake3) of the original query + // and pass it through, rather than formatting the selection set. + selection_set: format_selection_set, + block_ptr +}); + +fn format_selection_set(s: &a::SelectionSet) -> String { + format!("{:?}", s) } // The key is: subgraph id + selection set + variables + fragment definitions @@ -140,7 +109,13 @@ fn cache_key( selection_set, block_ptr, }; - stable_hash_legacy::utils::stable_hash::(&query) + // Security: + // This uses the crypo stable hash because a collision would + // cause us to fetch the incorrect query response and possibly + // attest to it. A collision should be impossibly rare with the + // non-crypto version, but a determined attacker should be able + // to find one and cause disputes which we must avoid. + stable_hash::crypto_stable_hash(&query) } fn lfu_cache( diff --git a/runtime/test/src/lib.rs b/runtime/test/src/lib.rs index 0853bb05ef6..d5a8505d1e5 100644 --- a/runtime/test/src/lib.rs +++ b/runtime/test/src/lib.rs @@ -1,3 +1,3 @@ +#![cfg(test)] pub mod common; -#[cfg(test)] mod test; diff --git a/runtime/test/src/test.rs b/runtime/test/src/test.rs index f312a517305..20e9d2f642a 100644 --- a/runtime/test/src/test.rs +++ b/runtime/test/src/test.rs @@ -2,7 +2,7 @@ use graph::data::store::scalar; use graph::data::subgraph::*; use graph::prelude::web3::types::U256; use graph::prelude::*; -use graph::runtime::{asc_get, asc_new, try_asc_get, AscIndexId, AscType}; +use graph::runtime::{AscIndexId, AscType}; use graph::runtime::{AscPtr, ToAscObj}; use graph::{components::store::*, ipfs_client::IpfsClient}; use graph_chain_ethereum::{Chain, DataSource}; @@ -197,8 +197,7 @@ impl WasmInstanceExt for WasmInstance { T: ToAscObj + ?Sized, { let func = self.get_func(f).typed().unwrap().clone(); - let gas = self.gas.cheap_clone(); - let ptr = asc_new(self, arg, &gas).unwrap(); + let ptr = self.asc_new(arg).unwrap(); let ptr: u32 = func.call(ptr.wasm_ptr()).unwrap(); ptr.into() } @@ -221,9 +220,8 @@ impl WasmInstanceExt for WasmInstance { T2: ToAscObj + ?Sized, { let func = self.get_func(f).typed().unwrap().clone(); - let gas = self.gas.cheap_clone(); - let arg0 = asc_new(self, arg0, &gas).unwrap(); - let arg1 = asc_new(self, arg1, &gas).unwrap(); + let arg0 = self.asc_new(arg0).unwrap(); + let arg1 = self.asc_new(arg1).unwrap(); let ptr: u32 = func.call((arg0.wasm_ptr(), arg1.wasm_ptr())).unwrap(); ptr.into() } @@ -241,9 +239,8 @@ impl WasmInstanceExt for WasmInstance { T2: ToAscObj + ?Sized, { let func = self.get_func(f).typed().unwrap().clone(); - let gas = self.gas.cheap_clone(); - let arg0 = asc_new(self, arg0, &gas).unwrap(); - let arg1 = asc_new(self, arg1, &gas).unwrap(); + let arg0 = self.asc_new(arg0).unwrap(); + let arg1 = self.asc_new(arg1).unwrap(); func.call((arg0.wasm_ptr(), arg1.wasm_ptr())) } @@ -258,8 +255,7 @@ impl WasmInstanceExt for WasmInstance { T: ToAscObj + ?Sized, { let func = self.get_func(func).typed().unwrap().clone(); - let gas = self.gas.cheap_clone(); - let ptr = asc_new(self, v, &gas).unwrap(); + let ptr = self.asc_new(v).unwrap(); func.call(ptr.wasm_ptr()).unwrap() } @@ -299,7 +295,7 @@ async fn test_json_conversions(api_version: Version, gas_used: u64) { // test BigInt conversion let number = "-922337203685077092345034"; let big_int_obj: AscPtr = module.invoke_export1("testToBigInt", number); - let bytes: Vec = asc_get(&module, big_int_obj, &module.gas).unwrap(); + let bytes: Vec = module.asc_get(big_int_obj).unwrap(); assert_eq!( scalar::BigInt::from_str(number).unwrap(), @@ -334,7 +330,7 @@ async fn test_json_parsing(api_version: Version, gas_used: u64) { let s = "foo"; // Invalid because there are no quotes around `foo` let bytes: &[u8] = s.as_ref(); let return_value: AscPtr = module.invoke_export1("handleJsonError", bytes); - let output: String = asc_get(&module, return_value, &module.gas).unwrap(); + let output: String = module.asc_get(return_value).unwrap(); assert_eq!(output, "ERROR: true"); // Parse valid JSON and get it back @@ -342,7 +338,7 @@ async fn test_json_parsing(api_version: Version, gas_used: u64) { let bytes: &[u8] = s.as_ref(); let return_value: AscPtr = module.invoke_export1("handleJsonError", bytes); - let output: String = asc_get(&module, return_value, &module.gas).unwrap(); + let output: String = module.asc_get(return_value).unwrap(); assert_eq!(output, "OK: foo, ERROR: false"); assert_eq!(module.gas_used(), gas_used); } @@ -376,7 +372,7 @@ async fn test_ipfs_cat(api_version: Version) { api_version, )); let converted: AscPtr = module.invoke_export1("ipfsCatString", &hash); - let data: String = asc_get(&module, converted, &module.gas).unwrap(); + let data: String = module.asc_get(converted).unwrap(); assert_eq!(data, "42"); }) .join() @@ -412,7 +408,7 @@ async fn test_ipfs_block() { API_VERSION_0_0_5, )); let converted: AscPtr = module.invoke_export1("ipfsBlockHex", &hash); - let data: String = asc_get(&module, converted, &module.gas).unwrap(); + let data: String = module.asc_get(converted).unwrap(); assert_eq!(data, "0x0a080802120234321802"); }) .join() @@ -473,9 +469,9 @@ async fn run_ipfs_map( ), api_version, )); - let gas = module.gas.cheap_clone(); - let value = asc_new(&mut module, &hash, &gas).unwrap(); - let user_data = asc_new(&mut module, USER_DATA, &gas).unwrap(); + + let value = module.asc_new(&hash).unwrap(); + let user_data = module.asc_new(USER_DATA).unwrap(); // Invoke the callback let func = module.get_func("ipfsMap").typed().unwrap().clone(); @@ -643,7 +639,7 @@ async fn test_crypto_keccak256(api_version: Version) { let input: &[u8] = "eth".as_ref(); let hash: AscPtr = module.invoke_export1("hash", input); - let hash: Vec = asc_get(&module, hash, &module.gas).unwrap(); + let hash: Vec = module.asc_get(hash).unwrap(); assert_eq!( hex::encode(hash), "4f5b812789fc606be1b3b16908db13fc7a9adf7ca72641f84d75b47069d3d7f0" @@ -674,19 +670,19 @@ async fn test_big_int_to_hex(api_version: Version, gas_used: u64) { // Convert zero to hex let zero = BigInt::from_unsigned_u256(&U256::zero()); let zero_hex_ptr: AscPtr = module.invoke_export1("big_int_to_hex", &zero); - let zero_hex_str: String = asc_get(&module, zero_hex_ptr, &module.gas).unwrap(); + let zero_hex_str: String = module.asc_get(zero_hex_ptr).unwrap(); assert_eq!(zero_hex_str, "0x0"); // Convert 1 to hex let one = BigInt::from_unsigned_u256(&U256::one()); let one_hex_ptr: AscPtr = module.invoke_export1("big_int_to_hex", &one); - let one_hex_str: String = asc_get(&module, one_hex_ptr, &module.gas).unwrap(); + let one_hex_str: String = module.asc_get(one_hex_ptr).unwrap(); assert_eq!(one_hex_str, "0x1"); // Convert U256::max_value() to hex let u256_max = BigInt::from_unsigned_u256(&U256::max_value()); let u256_max_hex_ptr: AscPtr = module.invoke_export1("big_int_to_hex", &u256_max); - let u256_max_hex_str: String = asc_get(&module, u256_max_hex_ptr, &module.gas).unwrap(); + let u256_max_hex_str: String = module.asc_get(u256_max_hex_ptr).unwrap(); assert_eq!( u256_max_hex_str, "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" @@ -720,42 +716,42 @@ async fn test_big_int_arithmetic(api_version: Version, gas_used: u64) { let zero = BigInt::from(0); let one = BigInt::from(1); let result_ptr: AscPtr = module.invoke_export2("plus", &zero, &one); - let result: BigInt = asc_get(&module, result_ptr, &module.gas).unwrap(); + let result: BigInt = module.asc_get(result_ptr).unwrap(); assert_eq!(result, BigInt::from(1)); // 127 + 1 = 128 let zero = BigInt::from(127); let one = BigInt::from(1); let result_ptr: AscPtr = module.invoke_export2("plus", &zero, &one); - let result: BigInt = asc_get(&module, result_ptr, &module.gas).unwrap(); + let result: BigInt = module.asc_get(result_ptr).unwrap(); assert_eq!(result, BigInt::from(128)); // 5 - 10 = -5 let five = BigInt::from(5); let ten = BigInt::from(10); let result_ptr: AscPtr = module.invoke_export2("minus", &five, &ten); - let result: BigInt = asc_get(&module, result_ptr, &module.gas).unwrap(); + let result: BigInt = module.asc_get(result_ptr).unwrap(); assert_eq!(result, BigInt::from(-5)); // -20 * 5 = -100 let minus_twenty = BigInt::from(-20); let five = BigInt::from(5); let result_ptr: AscPtr = module.invoke_export2("times", &minus_twenty, &five); - let result: BigInt = asc_get(&module, result_ptr, &module.gas).unwrap(); + let result: BigInt = module.asc_get(result_ptr).unwrap(); assert_eq!(result, BigInt::from(-100)); // 5 / 2 = 2 let five = BigInt::from(5); let two = BigInt::from(2); let result_ptr: AscPtr = module.invoke_export2("dividedBy", &five, &two); - let result: BigInt = asc_get(&module, result_ptr, &module.gas).unwrap(); + let result: BigInt = module.asc_get(result_ptr).unwrap(); assert_eq!(result, BigInt::from(2)); // 5 % 2 = 1 let five = BigInt::from(5); let two = BigInt::from(2); let result_ptr: AscPtr = module.invoke_export2("mod", &five, &two); - let result: BigInt = asc_get(&module, result_ptr, &module.gas).unwrap(); + let result: BigInt = module.asc_get(result_ptr).unwrap(); assert_eq!(result, BigInt::from(1)); assert_eq!(module.gas_used(), gas_used); @@ -816,7 +812,7 @@ async fn test_bytes_to_base58(api_version: Version, gas_used: u64) { let bytes = hex::decode("12207D5A99F603F231D53A4F39D1521F98D2E8BB279CF29BEBFD0687DC98458E7F89") .unwrap(); let result_ptr: AscPtr = module.invoke_export1("bytes_to_base58", bytes.as_slice()); - let base58: String = asc_get(&module, result_ptr, &module.gas).unwrap(); + let base58: String = module.asc_get(result_ptr).unwrap(); assert_eq!(base58, "QmWmyoMoctfbAaiEs2G46gpeUmhqFRDW6KWo64y5r581Vz"); assert_eq!(module.gas_used(), gas_used); @@ -910,7 +906,7 @@ async fn test_ens_name_by_hash(api_version: Version) { let name = "dealdrafts"; test_store::insert_ens_name(hash, name); let converted: AscPtr = module.invoke_export1("nameByHash", hash); - let data: String = asc_get(&module, converted, &module.gas).unwrap(); + let data: String = module.asc_get(converted).unwrap(); assert_eq!(data, name); assert!(module @@ -959,7 +955,8 @@ async fn test_entity_store(api_version: Version) { None } else { Some(Entity::from( - try_asc_get::, _, _>(module, entity_ptr, &module.gas) + module + .asc_get::, _>(entity_ptr) .unwrap(), )) } diff --git a/runtime/test/src/test/abi.rs b/runtime/test/src/test/abi.rs index 9d7a4ecf7ee..dc62f44bd62 100644 --- a/runtime/test/src/test/abi.rs +++ b/runtime/test/src/test/abi.rs @@ -79,7 +79,7 @@ async fn test_abi_array(api_version: Version, gas_used: u64) { "4".to_owned(), ]; let new_vec_obj: AscPtr>> = module.invoke_export1("test_array", &vec); - let new_vec: Vec = asc_get(&module, new_vec_obj, &module.gas).unwrap(); + let new_vec: Vec = module.asc_get(new_vec_obj).unwrap(); assert_eq!(module.gas_used(), gas_used); assert_eq!( @@ -118,7 +118,7 @@ async fn test_abi_subarray(api_version: Version) { let vec: Vec = vec![1, 2, 3, 4]; let new_vec_obj: AscPtr> = module.invoke_export1("byte_array_third_quarter", vec.as_slice()); - let new_vec: Vec = asc_get(&module, new_vec_obj, &module.gas).unwrap(); + let new_vec: Vec = module.asc_get(new_vec_obj).unwrap(); assert_eq!(new_vec, vec![3]); } @@ -148,7 +148,7 @@ async fn test_abi_bytes_and_fixed_bytes(api_version: Version) { let new_vec_obj: AscPtr = module.invoke_export2("concat", &*bytes1, &*bytes2); // This should be bytes1 and bytes2 concatenated. - let new_vec: Vec = asc_get(&module, new_vec_obj, &module.gas).unwrap(); + let new_vec: Vec = module.asc_get(new_vec_obj).unwrap(); let mut concated = bytes1.clone(); concated.extend(bytes2.clone()); @@ -175,7 +175,6 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { api_version, ) .await; - let gas = module.gas.cheap_clone(); // Token::Address let address = H160([1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]); @@ -185,7 +184,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { module.invoke_export1("token_to_address", &token_address); let new_token_ptr = module.takes_ptr_returns_ptr("token_from_address", new_address_obj); - let new_token = asc_get(&module, new_token_ptr, &gas).unwrap(); + let new_token = module.asc_get(new_token_ptr).unwrap(); assert_eq!(token_address, new_token); @@ -193,7 +192,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { let token_bytes = Token::Bytes(vec![42, 45, 7, 245, 45]); let new_bytes_obj: AscPtr = module.invoke_export1("token_to_bytes", &token_bytes); let new_token_ptr = module.takes_ptr_returns_ptr("token_from_bytes", new_bytes_obj); - let new_token = asc_get(&module, new_token_ptr, &gas).unwrap(); + let new_token = module.asc_get(new_token_ptr).unwrap(); assert_eq!(token_bytes, new_token); @@ -202,7 +201,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { let new_int_obj: AscPtr = module.invoke_export1("token_to_int", &int_token); let new_token_ptr = module.takes_ptr_returns_ptr("token_from_int", new_int_obj); - let new_token = asc_get(&module, new_token_ptr, &gas).unwrap(); + let new_token = module.asc_get(new_token_ptr).unwrap(); assert_eq!(int_token, new_token); @@ -211,7 +210,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { let new_uint_obj: AscPtr = module.invoke_export1("token_to_uint", &uint_token); let new_token_ptr = module.takes_ptr_returns_ptr("token_from_uint", new_uint_obj); - let new_token = asc_get(&module, new_token_ptr, &module.gas).unwrap(); + let new_token = module.asc_get(new_token_ptr).unwrap(); assert_eq!(uint_token, new_token); assert_ne!(uint_token, int_token); @@ -219,12 +218,12 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { // Token::Bool let token_bool = Token::Bool(true); - let token_bool_ptr = asc_new(&mut module, &token_bool, &gas).unwrap(); + let token_bool_ptr = module.asc_new(&token_bool).unwrap(); let func = module.get_func("token_to_bool").typed().unwrap().clone(); let boolean: i32 = func.call(token_bool_ptr.wasm_ptr()).unwrap(); let new_token_ptr = module.takes_val_returns_ptr("token_from_bool", boolean); - let new_token = asc_get(&module, new_token_ptr, &module.gas).unwrap(); + let new_token = module.asc_get(new_token_ptr).unwrap(); assert_eq!(token_bool, new_token); @@ -232,7 +231,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { let token_string = Token::String("漢字Go🇧🇷".into()); let new_string_obj: AscPtr = module.invoke_export1("token_to_string", &token_string); let new_token_ptr = module.takes_ptr_returns_ptr("token_from_string", new_string_obj); - let new_token = asc_get(&module, new_token_ptr, &module.gas).unwrap(); + let new_token = module.asc_get(new_token_ptr).unwrap(); assert_eq!(token_string, new_token); @@ -243,7 +242,7 @@ async fn test_abi_ethabi_token_identity(api_version: Version) { module.invoke_export1("token_to_array", &token_array_nested); let new_token_ptr = module.takes_ptr_returns_ptr("token_from_array", new_array_obj); - let new_token: Token = asc_get(&module, new_token_ptr, &module.gas).unwrap(); + let new_token: Token = module.asc_get(new_token_ptr).unwrap(); assert_eq!(new_token, token_array_nested); } @@ -277,51 +276,50 @@ async fn test_abi_store_value(api_version: Version) { let func = module.get_func("value_null").typed().unwrap().clone(); let ptr: u32 = func.call(()).unwrap(); let null_value_ptr: AscPtr> = ptr.into(); - let null_value: Value = try_asc_get(&module, null_value_ptr, &module.gas).unwrap(); + let null_value: Value = module.asc_get(null_value_ptr).unwrap(); assert_eq!(null_value, Value::Null); // Value::String let string = "some string"; let new_value_ptr = module.invoke_export1("value_from_string", string); - let new_value: Value = try_asc_get(&module, new_value_ptr, &module.gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!(new_value, Value::from(string)); // Value::Int let int = i32::min_value(); let new_value_ptr = module.takes_val_returns_ptr("value_from_int", int); - let new_value: Value = try_asc_get(&module, new_value_ptr, &module.gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!(new_value, Value::Int(int)); // Value::BigDecimal let big_decimal = BigDecimal::from_str("3.14159001").unwrap(); let new_value_ptr = module.invoke_export1("value_from_big_decimal", &big_decimal); - let new_value: Value = try_asc_get(&module, new_value_ptr, &module.gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!(new_value, Value::BigDecimal(big_decimal)); let big_decimal = BigDecimal::new(10.into(), 5); let new_value_ptr = module.invoke_export1("value_from_big_decimal", &big_decimal); - let new_value: Value = try_asc_get(&module, new_value_ptr, &module.gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!(new_value, Value::BigDecimal(1_000_000.into())); // Value::Bool let boolean = true; let new_value_ptr = module.takes_val_returns_ptr("value_from_bool", if boolean { 1 } else { 0 }); - let new_value: Value = try_asc_get(&module, new_value_ptr, &module.gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!(new_value, Value::Bool(boolean)); // Value::List - let gas = module.gas.cheap_clone(); let func = module .get_func("array_from_values") .typed() .unwrap() .clone(); let new_value_ptr: u32 = func - .call((asc_new(&mut module, string, &gas).unwrap().wasm_ptr(), int)) + .call((module.asc_new(string).unwrap().wasm_ptr(), int)) .unwrap(); let new_value_ptr = AscPtr::from(new_value_ptr); - let new_value: Value = try_asc_get(&module, new_value_ptr, &gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!( new_value, Value::List(vec![Value::from(string), Value::Int(int)]) @@ -332,7 +330,7 @@ async fn test_abi_store_value(api_version: Version) { Value::String("bar".to_owned()), ]; let new_value_ptr = module.invoke_export1("value_from_array", array); - let new_value: Value = try_asc_get(&module, new_value_ptr, &module.gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!( new_value, Value::List(vec![ @@ -344,13 +342,13 @@ async fn test_abi_store_value(api_version: Version) { // Value::Bytes let bytes: &[u8] = &[0, 2, 5]; let new_value_ptr = module.invoke_export1("value_from_bytes", bytes); - let new_value: Value = try_asc_get(&module, new_value_ptr, &module.gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!(new_value, Value::Bytes(bytes.into())); // Value::BigInt let bytes: &[u8] = &[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]; let new_value_ptr = module.invoke_export1("value_from_bigint", bytes); - let new_value: Value = try_asc_get(&module, new_value_ptr, &module.gas).unwrap(); + let new_value: Value = module.asc_get(new_value_ptr).unwrap(); assert_eq!( new_value, Value::BigInt(::graph::data::store::scalar::BigInt::from_unsigned_bytes_le(bytes)) @@ -383,7 +381,7 @@ async fn test_abi_h160(api_version: Version) { let new_address_obj: AscPtr = module.invoke_export1("test_address", &address); // This should have 1 added to the first and last byte. - let new_address: H160 = asc_get(&module, new_address_obj, &module.gas).unwrap(); + let new_address: H160 = module.asc_get(new_address_obj).unwrap(); assert_eq!( new_address, @@ -413,7 +411,7 @@ async fn test_string(api_version: Version) { .await; let string = " 漢字Double_Me🇧🇷 "; let trimmed_string_obj: AscPtr = module.invoke_export1("repeat_twice", string); - let doubled_string: String = asc_get(&module, trimmed_string_obj, &module.gas).unwrap(); + let doubled_string: String = module.asc_get(trimmed_string_obj).unwrap(); assert_eq!(doubled_string, string.repeat(2)); } @@ -442,7 +440,7 @@ async fn test_abi_big_int(api_version: Version) { let old_uint = U256::zero(); let new_uint_obj: AscPtr = module.invoke_export1("test_uint", &BigInt::from_unsigned_u256(&old_uint)); - let new_uint: BigInt = asc_get(&module, new_uint_obj, &module.gas).unwrap(); + let new_uint: BigInt = module.asc_get(new_uint_obj).unwrap(); assert_eq!(new_uint, BigInt::from(1 as i32)); let new_uint = new_uint.to_unsigned_u256(); assert_eq!(new_uint, U256([1, 0, 0, 0])); @@ -450,7 +448,7 @@ async fn test_abi_big_int(api_version: Version) { // Test passing in -50 and increment it by 1 let old_uint = BigInt::from(-50); let new_uint_obj: AscPtr = module.invoke_export1("test_uint", &old_uint); - let new_uint: BigInt = asc_get(&module, new_uint_obj, &module.gas).unwrap(); + let new_uint: BigInt = module.asc_get(new_uint_obj).unwrap(); assert_eq!(new_uint, BigInt::from(-49 as i32)); let new_uint_from_u256 = BigInt::from_signed_u256(&new_uint.to_signed_u256()); assert_eq!(new_uint, new_uint_from_u256); @@ -480,7 +478,7 @@ async fn test_big_int_to_string(api_version: Version) { let big_int_str = "30145144166666665000000000000000000"; let big_int = BigInt::from_str(big_int_str).unwrap(); let string_obj: AscPtr = module.invoke_export1("big_int_to_string", &big_int); - let string: String = asc_get(&module, string_obj, &module.gas).unwrap(); + let string: String = module.asc_get(string_obj).unwrap(); assert_eq!(string, big_int_str); } @@ -511,7 +509,7 @@ async fn test_invalid_discriminant(api_version: Version) { .unwrap() .clone(); let ptr: u32 = func.call(()).unwrap(); - let _value: Value = try_asc_get(&module, ptr.into(), &module.gas).unwrap(); + let _value: Value = module.asc_get(ptr.into()).unwrap(); } // This should panic rather than exhibiting UB. It's hard to test for UB, but diff --git a/runtime/wasm/src/module/mod.rs b/runtime/wasm/src/module/mod.rs index e82a4ba0a47..46a3ef11855 100644 --- a/runtime/wasm/src/module/mod.rs +++ b/runtime/wasm/src/module/mod.rs @@ -1,6 +1,8 @@ -use std::cell::{RefCell, RefMut}; +use std::cell::RefCell; use std::collections::HashMap; use std::convert::TryFrom; +use std::mem::MaybeUninit; +use std::ops::{Deref, DerefMut}; use std::rc::Rc; use std::sync::atomic::{AtomicBool, Ordering}; use std::time::Instant; @@ -13,15 +15,16 @@ use wasmtime::{Memory, Trap}; use graph::blockchain::{Blockchain, HostFnCtx, TriggerWithHandler}; use graph::data::store; +use graph::data::subgraph::schema::SubgraphError; use graph::prelude::*; -use graph::runtime::gas::{self, Gas, GasCounter, SaturatingInto}; -use graph::runtime::HostExportError; -use graph::runtime::{AscHeap, IndexForAscTypeId}; -use graph::{components::subgraph::MappingError, runtime::AscPtr}; -use graph::{ - data::subgraph::schema::SubgraphError, - runtime::{asc_get, asc_new, try_asc_get, DeterministicHostError}, +use graph::runtime::{ + asc_get, asc_new, + gas::{self, Gas, GasCounter, SaturatingInto}, + AscHeap, AscIndexId, AscType, DeterministicHostError, FromAscObj, HostExportError, + IndexForAscTypeId, ToAscObj, }; +use graph::util::mem::init_slice; +use graph::{components::subgraph::MappingError, runtime::AscPtr}; pub use into_wasm_ret::IntoWasmRet; pub use stopwatch::TimeoutStopwatch; @@ -65,31 +68,24 @@ impl Drop for WasmInstance { } } -/// Proxies to the WasmInstanceContext. -impl AscHeap for WasmInstance { - fn raw_new(&mut self, bytes: &[u8], gas: &GasCounter) -> Result { - let mut ctx = RefMut::map(self.instance_ctx.borrow_mut(), |i| i.as_mut().unwrap()); - ctx.raw_new(bytes, gas) - } - - fn get( - &self, - offset: u32, - size: u32, - gas: &GasCounter, - ) -> Result, DeterministicHostError> { - self.instance_ctx().get(offset, size, gas) - } - - fn api_version(&self) -> Version { - self.instance_ctx().api_version() +impl WasmInstance { + pub fn asc_get(&self, asc_ptr: AscPtr

) -> Result + where + P: AscType + AscIndexId, + T: FromAscObj

, + { + asc_get(self.instance_ctx().deref(), asc_ptr, &self.gas) } - fn asc_type_id( + pub fn asc_new( &mut self, - type_id_index: IndexForAscTypeId, - ) -> Result { - self.instance_ctx_mut().asc_type_id(type_id_index) + rust_obj: &T, + ) -> Result, DeterministicHostError> + where + P: AscType + AscIndexId, + T: ToAscObj

, + { + asc_new(self.instance_ctx_mut().deref_mut(), rust_obj, &self.gas) } } @@ -101,8 +97,8 @@ impl WasmInstance { user_data: &store::Value, ) -> Result, anyhow::Error> { let gas = GasCounter::default(); - let value = asc_new(&mut self, value, &gas)?; - let user_data = asc_new(&mut self, user_data, &gas)?; + let value = asc_new(self.instance_ctx_mut().deref_mut(), value, &gas)?; + let user_data = asc_new(self.instance_ctx_mut().deref_mut(), user_data, &gas)?; self.instance_ctx_mut().ctx.state.enter_handler(); @@ -125,7 +121,7 @@ impl WasmInstance { ) -> Result<(BlockState, Gas), MappingError> { let handler_name = trigger.handler_name().to_owned(); let gas = self.gas.clone(); - let asc_trigger = trigger.to_asc_ptr(&mut self, &gas)?; + let asc_trigger = trigger.to_asc_ptr(self.instance_ctx_mut().deref_mut(), &gas)?; self.invoke_handler(&handler_name, asc_trigger) } @@ -652,30 +648,48 @@ impl AscHeap for WasmInstanceContext { Ok(ptr as u32) } - fn get( + fn read_u32(&self, offset: u32, gas: &GasCounter) -> Result { + gas.consume_host_fn(Gas::new(GAS_COST_LOAD as u64 * 4))?; + let mut bytes = [0; 4]; + self.memory.read(offset as usize, &mut bytes).map_err(|_| { + DeterministicHostError::from(anyhow!( + "Heap access out of bounds. Offset: {} Size: {}", + offset, + 4 + )) + })?; + Ok(u32::from_le_bytes(bytes)) + } + + fn read<'a>( &self, offset: u32, - size: u32, + buffer: &'a mut [MaybeUninit], gas: &GasCounter, - ) -> Result, DeterministicHostError> { + ) -> Result<&'a mut [u8], DeterministicHostError> { // The cost of reading wasm memory from the host is the same as of reading from wasm using // load instructions. - gas.consume_host_fn(Gas::new(GAS_COST_LOAD as u64 * size as u64))?; + gas.consume_host_fn(Gas::new(GAS_COST_LOAD as u64 * (buffer.len() as u64)))?; let offset = offset as usize; - let size = size as usize; - let mut data = vec![0; size]; - - self.memory.read(offset, &mut data).map_err(|_| { - DeterministicHostError::from(anyhow!( - "Heap access out of bounds. Offset: {} Size: {}", - offset, - size - )) - })?; - - Ok(data) + unsafe { + // Safety: This was copy-pasted from Memory::read, and we ensure + // nothing else is writing this memory because we don't call into + // WASM here. + let src = self + .memory + .data_unchecked() + .get(offset..) + .and_then(|s| s.get(..buffer.len())) + .ok_or(DeterministicHostError::from(anyhow!( + "Heap access out of bounds. Offset: {} Size: {}", + offset, + buffer.len() + )))?; + + Ok(init_slice(src, buffer)) + } } fn api_version(&self) -> Version { @@ -855,7 +869,7 @@ impl WasmInstanceContext { let entity = asc_get(self, entity_ptr, gas)?; let id = asc_get(self, id_ptr, gas)?; - let data = try_asc_get(self, data_ptr, gas)?; + let data = asc_get(self, data_ptr, gas)?; self.ctx.host_exports.store_set( &self.ctx.logger, @@ -1155,7 +1169,7 @@ impl WasmInstanceContext { let link: String = asc_get(self, link_ptr, gas)?; let callback: String = asc_get(self, callback, gas)?; - let user_data: store::Value = try_asc_get(self, user_data, gas)?; + let user_data: store::Value = asc_get(self, user_data, gas)?; let flags = asc_get(self, flags, gas)?; @@ -1323,7 +1337,7 @@ impl WasmInstanceContext { let result = self.ctx .host_exports - .big_decimal_divided_by(x, try_asc_get(self, y_ptr, gas)?, gas)?; + .big_decimal_divided_by(x, asc_get(self, y_ptr, gas)?, gas)?; asc_new(self, &result, gas) } @@ -1439,7 +1453,7 @@ impl WasmInstanceContext { let result = self .ctx .host_exports - .big_decimal_to_string(try_asc_get(self, big_decimal_ptr, gas)?, gas)?; + .big_decimal_to_string(asc_get(self, big_decimal_ptr, gas)?, gas)?; asc_new(self, &result, gas) } @@ -1464,8 +1478,8 @@ impl WasmInstanceContext { y_ptr: AscPtr, ) -> Result, DeterministicHostError> { let result = self.ctx.host_exports.big_decimal_plus( - try_asc_get(self, x_ptr, gas)?, - try_asc_get(self, y_ptr, gas)?, + asc_get(self, x_ptr, gas)?, + asc_get(self, y_ptr, gas)?, gas, )?; asc_new(self, &result, gas) @@ -1479,8 +1493,8 @@ impl WasmInstanceContext { y_ptr: AscPtr, ) -> Result, DeterministicHostError> { let result = self.ctx.host_exports.big_decimal_minus( - try_asc_get(self, x_ptr, gas)?, - try_asc_get(self, y_ptr, gas)?, + asc_get(self, x_ptr, gas)?, + asc_get(self, y_ptr, gas)?, gas, )?; asc_new(self, &result, gas) @@ -1494,8 +1508,8 @@ impl WasmInstanceContext { y_ptr: AscPtr, ) -> Result, DeterministicHostError> { let result = self.ctx.host_exports.big_decimal_times( - try_asc_get(self, x_ptr, gas)?, - try_asc_get(self, y_ptr, gas)?, + asc_get(self, x_ptr, gas)?, + asc_get(self, y_ptr, gas)?, gas, )?; asc_new(self, &result, gas) @@ -1509,8 +1523,8 @@ impl WasmInstanceContext { y_ptr: AscPtr, ) -> Result, DeterministicHostError> { let result = self.ctx.host_exports.big_decimal_divided_by( - try_asc_get(self, x_ptr, gas)?, - try_asc_get(self, y_ptr, gas)?, + asc_get(self, x_ptr, gas)?, + asc_get(self, y_ptr, gas)?, gas, )?; asc_new(self, &result, gas) @@ -1524,8 +1538,8 @@ impl WasmInstanceContext { y_ptr: AscPtr, ) -> Result { self.ctx.host_exports.big_decimal_equals( - try_asc_get(self, x_ptr, gas)?, - try_asc_get(self, y_ptr, gas)?, + asc_get(self, x_ptr, gas)?, + asc_get(self, y_ptr, gas)?, gas, ) } @@ -1560,7 +1574,7 @@ impl WasmInstanceContext { ) -> Result<(), HostExportError> { let name: String = asc_get(self, name_ptr, gas)?; let params: Vec = asc_get(self, params_ptr, gas)?; - let context: HashMap<_, _> = try_asc_get(self, context_ptr, gas)?; + let context: HashMap<_, _> = asc_get(self, context_ptr, gas)?; self.ctx.host_exports.data_source_create( &self.ctx.logger, &mut self.ctx.state, diff --git a/runtime/wasm/src/to_from/external.rs b/runtime/wasm/src/to_from/external.rs index 54ae6442f09..96e99d731e5 100644 --- a/runtime/wasm/src/to_from/external.rs +++ b/runtime/wasm/src/to_from/external.rs @@ -1,16 +1,11 @@ use ethabi; +use graph::prelude::{BigDecimal, BigInt}; use graph::runtime::gas::GasCounter; -use graph::runtime::{ - asc_get, asc_new, try_asc_get, AscIndexId, AscPtr, AscType, AscValue, ToAscObj, -}; +use graph::runtime::{asc_get, asc_new, AscIndexId, AscPtr, AscType, AscValue, ToAscObj}; use graph::{data::store, runtime::DeterministicHostError}; use graph::{prelude::serde_json, runtime::FromAscObj}; use graph::{prelude::web3::types as web3, runtime::AscHeap}; -use graph::{ - prelude::{BigDecimal, BigInt}, - runtime::TryFromAscObj, -}; use crate::asc_abi::class::*; @@ -106,8 +101,8 @@ impl ToAscObj for BigDecimal { } } -impl TryFromAscObj for BigDecimal { - fn try_from_asc_obj( +impl FromAscObj for BigDecimal { + fn from_asc_obj( big_decimal: AscBigDecimal, heap: &H, gas: &GasCounter, @@ -238,8 +233,8 @@ impl FromAscObj> for ethabi::Token { } } -impl TryFromAscObj> for store::Value { - fn try_from_asc_obj( +impl FromAscObj> for store::Value { + fn from_asc_obj( asc_enum: AscEnum, heap: &H, gas: &GasCounter, @@ -255,12 +250,12 @@ impl TryFromAscObj> for store::Value { StoreValueKind::Int => Value::Int(i32::from(payload)), StoreValueKind::BigDecimal => { let ptr: AscPtr = AscPtr::from(payload); - Value::BigDecimal(try_asc_get(heap, ptr, gas)?) + Value::BigDecimal(asc_get(heap, ptr, gas)?) } StoreValueKind::Bool => Value::Bool(bool::from(payload)), StoreValueKind::Array => { let ptr: AscEnumArray = AscPtr::from(payload); - Value::List(try_asc_get(heap, ptr, gas)?) + Value::List(asc_get(heap, ptr, gas)?) } StoreValueKind::Null => Value::Null, StoreValueKind::Bytes => { diff --git a/runtime/wasm/src/to_from/mod.rs b/runtime/wasm/src/to_from/mod.rs index a8262c9c50f..a43ecbab2ff 100644 --- a/runtime/wasm/src/to_from/mod.rs +++ b/runtime/wasm/src/to_from/mod.rs @@ -2,13 +2,9 @@ use std::collections::HashMap; use std::hash::Hash; use std::iter::FromIterator; -use graph::runtime::asc_get; -use graph::runtime::asc_new; -use graph::runtime::gas::GasCounter; -use graph::runtime::try_asc_get; use graph::runtime::{ - AscHeap, AscIndexId, AscPtr, AscType, AscValue, DeterministicHostError, FromAscObj, ToAscObj, - TryFromAscObj, + asc_get, asc_new, gas::GasCounter, AscHeap, AscIndexId, AscPtr, AscType, AscValue, + DeterministicHostError, FromAscObj, ToAscObj, }; use crate::asc_abi::class::*; @@ -37,52 +33,13 @@ impl FromAscObj> for Vec { } } -impl FromAscObj> for [T; 32] { +impl FromAscObj> for [T; LEN] { fn from_asc_obj( typed_array: TypedArray, heap: &H, gas: &GasCounter, ) -> Result { - let mut array: [T; 32] = [T::default(); 32]; - let v = typed_array.to_vec(heap, gas)?; - array.copy_from_slice(&v); - Ok(array) - } -} - -impl FromAscObj> for [T; 20] { - fn from_asc_obj( - typed_array: TypedArray, - heap: &H, - gas: &GasCounter, - ) -> Result { - let mut array: [T; 20] = [T::default(); 20]; - let v = typed_array.to_vec(heap, gas)?; - array.copy_from_slice(&v); - Ok(array) - } -} - -impl FromAscObj> for [T; 16] { - fn from_asc_obj( - typed_array: TypedArray, - heap: &H, - gas: &GasCounter, - ) -> Result { - let mut array: [T; 16] = [T::default(); 16]; - let v = typed_array.to_vec(heap, gas)?; - array.copy_from_slice(&v); - Ok(array) - } -} - -impl FromAscObj> for [T; 4] { - fn from_asc_obj( - typed_array: TypedArray, - heap: &H, - gas: &GasCounter, - ) -> Result { - let mut array: [T; 4] = [T::default(); 4]; + let mut array: [T; LEN] = [T::default(); LEN]; let v = typed_array.to_vec(heap, gas)?; array.copy_from_slice(&v); Ok(array) @@ -126,16 +83,6 @@ impl FromAscObj for String { } } -impl TryFromAscObj for String { - fn try_from_asc_obj( - asc_string: AscString, - heap: &H, - gas: &GasCounter, - ) -> Result { - Ok(Self::from_asc_obj(asc_string, heap, gas)?) - } -} - impl> ToAscObj>> for [T] { fn to_asc_obj( &self, @@ -162,35 +109,17 @@ impl> FromAscObj>> for } } -impl> TryFromAscObj>> for Vec { - fn try_from_asc_obj( - array: Array>, - heap: &H, - gas: &GasCounter, - ) -> Result { - array - .to_vec(heap, gas)? - .into_iter() - .map(|x| try_asc_get(heap, x, gas)) - .collect() - } -} - -impl< - K: AscType + AscIndexId, - V: AscType + AscIndexId, - T: TryFromAscObj, - U: TryFromAscObj, - > TryFromAscObj> for (T, U) +impl, U: FromAscObj> + FromAscObj> for (T, U) { - fn try_from_asc_obj( + fn from_asc_obj( asc_entry: AscTypedMapEntry, heap: &H, gas: &GasCounter, ) -> Result { Ok(( - try_asc_get(heap, asc_entry.key, gas)?, - try_asc_get(heap, asc_entry.value, gas)?, + asc_get(heap, asc_entry.key, gas)?, + asc_get(heap, asc_entry.value, gas)?, )) } } @@ -213,19 +142,19 @@ impl, U: ToAscO impl< K: AscType + AscIndexId, V: AscType + AscIndexId, - T: TryFromAscObj + Hash + Eq, - U: TryFromAscObj, - > TryFromAscObj> for HashMap + T: FromAscObj + Hash + Eq, + U: FromAscObj, + > FromAscObj> for HashMap where Array>>: AscIndexId, AscTypedMapEntry: AscIndexId, { - fn try_from_asc_obj( + fn from_asc_obj( asc_map: AscTypedMap, heap: &H, gas: &GasCounter, ) -> Result { - let entries: Vec<(T, U)> = try_asc_get(heap, asc_map.entries, gas)?; + let entries: Vec<(T, U)> = asc_get(heap, asc_map.entries, gas)?; Ok(HashMap::from_iter(entries.into_iter())) } } diff --git a/store/postgres/Cargo.toml b/store/postgres/Cargo.toml index df158c99bf6..6e838f3b8b7 100644 --- a/store/postgres/Cargo.toml +++ b/store/postgres/Cargo.toml @@ -26,7 +26,7 @@ postgres-openssl = "0.5.0" rand = "0.8.4" serde = "1.0" uuid = { version = "1.1.2", features = ["v4"] } -stable-hash_legacy = { version = "0.3.2", package = "stable-hash" } +stable-hash_legacy = { version = "0.3.3", package = "stable-hash" } diesel_derives = "1.4.1" anyhow = "1.0.57" git-testament = "0.2.0" diff --git a/store/postgres/src/deployment_store.rs b/store/postgres/src/deployment_store.rs index 342beb52fc2..e54cf61820f 100644 --- a/store/postgres/src/deployment_store.rs +++ b/store/postgres/src/deployment_store.rs @@ -22,7 +22,7 @@ use std::sync::{atomic::AtomicUsize, Arc, Mutex}; use std::time::Instant; use graph::components::store::EntityCollection; -use graph::components::subgraph::ProofOfIndexingFinisher; +use graph::components::subgraph::{ProofOfIndexingFinisher, ProofOfIndexingVersion}; use graph::constraint_violation; use graph::data::subgraph::schema::{DeploymentCreate, SubgraphError, POI_OBJECT}; use graph::prelude::{ @@ -892,7 +892,13 @@ impl DeploymentStore { }) .collect::, anyhow::Error>>()?; - let mut finisher = ProofOfIndexingFinisher::new(&block2, &site3.deployment, &indexer); + // TODO: (Fast PoI) This should also support the Fast + // variant when indicated by the subgraph manifest. + // See also a0a79c0f-919f-4d97-a82a-439a1ff78230 + let version = ProofOfIndexingVersion::Legacy; + + let mut finisher = + ProofOfIndexingFinisher::new(&block2, &site3.deployment, &indexer, version); for (name, region) in by_causality_region.drain() { finisher.add_causality_region(&name, ®ion); }