Skip to content

Commit

Permalink
Compilation corrections after rebase.
Browse files Browse the repository at this point in the history
  • Loading branch information
MathieuDutSik committed Jan 22, 2025
1 parent 502808f commit 18cfadf
Show file tree
Hide file tree
Showing 8 changed files with 52 additions and 15 deletions.
6 changes: 3 additions & 3 deletions linera-indexer/lib/src/rocks_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,13 @@ pub type RocksDbRunner = Runner<RocksDbStore, RocksDbConfig>;
impl RocksDbRunner {
pub async fn load() -> Result<Self, IndexerError> {
let config = IndexerConfig::<RocksDbConfig>::parse();
let storage_cache_policy =
read_storage_cache_policy(config.client.storage_cache_policy.clone());
let common_config = CommonStoreConfig {
max_concurrent_queries: config.client.max_concurrent_queries,
max_stream_queries: config.client.max_stream_queries,
cache_size: config.client.cache_size,
storage_cache_policy,
};
let storage_cache_policy =
read_storage_cache_policy(config.client.storage_cache_policy.clone());
let path_buf = config.client.storage.as_path().to_path_buf();
let path_with_guard = PathWithGuard::new(path_buf);
// The tests are run in single threaded mode, therefore we need
Expand Down
6 changes: 3 additions & 3 deletions linera-indexer/lib/src/scylla_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,13 @@ pub type ScyllaDbRunner = Runner<ScyllaDbStore, ScyllaDbConfig>;
impl ScyllaDbRunner {
pub async fn load() -> Result<Self, IndexerError> {
let config = <IndexerConfig<ScyllaDbConfig> as clap::Parser>::parse();
let storage_cache_policy =
read_storage_cache_policy(config.client.storage_cache_policy.clone());
let common_config = CommonStoreConfig {
max_concurrent_queries: config.client.max_concurrent_queries,
max_stream_queries: config.client.max_stream_queries,
cache_size: config.client.cache_size,
storage_cache_policy,
};
let storage_cache_policy =
read_storage_cache_policy(config.client.storage_cache_policy.clone());
let namespace = config.client.table.clone();
let root_key = &[];
let store_config = ScyllaDbStoreConfig::new(config.client.uri.clone(), common_config);
Expand Down
2 changes: 1 addition & 1 deletion linera-views/src/backends/dynamo_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1168,7 +1168,7 @@ impl DynamoDbStoreConfig {
};
DynamoDbStoreConfig {
inner_config,
cache_size: common_config.cache_size,
storage_cache_policy: common_config.storage_cache_policy,
}
}
}
Expand Down
3 changes: 1 addition & 2 deletions linera-views/src/backends/lru_caching.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ use std::{
};

use linked_hash_map::LinkedHashMap;
#[cfg(with_metrics)]
use {linera_base::prometheus_util::register_int_counter_vec, prometheus::IntCounterVec};

use crate::{
batch::{Batch, WriteOperation},
Expand All @@ -26,6 +24,7 @@ use crate::{
#[cfg(with_testing)]
use crate::{memory::MemoryStore, store::TestKeyValueStore};

#[cfg(with_metrics)]
mod metrics {
use std::sync::LazyLock;

Expand Down
2 changes: 1 addition & 1 deletion linera-views/src/backends/rocks_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -620,7 +620,7 @@ impl RocksDbStoreConfig {
};
RocksDbStoreConfig {
inner_config,
cache_size: common_config.cache_size,
storage_cache_policy: common_config.storage_cache_policy,
}
}
}
11 changes: 8 additions & 3 deletions linera-views/src/backends/scylla_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -872,8 +872,13 @@ impl TestKeyValueStore for JournalingKeyValueStore<ScyllaDbStoreInternal> {

/// The `ScyllaDbStore` composed type with metrics
#[cfg(with_metrics)]
pub type ScyllaDbStore =
MeteredStore<CachingStore<MeteredStore<ValueSplittingStore<MeteredStore<JournalingKeyValueStore<ScyllaDbStoreInternal>>>>>>;
pub type ScyllaDbStore = MeteredStore<
CachingStore<
MeteredStore<
ValueSplittingStore<MeteredStore<JournalingKeyValueStore<ScyllaDbStoreInternal>>>,
>,
>,
>;

/// The `ScyllaDbStore` composed type
#[cfg(not(with_metrics))]
Expand All @@ -892,7 +897,7 @@ impl ScyllaDbStoreConfig {
};
ScyllaDbStoreConfig {
inner_config,
cache_size: common_config.cache_size,
storage_cache_policy: common_config.storage_cache_policy,
}
}
}
Expand Down
32 changes: 32 additions & 0 deletions linera-views/src/test_utils/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -590,6 +590,38 @@ pub async fn run_lru_related_test2<S: LocalRestrictedKeyValueStore>(store: &S) {
}
}

/// Reading many keys at a time could trigger an error. This needs to be tested.
pub async fn big_read_multi_values<C: LocalKeyValueStore>(
config: C::Config,
value_size: usize,
n_entries: usize,
) {
let mut rng = make_deterministic_rng();
let namespace = generate_test_namespace();
let root_key = &[];
//
let store = C::recreate_and_connect(&config, &namespace, root_key)
.await
.unwrap();
let key_prefix = vec![42, 54];
let mut batch = Batch::new();
let mut keys = Vec::new();
let mut values = Vec::new();
for i in 0..n_entries {
let mut key = key_prefix.clone();
bcs::serialize_into(&mut key, &i).unwrap();
let value = get_random_byte_vector(&mut rng, &[], value_size);
batch.put_key_value_bytes(key.clone(), value.clone());
keys.push(key);
values.push(Some(value));
}
store.write_batch(batch).await.unwrap();
// We reconnect so that the read is not using the cache.
let store = C::connect(&config, &namespace, root_key).await.unwrap();
let values_read = store.read_multi_values_bytes(keys).await.unwrap();
assert_eq!(values, values_read);
}

/// That test is especially challenging for ScyllaDB.
/// In its default settings, Scylla has a limitation to 10000 tombstones.
/// A tombstone is an indication that the data has been deleted. That
Expand Down
5 changes: 3 additions & 2 deletions linera-views/tests/store_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ use linera_views::{
random::make_deterministic_rng,
store::TestKeyValueStore as _,
test_utils::{
get_random_test_scenarios, run_big_write_read, run_lru_related_test1,
run_lru_related_test2, run_reads, run_writes_from_blank, run_writes_from_state,
big_read_multi_values, get_random_test_scenarios, run_big_write_read,
run_lru_related_test1, run_lru_related_test2, run_reads, run_writes_from_blank,
run_writes_from_state,
},
value_splitting::create_value_splitting_memory_store,
};
Expand Down

0 comments on commit 18cfadf

Please sign in to comment.