From c745cb25ce32f5452d56d2068b0bfe3b4d6d5249 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Thu, 5 Dec 2024 08:29:08 +0000 Subject: [PATCH 01/34] Reduce dependencies of trace call detector for use in driver --- crates/autopilot/src/run.rs | 8 ++-- crates/orderbook/src/run.rs | 8 ++-- crates/shared/src/bad_token/trace_call.rs | 58 +++++++++++++++-------- 3 files changed, 45 insertions(+), 29 deletions(-) diff --git a/crates/autopilot/src/run.rs b/crates/autopilot/src/run.rs index 59abed8253..17669c209d 100644 --- a/crates/autopilot/src/run.rs +++ b/crates/autopilot/src/run.rs @@ -274,16 +274,16 @@ pub async fn run(args: Arguments) { let trace_call_detector = args.tracing_node_url.as_ref().map(|tracing_node_url| { CachingDetector::new( - Box::new(TraceCallDetector { - web3: shared::ethrpc::web3( + Box::new(TraceCallDetector::new( + shared::ethrpc::web3( &args.shared.ethrpc, &http_factory, tracing_node_url, "trace", ), + eth.contracts().settlement().address(), finder, - settlement_contract: eth.contracts().settlement().address(), - }), + )), args.shared.token_quality_cache_expiry, args.shared.token_quality_cache_prefetch_time, ) diff --git a/crates/orderbook/src/run.rs b/crates/orderbook/src/run.rs index 16c27567b0..6922a6e77f 100644 --- a/crates/orderbook/src/run.rs +++ b/crates/orderbook/src/run.rs @@ -211,16 +211,16 @@ pub async fn run(args: Arguments) { let trace_call_detector = args.tracing_node_url.as_ref().map(|tracing_node_url| { CachingDetector::new( - Box::new(TraceCallDetector { - web3: shared::ethrpc::web3( + Box::new(TraceCallDetector::new( + shared::ethrpc::web3( &args.shared.ethrpc, &http_factory, tracing_node_url, "trace", ), + settlement_contract.address(), finder, - settlement_contract: settlement_contract.address(), - }), + )), args.shared.token_quality_cache_expiry, args.shared.token_quality_cache_prefetch_time, ) diff --git a/crates/shared/src/bad_token/trace_call.rs b/crates/shared/src/bad_token/trace_call.rs index 00da8feb3c..2cb3662526 100644 --- a/crates/shared/src/bad_token/trace_call.rs +++ b/crates/shared/src/bad_token/trace_call.rs @@ -26,9 +26,8 @@ use { /// - transfer into the settlement contract or back out fails /// - a transfer loses total balance pub struct TraceCallDetector { - pub web3: Web3, - pub finder: Arc, - pub settlement_contract: H160, + inner: TraceCallDetectorRaw, + finder: Arc, } #[async_trait::async_trait] @@ -41,7 +40,14 @@ impl BadTokenDetecting for TraceCallDetector { } impl TraceCallDetector { - pub async fn detect_impl(&self, token: H160) -> Result { + pub fn new(web3: Web3, settlement: H160, finder: Arc) -> Self { + Self { + inner: TraceCallDetectorRaw::new(web3, settlement), + finder, + } + } + + async fn detect_impl(&self, token: H160) -> Result { // Arbitrary amount that is large enough that small relative fees should be // visible. const MIN_AMOUNT: u64 = 100_000; @@ -72,7 +78,29 @@ impl TraceCallDetector { ))) } }; + self.inner.test_transfer(take_from, token, amount).await + } +} + +pub struct TraceCallDetectorRaw { + pub web3: Web3, + pub settlement_contract: H160, +} + +impl TraceCallDetectorRaw { + pub fn new(web3: Web3, settlement: H160) -> Self { + Self { + web3, + settlement_contract: settlement, + } + } + pub async fn test_transfer( + &self, + take_from: H160, + token: H160, + amount: U256, + ) -> Result { // We transfer the full available amount of the token from the amm pool into the // settlement contract and then to an arbitrary address. // Note that gas use can depend on the recipient because for the standard @@ -465,14 +493,14 @@ mod tests { }, ]; - let result = TraceCallDetector::handle_response(traces, 1.into(), H160::zero()).unwrap(); + let result = TraceCallDetectorRaw::handle_response(traces, 1.into(), H160::zero()).unwrap(); let expected = TokenQuality::Good; assert_eq!(result, expected); } #[test] fn arbitrary_recipient_() { - println!("{:?}", TraceCallDetector::arbitrary_recipient()); + println!("{:?}", TraceCallDetectorRaw::arbitrary_recipient()); } // cargo test -p shared mainnet_tokens -- --nocapture --ignored @@ -706,11 +734,7 @@ mod tests { ), ], }); - let token_cache = TraceCallDetector { - web3, - finder, - settlement_contract: settlement.address(), - }; + let token_cache = TraceCallDetector::new(web3, settlement.address(), finder); println!("testing good tokens"); for &token in base_tokens { @@ -744,11 +768,7 @@ mod tests { settlement_contract: settlement.address(), proposers: vec![univ3], }); - let token_cache = super::TraceCallDetector { - web3, - finder, - settlement_contract: settlement.address(), - }; + let token_cache = TraceCallDetector::new(web3, settlement.address(), finder); let result = token_cache.detect(testlib::tokens::USDC).await; dbg!(&result); @@ -870,11 +890,7 @@ mod tests { proposers: vec![solver_token_finder], settlement_contract: settlement.address(), }); - let token_cache = TraceCallDetector { - web3, - finder, - settlement_contract: settlement.address(), - }; + let token_cache = TraceCallDetector::new(web3, settlement.address(), finder); for token in tokens { let result = token_cache.detect(token).await; From 4353d3aaa1c6da737fff989349ed87382ba2c60e Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Thu, 5 Dec 2024 08:47:38 +0000 Subject: [PATCH 02/34] Allow executing pre-interactions before hand --- .../driver/src/domain/competition/auction.rs | 6 ++++- crates/shared/src/bad_token/trace_call.rs | 22 +++++++++++++++---- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/crates/driver/src/domain/competition/auction.rs b/crates/driver/src/domain/competition/auction.rs index 59cddff203..33eb64a393 100644 --- a/crates/driver/src/domain/competition/auction.rs +++ b/crates/driver/src/domain/competition/auction.rs @@ -14,7 +14,10 @@ use { futures::future::{join_all, BoxFuture, FutureExt, Shared}, itertools::Itertools, model::{order::OrderKind, signature::Signature}, - shared::signature_validator::{Contracts, SignatureValidating}, + shared::{ + bad_token::trace_call::TraceCallDetectorRaw, + signature_validator::{Contracts, SignatureValidating}, + }, std::{ collections::{HashMap, HashSet}, sync::{Arc, Mutex}, @@ -135,6 +138,7 @@ struct Inner { /// `order_priority_strategies` from the driver's config. order_sorting_strategies: Vec>, signature_validator: Arc, + bad_token_detector: Arc, } type BalanceGroup = (order::Trader, eth::TokenAddress, order::SellTokenBalance); diff --git a/crates/shared/src/bad_token/trace_call.rs b/crates/shared/src/bad_token/trace_call.rs index 2cb3662526..ce000a95e2 100644 --- a/crates/shared/src/bad_token/trace_call.rs +++ b/crates/shared/src/bad_token/trace_call.rs @@ -9,12 +9,13 @@ use { transaction::TransactionBuilder, PrivateKey, }, + model::interaction::InteractionData, primitive_types::{H160, U256}, std::{cmp, sync::Arc}, web3::{ error::TransportError, signing::keccak256, - types::{BlockTrace, CallRequest, Res}, + types::{BlockTrace, Bytes, CallRequest, Res}, }, }; @@ -78,7 +79,9 @@ impl TraceCallDetector { ))) } }; - self.inner.test_transfer(take_from, token, amount).await + self.inner + .test_transfer(take_from, token, amount, Vec::new()) + .await } } @@ -100,13 +103,23 @@ impl TraceCallDetectorRaw { take_from: H160, token: H160, amount: U256, + pre_interactions: Vec, ) -> Result { + let mut request: Vec<_> = pre_interactions + .iter() + .map(|i| CallRequest { + to: Some(i.target), + value: Some(i.value), + data: Some(Bytes(i.call_data.clone())), + ..Default::default() + }) + .collect(); // We transfer the full available amount of the token from the amm pool into the // settlement contract and then to an arbitrary address. // Note that gas use can depend on the recipient because for the standard // implementation sending to an address that does not have any balance // yet (implicitly 0) causes an allocation. - let request = self.create_trace_request(token, amount, take_from); + request.append(&mut self.create_trace_request(token, amount, take_from)); let traces = match trace_many::trace_many(request, &self.web3).await { Ok(result) => result, Err(e) => { @@ -132,7 +145,8 @@ impl TraceCallDetectorRaw { return Err(e).context("trace_many"); } }; - Self::handle_response(&traces, amount, take_from) + let relevant_traces = &traces[pre_interactions.len()..]; + Self::handle_response(&relevant_traces, amount, take_from) } // For the out transfer we use an arbitrary address without balance to detect From d506d2e1c5ab386860d72819b565972771b32c22 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Fri, 6 Dec 2024 11:43:32 +0000 Subject: [PATCH 03/34] TBC --- Cargo.lock | 1 + Cargo.toml | 1 + crates/driver/Cargo.toml | 1 + .../driver/src/domain/competition/auction.rs | 21 +- .../src/domain/competition/bad_tokens.rs | 220 ++++++++++++++++++ crates/driver/src/domain/competition/mod.rs | 2 + crates/driver/src/infra/api/mod.rs | 3 + crates/shared/Cargo.toml | 2 +- crates/shared/src/bad_token/trace_call.rs | 7 +- 9 files changed, 250 insertions(+), 8 deletions(-) create mode 100644 crates/driver/src/domain/competition/bad_tokens.rs diff --git a/Cargo.lock b/Cargo.lock index 17ca562be0..9dd3a9ba1d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1800,6 +1800,7 @@ dependencies = [ "clap", "contracts", "cow-amm", + "dashmap", "derive_more 1.0.0", "ethabi", "ethcontract", diff --git a/Cargo.toml b/Cargo.toml index d25c30318f..f2ce7391f6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,6 +10,7 @@ bigdecimal = "0.3" cached = { version = "0.49.3", default-features = false } chrono = { version = "0.4.38", default-features = false } clap = { version = "4.5.6", features = ["derive", "env"] } +dashmap = "6.1.0" derivative = "2.2.0" derive_more = { version = "1.0.0", features = ["full"] } ethcontract = { version = "0.25.7", default-features = false, features = ["aws-kms"] } diff --git a/crates/driver/Cargo.toml b/crates/driver/Cargo.toml index 35dd748162..0675c6a046 100644 --- a/crates/driver/Cargo.toml +++ b/crates/driver/Cargo.toml @@ -24,6 +24,7 @@ axum = { workspace = true } bigdecimal = { workspace = true } chrono = { workspace = true, features = ["clock"], default-features = false } cow-amm = { path = "../cow-amm" } +dashmap = { workspace = true } derive_more = { workspace = true } ethabi = "18.0" ethereum-types = { workspace = true } diff --git a/crates/driver/src/domain/competition/auction.rs b/crates/driver/src/domain/competition/auction.rs index 33eb64a393..5c2165c930 100644 --- a/crates/driver/src/domain/competition/auction.rs +++ b/crates/driver/src/domain/competition/auction.rs @@ -138,7 +138,7 @@ struct Inner { /// `order_priority_strategies` from the driver's config. order_sorting_strategies: Vec>, signature_validator: Arc, - bad_token_detector: Arc, + bad_token_detector: TraceCallDetectorRaw, } type BalanceGroup = (order::Trader, eth::TokenAddress, order::SellTokenBalance); @@ -182,15 +182,18 @@ impl AuctionProcessor { let mut orders = auction.orders.clone(); let solver = *solver; let order_comparators = lock.order_sorting_strategies.clone(); + let bad_tokens = lock.bad_token_detector.clone(); // Use spawn_blocking() because a lot of CPU bound computations are happening // and we don't want to block the runtime for too long. let fut = tokio::task::spawn_blocking(move || { let start = std::time::Instant::now(); - orders.extend(rt.block_on(Self::cow_amm_orders(ð, &tokens, &cow_amms, signature_validator.as_ref()))); + let cow_amm_orders = rt.block_on(Self::cow_amm_orders(ð, &tokens, &cow_amms, signature_validator.as_ref())); + orders.extend(cow_amm_orders); sorting::sort_orders(&mut orders, &tokens, &solver, &order_comparators); - let mut balances = - rt.block_on(async { Self::fetch_balances(ð, &orders).await }); + // TODO add fn to filter bad tokens + let mut balances = rt.block_on(Self::fetch_balances(ð, &orders)); + rt.block_on(Self::filter_bad_tokens(&mut orders, &bad_tokens)); Self::filter_orders(&mut balances, &mut orders); tracing::debug!(auction_id = new_id.0, time =? start.elapsed(), "auction preprocessing done"); orders @@ -211,6 +214,12 @@ impl AuctionProcessor { fut } + async fn filter_bad_tokens(orders: &mut Vec, detector: &TraceCallDetectorRaw) { + // only run detection on sell tokens because we can't fake the balance + // for buy tokens? also this bad token detector needs to be + // shared acro + } + /// Removes orders that cannot be filled due to missing funds of the owner. fn filter_orders(balances: &mut Balances, orders: &mut Vec) { // The auction that we receive from the `autopilot` assumes that there @@ -483,6 +492,10 @@ impl AuctionProcessor { Self(Arc::new(Mutex::new(Inner { auction: Id(0), fut: futures::future::pending().boxed().shared(), + bad_token_detector: TraceCallDetectorRaw::new( + eth.web3().clone(), + eth.contracts().settlement().address(), + ), eth, order_sorting_strategies, signature_validator, diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs new file mode 100644 index 0000000000..e6bf6e62ad --- /dev/null +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -0,0 +1,220 @@ +use { + crate::domain::eth, + dashmap::{DashMap, Entry}, + model::interaction::InteractionData, + shared::bad_token::{trace_call::TraceCallDetectorRaw, TokenQuality}, + std::{ + collections::HashMap, + fmt, + sync::Arc, + time::{Duration, Instant}, + }, +}; + +// TODO better comments +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum Quality { + /// Solver is likely to produce working solutions when computing + /// routes for this token. + Supported, + /// Solver will likely produce failing solutions when computing + /// routes for this token. This can have many reasons: + /// * fees on transfer + /// * token enforces max transfer amount + /// * trader is deny listed + /// * bugs in the solidity compiler make it incompatible with the settlement + /// contract - see + /// * probably tons of other reasons + Unsupported, +} + +// TODO: better name (it only looks up stuff) +#[derive(Default)] +pub struct Detector { + /// manually configured list of supported and unsupported tokens. Only + /// tokens that get detected incorrectly by the automatic detectors get + /// listed here and therefore have a higher precedence. + hardcoded: HashMap, + /// cache which is shared and updated by multiple bad token detection + /// mechanisms + dynamic: Vec, +} + +impl Detector { + /// Returns which of the passed in tokens should be considered unsupported. + pub fn supported_tokens(&self, mut tokens: Vec) -> Vec { + let now = Instant::now(); + + tokens.retain(|token| { + if let Some(entry) = self.hardcoded.get(token) { + return *entry == Quality::Supported; + } + + for cache in &self.dynamic { + if let Some(quality) = cache.get_quality(*token, now) { + return quality == Quality::Supported; + } + } + + // token quality is unknown so we assume it's good + true + }); + + // now it only contains good tokens + tokens + } + + /// Creates a new [`Detector`] with a configured list of token + /// qualities. + pub fn new(config: HashMap) -> Self { + Self { + hardcoded: config, + dynamic: Default::default(), + } + } + + /// Registers an externally managed [`Cache`] to read the quality + /// of tokens from. + pub fn register_cache(&mut self, cache: Cache) -> &mut Self { + self.dynamic.push(cache); + self + } +} + +impl fmt::Debug for Detector { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Detector") + .field("hardcoded", &self.hardcoded) + .field("dynamic", &format_args!("Vec")) + .finish() + } +} + +/// Cache keeping track of whether or not a token is considered supported or +/// not. Internally reference counted for cheap clones and easy sharing. +/// Stores a map instead of a set to not recompute the quality of good tokens +/// over and over. +/// Evicts cached value after a configurable period of time. +#[derive(Clone)] +struct Cache { + cache: Arc>, + /// entries older than this get ignored and evicted + max_age: Duration, + /// evicts entries when the cache grows beyond this size + max_size: usize, +} + +struct CacheEntry { + /// when the decision on the token quality was made + timestamp: std::time::Instant, + /// whether the token is supported or not + quality: Quality, +} + +impl Cache { + /// Creates a new instance which evicts cached values after a period of + /// time. + pub fn new(max_age: Duration, max_size: usize) -> Self { + Self { + max_age, + max_size, + cache: Default::default(), + } + } + + /// Updates whether or not a token should be considered supported. + pub fn update_tokens(&self, updates: impl IntoIterator) { + let now = Instant::now(); + for (token, quality) in updates { + self.cache.insert( + token, + CacheEntry { + quality, + timestamp: now, + }, + ); + } + + if self.cache.len() > self.max_size { + // this could still leave us with more than max_size entries but it at least + // guarantees that the cache does not grow beyond the actual working set which + // is enough for now + self.cache + .retain(|_, value| now.duration_since(value.timestamp) > self.max_age); + } + } + + /// Returns the quality of the token. If the cached value is older than the + /// `max_age` it gets ignored and the token evicted. + pub fn get_quality(&self, token: eth::Address, now: Instant) -> Option { + let Entry::Occupied(entry) = self.cache.entry(token) else { + return None; + }; + + let value = entry.get(); + if now.duration_since(value.timestamp) > self.max_age { + entry.remove(); + return None; + } + + Some(value.quality) + } +} + +/// Detects bad a token's quality with simulations using `trace_callMany`. +struct SimulationDetector { + cache: Cache, + detector: Arc, +} + +impl SimulationDetector { + pub fn new(detector: Arc) -> Self { + Self { + detector, + cache: Default::default(), + } + } + + pub async fn determine_token_quality( + &self, + token: eth::Address, + holder: eth::Address, + amount: eth::U256, + pre_interactions: &[InteractionData], + ) { + if self.cache.get_quality(token, Instant::now()).is_some() { + return; + } + + match self + .detector + .test_transfer(holder.0, token.0, amount, pre_interactions) + .await + { + Err(err) => { + tracing::debug!(?err, "failed to determine token quality"); + } + Ok(TokenQuality::Good) => self.cache.update_tokens([(token, Quality::Supported)]), + Ok(TokenQuality::Bad { reason }) => { + tracing::debug!(reason, "cache token as unsupported"); + self.cache.update_tokens([(token, Quality::Unsupported)]); + } + } + } +} + +/// Keeps track of how often tokens are associated with reverting solutions +/// to detect unsupported tokens based on heuristics. Tokens that are +/// often part of reverting solutions are likely to be unsupported. +struct MetricsDetector { + cache: Cache, + metrics: Arc, +} + +impl MetricsDetector { + /// Updates metrics on how often each token is associated with a failing + /// settlement. + pub fn record_failed_settlement(&self, tokens: impl IntoIterator) {} +} + +struct Metrics {} diff --git a/crates/driver/src/domain/competition/mod.rs b/crates/driver/src/domain/competition/mod.rs index dd3aa06073..9db4f2e036 100644 --- a/crates/driver/src/domain/competition/mod.rs +++ b/crates/driver/src/domain/competition/mod.rs @@ -28,6 +28,7 @@ use { }; pub mod auction; +pub mod bad_tokens; pub mod order; pub mod solution; mod sorting; @@ -52,6 +53,7 @@ pub struct Competition { pub mempools: Mempools, /// Cached solutions with the most recent solutions at the front. pub settlements: Mutex>, + pub bad_tokens: bad_tokens::Detector, } impl Competition { diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index 9517b858d1..b9559fd384 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -69,6 +69,9 @@ impl Api { let router = routes::solve(router); let router = routes::reveal(router); let router = routes::settle(router); + + // each solver needs to get its own bad_tokens::Detector + let router = router.with_state(State(Arc::new(Inner { eth: self.eth.clone(), solver: solver.clone(), diff --git a/crates/shared/Cargo.toml b/crates/shared/Cargo.toml index f86b75cfd3..492b6bfb6e 100644 --- a/crates/shared/Cargo.toml +++ b/crates/shared/Cargo.toml @@ -19,7 +19,7 @@ chain = { path = "../chain" } chrono = { workspace = true, features = ["clock"] } clap = { workspace = true } contracts = { path = "../contracts" } -dashmap = "6.1.0" +dashmap = { workspace = true } database = { path = "../database" } derive_more = { workspace = true } ttl_cache = "0.5" diff --git a/crates/shared/src/bad_token/trace_call.rs b/crates/shared/src/bad_token/trace_call.rs index ce000a95e2..4d2e0a7530 100644 --- a/crates/shared/src/bad_token/trace_call.rs +++ b/crates/shared/src/bad_token/trace_call.rs @@ -80,11 +80,12 @@ impl TraceCallDetector { } }; self.inner - .test_transfer(take_from, token, amount, Vec::new()) + .test_transfer(take_from, token, amount, &[]) .await } } +#[derive(Debug, Clone)] pub struct TraceCallDetectorRaw { pub web3: Web3, pub settlement_contract: H160, @@ -103,7 +104,7 @@ impl TraceCallDetectorRaw { take_from: H160, token: H160, amount: U256, - pre_interactions: Vec, + pre_interactions: &[InteractionData], ) -> Result { let mut request: Vec<_> = pre_interactions .iter() @@ -146,7 +147,7 @@ impl TraceCallDetectorRaw { } }; let relevant_traces = &traces[pre_interactions.len()..]; - Self::handle_response(&relevant_traces, amount, take_from) + Self::handle_response(relevant_traces, amount, take_from) } // For the out transfer we use an arbitrary address without balance to detect From 5e5241a9397d2e7a643bcd22139f79f9c5810b02 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Tue, 10 Dec 2024 14:19:23 +0000 Subject: [PATCH 04/34] TBC --- .../driver/src/domain/competition/auction.rs | 12 +- .../src/domain/competition/bad_tokens.rs | 239 ++++++++++-------- crates/driver/src/domain/competition/mod.rs | 5 + .../src/domain/competition/order/mod.rs | 2 +- crates/driver/src/infra/api/mod.rs | 8 +- 5 files changed, 147 insertions(+), 119 deletions(-) diff --git a/crates/driver/src/domain/competition/auction.rs b/crates/driver/src/domain/competition/auction.rs index 5c2165c930..9b83bf6e19 100644 --- a/crates/driver/src/domain/competition/auction.rs +++ b/crates/driver/src/domain/competition/auction.rs @@ -1,5 +1,5 @@ use { - super::{order, Order}, + super::{bad_tokens, order, Order}, crate::{ domain::{ competition::{self, auction, sorting}, @@ -138,7 +138,6 @@ struct Inner { /// `order_priority_strategies` from the driver's config. order_sorting_strategies: Vec>, signature_validator: Arc, - bad_token_detector: TraceCallDetectorRaw, } type BalanceGroup = (order::Trader, eth::TokenAddress, order::SellTokenBalance); @@ -182,7 +181,6 @@ impl AuctionProcessor { let mut orders = auction.orders.clone(); let solver = *solver; let order_comparators = lock.order_sorting_strategies.clone(); - let bad_tokens = lock.bad_token_detector.clone(); // Use spawn_blocking() because a lot of CPU bound computations are happening // and we don't want to block the runtime for too long. @@ -191,9 +189,7 @@ impl AuctionProcessor { let cow_amm_orders = rt.block_on(Self::cow_amm_orders(ð, &tokens, &cow_amms, signature_validator.as_ref())); orders.extend(cow_amm_orders); sorting::sort_orders(&mut orders, &tokens, &solver, &order_comparators); - // TODO add fn to filter bad tokens let mut balances = rt.block_on(Self::fetch_balances(ð, &orders)); - rt.block_on(Self::filter_bad_tokens(&mut orders, &bad_tokens)); Self::filter_orders(&mut balances, &mut orders); tracing::debug!(auction_id = new_id.0, time =? start.elapsed(), "auction preprocessing done"); orders @@ -214,12 +210,6 @@ impl AuctionProcessor { fut } - async fn filter_bad_tokens(orders: &mut Vec, detector: &TraceCallDetectorRaw) { - // only run detection on sell tokens because we can't fake the balance - // for buy tokens? also this bad token detector needs to be - // shared acro - } - /// Removes orders that cannot be filled due to missing funds of the owner. fn filter_orders(balances: &mut Balances, orders: &mut Vec) { // The auction that we receive from the `autopilot` assumes that there diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index e6bf6e62ad..04608ee12c 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -1,6 +1,11 @@ use { - crate::domain::eth, - dashmap::{DashMap, Entry}, + super::Order, + crate::{ + domain::{self, eth}, + infra, + }, + anyhow::Result, + dashmap::{DashMap, Entry, OccupiedEntry, VacantEntry}, model::interaction::InteractionData, shared::bad_token::{trace_call::TraceCallDetectorRaw, TokenQuality}, std::{ @@ -34,50 +39,110 @@ pub struct Detector { /// manually configured list of supported and unsupported tokens. Only /// tokens that get detected incorrectly by the automatic detectors get /// listed here and therefore have a higher precedence. - hardcoded: HashMap, + hardcoded: HashMap, /// cache which is shared and updated by multiple bad token detection /// mechanisms - dynamic: Vec, + cache: Cache, + simulation_detector: Option, + metrics: Option, } impl Detector { - /// Returns which of the passed in tokens should be considered unsupported. - pub fn supported_tokens(&self, mut tokens: Vec) -> Vec { - let now = Instant::now(); + pub fn with_config(mut self, config: HashMap) -> Self { + self.hardcoded = config; + self + } - tokens.retain(|token| { - if let Some(entry) = self.hardcoded.get(token) { - return *entry == Quality::Supported; - } + pub fn with_simulation_detector(mut self, eth: &infra::Ethereum) -> Self { + let detector = + TraceCallDetectorRaw::new(eth.web3().clone(), eth.contracts().settlement().address()); + self.simulation_detector = Some(detector); + self + } - for cache in &self.dynamic { - if let Some(quality) = cache.get_quality(*token, now) { - return quality == Quality::Supported; - } - } + pub fn with_heuristic_detector(mut self) -> Self { + self.metrics = Some(Default::default()); + self + } + + pub fn filter_unsupported_orders(&self, mut orders: Vec) -> Vec { + let now = Instant::now(); + + // group by sell tokens? + // future calling `determine_sell_token_quality()` for all of orders - // token quality is unknown so we assume it's good - true + orders.retain(|o| { + [o.sell.token, o.buy.token].iter().all(|token| { + self.get_token_quality(*token, now) + .is_none_or(|q| q == Quality::Supported) + }) }); - // now it only contains good tokens - tokens + self.cache.evict_outdated_entries(); + + orders } - /// Creates a new [`Detector`] with a configured list of token - /// qualities. - pub fn new(config: HashMap) -> Self { - Self { - hardcoded: config, - dynamic: Default::default(), + fn get_token_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { + if let Some(quality) = self.hardcoded.get(&token) { + return Some(*quality); + } + + if let Some(quality) = self.cache.get_quality(token, now) { + return Some(quality); } + + if let Some(metrics) = &self.metrics { + return metrics.get_quality(token); + } + + None } - /// Registers an externally managed [`Cache`] to read the quality - /// of tokens from. - pub fn register_cache(&mut self, cache: Cache) -> &mut Self { - self.dynamic.push(cache); - self + pub async fn determine_sell_token_quality( + &self, + detector: &TraceCallDetectorRaw, + order: &Order, + now: Instant, + ) -> Option { + if let Some(quality) = self.cache.get_quality(order.sell.token, now) { + return Some(quality); + } + + let token = order.sell.token; + let pre_interactions: Vec<_> = order + .pre_interactions + .iter() + .map(|i| InteractionData { + target: i.target.0, + value: i.value.0, + call_data: i.call_data.0.clone(), + }) + .collect(); + + match detector + .test_transfer( + order.trader().0 .0, + token.0 .0, + order.sell.amount.0, + &pre_interactions, + ) + .await + { + Err(err) => { + tracing::debug!(?err, "failed to determine token quality"); + None + } + Ok(TokenQuality::Good) => { + self.cache.update_quality(token, Quality::Supported, now); + Some(Quality::Supported) + } + Ok(TokenQuality::Bad { reason }) => { + tracing::debug!(reason, "cache token as unsupported"); + self.cache.update_quality(token, Quality::Unsupported, now); + Some(Quality::Unsupported) + } + } } } @@ -95,9 +160,8 @@ impl fmt::Debug for Detector { /// Stores a map instead of a set to not recompute the quality of good tokens /// over and over. /// Evicts cached value after a configurable period of time. -#[derive(Clone)] -struct Cache { - cache: Arc>, +pub struct Cache { + cache: DashMap, /// entries older than this get ignored and evicted max_age: Duration, /// evicts entries when the cache grows beyond this size @@ -106,11 +170,17 @@ struct Cache { struct CacheEntry { /// when the decision on the token quality was made - timestamp: std::time::Instant, + timestamp: Instant, /// whether the token is supported or not quality: Quality, } +impl Default for Cache { + fn default() -> Self { + Self::new(Duration::from_secs(60 * 10), 1000) + } +} + impl Cache { /// Creates a new instance which evicts cached values after a period of /// time. @@ -123,30 +193,38 @@ impl Cache { } /// Updates whether or not a token should be considered supported. - pub fn update_tokens(&self, updates: impl IntoIterator) { - let now = Instant::now(); - for (token, quality) in updates { - self.cache.insert( - token, - CacheEntry { + pub fn update_quality(&self, token: eth::TokenAddress, quality: Quality, now: Instant) { + match self.cache.entry(token) { + Entry::Occupied(mut o) => { + let value = o.get_mut(); + if now.duration_since(value.timestamp) > self.max_age + || quality == Quality::Unsupported + { + // Only update the value if the cached value is outdated by now or + // if the new value is "Unsupported". This means on conflicting updates + // we err on the conservative side and assume a token is unsupported. + value.quality = quality; + } + value.timestamp = now; + } + Entry::Vacant(v) => { + v.insert(CacheEntry { quality, timestamp: now, - }, - ); + }); + } } + } - if self.cache.len() > self.max_size { - // this could still leave us with more than max_size entries but it at least - // guarantees that the cache does not grow beyond the actual working set which - // is enough for now - self.cache - .retain(|_, value| now.duration_since(value.timestamp) > self.max_age); - } + fn evict_outdated_entries(&self) { + let now = Instant::now(); + self.cache + .retain(|_, value| now.duration_since(value.timestamp) > self.max_age); } /// Returns the quality of the token. If the cached value is older than the /// `max_age` it gets ignored and the token evicted. - pub fn get_quality(&self, token: eth::Address, now: Instant) -> Option { + pub fn get_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { let Entry::Occupied(entry) = self.cache.entry(token) else { return None; }; @@ -161,60 +239,11 @@ impl Cache { } } -/// Detects bad a token's quality with simulations using `trace_callMany`. -struct SimulationDetector { - cache: Cache, - detector: Arc, -} - -impl SimulationDetector { - pub fn new(detector: Arc) -> Self { - Self { - detector, - cache: Default::default(), - } - } - - pub async fn determine_token_quality( - &self, - token: eth::Address, - holder: eth::Address, - amount: eth::U256, - pre_interactions: &[InteractionData], - ) { - if self.cache.get_quality(token, Instant::now()).is_some() { - return; - } +#[derive(Default)] +struct Metrics {} - match self - .detector - .test_transfer(holder.0, token.0, amount, pre_interactions) - .await - { - Err(err) => { - tracing::debug!(?err, "failed to determine token quality"); - } - Ok(TokenQuality::Good) => self.cache.update_tokens([(token, Quality::Supported)]), - Ok(TokenQuality::Bad { reason }) => { - tracing::debug!(reason, "cache token as unsupported"); - self.cache.update_tokens([(token, Quality::Unsupported)]); - } - } +impl Metrics { + fn get_quality(&self, token: eth::TokenAddress) -> Option { + todo!() } } - -/// Keeps track of how often tokens are associated with reverting solutions -/// to detect unsupported tokens based on heuristics. Tokens that are -/// often part of reverting solutions are likely to be unsupported. -struct MetricsDetector { - cache: Cache, - metrics: Arc, -} - -impl MetricsDetector { - /// Updates metrics on how often each token is associated with a failing - /// settlement. - pub fn record_failed_settlement(&self, tokens: impl IntoIterator) {} -} - -struct Metrics {} diff --git a/crates/driver/src/domain/competition/mod.rs b/crates/driver/src/domain/competition/mod.rs index 9db4f2e036..ddebeff32b 100644 --- a/crates/driver/src/domain/competition/mod.rs +++ b/crates/driver/src/domain/competition/mod.rs @@ -53,12 +53,16 @@ pub struct Competition { pub mempools: Mempools, /// Cached solutions with the most recent solutions at the front. pub settlements: Mutex>, + // TODO: single type should have the feature set to simulate pub bad_tokens: bad_tokens::Detector, } impl Competition { /// Solve an auction as part of this competition. pub async fn solve(&self, auction: &Auction) -> Result, Error> { + // 1. simulate sell tokens + // 2. filter bad tokens + let liquidity = match self.solver.liquidity() { solver::Liquidity::Fetch => { self.liquidity @@ -135,6 +139,7 @@ impl Competition { // don't report on errors coming from solution merging Err(_err) if id.solutions().len() > 1 => None, Err(err) => { + // TODO update metrics of bad token detection observe::encoding_failed(self.solver.name(), &id, &err); notify::encoding_failed(&self.solver, auction.id(), &id, &err); None diff --git a/crates/driver/src/domain/competition/order/mod.rs b/crates/driver/src/domain/competition/order/mod.rs index 5b82b1d758..f833b48de4 100644 --- a/crates/driver/src/domain/competition/order/mod.rs +++ b/crates/driver/src/domain/competition/order/mod.rs @@ -372,7 +372,7 @@ impl From for BuyTokenBalance { /// The address which placed the order. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Into)] -pub struct Trader(eth::Address); +pub struct Trader(pub eth::Address); /// A just-in-time order. JIT orders are added at solving time by the solver to /// generate a more optimal solution for the auction. Very similar to a regular diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index b9559fd384..37375236ed 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -1,6 +1,6 @@ use { crate::{ - domain::{self, Mempools}, + domain::{self, competition::bad_tokens, Mempools}, infra::{ self, config::file::OrderPriorityStrategy, @@ -52,6 +52,8 @@ impl Api { let tokens = tokens::Fetcher::new(&self.eth); let pre_processor = domain::competition::AuctionProcessor::new(&self.eth, order_priority_strategies); + let trace_detector = bad_tokens::SimulationDetector::new(&self.eth); + let miep = bad_tokens::Detector::default().register_cache(trace_detector.cache().clone()); // Add the metrics and healthz endpoints. app = routes::metrics(app); @@ -70,7 +72,8 @@ impl Api { let router = routes::reveal(router); let router = routes::settle(router); - // each solver needs to get its own bad_tokens::Detector + let miep = + bad_tokens::Detector::default().register_cache(trace_detector.cache().clone()); let router = router.with_state(State(Arc::new(Inner { eth: self.eth.clone(), @@ -82,6 +85,7 @@ impl Api { simulator: self.simulator.clone(), mempools: self.mempools.clone(), settlements: Default::default(), + bad_tokens: miep, }, liquidity: self.liquidity.clone(), tokens: tokens.clone(), From d44598201f5e89177b089758c00e7a94bc1c9ac2 Mon Sep 17 00:00:00 2001 From: Mateo Date: Thu, 12 Dec 2024 15:05:11 +0100 Subject: [PATCH 05/34] wip --- .../driver/src/domain/competition/auction.rs | 39 +++++-- .../src/domain/competition/bad_tokens.rs | 105 +++++++++++++----- crates/driver/src/domain/competition/mod.rs | 19 +++- .../src/domain/competition/order/mod.rs | 4 +- crates/driver/src/infra/api/mod.rs | 46 ++++++-- .../driver/src/infra/api/routes/solve/mod.rs | 2 +- crates/driver/src/infra/config/file/load.rs | 5 + crates/driver/src/infra/config/file/mod.rs | 60 +++++++++- crates/driver/src/infra/config/mod.rs | 3 +- crates/driver/src/infra/solver/mod.rs | 24 +++- crates/driver/src/run.rs | 1 + 11 files changed, 249 insertions(+), 59 deletions(-) diff --git a/crates/driver/src/domain/competition/auction.rs b/crates/driver/src/domain/competition/auction.rs index 9b83bf6e19..3c957aed46 100644 --- a/crates/driver/src/domain/competition/auction.rs +++ b/crates/driver/src/domain/competition/auction.rs @@ -1,5 +1,5 @@ use { - super::{bad_tokens, order, Order}, + super::{order, Order}, crate::{ domain::{ competition::{self, auction, sorting}, @@ -14,12 +14,11 @@ use { futures::future::{join_all, BoxFuture, FutureExt, Shared}, itertools::Itertools, model::{order::OrderKind, signature::Signature}, - shared::{ - bad_token::trace_call::TraceCallDetectorRaw, - signature_validator::{Contracts, SignatureValidating}, - }, + shared::signature_validator::{Contracts, SignatureValidating}, std::{ collections::{HashMap, HashSet}, + future::Future, + pin::Pin, sync::{Arc, Mutex}, }, thiserror::Error, @@ -76,6 +75,32 @@ impl Auction { }) } + /// Filter the orders according to the funcion `filter_fn` provided. + /// The function `filter_fn` must return an `Option`, with `None` + /// indicating that the order has to be filtered. + /// This is needed due to the lack of `filter()` async closure support. + pub async fn filter_orders(&mut self, filter_fn: F) + where + F: Fn( + competition::Order, + ) -> Pin> + Send>> + + Send, + { + let futures = self + .orders + .drain(..) + .map(|order| { + let filter_fn = &filter_fn; + async move { filter_fn(order).await } + }) + .collect::>(); + self.orders = futures::future::join_all(futures) + .await + .into_iter() + .flatten() + .collect(); + } + /// [`None`] if this auction applies to a quote. See /// [`crate::domain::quote`]. pub fn id(&self) -> Option { @@ -482,10 +507,6 @@ impl AuctionProcessor { Self(Arc::new(Mutex::new(Inner { auction: Id(0), fut: futures::future::pending().boxed().shared(), - bad_token_detector: TraceCallDetectorRaw::new( - eth.web3().clone(), - eth.contracts().settlement().address(), - ), eth, order_sorting_strategies, signature_validator, diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index 04608ee12c..34abea5d27 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -1,11 +1,11 @@ use { super::Order, crate::{ - domain::{self, eth}, - infra, + domain::{competition::Auction, eth}, + infra::{self, config::file::BadTokenDetectionCache}, }, - anyhow::Result, - dashmap::{DashMap, Entry, OccupiedEntry, VacantEntry}, + dashmap::{DashMap, Entry}, + futures::FutureExt, model::interaction::InteractionData, shared::bad_token::{trace_call::TraceCallDetectorRaw, TokenQuality}, std::{ @@ -42,7 +42,7 @@ pub struct Detector { hardcoded: HashMap, /// cache which is shared and updated by multiple bad token detection /// mechanisms - cache: Cache, + cache: Arc, simulation_detector: Option, metrics: Option, } @@ -65,22 +65,73 @@ impl Detector { self } - pub fn filter_unsupported_orders(&self, mut orders: Vec) -> Vec { + pub fn with_cache(mut self, cache: Arc) -> Self { + self.cache = cache; + self + } + + /// Filter all unsupported orders within an Auction + pub async fn filter_unsupported_orders_in_auction( + self: Arc, + mut auction: Auction, + ) -> Auction { let now = Instant::now(); - // group by sell tokens? - // future calling `determine_sell_token_quality()` for all of orders + let self_clone = self.clone(); + + auction + .filter_orders(move |order| { + { + let self_clone = self_clone.clone(); + async move { + // We first check the token quality: + // - If both tokens are supported, the order does is not filtered + // - If any of the order tokens is unsupported, the order is filtered + // - If the token quality cannot be determined: call + // `determine_sell_token_quality()` to execute the simulation + // All of these operations are done within the same `.map()` in order to + // avoid iterating twice over the orders vector + let tokens_quality = [order.sell.token, order.buy.token] + .iter() + .map(|token| self_clone.get_token_quality(*token, now)) + .collect::>(); + let both_tokens_supported = tokens_quality + .iter() + .all(|token_quality| *token_quality == Some(Quality::Supported)); + let any_token_unsupported = tokens_quality + .iter() + .any(|token_quality| *token_quality == Some(Quality::Unsupported)); + + // @TODO: remove the bad tokens from the tokens field? + + // If both tokens are supported, the order does is not filtered + if both_tokens_supported { + return Some(order); + } - orders.retain(|o| { - [o.sell.token, o.buy.token].iter().all(|token| { - self.get_token_quality(*token, now) - .is_none_or(|q| q == Quality::Supported) + // If any of the order tokens is unsupported, the order is filtered + if any_token_unsupported { + return None; + } + + // If the token quality cannot be determined: call + // `determine_sell_token_quality()` to execute the simulation + if self_clone.determine_sell_token_quality(&order, now).await + == Some(Quality::Supported) + { + return Some(order); + } + + None + } + } + .boxed() }) - }); + .await; self.cache.evict_outdated_entries(); - orders + auction } fn get_token_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { @@ -99,12 +150,11 @@ impl Detector { None } - pub async fn determine_sell_token_quality( - &self, - detector: &TraceCallDetectorRaw, - order: &Order, - now: Instant, - ) -> Option { + async fn determine_sell_token_quality(&self, order: &Order, now: Instant) -> Option { + let Some(detector) = self.simulation_detector.as_ref() else { + return None; + }; + if let Some(quality) = self.cache.get_quality(order.sell.token, now) { return Some(quality); } @@ -122,7 +172,7 @@ impl Detector { match detector .test_transfer( - order.trader().0 .0, + eth::Address::from(order.trader()).0, token.0 .0, order.sell.amount.0, &pre_interactions, @@ -164,8 +214,6 @@ pub struct Cache { cache: DashMap, /// entries older than this get ignored and evicted max_age: Duration, - /// evicts entries when the cache grows beyond this size - max_size: usize, } struct CacheEntry { @@ -177,18 +225,17 @@ struct CacheEntry { impl Default for Cache { fn default() -> Self { - Self::new(Duration::from_secs(60 * 10), 1000) + Self::new(&BadTokenDetectionCache::default()) } } impl Cache { /// Creates a new instance which evicts cached values after a period of /// time. - pub fn new(max_age: Duration, max_size: usize) -> Self { + pub fn new(bad_token_detection_cache: &BadTokenDetectionCache) -> Self { Self { - max_age, - max_size, - cache: Default::default(), + max_age: bad_token_detection_cache.max_age, + cache: DashMap::with_capacity(bad_token_detection_cache.max_size), } } @@ -243,7 +290,7 @@ impl Cache { struct Metrics {} impl Metrics { - fn get_quality(&self, token: eth::TokenAddress) -> Option { + fn get_quality(&self, _token: eth::TokenAddress) -> Option { todo!() } } diff --git a/crates/driver/src/domain/competition/mod.rs b/crates/driver/src/domain/competition/mod.rs index ddebeff32b..8b30fcd0ba 100644 --- a/crates/driver/src/domain/competition/mod.rs +++ b/crates/driver/src/domain/competition/mod.rs @@ -22,7 +22,7 @@ use { std::{ cmp::Reverse, collections::{HashMap, HashSet, VecDeque}, - sync::Mutex, + sync::{Arc, Mutex}, }, tap::TapFallible, }; @@ -53,15 +53,22 @@ pub struct Competition { pub mempools: Mempools, /// Cached solutions with the most recent solutions at the front. pub settlements: Mutex>, - // TODO: single type should have the feature set to simulate - pub bad_tokens: bad_tokens::Detector, + pub bad_tokens: Option>, } impl Competition { /// Solve an auction as part of this competition. - pub async fn solve(&self, auction: &Auction) -> Result, Error> { - // 1. simulate sell tokens - // 2. filter bad tokens + pub async fn solve(&self, mut auction: Auction) -> Result, Error> { + // filter orders in auction which contain a bad tokens if the bad token + // detection is configured + if let Some(bad_tokens) = self.bad_tokens.as_ref() { + auction = bad_tokens + .clone() + .filter_unsupported_orders_in_auction(auction) + .await; + } + // Enforces Auction not to be consumed by making it as a shared reference + let auction = &auction; let liquidity = match self.solver.liquidity() { solver::Liquidity::Fetch => { diff --git a/crates/driver/src/domain/competition/order/mod.rs b/crates/driver/src/domain/competition/order/mod.rs index f833b48de4..3d2857be84 100644 --- a/crates/driver/src/domain/competition/order/mod.rs +++ b/crates/driver/src/domain/competition/order/mod.rs @@ -371,8 +371,8 @@ impl From for BuyTokenBalance { } /// The address which placed the order. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Into)] -pub struct Trader(pub eth::Address); +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Into, From)] +pub struct Trader(eth::Address); /// A just-in-time order. JIT orders are added at solving time by the solver to /// generate a more optimal solution for the auction. Very similar to a regular diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index 37375236ed..11145c2f84 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -1,9 +1,17 @@ use { crate::{ - domain::{self, competition::bad_tokens, Mempools}, + domain::{ + self, + competition::{ + bad_tokens, + bad_tokens::{Cache, Quality}, + }, + eth, + Mempools, + }, infra::{ self, - config::file::OrderPriorityStrategy, + config::file::{BadTokenDetectionCache, OrderPriorityStrategy}, liquidity, solver::{Solver, Timeouts}, tokens, @@ -13,7 +21,7 @@ use { }, error::Error, futures::Future, - std::{net::SocketAddr, sync::Arc}, + std::{collections::HashMap, net::SocketAddr, sync::Arc}, tokio::sync::oneshot, }; @@ -32,6 +40,7 @@ pub struct Api { /// If this channel is specified, the bound address will be sent to it. This /// allows the driver to bind to 0.0.0.0:0 during testing. pub addr_sender: Option>, + pub bad_token_detection_cache: BadTokenDetectionCache, } impl Api { @@ -52,8 +61,9 @@ impl Api { let tokens = tokens::Fetcher::new(&self.eth); let pre_processor = domain::competition::AuctionProcessor::new(&self.eth, order_priority_strategies); - let trace_detector = bad_tokens::SimulationDetector::new(&self.eth); - let miep = bad_tokens::Detector::default().register_cache(trace_detector.cache().clone()); + + // TODO: create a struct wrapper to handle this under the hood + let trace_detector = Arc::new(Cache::new(&self.bad_token_detection_cache)); // Add the metrics and healthz endpoints. app = routes::metrics(app); @@ -72,8 +82,28 @@ impl Api { let router = routes::reveal(router); let router = routes::settle(router); - let miep = - bad_tokens::Detector::default().register_cache(trace_detector.cache().clone()); + let bad_tokens = solver.bad_token_detector().and_then(|bad_token_detector| { + // maybe make this as part of the bad token builder? + let config = bad_token_detector + .unsupported_tokens + .iter() + .map(|token| (eth::TokenAddress::from(*token), Quality::Unsupported)) + .chain( + bad_token_detector + .allowed_tokens + .iter() + .map(|token| (eth::TokenAddress::from(*token), Quality::Supported)), + ) + .collect::>(); + + Some(Arc::new( + // maybe do proper builder pattern here? + bad_tokens::Detector::default() + .with_simulation_detector(&self.eth.clone()) + .with_config(config) + .with_cache(trace_detector.clone()), + )) + }); let router = router.with_state(State(Arc::new(Inner { eth: self.eth.clone(), @@ -85,7 +115,7 @@ impl Api { simulator: self.simulator.clone(), mempools: self.mempools.clone(), settlements: Default::default(), - bad_tokens: miep, + bad_tokens, }, liquidity: self.liquidity.clone(), tokens: tokens.clone(), diff --git a/crates/driver/src/infra/api/routes/solve/mod.rs b/crates/driver/src/infra/api/routes/solve/mod.rs index eccafb8ead..072de4bceb 100644 --- a/crates/driver/src/infra/api/routes/solve/mod.rs +++ b/crates/driver/src/infra/api/routes/solve/mod.rs @@ -36,7 +36,7 @@ async fn route( .pre_processor() .prioritize(auction, &competition.solver.account().address()) .await; - let result = competition.solve(&auction).await; + let result = competition.solve(auction).await; observe::solved(state.solver().name(), &result); Ok(axum::Json(dto::Solved::new(result?, &competition.solver))) }; diff --git a/crates/driver/src/infra/config/file/load.rs b/crates/driver/src/infra/config/file/load.rs index b235106448..ed22caa278 100644 --- a/crates/driver/src/infra/config/file/load.rs +++ b/crates/driver/src/infra/config/file/load.rs @@ -94,6 +94,10 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { solver_native_token: config.manage_native_token.to_domain(), quote_tx_origin: config.quote_tx_origin.map(eth::Address), response_size_limit_max_bytes: config.response_size_limit_max_bytes, + bad_token_detector: config + .bad_token_detector + .filter(|bad_token_detector| bad_token_detector.enabled) + .map(Into::into), } })) .await, @@ -340,5 +344,6 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { gas_estimator: config.gas_estimator, order_priority_strategies: config.order_priority_strategies, archive_node_url: config.archive_node_url, + bad_token_detection_cache: config.bad_token_detection_cache, } } diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index b461883636..eac55ce173 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -5,7 +5,10 @@ use { serde::{Deserialize, Serialize}, serde_with::serde_as, solver::solver::Arn, - std::{collections::HashMap, time::Duration}, + std::{ + collections::{HashMap, HashSet}, + time::Duration, + }, }; mod load; @@ -65,6 +68,9 @@ struct Config { /// Archive node URL used to index CoW AMM archive_node_url: Option, + + /// Cache configuration for the bad tokend detection + bad_token_detection_cache: BadTokenDetectionCache, } #[serde_as] @@ -260,6 +266,10 @@ struct SolverConfig { /// Maximum HTTP response size the driver will accept in bytes. #[serde(default = "default_response_size_limit_max_bytes")] response_size_limit_max_bytes: usize, + + /// Bad token detector configuration + #[serde(default)] + bad_token_detector: Option, } #[derive(Clone, Copy, Debug, Default, Deserialize, PartialEq, Serialize)] @@ -651,6 +661,54 @@ fn default_order_priority_strategies() -> Vec { ] } +/// Bad token detector configuration +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "kebab-case", deny_unknown_fields)] +pub struct BadTokenDetector { + /// Whether or not the bad token detector is enabled + #[serde(default = "bool::default")] + pub enabled: bool, + /// List of tokens which will be directly allowed, no detection will be run + /// on them + #[serde(default = "HashSet::new")] + pub allowed_tokens: HashSet, + /// List of tokens which will be directly unsupported + #[serde(default = "HashSet::new")] + pub unsupported_tokens_tokens: HashSet, +} + fn default_max_order_age() -> Option { Some(Duration::from_secs(300)) } + +/// Cache configuration for the bad token detection +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "kebab-case", deny_unknown_fields)] +pub struct BadTokenDetectionCache { + /// Entries older than `max_age` will get ignored and evicted + #[serde( + with = "humantime_serde", + default = "default_bad_token_detection_cache_max_age" + )] + pub max_age: Duration, + /// Maximum number of tokens the cache can have + #[serde(default = "default_bad_token_detection_cache_max_size")] + pub max_size: usize, +} + +impl Default for BadTokenDetectionCache { + fn default() -> Self { + Self { + max_age: default_bad_token_detection_cache_max_age(), + max_size: default_bad_token_detection_cache_max_size(), + } + } +} + +fn default_bad_token_detection_cache_max_age() -> Duration { + Duration::from_secs(600) +} + +fn default_bad_token_detection_cache_max_size() -> usize { + 1000 +} diff --git a/crates/driver/src/infra/config/mod.rs b/crates/driver/src/infra/config/mod.rs index 94ef8821a5..8fdbcfde95 100644 --- a/crates/driver/src/infra/config/mod.rs +++ b/crates/driver/src/infra/config/mod.rs @@ -3,7 +3,7 @@ use { domain::eth, infra::{ blockchain, - config::file::{GasEstimatorType, OrderPriorityStrategy}, + config::file::{BadTokenDetectionCache, GasEstimatorType, OrderPriorityStrategy}, liquidity, mempool, simulator, @@ -28,4 +28,5 @@ pub struct Config { pub contracts: blockchain::contracts::Addresses, pub order_priority_strategies: Vec, pub archive_node_url: Option, + pub bad_token_detection_cache: BadTokenDetectionCache, } diff --git a/crates/driver/src/infra/solver/mod.rs b/crates/driver/src/infra/solver/mod.rs index ba7332dd65..357ac82d68 100644 --- a/crates/driver/src/infra/solver/mod.rs +++ b/crates/driver/src/infra/solver/mod.rs @@ -12,7 +12,7 @@ use { }, infra::{ blockchain::Ethereum, - config::file::FeeHandler, + config::{self, file::FeeHandler}, persistence::{Persistence, S3}, }, util, @@ -21,7 +21,7 @@ use { derive_more::{From, Into}, num::BigRational, reqwest::header::HeaderName, - std::collections::HashMap, + std::collections::{HashMap, HashSet}, tap::TapFallible, thiserror::Error, tracing::Instrument, @@ -123,6 +123,7 @@ pub struct Config { /// Which `tx.origin` is required to make quote verification pass. pub quote_tx_origin: Option, pub response_size_limit_max_bytes: usize, + pub bad_token_detector: Option, } impl Solver { @@ -151,6 +152,10 @@ impl Solver { }) } + pub fn bad_token_detector(&self) -> Option<&BadTokenDetector> { + self.config.bad_token_detector.as_ref() + } + pub fn persistence(&self) -> Persistence { self.persistence.clone() } @@ -277,6 +282,21 @@ pub enum SolutionMerging { Forbidden, } +#[derive(Debug, Clone)] +pub struct BadTokenDetector { + pub allowed_tokens: HashSet, + pub unsupported_tokens: HashSet, +} + +impl From for BadTokenDetector { + fn from(value: config::file::BadTokenDetector) -> Self { + Self { + allowed_tokens: value.allowed_tokens, + unsupported_tokens: value.unsupported_tokens_tokens, + } + } +} + #[derive(Debug, Error)] pub enum Error { #[error("HTTP error: {0:?}")] diff --git a/crates/driver/src/run.rs b/crates/driver/src/run.rs index 68dd25b9d5..177cc9fe71 100644 --- a/crates/driver/src/run.rs +++ b/crates/driver/src/run.rs @@ -69,6 +69,7 @@ async fn run_with(args: cli::Args, addr_sender: Option Date: Thu, 12 Dec 2024 15:22:45 +0100 Subject: [PATCH 06/34] fix --- crates/driver/src/domain/competition/bad_tokens.rs | 4 +--- crates/driver/src/infra/api/mod.rs | 6 +++--- crates/driver/src/infra/config/file/mod.rs | 1 + 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index 34abea5d27..95f8d36f7c 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -151,9 +151,7 @@ impl Detector { } async fn determine_sell_token_quality(&self, order: &Order, now: Instant) -> Option { - let Some(detector) = self.simulation_detector.as_ref() else { - return None; - }; + let detector = self.simulation_detector.as_ref()?; if let Some(quality) = self.cache.get_quality(order.sell.token, now) { return Some(quality); diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index 11145c2f84..aefa013cd5 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -82,7 +82,7 @@ impl Api { let router = routes::reveal(router); let router = routes::settle(router); - let bad_tokens = solver.bad_token_detector().and_then(|bad_token_detector| { + let bad_tokens = solver.bad_token_detector().map(|bad_token_detector| { // maybe make this as part of the bad token builder? let config = bad_token_detector .unsupported_tokens @@ -96,13 +96,13 @@ impl Api { ) .collect::>(); - Some(Arc::new( + Arc::new( // maybe do proper builder pattern here? bad_tokens::Detector::default() .with_simulation_detector(&self.eth.clone()) .with_config(config) .with_cache(trace_detector.clone()), - )) + ) }); let router = router.with_state(State(Arc::new(Inner { diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index eac55ce173..c3e5dd2942 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -70,6 +70,7 @@ struct Config { archive_node_url: Option, /// Cache configuration for the bad tokend detection + #[serde(default)] bad_token_detection_cache: BadTokenDetectionCache, } From 9dce0be621a98e4507305498761e18c366529198 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Mon, 16 Dec 2024 10:51:35 +0000 Subject: [PATCH 07/34] Refactor filtering logic to avoid allocations --- .../src/domain/competition/bad_tokens.rs | 56 ++++++------------- 1 file changed, 17 insertions(+), 39 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index 95f8d36f7c..c4b16a2632 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -76,7 +76,6 @@ impl Detector { mut auction: Auction, ) -> Auction { let now = Instant::now(); - let self_clone = self.clone(); auction @@ -84,45 +83,24 @@ impl Detector { { let self_clone = self_clone.clone(); async move { - // We first check the token quality: - // - If both tokens are supported, the order does is not filtered - // - If any of the order tokens is unsupported, the order is filtered - // - If the token quality cannot be determined: call - // `determine_sell_token_quality()` to execute the simulation - // All of these operations are done within the same `.map()` in order to - // avoid iterating twice over the orders vector - let tokens_quality = [order.sell.token, order.buy.token] - .iter() - .map(|token| self_clone.get_token_quality(*token, now)) - .collect::>(); - let both_tokens_supported = tokens_quality - .iter() - .all(|token_quality| *token_quality == Some(Quality::Supported)); - let any_token_unsupported = tokens_quality - .iter() - .any(|token_quality| *token_quality == Some(Quality::Unsupported)); - - // @TODO: remove the bad tokens from the tokens field? - - // If both tokens are supported, the order does is not filtered - if both_tokens_supported { - return Some(order); - } - - // If any of the order tokens is unsupported, the order is filtered - if any_token_unsupported { - return None; - } - - // If the token quality cannot be determined: call - // `determine_sell_token_quality()` to execute the simulation - if self_clone.determine_sell_token_quality(&order, now).await - == Some(Quality::Supported) - { - return Some(order); + let sell = self_clone.get_token_quality(order.sell.token, now); + let buy = self_clone.get_token_quality(order.sell.token, now); + match (sell, buy) { + // both tokens supported => keep order + (Some(Quality::Supported), Some(Quality::Supported)) => Some(order), + // at least 1 token unsupported => drop order + (Some(Quality::Unsupported), _) | (_, Some(Quality::Unsupported)) => { + None + } + // sell token quality is unknown => keep order if token is supported + (None, _) => { + let quality = self_clone.determine_sell_token_quality(&order, now).await; + (quality == Some(Quality::Supported)).then_some(order) + }, + // buy token quality is unknown => keep order (because we can't + // determine quality and assume it's good) + (_, None) => Some(order) } - - None } } .boxed() From b1b174d7892454020a172e57663f7e5c30182e75 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Mon, 16 Dec 2024 11:20:19 +0000 Subject: [PATCH 08/34] Remove filter helper function --- .../driver/src/domain/competition/auction.rs | 30 +--------- .../src/domain/competition/bad_tokens.rs | 56 +++++++++---------- 2 files changed, 28 insertions(+), 58 deletions(-) diff --git a/crates/driver/src/domain/competition/auction.rs b/crates/driver/src/domain/competition/auction.rs index 3c957aed46..f3c49b8abb 100644 --- a/crates/driver/src/domain/competition/auction.rs +++ b/crates/driver/src/domain/competition/auction.rs @@ -17,8 +17,6 @@ use { shared::signature_validator::{Contracts, SignatureValidating}, std::{ collections::{HashMap, HashSet}, - future::Future, - pin::Pin, sync::{Arc, Mutex}, }, thiserror::Error, @@ -32,7 +30,7 @@ pub struct Auction { /// See the [`Self::id`] method. id: Option, /// See the [`Self::orders`] method. - orders: Vec, + pub(crate) orders: Vec, /// The tokens that are used in the orders of this auction. tokens: Tokens, gas_price: eth::GasPrice, @@ -75,32 +73,6 @@ impl Auction { }) } - /// Filter the orders according to the funcion `filter_fn` provided. - /// The function `filter_fn` must return an `Option`, with `None` - /// indicating that the order has to be filtered. - /// This is needed due to the lack of `filter()` async closure support. - pub async fn filter_orders(&mut self, filter_fn: F) - where - F: Fn( - competition::Order, - ) -> Pin> + Send>> - + Send, - { - let futures = self - .orders - .drain(..) - .map(|order| { - let filter_fn = &filter_fn; - async move { filter_fn(order).await } - }) - .collect::>(); - self.orders = futures::future::join_all(futures) - .await - .into_iter() - .flatten() - .collect(); - } - /// [`None`] if this auction applies to a quote. See /// [`crate::domain::quote`]. pub fn id(&self) -> Option { diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index c4b16a2632..0c8fbc1a14 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -5,7 +5,7 @@ use { infra::{self, config::file::BadTokenDetectionCache}, }, dashmap::{DashMap, Entry}, - futures::FutureExt, + futures::StreamExt, model::interaction::InteractionData, shared::bad_token::{trace_call::TraceCallDetectorRaw, TokenQuality}, std::{ @@ -76,36 +76,34 @@ impl Detector { mut auction: Auction, ) -> Auction { let now = Instant::now(); - let self_clone = self.clone(); - auction - .filter_orders(move |order| { - { - let self_clone = self_clone.clone(); - async move { - let sell = self_clone.get_token_quality(order.sell.token, now); - let buy = self_clone.get_token_quality(order.sell.token, now); - match (sell, buy) { - // both tokens supported => keep order - (Some(Quality::Supported), Some(Quality::Supported)) => Some(order), - // at least 1 token unsupported => drop order - (Some(Quality::Unsupported), _) | (_, Some(Quality::Unsupported)) => { - None - } - // sell token quality is unknown => keep order if token is supported - (None, _) => { - let quality = self_clone.determine_sell_token_quality(&order, now).await; - (quality == Some(Quality::Supported)).then_some(order) - }, - // buy token quality is unknown => keep order (because we can't - // determine quality and assume it's good) - (_, None) => Some(order) - } - } + let filtered_orders = futures::stream::iter( + auction + .orders + .into_iter() + .zip(std::iter::repeat(self.clone())), + ) + .filter_map(move |(order, _self)| async move { + let sell = _self.get_token_quality(order.sell.token, now); + let buy = _self.get_token_quality(order.sell.token, now); + match (sell, buy) { + // both tokens supported => keep order + (Some(Quality::Supported), Some(Quality::Supported)) => Some(order), + // at least 1 token unsupported => drop order + (Some(Quality::Unsupported), _) | (_, Some(Quality::Unsupported)) => None, + // sell token quality is unknown => keep order if token is supported + (None, _) => { + let quality = _self.determine_sell_token_quality(&order, now).await; + (quality == Some(Quality::Supported)).then_some(order) } - .boxed() - }) - .await; + // buy token quality is unknown => keep order (because we can't + // determine quality and assume it's good) + (_, None) => Some(order), + } + }) + .collect::>() + .await; + auction.orders = filtered_orders; self.cache.evict_outdated_entries(); From 144521ed4f89974305c6c0fe3d5b44e05d6c6285 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Mon, 16 Dec 2024 11:25:51 +0000 Subject: [PATCH 09/34] reference-count `Cache` internally for simpler API --- .../src/domain/competition/bad_tokens.rs | 26 +++++++++++-------- crates/driver/src/infra/api/mod.rs | 2 +- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index 0c8fbc1a14..6dadca708c 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -42,7 +42,7 @@ pub struct Detector { hardcoded: HashMap, /// cache which is shared and updated by multiple bad token detection /// mechanisms - cache: Arc, + cache: Cache, simulation_detector: Option, metrics: Option, } @@ -65,7 +65,7 @@ impl Detector { self } - pub fn with_cache(mut self, cache: Arc) -> Self { + pub fn with_cache(mut self, cache: Cache) -> Self { self.cache = cache; self } @@ -184,7 +184,10 @@ impl fmt::Debug for Detector { /// Stores a map instead of a set to not recompute the quality of good tokens /// over and over. /// Evicts cached value after a configurable period of time. -pub struct Cache { +#[derive(Clone)] +pub struct Cache(Arc); + +struct Inner { cache: DashMap, /// entries older than this get ignored and evicted max_age: Duration, @@ -207,18 +210,18 @@ impl Cache { /// Creates a new instance which evicts cached values after a period of /// time. pub fn new(bad_token_detection_cache: &BadTokenDetectionCache) -> Self { - Self { + Self(Arc::new(Inner { max_age: bad_token_detection_cache.max_age, cache: DashMap::with_capacity(bad_token_detection_cache.max_size), - } + })) } /// Updates whether or not a token should be considered supported. pub fn update_quality(&self, token: eth::TokenAddress, quality: Quality, now: Instant) { - match self.cache.entry(token) { + match self.0.cache.entry(token) { Entry::Occupied(mut o) => { let value = o.get_mut(); - if now.duration_since(value.timestamp) > self.max_age + if now.duration_since(value.timestamp) > self.0.max_age || quality == Quality::Unsupported { // Only update the value if the cached value is outdated by now or @@ -239,19 +242,20 @@ impl Cache { fn evict_outdated_entries(&self) { let now = Instant::now(); - self.cache - .retain(|_, value| now.duration_since(value.timestamp) > self.max_age); + self.0 + .cache + .retain(|_, value| now.duration_since(value.timestamp) > self.0.max_age); } /// Returns the quality of the token. If the cached value is older than the /// `max_age` it gets ignored and the token evicted. pub fn get_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { - let Entry::Occupied(entry) = self.cache.entry(token) else { + let Entry::Occupied(entry) = self.0.cache.entry(token) else { return None; }; let value = entry.get(); - if now.duration_since(value.timestamp) > self.max_age { + if now.duration_since(value.timestamp) > self.0.max_age { entry.remove(); return None; } diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index aefa013cd5..4565c6be50 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -63,7 +63,7 @@ impl Api { domain::competition::AuctionProcessor::new(&self.eth, order_priority_strategies); // TODO: create a struct wrapper to handle this under the hood - let trace_detector = Arc::new(Cache::new(&self.bad_token_detection_cache)); + let trace_detector = Cache::new(&self.bad_token_detection_cache); // Add the metrics and healthz endpoints. app = routes::metrics(app); From 41c41ed759d7f008e0efd9fa851d44fc3265a849 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Mon, 16 Dec 2024 11:27:43 +0000 Subject: [PATCH 10/34] Make some functions private --- crates/driver/src/domain/competition/bad_tokens.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index 6dadca708c..b885d10438 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -217,7 +217,7 @@ impl Cache { } /// Updates whether or not a token should be considered supported. - pub fn update_quality(&self, token: eth::TokenAddress, quality: Quality, now: Instant) { + fn update_quality(&self, token: eth::TokenAddress, quality: Quality, now: Instant) { match self.0.cache.entry(token) { Entry::Occupied(mut o) => { let value = o.get_mut(); @@ -249,7 +249,7 @@ impl Cache { /// Returns the quality of the token. If the cached value is older than the /// `max_age` it gets ignored and the token evicted. - pub fn get_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { + fn get_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { let Entry::Occupied(entry) = self.0.cache.entry(token) else { return None; }; From bda4d1c566c89a49df7e0b25d503eb3805ee7930 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Mon, 16 Dec 2024 12:22:30 +0000 Subject: [PATCH 11/34] Simplify config logic --- .../src/domain/competition/bad_tokens.rs | 52 ++++++++----------- crates/driver/src/domain/competition/mod.rs | 17 +++--- crates/driver/src/infra/api/mod.rs | 42 ++++----------- crates/driver/src/infra/config/file/load.rs | 19 +++++-- crates/driver/src/infra/config/file/mod.rs | 24 +-------- crates/driver/src/infra/solver/mod.rs | 27 +++------- 6 files changed, 60 insertions(+), 121 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index b885d10438..5d2184b38c 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -70,39 +70,31 @@ impl Detector { self } - /// Filter all unsupported orders within an Auction - pub async fn filter_unsupported_orders_in_auction( - self: Arc, - mut auction: Auction, - ) -> Auction { + /// Removes all unsupported orders from the auction. + pub async fn filter_unsupported_orders_in_auction(&self, mut auction: Auction) -> Auction { let now = Instant::now(); - let filtered_orders = futures::stream::iter( - auction - .orders - .into_iter() - .zip(std::iter::repeat(self.clone())), - ) - .filter_map(move |(order, _self)| async move { - let sell = _self.get_token_quality(order.sell.token, now); - let buy = _self.get_token_quality(order.sell.token, now); - match (sell, buy) { - // both tokens supported => keep order - (Some(Quality::Supported), Some(Quality::Supported)) => Some(order), - // at least 1 token unsupported => drop order - (Some(Quality::Unsupported), _) | (_, Some(Quality::Unsupported)) => None, - // sell token quality is unknown => keep order if token is supported - (None, _) => { - let quality = _self.determine_sell_token_quality(&order, now).await; - (quality == Some(Quality::Supported)).then_some(order) + let filtered_orders = futures::stream::iter(auction.orders.into_iter()) + .filter_map(move |order| async move { + let sell = self.get_token_quality(order.sell.token, now); + let buy = self.get_token_quality(order.sell.token, now); + match (sell, buy) { + // both tokens supported => keep order + (Some(Quality::Supported), Some(Quality::Supported)) => Some(order), + // at least 1 token unsupported => drop order + (Some(Quality::Unsupported), _) | (_, Some(Quality::Unsupported)) => None, + // sell token quality is unknown => keep order if token is supported + (None, _) => { + let quality = self.determine_sell_token_quality(&order, now).await; + (quality == Some(Quality::Supported)).then_some(order) + } + // buy token quality is unknown => keep order (because we can't + // determine quality and assume it's good) + (_, None) => Some(order), } - // buy token quality is unknown => keep order (because we can't - // determine quality and assume it's good) - (_, None) => Some(order), - } - }) - .collect::>() - .await; + }) + .collect::>() + .await; auction.orders = filtered_orders; self.cache.evict_outdated_entries(); diff --git a/crates/driver/src/domain/competition/mod.rs b/crates/driver/src/domain/competition/mod.rs index 8b30fcd0ba..2e79c6a100 100644 --- a/crates/driver/src/domain/competition/mod.rs +++ b/crates/driver/src/domain/competition/mod.rs @@ -53,21 +53,16 @@ pub struct Competition { pub mempools: Mempools, /// Cached solutions with the most recent solutions at the front. pub settlements: Mutex>, - pub bad_tokens: Option>, + pub bad_tokens: Arc, } impl Competition { /// Solve an auction as part of this competition. - pub async fn solve(&self, mut auction: Auction) -> Result, Error> { - // filter orders in auction which contain a bad tokens if the bad token - // detection is configured - if let Some(bad_tokens) = self.bad_tokens.as_ref() { - auction = bad_tokens - .clone() - .filter_unsupported_orders_in_auction(auction) - .await; - } - // Enforces Auction not to be consumed by making it as a shared reference + pub async fn solve(&self, auction: Auction) -> Result, Error> { + let auction = self + .bad_tokens + .filter_unsupported_orders_in_auction(auction) + .await; let auction = &auction; let liquidity = match self.solver.liquidity() { diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index 4565c6be50..2f41ec9f14 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -1,14 +1,6 @@ use { crate::{ - domain::{ - self, - competition::{ - bad_tokens, - bad_tokens::{Cache, Quality}, - }, - eth, - Mempools, - }, + domain::{self, competition::bad_tokens, Mempools}, infra::{ self, config::file::{BadTokenDetectionCache, OrderPriorityStrategy}, @@ -21,7 +13,7 @@ use { }, error::Error, futures::Future, - std::{collections::HashMap, net::SocketAddr, sync::Arc}, + std::{net::SocketAddr, sync::Arc}, tokio::sync::oneshot, }; @@ -63,7 +55,7 @@ impl Api { domain::competition::AuctionProcessor::new(&self.eth, order_priority_strategies); // TODO: create a struct wrapper to handle this under the hood - let trace_detector = Cache::new(&self.bad_token_detection_cache); + let trace_detector = bad_tokens::Cache::new(&self.bad_token_detection_cache); // Add the metrics and healthz endpoints. app = routes::metrics(app); @@ -82,28 +74,12 @@ impl Api { let router = routes::reveal(router); let router = routes::settle(router); - let bad_tokens = solver.bad_token_detector().map(|bad_token_detector| { - // maybe make this as part of the bad token builder? - let config = bad_token_detector - .unsupported_tokens - .iter() - .map(|token| (eth::TokenAddress::from(*token), Quality::Unsupported)) - .chain( - bad_token_detector - .allowed_tokens - .iter() - .map(|token| (eth::TokenAddress::from(*token), Quality::Supported)), - ) - .collect::>(); - - Arc::new( - // maybe do proper builder pattern here? - bad_tokens::Detector::default() - .with_simulation_detector(&self.eth.clone()) - .with_config(config) - .with_cache(trace_detector.clone()), - ) - }); + let bad_tokens = Arc::new( + bad_tokens::Detector::default() + .with_simulation_detector(&self.eth.clone()) + .with_config(solver.tokens_supported().clone()) + .with_cache(trace_detector.clone()), + ); let router = router.with_state(State(Arc::new(Inner { eth: self.eth.clone(), diff --git a/crates/driver/src/infra/config/file/load.rs b/crates/driver/src/infra/config/file/load.rs index ed22caa278..fe4ac4a87a 100644 --- a/crates/driver/src/infra/config/file/load.rs +++ b/crates/driver/src/infra/config/file/load.rs @@ -1,6 +1,6 @@ use { crate::{ - domain::eth, + domain::{competition::bad_tokens, eth}, infra::{ self, blockchain, @@ -94,10 +94,19 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { solver_native_token: config.manage_native_token.to_domain(), quote_tx_origin: config.quote_tx_origin.map(eth::Address), response_size_limit_max_bytes: config.response_size_limit_max_bytes, - bad_token_detector: config - .bad_token_detector - .filter(|bad_token_detector| bad_token_detector.enabled) - .map(Into::into), + tokens_supported: config + .token_supported + .iter() + .map(|(token, supported)| { + ( + eth::TokenAddress(eth::ContractAddress(*token)), + match supported { + true => bad_tokens::Quality::Supported, + false => bad_tokens::Quality::Unsupported, + }, + ) + }) + .collect(), } })) .await, diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index c3e5dd2942..0a7a5b351d 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -5,10 +5,7 @@ use { serde::{Deserialize, Serialize}, serde_with::serde_as, solver::solver::Arn, - std::{ - collections::{HashMap, HashSet}, - time::Duration, - }, + std::{collections::HashMap, time::Duration}, }; mod load; @@ -268,9 +265,8 @@ struct SolverConfig { #[serde(default = "default_response_size_limit_max_bytes")] response_size_limit_max_bytes: usize, - /// Bad token detector configuration #[serde(default)] - bad_token_detector: Option, + token_supported: HashMap, } #[derive(Clone, Copy, Debug, Default, Deserialize, PartialEq, Serialize)] @@ -662,22 +658,6 @@ fn default_order_priority_strategies() -> Vec { ] } -/// Bad token detector configuration -#[derive(Clone, Debug, Deserialize)] -#[serde(rename_all = "kebab-case", deny_unknown_fields)] -pub struct BadTokenDetector { - /// Whether or not the bad token detector is enabled - #[serde(default = "bool::default")] - pub enabled: bool, - /// List of tokens which will be directly allowed, no detection will be run - /// on them - #[serde(default = "HashSet::new")] - pub allowed_tokens: HashSet, - /// List of tokens which will be directly unsupported - #[serde(default = "HashSet::new")] - pub unsupported_tokens_tokens: HashSet, -} - fn default_max_order_age() -> Option { Some(Duration::from_secs(300)) } diff --git a/crates/driver/src/infra/solver/mod.rs b/crates/driver/src/infra/solver/mod.rs index 357ac82d68..8656d48956 100644 --- a/crates/driver/src/infra/solver/mod.rs +++ b/crates/driver/src/infra/solver/mod.rs @@ -4,6 +4,7 @@ use { domain::{ competition::{ auction::{self, Auction}, + bad_tokens, solution::{self, Solution}, }, eth, @@ -12,7 +13,7 @@ use { }, infra::{ blockchain::Ethereum, - config::{self, file::FeeHandler}, + config::file::FeeHandler, persistence::{Persistence, S3}, }, util, @@ -21,7 +22,7 @@ use { derive_more::{From, Into}, num::BigRational, reqwest::header::HeaderName, - std::collections::{HashMap, HashSet}, + std::collections::HashMap, tap::TapFallible, thiserror::Error, tracing::Instrument, @@ -123,7 +124,8 @@ pub struct Config { /// Which `tx.origin` is required to make quote verification pass. pub quote_tx_origin: Option, pub response_size_limit_max_bytes: usize, - pub bad_token_detector: Option, + /// Whether a token is explicitly allow- or deny-listed. + pub tokens_supported: HashMap, } impl Solver { @@ -152,8 +154,8 @@ impl Solver { }) } - pub fn bad_token_detector(&self) -> Option<&BadTokenDetector> { - self.config.bad_token_detector.as_ref() + pub fn tokens_supported(&self) -> &HashMap { + &self.config.tokens_supported } pub fn persistence(&self) -> Persistence { @@ -282,21 +284,6 @@ pub enum SolutionMerging { Forbidden, } -#[derive(Debug, Clone)] -pub struct BadTokenDetector { - pub allowed_tokens: HashSet, - pub unsupported_tokens: HashSet, -} - -impl From for BadTokenDetector { - fn from(value: config::file::BadTokenDetector) -> Self { - Self { - allowed_tokens: value.allowed_tokens, - unsupported_tokens: value.unsupported_tokens_tokens, - } - } -} - #[derive(Debug, Error)] pub enum Error { #[error("HTTP error: {0:?}")] From 1600736bda208e37e2dbe07735ee16d933c7a8dd Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Mon, 16 Dec 2024 12:26:10 +0000 Subject: [PATCH 12/34] fixup comment --- crates/driver/src/infra/solver/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/driver/src/infra/solver/mod.rs b/crates/driver/src/infra/solver/mod.rs index 8656d48956..0815630190 100644 --- a/crates/driver/src/infra/solver/mod.rs +++ b/crates/driver/src/infra/solver/mod.rs @@ -124,7 +124,7 @@ pub struct Config { /// Which `tx.origin` is required to make quote verification pass. pub quote_tx_origin: Option, pub response_size_limit_max_bytes: usize, - /// Whether a token is explicitly allow- or deny-listed. + /// Tokens that are explicitly allow- or deny-listed. pub tokens_supported: HashMap, } From 5c560dc5b4461ea7743787297d3efd72f15de06e Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Tue, 17 Dec 2024 08:12:50 +0000 Subject: [PATCH 13/34] Rename and simplify --- .../src/domain/competition/bad_tokens.rs | 10 ++++----- crates/driver/src/infra/api/mod.rs | 7 +++--- crates/driver/src/infra/config/file/load.rs | 2 +- crates/driver/src/infra/config/file/mod.rs | 22 +++++++++---------- crates/driver/src/infra/config/mod.rs | 4 ++-- crates/driver/src/run.rs | 2 +- 6 files changed, 22 insertions(+), 25 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs index 5d2184b38c..50c49bcbc9 100644 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ b/crates/driver/src/domain/competition/bad_tokens.rs @@ -2,7 +2,7 @@ use { super::Order, crate::{ domain::{competition::Auction, eth}, - infra::{self, config::file::BadTokenDetectionCache}, + infra::{self, config::file::BadTokenDetection}, }, dashmap::{DashMap, Entry}, futures::StreamExt, @@ -194,17 +194,17 @@ struct CacheEntry { impl Default for Cache { fn default() -> Self { - Self::new(&BadTokenDetectionCache::default()) + Self::new(BadTokenDetection::default().max_age) } } impl Cache { /// Creates a new instance which evicts cached values after a period of /// time. - pub fn new(bad_token_detection_cache: &BadTokenDetectionCache) -> Self { + pub fn new(max_age: Duration) -> Self { Self(Arc::new(Inner { - max_age: bad_token_detection_cache.max_age, - cache: DashMap::with_capacity(bad_token_detection_cache.max_size), + max_age, + cache: DashMap::default(), })) } diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index 2f41ec9f14..7bc4fab13e 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -3,7 +3,7 @@ use { domain::{self, competition::bad_tokens, Mempools}, infra::{ self, - config::file::{BadTokenDetectionCache, OrderPriorityStrategy}, + config::file::{BadTokenDetection, OrderPriorityStrategy}, liquidity, solver::{Solver, Timeouts}, tokens, @@ -32,7 +32,7 @@ pub struct Api { /// If this channel is specified, the bound address will be sent to it. This /// allows the driver to bind to 0.0.0.0:0 during testing. pub addr_sender: Option>, - pub bad_token_detection_cache: BadTokenDetectionCache, + pub bad_token_detection: BadTokenDetection, } impl Api { @@ -54,8 +54,7 @@ impl Api { let pre_processor = domain::competition::AuctionProcessor::new(&self.eth, order_priority_strategies); - // TODO: create a struct wrapper to handle this under the hood - let trace_detector = bad_tokens::Cache::new(&self.bad_token_detection_cache); + let trace_detector = bad_tokens::Cache::new(self.bad_token_detection.max_age); // Add the metrics and healthz endpoints. app = routes::metrics(app); diff --git a/crates/driver/src/infra/config/file/load.rs b/crates/driver/src/infra/config/file/load.rs index fe4ac4a87a..bb64173d1a 100644 --- a/crates/driver/src/infra/config/file/load.rs +++ b/crates/driver/src/infra/config/file/load.rs @@ -353,6 +353,6 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { gas_estimator: config.gas_estimator, order_priority_strategies: config.order_priority_strategies, archive_node_url: config.archive_node_url, - bad_token_detection_cache: config.bad_token_detection_cache, + bad_token_detection: config.bad_token_detection, } } diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index 0a7a5b351d..efe9492d2e 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -66,9 +66,10 @@ struct Config { /// Archive node URL used to index CoW AMM archive_node_url: Option, - /// Cache configuration for the bad tokend detection + /// Configuration options for automatically detecting unsupported + /// tokens. #[serde(default)] - bad_token_detection_cache: BadTokenDetectionCache, + bad_token_detection: BadTokenDetection, } #[serde_as] @@ -665,23 +666,24 @@ fn default_max_order_age() -> Option { /// Cache configuration for the bad token detection #[derive(Clone, Debug, Deserialize)] #[serde(rename_all = "kebab-case", deny_unknown_fields)] -pub struct BadTokenDetectionCache { +pub struct BadTokenDetection { /// Entries older than `max_age` will get ignored and evicted #[serde( with = "humantime_serde", default = "default_bad_token_detection_cache_max_age" )] pub max_age: Duration, - /// Maximum number of tokens the cache can have - #[serde(default = "default_bad_token_detection_cache_max_size")] - pub max_size: usize, + + /// RPC URL to be used for detecting unsupported tokens via simulations. + /// Requires support for `trace_callMany`. + pub simulation_detection_rpc: Option, } -impl Default for BadTokenDetectionCache { +impl Default for BadTokenDetection { fn default() -> Self { Self { max_age: default_bad_token_detection_cache_max_age(), - max_size: default_bad_token_detection_cache_max_size(), + simulation_detection_rpc: None, } } } @@ -689,7 +691,3 @@ impl Default for BadTokenDetectionCache { fn default_bad_token_detection_cache_max_age() -> Duration { Duration::from_secs(600) } - -fn default_bad_token_detection_cache_max_size() -> usize { - 1000 -} diff --git a/crates/driver/src/infra/config/mod.rs b/crates/driver/src/infra/config/mod.rs index 8fdbcfde95..983a1f1df4 100644 --- a/crates/driver/src/infra/config/mod.rs +++ b/crates/driver/src/infra/config/mod.rs @@ -3,7 +3,7 @@ use { domain::eth, infra::{ blockchain, - config::file::{BadTokenDetectionCache, GasEstimatorType, OrderPriorityStrategy}, + config::file::{BadTokenDetection, GasEstimatorType, OrderPriorityStrategy}, liquidity, mempool, simulator, @@ -28,5 +28,5 @@ pub struct Config { pub contracts: blockchain::contracts::Addresses, pub order_priority_strategies: Vec, pub archive_node_url: Option, - pub bad_token_detection_cache: BadTokenDetectionCache, + pub bad_token_detection: BadTokenDetection, } diff --git a/crates/driver/src/run.rs b/crates/driver/src/run.rs index 177cc9fe71..df936a09aa 100644 --- a/crates/driver/src/run.rs +++ b/crates/driver/src/run.rs @@ -69,7 +69,7 @@ async fn run_with(args: cli::Args, addr_sender: Option Date: Tue, 17 Dec 2024 10:01:55 +0000 Subject: [PATCH 14/34] Split logic into separate files --- .../src/domain/competition/bad_tokens.rs | 266 ------------------ .../domain/competition/bad_tokens/cache.rs | 87 ++++++ .../domain/competition/bad_tokens/metrics.rs | 10 + .../src/domain/competition/bad_tokens/mod.rs | 122 ++++++++ .../competition/bad_tokens/simulation.rs | 93 ++++++ crates/driver/src/infra/api/mod.rs | 21 +- crates/driver/src/infra/config/file/load.rs | 34 ++- crates/driver/src/infra/config/file/mod.rs | 44 +-- crates/driver/src/infra/config/mod.rs | 5 +- crates/driver/src/infra/mod.rs | 2 +- crates/driver/src/infra/solver/mod.rs | 14 +- crates/driver/src/run.rs | 7 +- 12 files changed, 374 insertions(+), 331 deletions(-) delete mode 100644 crates/driver/src/domain/competition/bad_tokens.rs create mode 100644 crates/driver/src/domain/competition/bad_tokens/cache.rs create mode 100644 crates/driver/src/domain/competition/bad_tokens/metrics.rs create mode 100644 crates/driver/src/domain/competition/bad_tokens/mod.rs create mode 100644 crates/driver/src/domain/competition/bad_tokens/simulation.rs diff --git a/crates/driver/src/domain/competition/bad_tokens.rs b/crates/driver/src/domain/competition/bad_tokens.rs deleted file mode 100644 index 50c49bcbc9..0000000000 --- a/crates/driver/src/domain/competition/bad_tokens.rs +++ /dev/null @@ -1,266 +0,0 @@ -use { - super::Order, - crate::{ - domain::{competition::Auction, eth}, - infra::{self, config::file::BadTokenDetection}, - }, - dashmap::{DashMap, Entry}, - futures::StreamExt, - model::interaction::InteractionData, - shared::bad_token::{trace_call::TraceCallDetectorRaw, TokenQuality}, - std::{ - collections::HashMap, - fmt, - sync::Arc, - time::{Duration, Instant}, - }, -}; - -// TODO better comments -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum Quality { - /// Solver is likely to produce working solutions when computing - /// routes for this token. - Supported, - /// Solver will likely produce failing solutions when computing - /// routes for this token. This can have many reasons: - /// * fees on transfer - /// * token enforces max transfer amount - /// * trader is deny listed - /// * bugs in the solidity compiler make it incompatible with the settlement - /// contract - see - /// * probably tons of other reasons - Unsupported, -} - -// TODO: better name (it only looks up stuff) -#[derive(Default)] -pub struct Detector { - /// manually configured list of supported and unsupported tokens. Only - /// tokens that get detected incorrectly by the automatic detectors get - /// listed here and therefore have a higher precedence. - hardcoded: HashMap, - /// cache which is shared and updated by multiple bad token detection - /// mechanisms - cache: Cache, - simulation_detector: Option, - metrics: Option, -} - -impl Detector { - pub fn with_config(mut self, config: HashMap) -> Self { - self.hardcoded = config; - self - } - - pub fn with_simulation_detector(mut self, eth: &infra::Ethereum) -> Self { - let detector = - TraceCallDetectorRaw::new(eth.web3().clone(), eth.contracts().settlement().address()); - self.simulation_detector = Some(detector); - self - } - - pub fn with_heuristic_detector(mut self) -> Self { - self.metrics = Some(Default::default()); - self - } - - pub fn with_cache(mut self, cache: Cache) -> Self { - self.cache = cache; - self - } - - /// Removes all unsupported orders from the auction. - pub async fn filter_unsupported_orders_in_auction(&self, mut auction: Auction) -> Auction { - let now = Instant::now(); - - let filtered_orders = futures::stream::iter(auction.orders.into_iter()) - .filter_map(move |order| async move { - let sell = self.get_token_quality(order.sell.token, now); - let buy = self.get_token_quality(order.sell.token, now); - match (sell, buy) { - // both tokens supported => keep order - (Some(Quality::Supported), Some(Quality::Supported)) => Some(order), - // at least 1 token unsupported => drop order - (Some(Quality::Unsupported), _) | (_, Some(Quality::Unsupported)) => None, - // sell token quality is unknown => keep order if token is supported - (None, _) => { - let quality = self.determine_sell_token_quality(&order, now).await; - (quality == Some(Quality::Supported)).then_some(order) - } - // buy token quality is unknown => keep order (because we can't - // determine quality and assume it's good) - (_, None) => Some(order), - } - }) - .collect::>() - .await; - auction.orders = filtered_orders; - - self.cache.evict_outdated_entries(); - - auction - } - - fn get_token_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { - if let Some(quality) = self.hardcoded.get(&token) { - return Some(*quality); - } - - if let Some(quality) = self.cache.get_quality(token, now) { - return Some(quality); - } - - if let Some(metrics) = &self.metrics { - return metrics.get_quality(token); - } - - None - } - - async fn determine_sell_token_quality(&self, order: &Order, now: Instant) -> Option { - let detector = self.simulation_detector.as_ref()?; - - if let Some(quality) = self.cache.get_quality(order.sell.token, now) { - return Some(quality); - } - - let token = order.sell.token; - let pre_interactions: Vec<_> = order - .pre_interactions - .iter() - .map(|i| InteractionData { - target: i.target.0, - value: i.value.0, - call_data: i.call_data.0.clone(), - }) - .collect(); - - match detector - .test_transfer( - eth::Address::from(order.trader()).0, - token.0 .0, - order.sell.amount.0, - &pre_interactions, - ) - .await - { - Err(err) => { - tracing::debug!(?err, "failed to determine token quality"); - None - } - Ok(TokenQuality::Good) => { - self.cache.update_quality(token, Quality::Supported, now); - Some(Quality::Supported) - } - Ok(TokenQuality::Bad { reason }) => { - tracing::debug!(reason, "cache token as unsupported"); - self.cache.update_quality(token, Quality::Unsupported, now); - Some(Quality::Unsupported) - } - } - } -} - -impl fmt::Debug for Detector { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Detector") - .field("hardcoded", &self.hardcoded) - .field("dynamic", &format_args!("Vec")) - .finish() - } -} - -/// Cache keeping track of whether or not a token is considered supported or -/// not. Internally reference counted for cheap clones and easy sharing. -/// Stores a map instead of a set to not recompute the quality of good tokens -/// over and over. -/// Evicts cached value after a configurable period of time. -#[derive(Clone)] -pub struct Cache(Arc); - -struct Inner { - cache: DashMap, - /// entries older than this get ignored and evicted - max_age: Duration, -} - -struct CacheEntry { - /// when the decision on the token quality was made - timestamp: Instant, - /// whether the token is supported or not - quality: Quality, -} - -impl Default for Cache { - fn default() -> Self { - Self::new(BadTokenDetection::default().max_age) - } -} - -impl Cache { - /// Creates a new instance which evicts cached values after a period of - /// time. - pub fn new(max_age: Duration) -> Self { - Self(Arc::new(Inner { - max_age, - cache: DashMap::default(), - })) - } - - /// Updates whether or not a token should be considered supported. - fn update_quality(&self, token: eth::TokenAddress, quality: Quality, now: Instant) { - match self.0.cache.entry(token) { - Entry::Occupied(mut o) => { - let value = o.get_mut(); - if now.duration_since(value.timestamp) > self.0.max_age - || quality == Quality::Unsupported - { - // Only update the value if the cached value is outdated by now or - // if the new value is "Unsupported". This means on conflicting updates - // we err on the conservative side and assume a token is unsupported. - value.quality = quality; - } - value.timestamp = now; - } - Entry::Vacant(v) => { - v.insert(CacheEntry { - quality, - timestamp: now, - }); - } - } - } - - fn evict_outdated_entries(&self) { - let now = Instant::now(); - self.0 - .cache - .retain(|_, value| now.duration_since(value.timestamp) > self.0.max_age); - } - - /// Returns the quality of the token. If the cached value is older than the - /// `max_age` it gets ignored and the token evicted. - fn get_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { - let Entry::Occupied(entry) = self.0.cache.entry(token) else { - return None; - }; - - let value = entry.get(); - if now.duration_since(value.timestamp) > self.0.max_age { - entry.remove(); - return None; - } - - Some(value.quality) - } -} - -#[derive(Default)] -struct Metrics {} - -impl Metrics { - fn get_quality(&self, _token: eth::TokenAddress) -> Option { - todo!() - } -} diff --git a/crates/driver/src/domain/competition/bad_tokens/cache.rs b/crates/driver/src/domain/competition/bad_tokens/cache.rs new file mode 100644 index 0000000000..588c854d1f --- /dev/null +++ b/crates/driver/src/domain/competition/bad_tokens/cache.rs @@ -0,0 +1,87 @@ +use { + crate::domain::{competition::bad_tokens::Quality, eth}, + dashmap::{DashMap, Entry}, + std::{ + sync::Arc, + time::{Duration, Instant}, + }, +}; + +/// Cache keeping track of whether or not a token is considered supported or +/// not. Internally reference counted for cheap clones and easy sharing. +/// Stores a map instead of a set to not recompute the quality of good tokens +/// over and over. +/// Evicts cached value after a configurable period of time. +#[derive(Clone)] +pub struct Cache(Arc); + +struct Inner { + cache: DashMap, + /// entries older than this get ignored and evicted + max_age: Duration, +} + +struct CacheEntry { + /// when the decision on the token quality was made + timestamp: Instant, + /// whether the token is supported or not + quality: Quality, +} + +impl Cache { + /// Creates a new instance which evicts cached values after a period of + /// time. + pub fn new(max_age: Duration) -> Self { + Self(Arc::new(Inner { + max_age, + cache: DashMap::default(), + })) + } + + /// Updates whether or not a token should be considered supported. + pub fn update_quality(&self, token: eth::TokenAddress, quality: Quality, now: Instant) { + match self.0.cache.entry(token) { + Entry::Occupied(mut o) => { + let value = o.get_mut(); + if now.duration_since(value.timestamp) > self.0.max_age + || quality == Quality::Unsupported + { + // Only update the value if the cached value is outdated by now or + // if the new value is "Unsupported". This means on conflicting updates + // we err on the conservative side and assume a token is unsupported. + value.quality = quality; + } + value.timestamp = now; + } + Entry::Vacant(v) => { + v.insert(CacheEntry { + quality, + timestamp: now, + }); + } + } + } + + pub fn evict_outdated_entries(&self) { + let now = Instant::now(); + self.0 + .cache + .retain(|_, value| now.duration_since(value.timestamp) > self.0.max_age); + } + + /// Returns the quality of the token. If the cached value is older than the + /// `max_age` it gets ignored and the token evicted. + pub fn get_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { + let Entry::Occupied(entry) = self.0.cache.entry(token) else { + return None; + }; + + let value = entry.get(); + if now.duration_since(value.timestamp) > self.0.max_age { + entry.remove(); + return None; + } + + Some(value.quality) + } +} diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs new file mode 100644 index 0000000000..f6640605aa --- /dev/null +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -0,0 +1,10 @@ +use {super::Quality, crate::domain::eth}; + +#[derive(Default)] +pub struct Detector; + +impl Detector { + pub fn get_quality(&self, _token: eth::TokenAddress) -> Option { + None + } +} diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs new file mode 100644 index 0000000000..f6785a4c8e --- /dev/null +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -0,0 +1,122 @@ +use { + crate::domain::{competition::Auction, eth}, + futures::StreamExt, + std::{collections::HashMap, fmt, time::Instant}, +}; + +pub mod cache; +pub mod metrics; +pub mod simulation; + +// TODO better comments +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum Quality { + /// Solver is likely to produce working solutions when computing + /// routes for this token. + Supported, + /// Solver will likely produce failing solutions when computing + /// routes for this token. This can have many reasons: + /// * fees on transfer + /// * token enforces max transfer amount + /// * trader is deny listed + /// * bugs in the solidity compiler make it incompatible with the settlement + /// contract - see + /// * probably tons of other reasons + Unsupported, +} + +// TODO: better name (it only looks up stuff) +#[derive(Default)] +pub struct Detector { + /// manually configured list of supported and unsupported tokens. Only + /// tokens that get detected incorrectly by the automatic detectors get + /// listed here and therefore have a higher precedence. + hardcoded: HashMap, + simulation_detector: Option, + metrics: Option, +} + +impl Detector { + pub fn with_config(mut self, config: HashMap) -> Self { + self.hardcoded = config; + self + } + + /// Enables detection of unsupported tokens via simulation based detection + /// methods. + pub fn with_simulation_detector(&mut self, detector: simulation::Detector) -> &mut Self { + self.simulation_detector = Some(detector); + self + } + + /// Enables detection of unsupported tokens based on heuristics. + pub fn with_heuristic_detector(mut self) -> Self { + self.metrics = Some(metrics::Detector); + self + } + + /// Removes all unsupported orders from the auction. + pub async fn filter_unsupported_orders_in_auction(&self, mut auction: Auction) -> Auction { + let now = Instant::now(); + + let filtered_orders = futures::stream::iter(auction.orders.into_iter()) + .filter_map(move |order| async move { + let sell = self.get_token_quality(order.sell.token, now); + let buy = self.get_token_quality(order.sell.token, now); + match (sell, buy) { + // both tokens supported => keep order + (Some(Quality::Supported), Some(Quality::Supported)) => Some(order), + // at least 1 token unsupported => drop order + (Some(Quality::Unsupported), _) | (_, Some(Quality::Unsupported)) => None, + // sell token quality is unknown => keep order if token is supported + (None, _) => { + let Some(detector) = &self.simulation_detector else { + // we can't determine quality => assume order is good + return Some(order); + }; + let quality = detector.determine_sell_token_quality(&order, now).await; + (quality == Some(Quality::Supported)).then_some(order) + } + // buy token quality is unknown => keep order (because we can't + // determine quality and assume it's good) + (_, None) => Some(order), + } + }) + .collect::>() + .await; + auction.orders = filtered_orders; + + if let Some(detector) = &self.simulation_detector { + detector.evict_outdated_entries(); + } + + auction + } + + fn get_token_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { + if let Some(quality) = self.hardcoded.get(&token) { + return Some(*quality); + } + + if let Some(detector) = &self.simulation_detector { + if let Some(quality) = detector.get_quality(token, now) { + return Some(quality); + } + } + + if let Some(metrics) = &self.metrics { + return metrics.get_quality(token); + } + + None + } +} + +impl fmt::Debug for Detector { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Detector") + .field("hardcoded", &self.hardcoded) + .field("dynamic", &format_args!("Vec")) + .finish() + } +} diff --git a/crates/driver/src/domain/competition/bad_tokens/simulation.rs b/crates/driver/src/domain/competition/bad_tokens/simulation.rs new file mode 100644 index 0000000000..b6c634d743 --- /dev/null +++ b/crates/driver/src/domain/competition/bad_tokens/simulation.rs @@ -0,0 +1,93 @@ +use { + crate::{ + domain::{ + competition::{ + bad_tokens::{cache::Cache, Quality}, + Order, + }, + eth, + }, + infra, + }, + model::interaction::InteractionData, + shared::bad_token::{trace_call::TraceCallDetectorRaw, TokenQuality}, + std::{ + sync::Arc, + time::{Duration, Instant}, + }, +}; + +#[derive(Clone)] +pub struct Detector(Arc); + +struct Inner { + cache: Cache, + detector: TraceCallDetectorRaw, +} + +impl Detector { + pub fn new(max_age: Duration, eth: &infra::Ethereum) -> Self { + let detector = + TraceCallDetectorRaw::new(eth.web3().clone(), eth.contracts().settlement().address()); + Self(Arc::new(Inner { + cache: Cache::new(max_age), + detector, + })) + } + + pub async fn determine_sell_token_quality( + &self, + order: &Order, + now: Instant, + ) -> Option { + let cache = &self.0.cache; + if let Some(quality) = cache.get_quality(order.sell.token, now) { + return Some(quality); + } + + let token = order.sell.token; + let pre_interactions: Vec<_> = order + .pre_interactions + .iter() + .map(|i| InteractionData { + target: i.target.0, + value: i.value.0, + call_data: i.call_data.0.clone(), + }) + .collect(); + + match self + .0 + .detector + .test_transfer( + eth::Address::from(order.trader()).0, + token.0 .0, + order.sell.amount.0, + &pre_interactions, + ) + .await + { + Err(err) => { + tracing::debug!(?err, "failed to determine token quality"); + None + } + Ok(TokenQuality::Good) => { + cache.update_quality(token, Quality::Supported, now); + Some(Quality::Supported) + } + Ok(TokenQuality::Bad { reason }) => { + tracing::debug!(reason, "cache token as unsupported"); + cache.update_quality(token, Quality::Unsupported, now); + Some(Quality::Unsupported) + } + } + } +} + +impl std::ops::Deref for Detector { + type Target = Cache; + + fn deref(&self) -> &Self::Target { + &self.0.cache + } +} diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index 7bc4fab13e..cb1f8c4bf0 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -3,7 +3,7 @@ use { domain::{self, competition::bad_tokens, Mempools}, infra::{ self, - config::file::{BadTokenDetection, OrderPriorityStrategy}, + config::file::OrderPriorityStrategy, liquidity, solver::{Solver, Timeouts}, tokens, @@ -29,10 +29,10 @@ pub struct Api { pub eth: Ethereum, pub mempools: Mempools, pub addr: SocketAddr, + pub bad_token_detector: bad_tokens::simulation::Detector, /// If this channel is specified, the bound address will be sent to it. This /// allows the driver to bind to 0.0.0.0:0 during testing. pub addr_sender: Option>, - pub bad_token_detection: BadTokenDetection, } impl Api { @@ -54,8 +54,6 @@ impl Api { let pre_processor = domain::competition::AuctionProcessor::new(&self.eth, order_priority_strategies); - let trace_detector = bad_tokens::Cache::new(self.bad_token_detection.max_age); - // Add the metrics and healthz endpoints. app = routes::metrics(app); app = routes::healthz(app); @@ -73,12 +71,13 @@ impl Api { let router = routes::reveal(router); let router = routes::settle(router); - let bad_tokens = Arc::new( - bad_tokens::Detector::default() - .with_simulation_detector(&self.eth.clone()) - .with_config(solver.tokens_supported().clone()) - .with_cache(trace_detector.clone()), - ); + let mut bad_tokens = bad_tokens::Detector::default(); + if solver + .bad_token_detection() + .enable_simulation_based_bad_token_detection + { + bad_tokens.with_simulation_detector(self.bad_token_detector.clone()); + } let router = router.with_state(State(Arc::new(Inner { eth: self.eth.clone(), @@ -90,7 +89,7 @@ impl Api { simulator: self.simulator.clone(), mempools: self.mempools.clone(), settlements: Default::default(), - bad_tokens, + bad_tokens: Arc::new(bad_tokens), }, liquidity: self.liquidity.clone(), tokens: tokens.clone(), diff --git a/crates/driver/src/infra/config/file/load.rs b/crates/driver/src/infra/config/file/load.rs index bb64173d1a..bb090d3d4e 100644 --- a/crates/driver/src/infra/config/file/load.rs +++ b/crates/driver/src/infra/config/file/load.rs @@ -8,7 +8,7 @@ use { liquidity, mempool, simulator, - solver::{self, SolutionMerging}, + solver::{self, BadTokenDetection, SolutionMerging}, }, }, futures::future::join_all, @@ -94,19 +94,23 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { solver_native_token: config.manage_native_token.to_domain(), quote_tx_origin: config.quote_tx_origin.map(eth::Address), response_size_limit_max_bytes: config.response_size_limit_max_bytes, - tokens_supported: config - .token_supported - .iter() - .map(|(token, supported)| { - ( - eth::TokenAddress(eth::ContractAddress(*token)), - match supported { - true => bad_tokens::Quality::Supported, - false => bad_tokens::Quality::Unsupported, - }, - ) - }) - .collect(), + bad_token_detection: BadTokenDetection { + tokens_supported: config + .token_supported + .iter() + .map(|(token, supported)| { + ( + eth::TokenAddress(eth::ContractAddress(*token)), + match supported { + true => bad_tokens::Quality::Supported, + false => bad_tokens::Quality::Unsupported, + }, + ) + }) + .collect(), + enable_simulation_based_bad_token_detection: config + .enable_simulation_bad_token_detection, + }, } })) .await, @@ -353,6 +357,6 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { gas_estimator: config.gas_estimator, order_priority_strategies: config.order_priority_strategies, archive_node_url: config.archive_node_url, - bad_token_detection: config.bad_token_detection, + simulation_bad_token_max_age: config.simulation_bad_token_max_age, } } diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index efe9492d2e..9f79c01e4f 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -66,10 +66,13 @@ struct Config { /// Archive node URL used to index CoW AMM archive_node_url: Option, - /// Configuration options for automatically detecting unsupported - /// tokens. - #[serde(default)] - bad_token_detection: BadTokenDetection, + /// How long should the token quality computed by the simulation + /// based logic be cached. + #[serde( + with = "humantime_serde", + default = "default_simulation_bad_token_max_age" + )] + simulation_bad_token_max_age: Duration, } #[serde_as] @@ -266,8 +269,14 @@ struct SolverConfig { #[serde(default = "default_response_size_limit_max_bytes")] response_size_limit_max_bytes: usize, + /// Which tokens are explicitly supported or unsupported by the solver. #[serde(default)] token_supported: HashMap, + + /// Whether or not the solver opted into detecting unsupported + /// tokens with `trace_callMany` based simulation. + #[serde(default)] + enable_simulation_bad_token_detection: bool, } #[derive(Clone, Copy, Debug, Default, Deserialize, PartialEq, Serialize)] @@ -663,31 +672,6 @@ fn default_max_order_age() -> Option { Some(Duration::from_secs(300)) } -/// Cache configuration for the bad token detection -#[derive(Clone, Debug, Deserialize)] -#[serde(rename_all = "kebab-case", deny_unknown_fields)] -pub struct BadTokenDetection { - /// Entries older than `max_age` will get ignored and evicted - #[serde( - with = "humantime_serde", - default = "default_bad_token_detection_cache_max_age" - )] - pub max_age: Duration, - - /// RPC URL to be used for detecting unsupported tokens via simulations. - /// Requires support for `trace_callMany`. - pub simulation_detection_rpc: Option, -} - -impl Default for BadTokenDetection { - fn default() -> Self { - Self { - max_age: default_bad_token_detection_cache_max_age(), - simulation_detection_rpc: None, - } - } -} - -fn default_bad_token_detection_cache_max_age() -> Duration { +fn default_simulation_bad_token_max_age() -> Duration { Duration::from_secs(600) } diff --git a/crates/driver/src/infra/config/mod.rs b/crates/driver/src/infra/config/mod.rs index 983a1f1df4..d87d4500f2 100644 --- a/crates/driver/src/infra/config/mod.rs +++ b/crates/driver/src/infra/config/mod.rs @@ -3,13 +3,14 @@ use { domain::eth, infra::{ blockchain, - config::file::{BadTokenDetection, GasEstimatorType, OrderPriorityStrategy}, + config::file::{GasEstimatorType, OrderPriorityStrategy}, liquidity, mempool, simulator, solver, }, }, + std::time::Duration, url::Url, }; @@ -28,5 +29,5 @@ pub struct Config { pub contracts: blockchain::contracts::Addresses, pub order_priority_strategies: Vec, pub archive_node_url: Option, - pub bad_token_detection: BadTokenDetection, + pub simulation_bad_token_max_age: Duration, } diff --git a/crates/driver/src/infra/mod.rs b/crates/driver/src/infra/mod.rs index b33526f545..4476cd91bf 100644 --- a/crates/driver/src/infra/mod.rs +++ b/crates/driver/src/infra/mod.rs @@ -11,7 +11,7 @@ pub mod simulator; pub mod solver; pub mod time; pub mod tokens; - +/// TODO put bad tokens here? pub use { self::solver::Solver, api::Api, diff --git a/crates/driver/src/infra/solver/mod.rs b/crates/driver/src/infra/solver/mod.rs index 0815630190..76281b8f72 100644 --- a/crates/driver/src/infra/solver/mod.rs +++ b/crates/driver/src/infra/solver/mod.rs @@ -124,8 +124,7 @@ pub struct Config { /// Which `tx.origin` is required to make quote verification pass. pub quote_tx_origin: Option, pub response_size_limit_max_bytes: usize, - /// Tokens that are explicitly allow- or deny-listed. - pub tokens_supported: HashMap, + pub bad_token_detection: BadTokenDetection, } impl Solver { @@ -154,8 +153,8 @@ impl Solver { }) } - pub fn tokens_supported(&self) -> &HashMap { - &self.config.tokens_supported + pub fn bad_token_detection(&self) -> &BadTokenDetection { + &self.config.bad_token_detection } pub fn persistence(&self) -> Persistence { @@ -302,3 +301,10 @@ impl Error { } } } + +#[derive(Debug, Clone)] +pub struct BadTokenDetection { + /// Tokens that are explicitly allow- or deny-listed. + pub tokens_supported: HashMap, + pub enable_simulation_based_bad_token_detection: bool, +} diff --git a/crates/driver/src/run.rs b/crates/driver/src/run.rs index df936a09aa..5d52e0accf 100644 --- a/crates/driver/src/run.rs +++ b/crates/driver/src/run.rs @@ -1,6 +1,6 @@ use { crate::{ - domain::Mempools, + domain::{competition::bad_tokens, Mempools}, infra::{ self, blockchain::{self, Ethereum}, @@ -66,10 +66,13 @@ async fn run_with(args: cli::Args, addr_sender: Option Date: Tue, 17 Dec 2024 10:04:41 +0000 Subject: [PATCH 15/34] some cleanup --- crates/driver/src/domain/competition/bad_tokens/mod.rs | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index f6785a4c8e..3719e83d08 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -25,7 +25,6 @@ pub enum Quality { Unsupported, } -// TODO: better name (it only looks up stuff) #[derive(Default)] pub struct Detector { /// manually configured list of supported and unsupported tokens. Only @@ -37,11 +36,6 @@ pub struct Detector { } impl Detector { - pub fn with_config(mut self, config: HashMap) -> Self { - self.hardcoded = config; - self - } - /// Enables detection of unsupported tokens via simulation based detection /// methods. pub fn with_simulation_detector(&mut self, detector: simulation::Detector) -> &mut Self { @@ -50,7 +44,7 @@ impl Detector { } /// Enables detection of unsupported tokens based on heuristics. - pub fn with_heuristic_detector(mut self) -> Self { + pub fn with_heuristic_detector(&mut self) -> &mut Self { self.metrics = Some(metrics::Detector); self } From 3fc408b8fbf6cc390dbef40d8a1c9f93b772104b Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Tue, 17 Dec 2024 10:46:25 +0000 Subject: [PATCH 16/34] Add request sharing to bad token detection --- .../competition/bad_tokens/simulation.rs | 92 ++++++++++++------- 1 file changed, 57 insertions(+), 35 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/simulation.rs b/crates/driver/src/domain/competition/bad_tokens/simulation.rs index b6c634d743..f5410f173e 100644 --- a/crates/driver/src/domain/competition/bad_tokens/simulation.rs +++ b/crates/driver/src/domain/competition/bad_tokens/simulation.rs @@ -3,14 +3,19 @@ use { domain::{ competition::{ bad_tokens::{cache::Cache, Quality}, + order, Order, }, eth, }, infra, }, + futures::FutureExt, model::interaction::InteractionData, - shared::bad_token::{trace_call::TraceCallDetectorRaw, TokenQuality}, + shared::{ + bad_token::{trace_call::TraceCallDetectorRaw, TokenQuality}, + request_sharing::BoxRequestSharing, + }, std::{ sync::Arc, time::{Duration, Instant}, @@ -23,6 +28,7 @@ pub struct Detector(Arc); struct Inner { cache: Cache, detector: TraceCallDetectorRaw, + sharing: BoxRequestSharing>, } impl Detector { @@ -32,6 +38,7 @@ impl Detector { Self(Arc::new(Inner { cache: Cache::new(max_age), detector, + sharing: BoxRequestSharing::labelled("bad_tokens".into()), })) } @@ -45,42 +52,57 @@ impl Detector { return Some(quality); } - let token = order.sell.token; - let pre_interactions: Vec<_> = order - .pre_interactions - .iter() - .map(|i| InteractionData { - target: i.target.0, - value: i.value.0, - call_data: i.call_data.0.clone(), - }) - .collect(); + // The simulation detector gets used by multiple solvers at the same time + // and therefore will have to handle a lot of duplicate requests. To avoid + // doing duplicate work we use the `RequestSharing` component which checks + // if an equivalent request is already in-flight and awaits that instead of + // creating a new one. + let uid = order.uid; + self.0 + .sharing + .shared_or_else(uid, move |_uid| { + let inner = self.0.clone(); + let sell_token = order.sell.token; + let pre_interactions: Vec<_> = order + .pre_interactions + .iter() + .map(|i| InteractionData { + target: i.target.0, + value: i.value.0, + call_data: i.call_data.0.clone(), + }) + .collect(); + let trader = eth::Address::from(order.trader()).0; + let sell_amount = order.sell.amount.0; - match self - .0 - .detector - .test_transfer( - eth::Address::from(order.trader()).0, - token.0 .0, - order.sell.amount.0, - &pre_interactions, - ) + async move { + let result = inner + .detector + .test_transfer(trader, sell_token.0 .0, sell_amount, &pre_interactions) + .await; + match result { + Err(err) => { + tracing::debug!(?err, "failed to determine token quality"); + None + } + Ok(TokenQuality::Good) => { + inner + .cache + .update_quality(sell_token, Quality::Supported, now); + Some(Quality::Supported) + } + Ok(TokenQuality::Bad { reason }) => { + tracing::debug!(reason, "cache token as unsupported"); + inner + .cache + .update_quality(sell_token, Quality::Unsupported, now); + Some(Quality::Unsupported) + } + } + } + .boxed() + }) .await - { - Err(err) => { - tracing::debug!(?err, "failed to determine token quality"); - None - } - Ok(TokenQuality::Good) => { - cache.update_quality(token, Quality::Supported, now); - Some(Quality::Supported) - } - Ok(TokenQuality::Bad { reason }) => { - tracing::debug!(reason, "cache token as unsupported"); - cache.update_quality(token, Quality::Unsupported, now); - Some(Quality::Unsupported) - } - } } } From 1e2ac01fe944a194c56b822cbc691b76a0cee777 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Tue, 17 Dec 2024 10:47:37 +0000 Subject: [PATCH 17/34] fixup --- crates/driver/src/domain/competition/bad_tokens/simulation.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/simulation.rs b/crates/driver/src/domain/competition/bad_tokens/simulation.rs index f5410f173e..6ec18ee7ef 100644 --- a/crates/driver/src/domain/competition/bad_tokens/simulation.rs +++ b/crates/driver/src/domain/competition/bad_tokens/simulation.rs @@ -54,7 +54,7 @@ impl Detector { // The simulation detector gets used by multiple solvers at the same time // and therefore will have to handle a lot of duplicate requests. To avoid - // doing duplicate work we use the `RequestSharing` component which checks + // doing unnecessary work we use the `RequestSharing` component which checks // if an equivalent request is already in-flight and awaits that instead of // creating a new one. let uid = order.uid; From f3650a1c01b92faf8a3123a6dfd465c3b6982c87 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Tue, 17 Dec 2024 11:08:23 +0000 Subject: [PATCH 18/34] enable driver bad token detection in e2e tests --- crates/e2e/src/setup/colocation.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/e2e/src/setup/colocation.rs b/crates/e2e/src/setup/colocation.rs index da59d6c8f2..5a4916e6be 100644 --- a/crates/e2e/src/setup/colocation.rs +++ b/crates/e2e/src/setup/colocation.rs @@ -124,6 +124,7 @@ relative-slippage = "0.1" account = "{account}" merge-solutions = {merge_solutions} quote-using-limit-orders = {quote_using_limit_orders} +enable-simulation-bad-token-detection = true "# ) }, From a8250990e54dc1408091d7be012d48445a35d208 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Tue, 17 Dec 2024 12:04:37 +0000 Subject: [PATCH 19/34] Reduce diff --- crates/driver/src/domain/competition/auction.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/driver/src/domain/competition/auction.rs b/crates/driver/src/domain/competition/auction.rs index f3c49b8abb..541cb2a8e8 100644 --- a/crates/driver/src/domain/competition/auction.rs +++ b/crates/driver/src/domain/competition/auction.rs @@ -183,10 +183,10 @@ impl AuctionProcessor { // and we don't want to block the runtime for too long. let fut = tokio::task::spawn_blocking(move || { let start = std::time::Instant::now(); - let cow_amm_orders = rt.block_on(Self::cow_amm_orders(ð, &tokens, &cow_amms, signature_validator.as_ref())); - orders.extend(cow_amm_orders); + orders.extend(rt.block_on(Self::cow_amm_orders(ð, &tokens, &cow_amms, signature_validator.as_ref()))); sorting::sort_orders(&mut orders, &tokens, &solver, &order_comparators); - let mut balances = rt.block_on(Self::fetch_balances(ð, &orders)); + let mut balances = + rt.block_on(async { Self::fetch_balances(ð, &orders).await }); Self::filter_orders(&mut balances, &mut orders); tracing::debug!(auction_id = new_id.0, time =? start.elapsed(), "auction preprocessing done"); orders From b4d907bb5d4c78f07293de6cd8b5af374af4b412 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Tue, 17 Dec 2024 12:16:18 +0000 Subject: [PATCH 20/34] fixup --- .../driver/src/domain/competition/bad_tokens/metrics.rs | 1 + crates/driver/src/domain/competition/bad_tokens/mod.rs | 8 +++++++- .../src/domain/competition/bad_tokens/simulation.rs | 7 +++++++ crates/driver/src/domain/competition/order/mod.rs | 2 +- crates/driver/src/infra/api/mod.rs | 3 ++- crates/driver/src/infra/mod.rs | 2 +- 6 files changed, 19 insertions(+), 4 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs index f6640605aa..8db0be0b82 100644 --- a/crates/driver/src/domain/competition/bad_tokens/metrics.rs +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -5,6 +5,7 @@ pub struct Detector; impl Detector { pub fn get_quality(&self, _token: eth::TokenAddress) -> Option { + // TODO implement a reasonable heuristic None } } diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index 3719e83d08..ace09f2097 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -8,7 +8,6 @@ pub mod cache; pub mod metrics; pub mod simulation; -// TODO better comments #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Quality { /// Solver is likely to produce working solutions when computing @@ -36,6 +35,13 @@ pub struct Detector { } impl Detector { + /// Hardcodes tokens as (un)supported based on the provided config. This has + /// the highest priority when looking up a token's quality. + pub fn with_config(mut self, config: HashMap) -> Self { + self.hardcoded = config; + self + } + /// Enables detection of unsupported tokens via simulation based detection /// methods. pub fn with_simulation_detector(&mut self, detector: simulation::Detector) -> &mut Self { diff --git a/crates/driver/src/domain/competition/bad_tokens/simulation.rs b/crates/driver/src/domain/competition/bad_tokens/simulation.rs index 6ec18ee7ef..9c831f751e 100644 --- a/crates/driver/src/domain/competition/bad_tokens/simulation.rs +++ b/crates/driver/src/domain/competition/bad_tokens/simulation.rs @@ -22,6 +22,10 @@ use { }, }; +/// Component to detect tokens which show unusual behavior during +/// transfers. These tokens are likely not supported by less advanced +/// solvers. Checks the behavior on transfer using a `trace_callMany` +/// based simulation. #[derive(Clone)] pub struct Detector(Arc); @@ -42,6 +46,9 @@ impl Detector { })) } + /// Simulates how the sell token behaves during transfers. Assumes that + /// the order owner has the required sell token balance and approvals + /// set. pub async fn determine_sell_token_quality( &self, order: &Order, diff --git a/crates/driver/src/domain/competition/order/mod.rs b/crates/driver/src/domain/competition/order/mod.rs index 3d2857be84..5b82b1d758 100644 --- a/crates/driver/src/domain/competition/order/mod.rs +++ b/crates/driver/src/domain/competition/order/mod.rs @@ -371,7 +371,7 @@ impl From for BuyTokenBalance { } /// The address which placed the order. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Into, From)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Into)] pub struct Trader(eth::Address); /// A just-in-time order. JIT orders are added at solving time by the solver to diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index cb1f8c4bf0..d76c6d825d 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -71,7 +71,8 @@ impl Api { let router = routes::reveal(router); let router = routes::settle(router); - let mut bad_tokens = bad_tokens::Detector::default(); + let mut bad_tokens = bad_tokens::Detector::default() + .with_config(solver.bad_token_detection().tokens_supported.clone()); if solver .bad_token_detection() .enable_simulation_based_bad_token_detection diff --git a/crates/driver/src/infra/mod.rs b/crates/driver/src/infra/mod.rs index 4476cd91bf..b33526f545 100644 --- a/crates/driver/src/infra/mod.rs +++ b/crates/driver/src/infra/mod.rs @@ -11,7 +11,7 @@ pub mod simulator; pub mod solver; pub mod time; pub mod tokens; -/// TODO put bad tokens here? + pub use { self::solver::Solver, api::Api, From 4cec1a29482c51987e43820760f7806ff66c9e30 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Tue, 17 Dec 2024 12:34:43 +0000 Subject: [PATCH 21/34] fixup --- crates/driver/src/domain/competition/bad_tokens/mod.rs | 1 - crates/shared/src/bad_token/trace_call.rs | 2 ++ 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index ace09f2097..0572342421 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -116,7 +116,6 @@ impl fmt::Debug for Detector { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Detector") .field("hardcoded", &self.hardcoded) - .field("dynamic", &format_args!("Vec")) .finish() } } diff --git a/crates/shared/src/bad_token/trace_call.rs b/crates/shared/src/bad_token/trace_call.rs index 4d2e0a7530..521162807b 100644 --- a/crates/shared/src/bad_token/trace_call.rs +++ b/crates/shared/src/bad_token/trace_call.rs @@ -85,6 +85,8 @@ impl TraceCallDetector { } } +/// Detects whether a token is "bad" (works in unexpected ways that are +/// problematic for solving) by simulating several transfers of a token. #[derive(Debug, Clone)] pub struct TraceCallDetectorRaw { pub web3: Web3, From f1e32d720ba815a6b59daae13ae374072217539c Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Wed, 18 Dec 2024 10:11:11 +0000 Subject: [PATCH 22/34] Fix cache eviction logic --- crates/driver/src/domain/competition/bad_tokens/cache.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/cache.rs b/crates/driver/src/domain/competition/bad_tokens/cache.rs index 588c854d1f..0d97d8d0ec 100644 --- a/crates/driver/src/domain/competition/bad_tokens/cache.rs +++ b/crates/driver/src/domain/competition/bad_tokens/cache.rs @@ -66,7 +66,7 @@ impl Cache { let now = Instant::now(); self.0 .cache - .retain(|_, value| now.duration_since(value.timestamp) > self.0.max_age); + .retain(|_, value| now.duration_since(value.timestamp) < self.0.max_age); } /// Returns the quality of the token. If the cached value is older than the From 3f29b2e8c4eba1b2622e36a733e14df2e407e2d2 Mon Sep 17 00:00:00 2001 From: MartinquaXD Date: Wed, 18 Dec 2024 11:04:01 +0000 Subject: [PATCH 23/34] Replace `.with_config()` with `new()` --- crates/driver/src/domain/competition/bad_tokens/mod.rs | 8 +++++--- crates/driver/src/infra/api/mod.rs | 4 ++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index 0572342421..180eea94db 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -37,9 +37,11 @@ pub struct Detector { impl Detector { /// Hardcodes tokens as (un)supported based on the provided config. This has /// the highest priority when looking up a token's quality. - pub fn with_config(mut self, config: HashMap) -> Self { - self.hardcoded = config; - self + pub fn new(config: HashMap) -> Self { + Self { + hardcoded: config, + ..Default::default() + } } /// Enables detection of unsupported tokens via simulation based detection diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index d76c6d825d..ea4544c4c3 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -71,8 +71,8 @@ impl Api { let router = routes::reveal(router); let router = routes::settle(router); - let mut bad_tokens = bad_tokens::Detector::default() - .with_config(solver.bad_token_detection().tokens_supported.clone()); + let mut bad_tokens = + bad_tokens::Detector::new(solver.bad_token_detection().tokens_supported.clone()); if solver .bad_token_detection() .enable_simulation_based_bad_token_detection From fb5794c41e6dd6c272c0325a006b53e3d970021a Mon Sep 17 00:00:00 2001 From: ilya Date: Wed, 18 Dec 2024 17:12:51 +0000 Subject: [PATCH 24/34] Implement metrics-based bad token detection strategy --- .../domain/competition/bad_tokens/metrics.rs | 41 ++++++++++++++++--- .../src/domain/competition/bad_tokens/mod.rs | 20 ++++++++- crates/driver/src/domain/competition/mod.rs | 17 +++++--- .../src/domain/competition/solution/mod.rs | 5 +++ crates/driver/src/infra/api/mod.rs | 7 ++++ crates/driver/src/infra/config/file/load.rs | 2 + crates/driver/src/infra/config/file/mod.rs | 5 +++ crates/driver/src/infra/solver/mod.rs | 1 + 8 files changed, 86 insertions(+), 12 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs index 8db0be0b82..741c71fad4 100644 --- a/crates/driver/src/domain/competition/bad_tokens/metrics.rs +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -1,11 +1,42 @@ -use {super::Quality, crate::domain::eth}; +use { + super::Quality, + crate::domain::eth, + dashmap::DashMap, + std::{collections::HashSet, sync::Arc}, +}; #[derive(Default)] -pub struct Detector; +pub struct Detector(Arc); + +#[derive(Default)] +struct Inner { + counter: DashMap, +} impl Detector { - pub fn get_quality(&self, _token: eth::TokenAddress) -> Option { - // TODO implement a reasonable heuristic - None + const UNSUPPORTED_THRESHOLD: u64 = 10; + + pub fn get_quality(&self, token: eth::TokenAddress) -> Option { + self.0 + .counter + .get(&token) + .filter(|counter| **counter >= Self::UNSUPPORTED_THRESHOLD) + .map(|_| Quality::Unsupported) + } + + pub fn update_failing_tokens(&self, tokens: HashSet) { + for token in tokens { + self.0 + .counter + .entry(token) + .and_modify(|c| *c += 1) + .or_insert(1); + } + } + + pub fn update_successful_tokens(&self, tokens: HashSet) { + for token in tokens { + self.0.counter.remove(&token); + } } } diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index 180eea94db..bb80d80871 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -1,7 +1,11 @@ use { crate::domain::{competition::Auction, eth}, futures::StreamExt, - std::{collections::HashMap, fmt, time::Instant}, + std::{ + collections::{HashMap, HashSet}, + fmt, + time::Instant, + }, }; pub mod cache; @@ -53,7 +57,7 @@ impl Detector { /// Enables detection of unsupported tokens based on heuristics. pub fn with_heuristic_detector(&mut self) -> &mut Self { - self.metrics = Some(metrics::Detector); + self.metrics = Some(metrics::Detector::default()); self } @@ -95,6 +99,18 @@ impl Detector { auction } + pub fn encoding_succeeded(&self, tokens: HashSet) { + if let Some(metrics) = &self.metrics { + metrics.update_successful_tokens(tokens); + } + } + + pub fn encoding_failed(&self, tokens: HashSet) { + if let Some(metrics) = &self.metrics { + metrics.update_failing_tokens(tokens); + } + } + fn get_token_quality(&self, token: eth::TokenAddress, now: Instant) -> Option { if let Some(quality) = self.hardcoded.get(&token) { return Some(*quality); diff --git a/crates/driver/src/domain/competition/mod.rs b/crates/driver/src/domain/competition/mod.rs index 2e79c6a100..63b89d9c14 100644 --- a/crates/driver/src/domain/competition/mod.rs +++ b/crates/driver/src/domain/competition/mod.rs @@ -123,6 +123,7 @@ impl Competition { .into_iter() .map(|solution| async move { let id = solution.id().clone(); + let tokens = solution.tokens(); observe::encoding(&id); let settlement = solution .encode( @@ -132,16 +133,22 @@ impl Competition { self.solver.solver_native_token(), ) .await; - (id, settlement) + (id, tokens, settlement) }) .collect::>() - .filter_map(|(id, result)| async move { + .filter_map(|(id, tokens, result)| async move { match result { - Ok(solution) => Some(solution), + Ok(solution) => { + self.bad_tokens.encoding_succeeded(tokens); + Some(solution) + } // don't report on errors coming from solution merging - Err(_err) if id.solutions().len() > 1 => None, + Err(_err) if id.solutions().len() > 1 => { + self.bad_tokens.encoding_failed(tokens); + None + } Err(err) => { - // TODO update metrics of bad token detection + self.bad_tokens.encoding_failed(tokens); observe::encoding_failed(self.solver.name(), &id, &err); notify::encoding_failed(&self.solver, auction.id(), &id, &err); None diff --git a/crates/driver/src/domain/competition/solution/mod.rs b/crates/driver/src/domain/competition/solution/mod.rs index 2f55562900..750b622bba 100644 --- a/crates/driver/src/domain/competition/solution/mod.rs +++ b/crates/driver/src/domain/competition/solution/mod.rs @@ -169,6 +169,11 @@ impl Solution { &self.trades } + /// Returns all the tokens involved in the solution. + pub fn tokens(&self) -> HashSet { + self.prices.keys().cloned().collect() + } + /// Interactions executed by this solution. pub fn interactions(&self) -> &[Interaction] { &self.interactions diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index ea4544c4c3..3c1e58ac72 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -80,6 +80,13 @@ impl Api { bad_tokens.with_simulation_detector(self.bad_token_detector.clone()); } + if solver + .bad_token_detection() + .enable_heuristic_based_bad_token_detection + { + bad_tokens.with_heuristic_detector(); + } + let router = router.with_state(State(Arc::new(Inner { eth: self.eth.clone(), solver: solver.clone(), diff --git a/crates/driver/src/infra/config/file/load.rs b/crates/driver/src/infra/config/file/load.rs index bb090d3d4e..6070445e8b 100644 --- a/crates/driver/src/infra/config/file/load.rs +++ b/crates/driver/src/infra/config/file/load.rs @@ -110,6 +110,8 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { .collect(), enable_simulation_based_bad_token_detection: config .enable_simulation_bad_token_detection, + enable_heuristic_based_bad_token_detection: config + .enable_heuristic_based_bad_token_detection, }, } })) diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index 9f79c01e4f..c62620dda4 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -277,6 +277,11 @@ struct SolverConfig { /// tokens with `trace_callMany` based simulation. #[serde(default)] enable_simulation_bad_token_detection: bool, + + /// Whether or not the solver opted into detecting unsupported + /// tokens with heuristic based simulation. + #[serde(default)] + enable_heuristic_based_bad_token_detection: bool, } #[derive(Clone, Copy, Debug, Default, Deserialize, PartialEq, Serialize)] diff --git a/crates/driver/src/infra/solver/mod.rs b/crates/driver/src/infra/solver/mod.rs index 76281b8f72..196356651a 100644 --- a/crates/driver/src/infra/solver/mod.rs +++ b/crates/driver/src/infra/solver/mod.rs @@ -307,4 +307,5 @@ pub struct BadTokenDetection { /// Tokens that are explicitly allow- or deny-listed. pub tokens_supported: HashMap, pub enable_simulation_based_bad_token_detection: bool, + pub enable_heuristic_based_bad_token_detection: bool, } From e8f6962fe889dcecf3b48e93b0b083b0d914f304 Mon Sep 17 00:00:00 2001 From: ilya Date: Wed, 18 Dec 2024 17:27:34 +0000 Subject: [PATCH 25/34] Docs --- .../src/domain/competition/bad_tokens/metrics.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs index 741c71fad4..e0f812970f 100644 --- a/crates/driver/src/domain/competition/bad_tokens/metrics.rs +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -5,6 +5,11 @@ use { std::{collections::HashSet, sync::Arc}, }; +/// Monitors tokens to determine whether they are considered "unsupported" based +/// on the number of consecutive participation in failing settlement encoding. +/// Tokens that consistently participate in failures beyond a predefined +/// threshold are marked as unsupported. Once token participates in a successful +/// settlement encoding, it is removed from the cache. #[derive(Default)] pub struct Detector(Arc); @@ -14,7 +19,9 @@ struct Inner { } impl Detector { - const UNSUPPORTED_THRESHOLD: u64 = 10; + /// Defines the threshold for the number of consecutive unsupported + /// quality detections before a token is considered unsupported. + const UNSUPPORTED_THRESHOLD: u64 = 5; pub fn get_quality(&self, token: eth::TokenAddress) -> Option { self.0 @@ -24,6 +31,7 @@ impl Detector { .map(|_| Quality::Unsupported) } + /// Increments the counter of failures for each token. pub fn update_failing_tokens(&self, tokens: HashSet) { for token in tokens { self.0 @@ -34,6 +42,8 @@ impl Detector { } } + /// Removes tokens from the cache since they all participated in a + /// successful settlement encoding. pub fn update_successful_tokens(&self, tokens: HashSet) { for token in tokens { self.0.counter.remove(&token); From cd7fa0ff95241971dc7156bb015bbafe2c27d989 Mon Sep 17 00:00:00 2001 From: ilya Date: Wed, 18 Dec 2024 17:29:31 +0000 Subject: [PATCH 26/34] More docs --- crates/driver/src/domain/competition/bad_tokens/mod.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index bb80d80871..9fafe0559d 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -99,12 +99,14 @@ impl Detector { auction } + /// Updates the tokens quality metric for successful operation. pub fn encoding_succeeded(&self, tokens: HashSet) { if let Some(metrics) = &self.metrics { metrics.update_successful_tokens(tokens); } } + /// Updates the tokens quality metric for failures. pub fn encoding_failed(&self, tokens: HashSet) { if let Some(metrics) = &self.metrics { metrics.update_failing_tokens(tokens); From 838ae161d157afe5ea600ecf60c877bead1f3902 Mon Sep 17 00:00:00 2001 From: ilya Date: Wed, 18 Dec 2024 20:14:04 +0000 Subject: [PATCH 27/34] Stop incrementing counter once threshold is reached --- crates/driver/src/domain/competition/bad_tokens/metrics.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs index e0f812970f..ae95a10987 100644 --- a/crates/driver/src/domain/competition/bad_tokens/metrics.rs +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -15,13 +15,13 @@ pub struct Detector(Arc); #[derive(Default)] struct Inner { - counter: DashMap, + counter: DashMap, } impl Detector { /// Defines the threshold for the number of consecutive unsupported /// quality detections before a token is considered unsupported. - const UNSUPPORTED_THRESHOLD: u64 = 5; + const UNSUPPORTED_THRESHOLD: u8 = 5; pub fn get_quality(&self, token: eth::TokenAddress) -> Option { self.0 @@ -37,7 +37,7 @@ impl Detector { self.0 .counter .entry(token) - .and_modify(|c| *c += 1) + .and_modify(|counter| *counter = Self::UNSUPPORTED_THRESHOLD.min(*counter + 1)) .or_insert(1); } } From 8e6bc4628fd70163998bdb1f915bedf607fb4bbc Mon Sep 17 00:00:00 2001 From: ilya Date: Thu, 19 Dec 2024 18:55:55 +0000 Subject: [PATCH 28/34] Reworked logic --- .../domain/competition/bad_tokens/metrics.rs | 70 +++++++++++++------ 1 file changed, 48 insertions(+), 22 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs index ae95a10987..e2e31f2fee 100644 --- a/crates/driver/src/domain/competition/bad_tokens/metrics.rs +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -6,47 +6,73 @@ use { }; /// Monitors tokens to determine whether they are considered "unsupported" based -/// on the number of consecutive participation in failing settlement encoding. -/// Tokens that consistently participate in failures beyond a predefined -/// threshold are marked as unsupported. Once token participates in a successful -/// settlement encoding, it is removed from the cache. +/// on the ratio of failing to total settlement encoding attempts. A token must +/// have participated in at least `REQUIRED_MEASUREMENTS` attempts to be +/// evaluated. If, at that point, the ratio of failures is greater than or equal +/// to `FAILURE_RATIO`, the token is considered unsupported. #[derive(Default)] pub struct Detector(Arc); #[derive(Default)] struct Inner { - counter: DashMap, + counter: DashMap, +} + +#[derive(Default)] +struct TokenStatistics { + attempts: u32, + fails: u32, } impl Detector { - /// Defines the threshold for the number of consecutive unsupported - /// quality detections before a token is considered unsupported. - const UNSUPPORTED_THRESHOLD: u8 = 5; + /// The ratio of failures to attempts that qualifies a token as unsupported. + const FAILURE_RATIO: f64 = 0.9; + /// The minimum number of attempts required before evaluating a token’s + /// quality. + const REQUIRED_MEASUREMENTS: u32 = 20; pub fn get_quality(&self, token: eth::TokenAddress) -> Option { - self.0 - .counter - .get(&token) - .filter(|counter| **counter >= Self::UNSUPPORTED_THRESHOLD) - .map(|_| Quality::Unsupported) + let measurements = self.0.counter.get(&token)?; + if measurements.attempts >= Self::REQUIRED_MEASUREMENTS + && (measurements.fails as f64 / measurements.attempts as f64) >= Self::FAILURE_RATIO + { + Some(Quality::Unsupported) + } else { + None + } } - /// Increments the counter of failures for each token. + /// Updates the tokens that participated in failing settlements by + /// incrementing both their attempt count and their failure count. + /// If a token doesn't exist in the cache yet, it gets added. pub fn update_failing_tokens(&self, tokens: HashSet) { for token in tokens { - self.0 - .counter - .entry(token) - .and_modify(|counter| *counter = Self::UNSUPPORTED_THRESHOLD.min(*counter + 1)) - .or_insert(1); + self.update_token(token, true); } } - /// Removes tokens from the cache since they all participated in a - /// successful settlement encoding. + /// Updates the tokens that participated in successful settlements by + /// incrementing their attempt count without increasing the failure + /// count. pub fn update_successful_tokens(&self, tokens: HashSet) { for token in tokens { - self.0.counter.remove(&token); + self.update_token(token, false); } } + + fn update_token(&self, token: eth::TokenAddress, failure: bool) { + self.0 + .counter + .entry(token) + .and_modify(|counter| { + counter.attempts += 1; + if failure { + counter.fails += 1; + } + }) + .or_insert_with(|| TokenStatistics { + attempts: 1, + fails: failure as u32, + }); + } } From 95536d45484eee99fcfbd3acd65ee10813cae8a7 Mon Sep 17 00:00:00 2001 From: ilya Date: Thu, 19 Dec 2024 19:33:53 +0000 Subject: [PATCH 29/34] Remove hashset --- .../domain/competition/bad_tokens/metrics.rs | 63 ++++++++----------- .../src/domain/competition/bad_tokens/mod.rs | 14 ++--- crates/driver/src/domain/competition/mod.rs | 15 ++--- .../src/domain/competition/solution/mod.rs | 18 +++++- 4 files changed, 52 insertions(+), 58 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs index e2e31f2fee..b385ebe8f0 100644 --- a/crates/driver/src/domain/competition/bad_tokens/metrics.rs +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -1,9 +1,4 @@ -use { - super::Quality, - crate::domain::eth, - dashmap::DashMap, - std::{collections::HashSet, sync::Arc}, -}; +use {super::Quality, crate::domain::eth, dashmap::DashMap, std::sync::Arc}; /// Monitors tokens to determine whether they are considered "unsupported" based /// on the ratio of failing to total settlement encoding attempts. A token must @@ -42,37 +37,31 @@ impl Detector { } } - /// Updates the tokens that participated in failing settlements by - /// incrementing both their attempt count and their failure count. - /// If a token doesn't exist in the cache yet, it gets added. - pub fn update_failing_tokens(&self, tokens: HashSet) { - for token in tokens { - self.update_token(token, true); - } - } - - /// Updates the tokens that participated in successful settlements by - /// incrementing their attempt count without increasing the failure - /// count. - pub fn update_successful_tokens(&self, tokens: HashSet) { - for token in tokens { - self.update_token(token, false); - } - } - - fn update_token(&self, token: eth::TokenAddress, failure: bool) { - self.0 - .counter - .entry(token) - .and_modify(|counter| { - counter.attempts += 1; - if failure { - counter.fails += 1; - } - }) - .or_insert_with(|| TokenStatistics { - attempts: 1, - fails: failure as u32, + /// Updates the tokens that participated in settlements by + /// incrementing their attempt count. + /// `failure` indicates whether the settlement was successful or not. + pub fn update_tokens( + &self, + token_pairs: Vec<(eth::TokenAddress, eth::TokenAddress)>, + failure: bool, + ) { + token_pairs + .into_iter() + .flat_map(|(token_a, token_b)| vec![token_a, token_b]) + .for_each(|token| { + self.0 + .counter + .entry(token) + .and_modify(|counter| { + counter.attempts += 1; + if failure { + counter.fails += 1; + } + }) + .or_insert_with(|| TokenStatistics { + attempts: 1, + fails: failure as u32, + }); }); } } diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index 9fafe0559d..0a4e21ecdb 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -1,11 +1,7 @@ use { crate::domain::{competition::Auction, eth}, futures::StreamExt, - std::{ - collections::{HashMap, HashSet}, - fmt, - time::Instant, - }, + std::{collections::HashMap, fmt, time::Instant}, }; pub mod cache; @@ -100,16 +96,16 @@ impl Detector { } /// Updates the tokens quality metric for successful operation. - pub fn encoding_succeeded(&self, tokens: HashSet) { + pub fn encoding_succeeded(&self, token_pairs: Vec<(eth::TokenAddress, eth::TokenAddress)>) { if let Some(metrics) = &self.metrics { - metrics.update_successful_tokens(tokens); + metrics.update_tokens(token_pairs, false); } } /// Updates the tokens quality metric for failures. - pub fn encoding_failed(&self, tokens: HashSet) { + pub fn encoding_failed(&self, token_pairs: Vec<(eth::TokenAddress, eth::TokenAddress)>) { if let Some(metrics) = &self.metrics { - metrics.update_failing_tokens(tokens); + metrics.update_tokens(token_pairs, true); } } diff --git a/crates/driver/src/domain/competition/mod.rs b/crates/driver/src/domain/competition/mod.rs index 63b89d9c14..3ee1d88b3e 100644 --- a/crates/driver/src/domain/competition/mod.rs +++ b/crates/driver/src/domain/competition/mod.rs @@ -123,7 +123,7 @@ impl Competition { .into_iter() .map(|solution| async move { let id = solution.id().clone(); - let tokens = solution.tokens(); + let token_pairs = solution.token_pairs(); observe::encoding(&id); let settlement = solution .encode( @@ -133,22 +133,19 @@ impl Competition { self.solver.solver_native_token(), ) .await; - (id, tokens, settlement) + (id, token_pairs, settlement) }) .collect::>() - .filter_map(|(id, tokens, result)| async move { + .filter_map(|(id, token_pairs, result)| async move { match result { Ok(solution) => { - self.bad_tokens.encoding_succeeded(tokens); + self.bad_tokens.encoding_succeeded(token_pairs); Some(solution) } // don't report on errors coming from solution merging - Err(_err) if id.solutions().len() > 1 => { - self.bad_tokens.encoding_failed(tokens); - None - } + Err(_err) if id.solutions().len() > 1 => None, Err(err) => { - self.bad_tokens.encoding_failed(tokens); + self.bad_tokens.encoding_failed(token_pairs); observe::encoding_failed(self.solver.name(), &id, &err); notify::encoding_failed(&self.solver, auction.id(), &id, &err); None diff --git a/crates/driver/src/domain/competition/solution/mod.rs b/crates/driver/src/domain/competition/solution/mod.rs index 750b622bba..800e3fb892 100644 --- a/crates/driver/src/domain/competition/solution/mod.rs +++ b/crates/driver/src/domain/competition/solution/mod.rs @@ -169,9 +169,21 @@ impl Solution { &self.trades } - /// Returns all the tokens involved in the solution. - pub fn tokens(&self) -> HashSet { - self.prices.keys().cloned().collect() + /// Returns all the token pairs involved in the solution. + pub fn token_pairs(&self) -> Vec<(TokenAddress, TokenAddress)> { + self.trades + .iter() + .map(|trade| match trade { + Trade::Fulfillment(fulfillment) => { + let order = fulfillment.order(); + (order.sell.token, order.buy.token) + } + Trade::Jit(jit) => { + let order = jit.order(); + (order.sell.token, order.buy.token) + } + }) + .collect() } /// Interactions executed by this solution. From ac3ad22108e42d9539481253507ccd887eb12834 Mon Sep 17 00:00:00 2001 From: ilya Date: Thu, 19 Dec 2024 19:34:38 +0000 Subject: [PATCH 30/34] Typo --- crates/driver/src/infra/config/file/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index c62620dda4..d9cb151632 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -279,7 +279,7 @@ struct SolverConfig { enable_simulation_bad_token_detection: bool, /// Whether or not the solver opted into detecting unsupported - /// tokens with heuristic based simulation. + /// tokens with heuristic based detection. #[serde(default)] enable_heuristic_based_bad_token_detection: bool, } From ec352220cc76fe9599fc6014b80fca16dc169041 Mon Sep 17 00:00:00 2001 From: ilya Date: Fri, 20 Dec 2024 09:46:19 +0000 Subject: [PATCH 31/34] Review comments --- .../domain/competition/bad_tokens/metrics.rs | 29 ++++++++----------- .../src/domain/competition/bad_tokens/mod.rs | 8 ++--- crates/driver/src/domain/competition/mod.rs | 4 +-- crates/driver/src/infra/api/mod.rs | 2 +- 4 files changed, 19 insertions(+), 24 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs index b385ebe8f0..662d464567 100644 --- a/crates/driver/src/domain/competition/bad_tokens/metrics.rs +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -26,15 +26,12 @@ impl Detector { /// quality. const REQUIRED_MEASUREMENTS: u32 = 20; - pub fn get_quality(&self, token: eth::TokenAddress) -> Option { - let measurements = self.0.counter.get(&token)?; - if measurements.attempts >= Self::REQUIRED_MEASUREMENTS - && (measurements.fails as f64 / measurements.attempts as f64) >= Self::FAILURE_RATIO - { - Some(Quality::Unsupported) - } else { - None - } + pub fn get_quality(&self, token: ð::TokenAddress) -> Option { + let measurements = self.0.counter.get(token)?; + let is_unsupported = measurements.attempts >= Self::REQUIRED_MEASUREMENTS + && (measurements.fails as f64 / measurements.attempts as f64) >= Self::FAILURE_RATIO; + + is_unsupported.then_some(Quality::Unsupported) } /// Updates the tokens that participated in settlements by @@ -42,25 +39,23 @@ impl Detector { /// `failure` indicates whether the settlement was successful or not. pub fn update_tokens( &self, - token_pairs: Vec<(eth::TokenAddress, eth::TokenAddress)>, + token_pairs: &[(eth::TokenAddress, eth::TokenAddress)], failure: bool, ) { token_pairs - .into_iter() - .flat_map(|(token_a, token_b)| vec![token_a, token_b]) + .iter() + .flat_map(|(token_a, token_b)| [token_a, token_b]) .for_each(|token| { self.0 .counter - .entry(token) + .entry(*token) .and_modify(|counter| { counter.attempts += 1; - if failure { - counter.fails += 1; - } + counter.fails += u32::from(failure) }) .or_insert_with(|| TokenStatistics { attempts: 1, - fails: failure as u32, + fails: u32::from(failure), }); }); } diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index 0a4e21ecdb..4b27cad388 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -52,7 +52,7 @@ impl Detector { } /// Enables detection of unsupported tokens based on heuristics. - pub fn with_heuristic_detector(&mut self) -> &mut Self { + pub fn enable_heuristic_detector(&mut self) -> &mut Self { self.metrics = Some(metrics::Detector::default()); self } @@ -96,14 +96,14 @@ impl Detector { } /// Updates the tokens quality metric for successful operation. - pub fn encoding_succeeded(&self, token_pairs: Vec<(eth::TokenAddress, eth::TokenAddress)>) { + pub fn encoding_succeeded(&self, token_pairs: &[(eth::TokenAddress, eth::TokenAddress)]) { if let Some(metrics) = &self.metrics { metrics.update_tokens(token_pairs, false); } } /// Updates the tokens quality metric for failures. - pub fn encoding_failed(&self, token_pairs: Vec<(eth::TokenAddress, eth::TokenAddress)>) { + pub fn encoding_failed(&self, token_pairs: &[(eth::TokenAddress, eth::TokenAddress)]) { if let Some(metrics) = &self.metrics { metrics.update_tokens(token_pairs, true); } @@ -121,7 +121,7 @@ impl Detector { } if let Some(metrics) = &self.metrics { - return metrics.get_quality(token); + return metrics.get_quality(&token); } None diff --git a/crates/driver/src/domain/competition/mod.rs b/crates/driver/src/domain/competition/mod.rs index 3ee1d88b3e..afc2d96876 100644 --- a/crates/driver/src/domain/competition/mod.rs +++ b/crates/driver/src/domain/competition/mod.rs @@ -139,13 +139,13 @@ impl Competition { .filter_map(|(id, token_pairs, result)| async move { match result { Ok(solution) => { - self.bad_tokens.encoding_succeeded(token_pairs); + self.bad_tokens.encoding_succeeded(&token_pairs); Some(solution) } // don't report on errors coming from solution merging Err(_err) if id.solutions().len() > 1 => None, Err(err) => { - self.bad_tokens.encoding_failed(token_pairs); + self.bad_tokens.encoding_failed(&token_pairs); observe::encoding_failed(self.solver.name(), &id, &err); notify::encoding_failed(&self.solver, auction.id(), &id, &err); None diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index 3c1e58ac72..ea4506110f 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -84,7 +84,7 @@ impl Api { .bad_token_detection() .enable_heuristic_based_bad_token_detection { - bad_tokens.with_heuristic_detector(); + bad_tokens.enable_heuristic_detector(); } let router = router.with_state(State(Arc::new(Inner { From 4b2de81f685fbc941b028437616622fc4f2a6697 Mon Sep 17 00:00:00 2001 From: ilya Date: Fri, 20 Dec 2024 10:25:29 +0000 Subject: [PATCH 32/34] Shared detector --- crates/driver/src/domain/competition/bad_tokens/metrics.rs | 2 +- crates/driver/src/domain/competition/bad_tokens/mod.rs | 4 ++-- crates/driver/src/infra/api/mod.rs | 6 ++++-- crates/driver/src/infra/config/file/load.rs | 2 +- crates/driver/src/infra/solver/mod.rs | 2 +- 5 files changed, 9 insertions(+), 7 deletions(-) diff --git a/crates/driver/src/domain/competition/bad_tokens/metrics.rs b/crates/driver/src/domain/competition/bad_tokens/metrics.rs index 662d464567..10eb559ada 100644 --- a/crates/driver/src/domain/competition/bad_tokens/metrics.rs +++ b/crates/driver/src/domain/competition/bad_tokens/metrics.rs @@ -5,7 +5,7 @@ use {super::Quality, crate::domain::eth, dashmap::DashMap, std::sync::Arc}; /// have participated in at least `REQUIRED_MEASUREMENTS` attempts to be /// evaluated. If, at that point, the ratio of failures is greater than or equal /// to `FAILURE_RATIO`, the token is considered unsupported. -#[derive(Default)] +#[derive(Default, Clone)] pub struct Detector(Arc); #[derive(Default)] diff --git a/crates/driver/src/domain/competition/bad_tokens/mod.rs b/crates/driver/src/domain/competition/bad_tokens/mod.rs index 4b27cad388..52c9f0fb59 100644 --- a/crates/driver/src/domain/competition/bad_tokens/mod.rs +++ b/crates/driver/src/domain/competition/bad_tokens/mod.rs @@ -52,8 +52,8 @@ impl Detector { } /// Enables detection of unsupported tokens based on heuristics. - pub fn enable_heuristic_detector(&mut self) -> &mut Self { - self.metrics = Some(metrics::Detector::default()); + pub fn with_metrics_detector(&mut self, detector: metrics::Detector) -> &mut Self { + self.metrics = Some(detector); self } diff --git a/crates/driver/src/infra/api/mod.rs b/crates/driver/src/infra/api/mod.rs index ea4506110f..c6f57364d3 100644 --- a/crates/driver/src/infra/api/mod.rs +++ b/crates/driver/src/infra/api/mod.rs @@ -58,6 +58,8 @@ impl Api { app = routes::metrics(app); app = routes::healthz(app); + let metrics_bad_token_detector = bad_tokens::metrics::Detector::default(); + // Multiplex each solver as part of the API. Multiple solvers are multiplexed // on the same driver so only one liquidity collector collects the liquidity // for all of them. This is important because liquidity collection is @@ -82,9 +84,9 @@ impl Api { if solver .bad_token_detection() - .enable_heuristic_based_bad_token_detection + .enable_metrics_based_bad_token_detection { - bad_tokens.enable_heuristic_detector(); + bad_tokens.with_metrics_detector(metrics_bad_token_detector.clone()); } let router = router.with_state(State(Arc::new(Inner { diff --git a/crates/driver/src/infra/config/file/load.rs b/crates/driver/src/infra/config/file/load.rs index 6070445e8b..137d162042 100644 --- a/crates/driver/src/infra/config/file/load.rs +++ b/crates/driver/src/infra/config/file/load.rs @@ -110,7 +110,7 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { .collect(), enable_simulation_based_bad_token_detection: config .enable_simulation_bad_token_detection, - enable_heuristic_based_bad_token_detection: config + enable_metrics_based_bad_token_detection: config .enable_heuristic_based_bad_token_detection, }, } diff --git a/crates/driver/src/infra/solver/mod.rs b/crates/driver/src/infra/solver/mod.rs index 196356651a..84adcb6d1b 100644 --- a/crates/driver/src/infra/solver/mod.rs +++ b/crates/driver/src/infra/solver/mod.rs @@ -307,5 +307,5 @@ pub struct BadTokenDetection { /// Tokens that are explicitly allow- or deny-listed. pub tokens_supported: HashMap, pub enable_simulation_based_bad_token_detection: bool, - pub enable_heuristic_based_bad_token_detection: bool, + pub enable_metrics_based_bad_token_detection: bool, } From 81348378a4c1585baa3bf79771db91f3dcb0aa2d Mon Sep 17 00:00:00 2001 From: ilya Date: Fri, 20 Dec 2024 10:36:53 +0000 Subject: [PATCH 33/34] Naming --- crates/driver/src/infra/config/file/load.rs | 2 +- crates/driver/src/infra/config/file/mod.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/driver/src/infra/config/file/load.rs b/crates/driver/src/infra/config/file/load.rs index 137d162042..4fdbfc55bd 100644 --- a/crates/driver/src/infra/config/file/load.rs +++ b/crates/driver/src/infra/config/file/load.rs @@ -111,7 +111,7 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { enable_simulation_based_bad_token_detection: config .enable_simulation_bad_token_detection, enable_metrics_based_bad_token_detection: config - .enable_heuristic_based_bad_token_detection, + .enable_metrics_based_bad_token_detection, }, } })) diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index d9cb151632..b3d307e291 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -279,9 +279,9 @@ struct SolverConfig { enable_simulation_bad_token_detection: bool, /// Whether or not the solver opted into detecting unsupported - /// tokens with heuristic based detection. + /// tokens with metrics-based detection. #[serde(default)] - enable_heuristic_based_bad_token_detection: bool, + enable_metrics_based_bad_token_detection: bool, } #[derive(Clone, Copy, Debug, Default, Deserialize, PartialEq, Serialize)] From 444e92e61063039bfb461bad11d1bbbd6c588291 Mon Sep 17 00:00:00 2001 From: ilya Date: Fri, 20 Dec 2024 10:40:12 +0000 Subject: [PATCH 34/34] Typo --- crates/driver/src/infra/config/file/load.rs | 2 +- crates/driver/src/infra/config/file/mod.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/driver/src/infra/config/file/load.rs b/crates/driver/src/infra/config/file/load.rs index 4fdbfc55bd..81908c1fc2 100644 --- a/crates/driver/src/infra/config/file/load.rs +++ b/crates/driver/src/infra/config/file/load.rs @@ -111,7 +111,7 @@ pub async fn load(chain: chain::Id, path: &Path) -> infra::Config { enable_simulation_based_bad_token_detection: config .enable_simulation_bad_token_detection, enable_metrics_based_bad_token_detection: config - .enable_metrics_based_bad_token_detection, + .enable_metrics_bad_token_detection, }, } })) diff --git a/crates/driver/src/infra/config/file/mod.rs b/crates/driver/src/infra/config/file/mod.rs index b3d307e291..2468361902 100644 --- a/crates/driver/src/infra/config/file/mod.rs +++ b/crates/driver/src/infra/config/file/mod.rs @@ -281,7 +281,7 @@ struct SolverConfig { /// Whether or not the solver opted into detecting unsupported /// tokens with metrics-based detection. #[serde(default)] - enable_metrics_based_bad_token_detection: bool, + enable_metrics_bad_token_detection: bool, } #[derive(Clone, Copy, Debug, Default, Deserialize, PartialEq, Serialize)]