Skip to content

Commit

Permalink
feat(multichain): token search endpoint (#1164)
Browse files Browse the repository at this point in the history
* chore: add logs

* feat: add tokens for quick search

* feat: add paginated search for tokens

* fix: remove chain_id from import entities

* feat: migrate to api-client-framework
  • Loading branch information
lok52 authored Jan 9, 2025
1 parent 921dcf2 commit e1a864f
Show file tree
Hide file tree
Showing 26 changed files with 1,361 additions and 594 deletions.
1,256 changes: 814 additions & 442 deletions multichain-aggregator/Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions multichain-aggregator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ tracing-subscriber = "0.3.18"
# misc
alloy-primitives = "0.8"
anyhow = "1.0"
api-client-framework = { git = "https://github.com/blockscout/blockscout-rs", rev = "d60c1d8" }
async-std = { version = "1", features = ["attributes", "tokio1"] }
async-trait = "0.1"
blockscout-chains = { git = "https://github.com/blockscout/blockscout-rs", version = "0.2.0" }
Expand Down
2 changes: 2 additions & 0 deletions multichain-aggregator/multichain-aggregator-logic/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,15 @@ edition = "2021"
multichain-aggregator-entity = { workspace = true }
multichain-aggregator-proto = { workspace = true }
anyhow = { workspace = true }
api-client-framework = { workspace = true }
blockscout-chains = { workspace = true }
tracing = { workspace = true }
sea-orm = { workspace = true }
alloy-primitives = { workspace = true }
regex = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
serde_with = { workspace = true }
thiserror = { workspace = true }
tonic = { workspace = true }
tokio = { workspace = true }
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
use api_client_framework::{
serialize_query, Endpoint, Error, HttpApiClient as Client, HttpApiClientConfig,
};
use reqwest::Method;
use serde::{Deserialize, Serialize};
use url::Url;

pub fn new_client(url: Url) -> Result<Client, Error> {
let config = HttpApiClientConfig::default();
Client::new(url, config)
}

pub struct SearchDapps {
pub params: SearchDappsParams,
}

#[derive(Serialize, Clone, Debug, Default, PartialEq)]
pub struct SearchDappsParams {
pub query: String,
}

impl Endpoint for SearchDapps {
type Response = Vec<DappWithChainId>;

fn method(&self) -> Method {
Method::GET
}

fn path(&self) -> String {
"/api/v1/marketplace/dapps:search".to_string()
}

fn query(&self) -> Option<String> {
serialize_query(&self.params)
}
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DappWithChainId {
pub dapp: Dapp,
pub chain_id: String,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Dapp {
pub id: String,
pub title: String,
pub logo: String,
pub short_description: String,
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
pub mod dapp;
pub mod token_info;
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
use crate::ChainId;
use api_client_framework::{
serialize_query, Endpoint, Error, HttpApiClient as Client, HttpApiClientConfig,
};
use reqwest::Method;
use serde::{Deserialize, Serialize};
use url::Url;

pub fn new_client(url: Url) -> Result<Client, Error> {
let config = HttpApiClientConfig::default();
Client::new(url, config)
}

pub struct SearchTokenInfos {
pub params: SearchTokenInfosParams,
}

#[serde_with::skip_serializing_none]
#[derive(Serialize, Clone, Debug, Default, PartialEq)]
pub struct SearchTokenInfosParams {
pub query: String,
pub chain_id: Option<ChainId>,
pub page_size: Option<u32>,
pub page_token: Option<String>,
}

impl Endpoint for SearchTokenInfos {
type Response = TokenInfoSearchResponse;

fn method(&self) -> Method {
Method::GET
}

fn path(&self) -> String {
"/api/v1/token-infos:search".to_string()
}

fn query(&self) -> Option<String> {
serialize_query(&self.params)
}
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TokenInfo {
pub token_address: String,
pub chain_id: String,
pub icon_url: String,
pub token_name: Option<String>,
pub token_symbol: Option<String>,
}

#[derive(Debug, Deserialize)]
pub struct TokenInfoSearchResponse {
pub token_infos: Vec<TokenInfo>,
pub next_page_params: Option<Pagination>,
}

#[derive(Debug, Deserialize)]
pub struct Pagination {
pub page_token: String,
pub page_size: u32,
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ pub enum ParseError {
ParseUuid(#[from] uuid::Error),
#[error("parse error: invalid slice")]
TryFromSlice(#[from] core::array::TryFromSliceError),
#[error("parse error: {0}")]
Custom(String),
}

impl From<ServiceError> for tonic::Status {
Expand Down
18 changes: 15 additions & 3 deletions multichain-aggregator/multichain-aggregator-logic/src/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,21 @@ pub async fn batch_import(
request: BatchImportRequest,
) -> Result<(), ServiceError> {
let tx = db.begin().await?;
repository::addresses::upsert_many(&tx, request.addresses).await?;
repository::block_ranges::upsert_many(&tx, request.block_ranges).await?;
repository::hashes::upsert_many(&tx, request.hashes).await?;
repository::addresses::upsert_many(&tx, request.addresses)
.await
.inspect_err(|e| {
tracing::error!(error = ?e, "failed to upsert addresses");
})?;
repository::block_ranges::upsert_many(&tx, request.block_ranges)
.await
.inspect_err(|e| {
tracing::error!(error = ?e, "failed to upsert block ranges");
})?;
repository::hashes::upsert_many(&tx, request.hashes)
.await
.inspect_err(|e| {
tracing::error!(error = ?e, "failed to upsert hashes");
})?;
tx.commit().await?;
Ok(())
}
5 changes: 3 additions & 2 deletions multichain-aggregator/multichain-aggregator-logic/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
pub mod api_key_manager;
pub mod dapp_client;
pub mod clients;
pub mod error;
mod import;
mod proto;
Expand All @@ -9,5 +9,6 @@ mod types;

pub use import::batch_import;
pub use types::{
api_keys::ApiKey, batch_import_request::BatchImportRequest, chains::Chain, ChainId,
api_keys::ApiKey, batch_import_request::BatchImportRequest, chains::Chain, token_info::Token,
ChainId,
};
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use alloy_primitives::Address as AddressAlloy;
use entity::addresses::{ActiveModel, Column, Entity, Model};
use regex::Regex;
use sea_orm::{
prelude::Expr, sea_query::OnConflict, ActiveValue::NotSet, ConnectionTrait, DbErr, EntityTrait,
IntoSimpleExpr, Iterable, QueryFilter, QueryOrder, QuerySelect,
prelude::Expr, sea_query::OnConflict, ActiveValue::NotSet, ColumnTrait, ConnectionTrait, DbErr,
EntityTrait, IntoSimpleExpr, Iterable, QueryFilter, QueryOrder, QuerySelect,
};
use std::sync::OnceLock;

Expand Down Expand Up @@ -54,14 +54,15 @@ pub async fn search_by_query<C>(db: &C, q: &str) -> Result<Vec<Address>, Service
where
C: ConnectionTrait,
{
search_by_query_paginated(db, q, None, 100)
search_by_query_paginated(db, q, None, None, 100)
.await
.map(|(addresses, _)| addresses)
}

pub async fn search_by_query_paginated<C>(
db: &C,
q: &str,
chain_id: Option<ChainId>,
page_token: Option<(AddressAlloy, ChainId)>,
limit: u64,
) -> Result<(Vec<Address>, Option<(AddressAlloy, ChainId)>), ServiceError>
Expand All @@ -84,6 +85,10 @@ where
.order_by_asc(Column::ChainId)
.limit(limit + 1);

if let Some(chain_id) = chain_id {
query = query.filter(Column::ChainId.eq(chain_id));
}

if hex_regex().is_match(q) {
query = query.filter(Expr::cust_with_expr(
"encode(hash, 'hex') LIKE $1",
Expand Down
46 changes: 42 additions & 4 deletions multichain-aggregator/multichain-aggregator-logic/src/search.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
use crate::{
dapp_client::DappClient,
clients::{
dapp::{SearchDapps, SearchDappsParams},
token_info::{SearchTokenInfos, SearchTokenInfosParams},
},
error::ServiceError,
repository::{addresses, block_ranges, hashes},
types::{
chains::Chain,
dapp::MarketplaceDapp,
search_results::{ChainSearchResult, SearchResults},
token_info::Token,
ChainId,
},
};
use api_client_framework::HttpApiClient;
use sea_orm::DatabaseConnection;
use std::collections::BTreeMap;
use tokio::join;
use tracing::instrument;

macro_rules! populate_search_results {
($target:expr, $explorers:expr, $from:expr, $field:ident) => {
Expand All @@ -28,19 +34,37 @@ macro_rules! populate_search_results {
};
}

#[instrument(skip_all, level = "info", fields(query = query))]
pub async fn quick_search(
db: &DatabaseConnection,
dapp_client: &DappClient,
dapp_client: &HttpApiClient,
token_info_client: &HttpApiClient,
query: String,
chains: &[Chain],
) -> Result<SearchResults, ServiceError> {
let raw_query = query.trim();

let (hashes, block_numbers, addresses, dapps) = join!(
let dapp_search_endpoint = SearchDapps {
params: SearchDappsParams {
query: raw_query.to_string(),
},
};

let token_info_search_endpoint = SearchTokenInfos {
params: SearchTokenInfosParams {
query: raw_query.to_string(),
chain_id: None,
page_size: Some(100),
page_token: None,
},
};

let (hashes, block_numbers, addresses, dapps, token_infos) = join!(
hashes::search_by_query(db, raw_query),
block_ranges::search_by_query(db, raw_query),
addresses::search_by_query(db, raw_query),
dapp_client.search_dapps(raw_query),
dapp_client.request(&dapp_search_endpoint),
token_info_client.request(&token_info_search_endpoint),
);

let explorers: BTreeMap<ChainId, String> = chains
Expand Down Expand Up @@ -91,5 +115,19 @@ pub async fn quick_search(
}
}

match token_infos {
Ok(token_infos) => {
let tokens: Vec<Token> = token_infos
.token_infos
.into_iter()
.filter_map(|t| t.try_into().ok())
.collect();
populate_search_results!(results, explorers, tokens, tokens);
}
Err(err) => {
tracing::error!(error = ?err, "failed to search token infos");
}
}

Ok(results)
}
Loading

0 comments on commit e1a864f

Please sign in to comment.