Skip to content

Commit

Permalink
fix: allow multiple client tokens at startup (#188)
Browse files Browse the repository at this point in the history
  • Loading branch information
sighphyre authored May 18, 2023
1 parent 940bb5b commit 808db80
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 34 deletions.
2 changes: 1 addition & 1 deletion server/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ async fn build_edge(args: &EdgeArgs) -> EdgeResult<EdgeInfo> {
.iter()
.filter(|candidate| candidate.value().token_type == Some(TokenType::Client))
{
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(validated_token.clone(), None)
.await;
}
Expand Down
87 changes: 57 additions & 30 deletions server/src/http/feature_refresher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ impl FeatureRefresher {
}

async fn register_and_hydrate_token(&self, token: &EdgeToken) -> EdgeResult<ClientFeatures> {
self.register_token_for_refresh(token.clone(), None).await?;
self.register_token_for_refresh(token.clone(), None).await;
self.hydrate_new_tokens().await;
self.get_filtered_features(token)
.ok_or(EdgeError::ClientFeaturesFetchError(FeatureError::Retriable))
Expand Down Expand Up @@ -145,12 +145,8 @@ impl FeatureRefresher {
}

///
/// Registers a token for refresh, returns true if token did not exist and as such needs hydration before we can guarantee that we have data for it
pub async fn register_token_for_refresh(
&self,
token: EdgeToken,
etag: Option<EntityTag>,
) -> EdgeResult<()> {
/// Registers a token for refresh, the token will be discarded if it can be subsumed by another previously registered token
pub async fn register_token_for_refresh(&self, token: EdgeToken, etag: Option<EntityTag>) {
if !self.tokens_to_refresh.contains_key(&token.token) {
let _ = self
.unleash_client
Expand All @@ -173,9 +169,6 @@ impl FeatureRefresher {
.insert(refreshes.token.token.clone(), refreshes.clone());
}
self.tokens_to_refresh.retain(|key, _| keys.contains(key));
Ok(())
} else {
Ok(())
}
}

Expand Down Expand Up @@ -350,13 +343,47 @@ mod tests {
);
let token =
EdgeToken::try_from("*:development.abcdefghijklmnopqrstuvwxyz".to_string()).unwrap();
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(token, None)
.await;

assert_eq!(feature_refresher.tokens_to_refresh.len(), 1);
}

#[tokio::test]
pub async fn registering_multiple_tokens_with_same_environment_reduces_tokens_to_valid_minimal_set(
) {
let unleash_client = UnleashClient::from_url(
Url::parse("http://localhost:4242").unwrap(),
false,
None,
None,
);
let features_cache = Arc::new(DashMap::default());
let engines_cache = Arc::new(DashMap::default());

let duration = Duration::seconds(5);
let feature_refresher = FeatureRefresher::new(
Arc::new(unleash_client),
features_cache,
engines_cache,
duration,
None,
);
let token1 =
EdgeToken::try_from("*:development.abcdefghijklmnopqrstuvwxyz".to_string()).unwrap();
let token2 =
EdgeToken::try_from("*:development.zyxwvutsrqponmlkjihgfedcba".to_string()).unwrap();
feature_refresher
.register_token_for_refresh(token1, None)
.await;
feature_refresher
.register_token_for_refresh(token2, None)
.await;

assert_eq!(feature_refresher.tokens_to_refresh.len(), 1);
}

#[tokio::test]
pub async fn registering_multiple_non_overlapping_tokens_will_keep_all() {
let unleash_client = UnleashClient::from_url(
Expand Down Expand Up @@ -384,13 +411,13 @@ mod tests {
let project_c_token =
EdgeToken::try_from("projectc:development.abcdefghijklmnopqrstuvwxyz".to_string())
.unwrap();
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_a_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_b_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_c_token, None)
.await;

Expand Down Expand Up @@ -427,16 +454,16 @@ mod tests {
let wildcard_token =
EdgeToken::try_from("*:development.abcdefghijklmnopqrstuvwxyz".to_string()).unwrap();

let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_a_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_b_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_c_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(wildcard_token, None)
.await;

Expand Down Expand Up @@ -477,16 +504,16 @@ mod tests {
EdgeToken::try_from("[]:development.abcdefghijklmnopqrstuvwxyz".to_string()).unwrap();
project_a_and_c_token.projects = vec!["projecta".into(), "projectc".into()];

let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_a_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_b_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_c_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_a_and_c_token, None)
.await;

Expand Down Expand Up @@ -524,10 +551,10 @@ mod tests {
EdgeToken::try_from("projecta:development.abcdefghijklmnopqrstuvwxyz".to_string())
.unwrap();

let _ = feature_refresher
feature_refresher
.register_token_for_refresh(star_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_a_token, None)
.await;

Expand Down Expand Up @@ -561,10 +588,10 @@ mod tests {
.unwrap();
let production_wildcard_token =
EdgeToken::try_from("*:production.abcdefghijklmnopqrstuvwxyz".to_string()).unwrap();
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(project_a_token, None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(production_wildcard_token, None)
.await;
assert_eq!(feature_refresher.tokens_to_refresh.len(), 2);
Expand Down Expand Up @@ -681,7 +708,7 @@ mod tests {
let mut token = EdgeToken::try_from("*:development.secret123".to_string()).unwrap();
token.status = Validated;
token.token_type = Some(TokenType::Client);
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(token, None)
.await;
assert!(!feature_refresher.tokens_to_refresh.is_empty());
Expand Down Expand Up @@ -717,7 +744,7 @@ mod tests {
let unleash_client = UnleashClient::new(server.url("/").as_str(), None).unwrap();
let mut feature_refresher = FeatureRefresher::with_client(Arc::new(unleash_client));
feature_refresher.refresh_interval = Duration::seconds(0);
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(valid_token.clone(), None)
.await;
assert!(!feature_refresher.tokens_to_refresh.is_empty());
Expand Down Expand Up @@ -762,10 +789,10 @@ mod tests {
let unleash_client = UnleashClient::new(server.url("/").as_str(), None).unwrap();
let mut feature_refresher = FeatureRefresher::with_client(Arc::new(unleash_client));
feature_refresher.refresh_interval = Duration::seconds(0);
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(dx_token.clone(), None)
.await;
let _ = feature_refresher
feature_refresher
.register_token_for_refresh(eg_token.clone(), None)
.await;
assert_eq!(feature_refresher.tokens_to_refresh.len(), 2);
Expand Down
48 changes: 45 additions & 3 deletions server/src/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use actix_web::http::header::HeaderValue;
use actix_web::web::Data;
use actix_web::FromRequest;
use actix_web::HttpRequest;
use std::collections::HashSet;
use std::future::{ready, Ready};
use std::str::FromStr;

Expand All @@ -13,11 +14,12 @@ use crate::types::EdgeToken;
use crate::types::TokenRefresh;
use crate::types::TokenValidationStatus;

pub(crate) fn simplify(tokens: &[TokenRefresh]) -> Vec<&TokenRefresh> {
tokens
pub(crate) fn simplify(tokens: &[TokenRefresh]) -> Vec<TokenRefresh> {
let uniques = filter_unique_tokens(tokens);
uniques
.iter()
.filter_map(|token| {
tokens.iter().fold(Some(token), |acc, current| {
uniques.iter().fold(Some(token), |acc, current| {
acc.and_then(|lead| {
if current.token.token != lead.token.token
&& current.token.subsumes(&lead.token)
Expand All @@ -29,9 +31,28 @@ pub(crate) fn simplify(tokens: &[TokenRefresh]) -> Vec<&TokenRefresh> {
})
})
})
.cloned()
.collect()
}

fn filter_unique_tokens(tokens: &[TokenRefresh]) -> Vec<TokenRefresh> {
let mut unique_tokens = Vec::new();
let mut unique_keys = HashSet::new();

for token in tokens {
let key = (
token.token.projects.clone(),
token.token.environment.clone(),
);
if !unique_keys.contains(&key) {
unique_tokens.push(token.clone());
unique_keys.insert(key);
}
}

unique_tokens
}

pub(crate) fn cache_key(token: &EdgeToken) -> String {
token
.environment
Expand Down Expand Up @@ -259,4 +280,25 @@ mod tests {

assert_eq!(actual, expected);
}

#[test]
fn test_case_5_when_two_tokens_share_environments_and_products_we_return_only_the_first() {
let tokens: Vec<TokenRefresh> = vec![
test_token(Some("abcdefghijklmnopqrst"), Some("development"), vec!["*"]),
test_token(Some("tsrqponmlkjihgfedcba"), Some("development"), vec!["*"]),
]
.into_iter()
.map(|t| TokenRefresh::new(t, None))
.collect();

let expected = vec![test_token(
Some("abcdefghijklmnopqrst"),
Some("development"),
vec!["*"],
)];

let actual: Vec<EdgeToken> = simplify(&tokens).iter().map(|x| x.token.clone()).collect();

assert_eq!(actual, expected);
}
}

0 comments on commit 808db80

Please sign in to comment.