diff --git a/src/backend/aqua.rs b/src/backend/aqua.rs index 4df0ae3e26..9839730964 100644 --- a/src/backend/aqua.rs +++ b/src/backend/aqua.rs @@ -39,7 +39,7 @@ impl Backend for AquaBackend { Ok(vec!["cosign", "slsa-verifier"]) } - fn _list_remote_versions(&self) -> eyre::Result> { + fn _list_remote_versions(&self) -> Result> { let pkg = AQUA_REGISTRY.package(&self.id)?; if !pkg.repo_owner.is_empty() && !pkg.repo_name.is_empty() { let versions = if let Some("github_tag") = pkg.version_source.as_deref() { diff --git a/src/duration.rs b/src/duration.rs index f80fa902c2..e237d8310d 100644 --- a/src/duration.rs +++ b/src/duration.rs @@ -2,4 +2,4 @@ pub use std::time::Duration; pub const HOURLY: Duration = Duration::from_secs(60 * 60); pub const DAILY: Duration = Duration::from_secs(60 * 60 * 24); -//pub const WEEKLY: Duration = Duration::from_secs(60 * 60 * 24 * 7); +// pub const WEEKLY: Duration = Duration::from_secs(60 * 60 * 24 * 7); diff --git a/src/github.rs b/src/github.rs index e660519d28..a63b41a349 100644 --- a/src/github.rs +++ b/src/github.rs @@ -1,9 +1,17 @@ -use crate::env; +use crate::cache::{CacheManager, CacheManagerBuilder}; +use crate::{dirs, duration, env}; +use eyre::Result; +use heck::ToKebabCase; +use once_cell::sync::Lazy; use reqwest::header::HeaderMap; -use serde_derive::Deserialize; +use serde_derive::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::RwLock; +use std::sync::RwLockReadGuard; use xx::regex; -#[derive(Debug, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubRelease { pub tag_name: String, // pub name: Option, @@ -14,20 +22,74 @@ pub struct GithubRelease { pub assets: Vec, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubTag { pub name: String, } -#[derive(Debug, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct GithubAsset { pub name: String, // pub size: u64, pub browser_download_url: String, } -pub fn list_releases(repo: &str) -> eyre::Result> { - let url = format!("https://api.github.com/repos/{}/releases", repo); +type CacheGroup = HashMap>; + +static RELEASES_CACHE: Lazy>>> = Lazy::new(Default::default); + +static RELEASE_CACHE: Lazy>> = Lazy::new(Default::default); + +static TAGS_CACHE: Lazy>>> = Lazy::new(Default::default); + +fn get_tags_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup>> { + TAGS_CACHE + .write() + .unwrap() + .entry(key.to_string()) + .or_insert_with(|| { + CacheManagerBuilder::new(cache_dir().join(format!("{key}-tags.msgpack.z"))) + .with_fresh_duration(Some(duration::DAILY)) + .build() + }); + TAGS_CACHE.read().unwrap() +} + +fn get_releases_cache(key: &str) -> RwLockReadGuard<'_, CacheGroup>> { + RELEASES_CACHE + .write() + .unwrap() + .entry(key.to_string()) + .or_insert_with(|| { + CacheManagerBuilder::new(cache_dir().join(format!("{key}-releases.msgpack.z"))) + .with_fresh_duration(Some(duration::DAILY)) + .build() + }); + RELEASES_CACHE.read().unwrap() +} + +fn get_release_cache<'a>(key: &str) -> RwLockReadGuard<'a, CacheGroup> { + RELEASE_CACHE + .write() + .unwrap() + .entry(key.to_string()) + .or_insert_with(|| { + CacheManagerBuilder::new(cache_dir().join(format!("{key}.msgpack.z"))) + .with_fresh_duration(Some(duration::DAILY)) + .build() + }); + RELEASE_CACHE.read().unwrap() +} + +pub fn list_releases(repo: &str) -> Result> { + let key = repo.to_kebab_case(); + let cache = get_releases_cache(&key); + let cache = cache.get(&key).unwrap(); + Ok(cache.get_or_try_init(|| list_releases_(repo))?.to_vec()) +} + +fn list_releases_(repo: &str) -> Result> { + let url = format!("https://api.github.com/repos/{repo}/releases"); let (mut releases, mut headers) = crate::http::HTTP_FETCH.json_headers::, _>(url)?; @@ -42,7 +104,14 @@ pub fn list_releases(repo: &str) -> eyre::Result> { Ok(releases) } -pub fn list_tags(repo: &str) -> eyre::Result> { +pub fn list_tags(repo: &str) -> Result> { + let key = repo.to_kebab_case(); + let cache = get_tags_cache(&key); + let cache = cache.get(&key).unwrap(); + Ok(cache.get_or_try_init(|| list_tags_(repo))?.to_vec()) +} + +fn list_tags_(repo: &str) -> Result> { let url = format!("https://api.github.com/repos/{}/tags", repo); let (mut tags, mut headers) = crate::http::HTTP_FETCH.json_headers::, _>(url)?; @@ -57,11 +126,15 @@ pub fn list_tags(repo: &str) -> eyre::Result> { Ok(tags.into_iter().map(|t| t.name).collect()) } -pub fn get_release(repo: &str, tag: &str) -> eyre::Result { - let url = format!( - "https://api.github.com/repos/{}/releases/tags/{}", - repo, tag - ); +pub fn get_release(repo: &str, tag: &str) -> Result { + let key = format!("{repo}-{tag}").to_kebab_case(); + let cache = get_release_cache(&key); + let cache = cache.get(&key).unwrap(); + Ok(cache.get_or_try_init(|| get_release_(repo, tag))?.clone()) +} + +fn get_release_(repo: &str, tag: &str) -> Result { + let url = format!("https://api.github.com/repos/{repo}/releases/tags/{tag}"); crate::http::HTTP_FETCH.json(url) } @@ -74,3 +147,7 @@ fn next_page(headers: &HeaderMap) -> Option { .captures(&link) .map(|c| c.get(1).unwrap().as_str().to_string()) } + +fn cache_dir() -> PathBuf { + dirs::CACHE.join("github") +}