Skip to content

Commit

Permalink
Anki: Replace lazy_static with once_cell
Browse files Browse the repository at this point in the history
Unify to once_cell, lazy_static's replacement. The latter in unmaintained.
  • Loading branch information
twwn committed Sep 27, 2024
1 parent 1b7390f commit e9b7268
Show file tree
Hide file tree
Showing 24 changed files with 175 additions and 184 deletions.
7 changes: 3 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion build/ninja_gen/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ camino.workspace = true
dunce.workspace = true
globset.workspace = true
itertools.workspace = true
lazy_static.workspace = true
maplit.workspace = true
num_cpus.workspace = true
once_cell.workspace = true
walkdir.workspace = true
which.workspace = true
5 changes: 2 additions & 3 deletions build/ninja_gen/src/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use std::collections::HashMap;
use std::fmt::Display;

use camino::Utf8PathBuf;
use once_cell::sync::Lazy;

#[derive(Debug, Clone, Hash, Default)]
pub enum BuildInput {
Expand Down Expand Up @@ -118,9 +119,7 @@ pub struct Glob {
pub exclude: Option<String>,
}

lazy_static::lazy_static! {
static ref CACHED_FILES: Vec<Utf8PathBuf> = cache_files();
}
static CACHED_FILES: Lazy<Vec<Utf8PathBuf>> = Lazy::new(|| cache_files());

/// Walking the source tree once instead of for each glob yields ~4x speed
/// improvements.
Expand Down
2 changes: 1 addition & 1 deletion ftl/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ camino.workspace = true
clap.workspace = true
fluent-syntax.workspace = true
itertools.workspace = true
lazy_static.workspace = true
once_cell.workspace = true
regex.workspace = true
serde_json.workspace = true
snafu.workspace = true
Expand Down
17 changes: 8 additions & 9 deletions ftl/src/garbage_collection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use clap::Args;
use fluent_syntax::ast;
use fluent_syntax::ast::Resource;
use fluent_syntax::parser;
use lazy_static::lazy_static;
use once_cell::sync::Lazy;
use regex::Regex;
use walkdir::DirEntry;
use walkdir::WalkDir;
Expand Down Expand Up @@ -144,9 +144,7 @@ fn extract_nested_messages_and_terms(
ftl_roots: &[impl AsRef<str>],
used_ftls: &mut HashSet<String>,
) {
lazy_static! {
static ref REFERENCE: Regex = Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap();
}
static REFERENCE: Lazy<Regex> = Lazy::new(|| Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap());
for_files_with_ending(ftl_roots, ".ftl", |entry| {
let source = fs::read_to_string(entry.path()).expect("file not readable");
for caps in REFERENCE.captures_iter(&source) {
Expand Down Expand Up @@ -198,11 +196,12 @@ fn entry_use_check(used_ftls: &HashSet<String>) -> impl Fn(&ast::Entry<&str>) ->
}

fn extract_references_from_file(refs: &mut HashSet<String>, entry: &DirEntry) {
lazy_static! {
static ref SNAKECASE_TR: Regex = Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap();
static ref CAMELCASE_TR: Regex = Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap();
static ref DESIGNER_STYLE_TR: Regex = Regex::new(r"<string>([0-9a-z_]+)</string>").unwrap();
}
static SNAKECASE_TR: Lazy<Regex> =
Lazy::new(|| Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap());
static CAMELCASE_TR: Lazy<Regex> =
Lazy::new(|| Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap());
static DESIGNER_STYLE_TR: Lazy<Regex> =
Lazy::new(|| Regex::new(r"<string>([0-9a-z_]+)</string>").unwrap());

let file_name = entry.file_name().to_str().expect("non-unicode filename");

Expand Down
1 change: 0 additions & 1 deletion rslib/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ htmlescape.workspace = true
hyper.workspace = true
id_tree.workspace = true
itertools.workspace = true
lazy_static.workspace = true
nom.workspace = true
num_cpus.workspace = true
num_enum.workspace = true
Expand Down
2 changes: 1 addition & 1 deletion rslib/linkchecker/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ rust-version.workspace = true
anki.workspace = true
futures.workspace = true
itertools.workspace = true
lazy_static.workspace = true
linkcheck.workspace = true
once_cell.workspace = true
regex.workspace = true
reqwest.workspace = true
strum.workspace = true
Expand Down
9 changes: 5 additions & 4 deletions rslib/linkchecker/tests/links.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use anki::links::help_page_to_link;
use anki::links::HelpPage;
use futures::StreamExt;
use itertools::Itertools;
use lazy_static::lazy_static;
use once_cell::sync::Lazy;
use linkcheck::validation::check_web;
use linkcheck::validation::Context;
use linkcheck::validation::Reason;
Expand Down Expand Up @@ -70,9 +70,10 @@ impl From<&'static str> for CheckableUrl {
}

fn ts_help_pages() -> impl Iterator<Item = &'static str> {
lazy_static! {
static ref QUOTED_URL: Regex = Regex::new("\"(http.+)\"").unwrap();
}
static QUOTED_URL: Lazy<Regex> = Lazy::new(|| {
Regex::new("\"(http.+)\"").unwrap()
});

QUOTED_URL
.captures_iter(include_str!("../../../ts/lib/tslib/help-page.ts"))
.map(|caps| caps.get(1).unwrap().as_str())
Expand Down
15 changes: 6 additions & 9 deletions rslib/src/ankidroid/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use itertools::FoldWhile;
use itertools::FoldWhile::Continue;
use itertools::FoldWhile::Done;
use itertools::Itertools;
use lazy_static::lazy_static;
use once_cell::sync::Lazy;
use rusqlite::ToSql;
use serde::Deserialize;

Expand Down Expand Up @@ -110,10 +110,8 @@ fn select_slice_of_size<'a>(

type SequenceNumber = i32;

lazy_static! {
static ref HASHMAP: Mutex<HashMap<CollectionId, HashMap<SequenceNumber, DbResponse>>> =
Mutex::new(HashMap::new());
}
static HASHMAP: Lazy<Mutex<HashMap<CollectionId, HashMap<SequenceNumber, DbResponse>>>> =
Lazy::new(|| Mutex::new(HashMap::new()));

pub(crate) fn flush_single_result(col: &Collection, sequence_number: i32) {
HASHMAP
Expand Down Expand Up @@ -244,10 +242,9 @@ pub(crate) fn next_sequence_number() -> i32 {
SEQUENCE_NUMBER.fetch_add(1, Ordering::SeqCst)
}

lazy_static! {
// same as we get from io.requery.android.database.CursorWindow.sCursorWindowSize
static ref DB_COMMAND_PAGE_SIZE: Mutex<usize> = Mutex::new(1024 * 1024 * 2);
}
// same as we get from
// io.requery.android.database.CursorWindow.sCursorWindowSize
static DB_COMMAND_PAGE_SIZE: Lazy<Mutex<usize>> = Lazy::new(|| Mutex::new(1024 * 1024 * 2));

pub(crate) fn set_max_page_size(size: usize) {
let mut state = DB_COMMAND_PAGE_SIZE.lock().expect("Could not lock mutex");
Expand Down
10 changes: 4 additions & 6 deletions rslib/src/ankihub/login.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html

use lazy_static::lazy_static;
use once_cell::sync::Lazy;
use regex::Regex;
use reqwest::Client;
use serde;
Expand Down Expand Up @@ -31,11 +31,9 @@ pub async fn ankihub_login<S: Into<String>>(
client: Client,
) -> Result<LoginResponse> {
let client = HttpAnkiHubClient::new("", client);
lazy_static! {
static ref EMAIL_RE: Regex =
Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$")
.unwrap();
}
static EMAIL_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap()
});
let mut request = LoginRequest {
username: None,
email: None,
Expand Down
12 changes: 6 additions & 6 deletions rslib/src/cloze.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ use std::fmt::Write;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion;
use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape;
use htmlescape::encode_attribute;
use lazy_static::lazy_static;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::bytes::complete::take_while;
use nom::combinator::map;
use nom::IResult;
use once_cell::sync::Lazy;
use regex::Captures;
use regex::Regex;

Expand All @@ -24,16 +24,16 @@ use crate::latex::contains_latex;
use crate::template::RenderContext;
use crate::text::strip_html_preserving_entities;

lazy_static! {
static ref MATHJAX: Regex = Regex::new(
static MATHJAX: Lazy<Regex> = Lazy::new(|| {
Regex::new(
r"(?xsi)
(\\[(\[]) # 1 = mathjax opening tag
(.*?) # 2 = inner content
(\\[])]) # 3 = mathjax closing tag
"
",
)
.unwrap();
}
.unwrap()
});

mod mathjax_caps {
pub const OPENING_TAG: usize = 1;
Expand Down
16 changes: 7 additions & 9 deletions rslib/src/import_export/text/csv/export.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use std::sync::Arc;

use anki_proto::import_export::ExportNoteCsvRequest;
use itertools::Itertools;
use lazy_static::lazy_static;
use once_cell::sync::Lazy;
use regex::Regex;

use super::metadata::Delimiter;
Expand Down Expand Up @@ -156,23 +156,21 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow<str> {
}

fn strip_redundant_sections(text: &str) -> Cow<str> {
lazy_static! {
static ref RE: Regex = Regex::new(
static RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(
r"(?isx)
<style>.*?</style> # style elements
|
\[\[type:[^]]+\]\] # type replacements
"
",
)
.unwrap();
}
.unwrap()
});
RE.replace_all(text.as_ref(), "")
}

fn strip_answer_side_question(text: &str) -> Cow<str> {
lazy_static! {
static ref RE: Regex = Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap();
}
static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?is)^.*<hr id=answer>\n*").unwrap());
RE.replace_all(text.as_ref(), "")
}

Expand Down
20 changes: 11 additions & 9 deletions rslib/src/latex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,34 +3,36 @@

use std::borrow::Cow;

use lazy_static::lazy_static;
use once_cell::sync::Lazy;
use regex::Captures;
use regex::Regex;

use crate::cloze::expand_clozes_to_reveal_latex;
use crate::media::files::sha1_of_data;
use crate::text::strip_html;

lazy_static! {
pub(crate) static ref LATEX: Regex = Regex::new(
pub(crate) static LATEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(
r"(?xsi)
\[latex\](.+?)\[/latex\] # 1 - standard latex
|
\[\$\](.+?)\[/\$\] # 2 - inline math
|
\[\$\$\](.+?)\[/\$\$\] # 3 - math environment
"
",
)
.unwrap();
static ref LATEX_NEWLINES: Regex = Regex::new(
.unwrap()
});
static LATEX_NEWLINES: Lazy<Regex> = Lazy::new(|| {
Regex::new(
r#"(?xi)
<br( /)?>
|
<div>
"#
"#,
)
.unwrap();
}
.unwrap()
});

pub(crate) fn contains_latex(text: &str) -> bool {
LATEX.is_match(text)
Expand Down
6 changes: 2 additions & 4 deletions rslib/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,6 @@ pub mod version;

use std::env;

use lazy_static::lazy_static;
use once_cell::sync::Lazy;

lazy_static! {
pub(crate) static ref PYTHON_UNIT_TESTS: bool = env::var("ANKI_TEST_MODE").is_ok();
}
pub(crate) static PYTHON_UNIT_TESTS: Lazy<bool> = Lazy::new(|| env::var("ANKI_TEST_MODE").is_ok());
Loading

0 comments on commit e9b7268

Please sign in to comment.