diff --git a/Cargo.lock b/Cargo.lock index 4711ad5e7a3..b0a42b53a6f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -248,7 +248,6 @@ dependencies = [ "camino", "inflections", "itertools 0.13.0", - "once_cell", "prost-reflect", "prost-types", "regex", @@ -1890,7 +1889,6 @@ dependencies = [ "clap", "fluent-syntax", "itertools 0.13.0", - "once_cell", "regex", "serde_json", "snafu", @@ -3167,7 +3165,6 @@ dependencies = [ "futures", "itertools 0.13.0", "linkcheck", - "once_cell", "regex", "reqwest 0.12.7", "strum 0.26.3", @@ -3444,7 +3441,6 @@ dependencies = [ "anki_process", "anyhow", "camino", - "once_cell", "walkdir", "which", ] @@ -3610,7 +3606,6 @@ dependencies = [ "itertools 0.13.0", "maplit", "num_cpus", - "once_cell", "walkdir", "which", ] diff --git a/build/ninja_gen/Cargo.toml b/build/ninja_gen/Cargo.toml index ea658697527..7757116c62e 100644 --- a/build/ninja_gen/Cargo.toml +++ b/build/ninja_gen/Cargo.toml @@ -16,6 +16,5 @@ globset.workspace = true itertools.workspace = true maplit.workspace = true num_cpus.workspace = true -once_cell.workspace = true walkdir.workspace = true which.workspace = true diff --git a/build/ninja_gen/src/input.rs b/build/ninja_gen/src/input.rs index 9e3d4ebfc4b..8b77ce5c092 100644 --- a/build/ninja_gen/src/input.rs +++ b/build/ninja_gen/src/input.rs @@ -3,9 +3,9 @@ use std::collections::HashMap; use std::fmt::Display; +use std::sync::LazyLock; use camino::Utf8PathBuf; -use once_cell::sync::Lazy; #[derive(Debug, Clone, Hash, Default)] pub enum BuildInput { @@ -119,7 +119,7 @@ pub struct Glob { pub exclude: Option, } -static CACHED_FILES: Lazy> = Lazy::new(|| cache_files()); +static CACHED_FILES: LazyLock> = LazyLock::new(cache_files()); /// Walking the source tree once instead of for each glob yields ~4x speed /// improvements. diff --git a/ftl/Cargo.toml b/ftl/Cargo.toml index 677360f0ccb..55252e60dd6 100644 --- a/ftl/Cargo.toml +++ b/ftl/Cargo.toml @@ -16,7 +16,6 @@ camino.workspace = true clap.workspace = true fluent-syntax.workspace = true itertools.workspace = true -once_cell.workspace = true regex.workspace = true serde_json.workspace = true snafu.workspace = true diff --git a/ftl/src/garbage_collection.rs b/ftl/src/garbage_collection.rs index a6aee4cf1a2..17d47e430ec 100644 --- a/ftl/src/garbage_collection.rs +++ b/ftl/src/garbage_collection.rs @@ -6,6 +6,7 @@ use std::fs; use std::io::BufReader; use std::iter::FromIterator; use std::path::PathBuf; +use std::sync::LazyLock; use anki_io::create_file; use anyhow::Context; @@ -14,7 +15,6 @@ use clap::Args; use fluent_syntax::ast; use fluent_syntax::ast::Resource; use fluent_syntax::parser; -use once_cell::sync::Lazy; use regex::Regex; use walkdir::DirEntry; use walkdir::WalkDir; @@ -144,7 +144,8 @@ fn extract_nested_messages_and_terms( ftl_roots: &[impl AsRef], used_ftls: &mut HashSet, ) { - static REFERENCE: Lazy = Lazy::new(|| Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap()); + static REFERENCE: LazyLock = + LazyLock::new(|| Regex::new(r"\{\s*-?([-0-9a-z]+)\s*\}").unwrap()); for_files_with_ending(ftl_roots, ".ftl", |entry| { let source = fs::read_to_string(entry.path()).expect("file not readable"); for caps in REFERENCE.captures_iter(&source) { @@ -196,12 +197,12 @@ fn entry_use_check(used_ftls: &HashSet) -> impl Fn(&ast::Entry<&str>) -> } fn extract_references_from_file(refs: &mut HashSet, entry: &DirEntry) { - static SNAKECASE_TR: Lazy = - Lazy::new(|| Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap()); - static CAMELCASE_TR: Lazy = - Lazy::new(|| Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap()); - static DESIGNER_STYLE_TR: Lazy = - Lazy::new(|| Regex::new(r"([0-9a-z_]+)").unwrap()); + static SNAKECASE_TR: LazyLock = + LazyLock::new(|| Regex::new(r"\Wtr\s*\.([0-9a-z_]+)\W").unwrap()); + static CAMELCASE_TR: LazyLock = + LazyLock::new(|| Regex::new(r"\Wtr2?\.([0-9A-Za-z_]+)\W").unwrap()); + static DESIGNER_STYLE_TR: LazyLock = + LazyLock::new(|| Regex::new(r"([0-9a-z_]+)").unwrap()); let file_name = entry.file_name().to_str().expect("non-unicode filename"); diff --git a/rslib/linkchecker/Cargo.toml b/rslib/linkchecker/Cargo.toml index 16923e0972f..70ff2204329 100644 --- a/rslib/linkchecker/Cargo.toml +++ b/rslib/linkchecker/Cargo.toml @@ -12,7 +12,6 @@ anki.workspace = true futures.workspace = true itertools.workspace = true linkcheck.workspace = true -once_cell.workspace = true regex.workspace = true reqwest.workspace = true strum.workspace = true diff --git a/rslib/linkchecker/tests/links.rs b/rslib/linkchecker/tests/links.rs index 28ab46ae7c6..48656ace440 100644 --- a/rslib/linkchecker/tests/links.rs +++ b/rslib/linkchecker/tests/links.rs @@ -6,6 +6,7 @@ use std::borrow::Cow; use std::env; use std::iter; +use std::sync::LazyLock; use std::time::Duration; use anki::links::help_page_link_suffix; @@ -13,7 +14,6 @@ use anki::links::help_page_to_link; use anki::links::HelpPage; use futures::StreamExt; use itertools::Itertools; -use once_cell::sync::Lazy; use linkcheck::validation::check_web; use linkcheck::validation::Context; use linkcheck::validation::Reason; @@ -70,9 +70,7 @@ impl From<&'static str> for CheckableUrl { } fn ts_help_pages() -> impl Iterator { - static QUOTED_URL: Lazy = Lazy::new(|| { - Regex::new("\"(http.+)\"").unwrap() - }); + static QUOTED_URL: LazyLock = LazyLock::new(|| Regex::new("\"(http.+)\"").unwrap()); QUOTED_URL .captures_iter(include_str!("../../../ts/lib/tslib/help-page.ts")) diff --git a/rslib/proto_gen/Cargo.toml b/rslib/proto_gen/Cargo.toml index 552de7e1401..5f1ee981774 100644 --- a/rslib/proto_gen/Cargo.toml +++ b/rslib/proto_gen/Cargo.toml @@ -15,7 +15,6 @@ anyhow.workspace = true camino.workspace = true inflections.workspace = true itertools.workspace = true -once_cell.workspace = true prost-reflect.workspace = true prost-types.workspace = true regex.workspace = true diff --git a/rslib/proto_gen/src/lib.rs b/rslib/proto_gen/src/lib.rs index f3250523640..83bbc5f10a4 100644 --- a/rslib/proto_gen/src/lib.rs +++ b/rslib/proto_gen/src/lib.rs @@ -7,6 +7,7 @@ use std::collections::HashMap; use std::env; use std::path::PathBuf; +use std::sync::LazyLock; use anki_io::read_to_string; use anki_io::write_file_if_changed; @@ -16,7 +17,6 @@ use camino::Utf8Path; use inflections::Inflect; use itertools::Either; use itertools::Itertools; -use once_cell::sync::Lazy; use prost_reflect::DescriptorPool; use prost_reflect::MessageDescriptor; use prost_reflect::MethodDescriptor; @@ -238,8 +238,8 @@ pub fn add_must_use_annotations_to_file(path: &Utf8Path, is_empty: E) -> Resu where E: Fn(&Utf8Path, &str) -> bool, { - static MESSAGE_OR_ENUM_RE: Lazy = - Lazy::new(|| Regex::new(r"pub (struct|enum) ([[:alnum:]]+?)\s").unwrap()); + static MESSAGE_OR_ENUM_RE: LazyLock = + LazyLock::new(|| Regex::new(r"pub (struct|enum) ([[:alnum:]]+?)\s").unwrap()); let contents = read_to_string(path)?; let contents = MESSAGE_OR_ENUM_RE.replace_all(&contents, |caps: &Captures| { let is_enum = caps.get(1).unwrap().as_str() == "enum"; diff --git a/rslib/src/ankidroid/db.rs b/rslib/src/ankidroid/db.rs index 3cc5fe7e4b8..e143550740d 100644 --- a/rslib/src/ankidroid/db.rs +++ b/rslib/src/ankidroid/db.rs @@ -5,6 +5,7 @@ use std::collections::HashMap; use std::mem::size_of; use std::sync::atomic::AtomicI32; use std::sync::atomic::Ordering; +use std::sync::LazyLock; use std::sync::Mutex; use anki_proto::ankidroid::sql_value::Data; @@ -16,7 +17,6 @@ use itertools::FoldWhile; use itertools::FoldWhile::Continue; use itertools::FoldWhile::Done; use itertools::Itertools; -use once_cell::sync::Lazy; use rusqlite::ToSql; use serde::Deserialize; @@ -110,8 +110,8 @@ fn select_slice_of_size<'a>( type SequenceNumber = i32; -static HASHMAP: Lazy>>> = - Lazy::new(|| Mutex::new(HashMap::new())); +static HASHMAP: LazyLock>>> = + LazyLock::new(|| Mutex::new(HashMap::new())); pub(crate) fn flush_single_result(col: &Collection, sequence_number: i32) { HASHMAP @@ -244,7 +244,7 @@ pub(crate) fn next_sequence_number() -> i32 { // same as we get from // io.requery.android.database.CursorWindow.sCursorWindowSize -static DB_COMMAND_PAGE_SIZE: Lazy> = Lazy::new(|| Mutex::new(1024 * 1024 * 2)); +static DB_COMMAND_PAGE_SIZE: LazyLock> = LazyLock::new(|| Mutex::new(1024 * 1024 * 2)); pub(crate) fn set_max_page_size(size: usize) { let mut state = DB_COMMAND_PAGE_SIZE.lock().expect("Could not lock mutex"); diff --git a/rslib/src/ankihub/login.rs b/rslib/src/ankihub/login.rs index a77e17a95cb..972d00f09a7 100644 --- a/rslib/src/ankihub/login.rs +++ b/rslib/src/ankihub/login.rs @@ -1,7 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -use once_cell::sync::Lazy; +use std::sync::LazyLock; + use regex::Regex; use reqwest::Client; use serde; @@ -31,7 +32,7 @@ pub async fn ankihub_login>( client: Client, ) -> Result { let client = HttpAnkiHubClient::new("", client); - static EMAIL_RE: Lazy = Lazy::new(|| { + static EMAIL_RE: LazyLock = LazyLock::new(|| { Regex::new(r"^[a-zA-Z0-9.!#$%&’*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*$").unwrap() }); let mut request = LoginRequest { diff --git a/rslib/src/backend/mod.rs b/rslib/src/backend/mod.rs index cff86d7e49a..42260b1500b 100644 --- a/rslib/src/backend/mod.rs +++ b/rslib/src/backend/mod.rs @@ -19,10 +19,10 @@ use std::ops::Deref; use std::result; use std::sync::Arc; use std::sync::Mutex; +use std::sync::OnceLock; use std::thread::JoinHandle; use futures::future::AbortHandle; -use once_cell::sync::OnceCell; use prost::Message; use reqwest::Client; use tokio::runtime; @@ -53,7 +53,7 @@ pub struct BackendInner { server: bool, sync_abort: Mutex>, progress_state: Arc>, - runtime: OnceCell, + runtime: OnceLock, state: Mutex, backup_task: Mutex>>>, media_sync_task: Mutex>>>, @@ -88,7 +88,7 @@ impl Backend { want_abort: false, last_progress: None, })), - runtime: OnceCell::new(), + runtime: OnceLock::new(), state: Mutex::new(BackendState::default()), backup_task: Mutex::new(None), media_sync_task: Mutex::new(None), diff --git a/rslib/src/cloze.rs b/rslib/src/cloze.rs index f82e0f82332..8bcf5765fcc 100644 --- a/rslib/src/cloze.rs +++ b/rslib/src/cloze.rs @@ -5,6 +5,7 @@ use std::borrow::Cow; use std::collections::HashMap; use std::collections::HashSet; use std::fmt::Write; +use std::sync::LazyLock; use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusion; use anki_proto::image_occlusion::get_image_occlusion_note_response::ImageOcclusionShape; @@ -14,7 +15,6 @@ use nom::bytes::complete::tag; use nom::bytes::complete::take_while; use nom::combinator::map; use nom::IResult; -use once_cell::sync::Lazy; use regex::Captures; use regex::Regex; @@ -24,7 +24,7 @@ use crate::latex::contains_latex; use crate::template::RenderContext; use crate::text::strip_html_preserving_entities; -static MATHJAX: Lazy = Lazy::new(|| { +static MATHJAX: LazyLock = LazyLock::new(|| { Regex::new( r"(?xsi) (\\[(\[]) # 1 = mathjax opening tag diff --git a/rslib/src/import_export/text/csv/export.rs b/rslib/src/import_export/text/csv/export.rs index 6fa14ab9461..edf3d3ca103 100644 --- a/rslib/src/import_export/text/csv/export.rs +++ b/rslib/src/import_export/text/csv/export.rs @@ -6,10 +6,10 @@ use std::collections::HashMap; use std::fs::File; use std::io::Write; use std::sync::Arc; +use std::sync::LazyLock; use anki_proto::import_export::ExportNoteCsvRequest; use itertools::Itertools; -use once_cell::sync::Lazy; use regex::Regex; use super::metadata::Delimiter; @@ -156,7 +156,7 @@ fn field_to_record_field(field: &str, with_html: bool) -> Cow { } fn strip_redundant_sections(text: &str) -> Cow { - static RE: Lazy = Lazy::new(|| { + static RE: LazyLock = LazyLock::new(|| { Regex::new( r"(?isx) # style elements @@ -170,7 +170,8 @@ fn strip_redundant_sections(text: &str) -> Cow { } fn strip_answer_side_question(text: &str) -> Cow { - static RE: Lazy = Lazy::new(|| Regex::new(r"(?is)^.*
\n*").unwrap()); + static RE: LazyLock = + LazyLock::new(|| Regex::new(r"(?is)^.*
\n*").unwrap()); RE.replace_all(text.as_ref(), "") } diff --git a/rslib/src/latex.rs b/rslib/src/latex.rs index d164b3cfd3d..3ebeebf8ac0 100644 --- a/rslib/src/latex.rs +++ b/rslib/src/latex.rs @@ -2,8 +2,8 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use std::borrow::Cow; +use std::sync::LazyLock; -use once_cell::sync::Lazy; use regex::Captures; use regex::Regex; @@ -11,7 +11,7 @@ use crate::cloze::expand_clozes_to_reveal_latex; use crate::media::files::sha1_of_data; use crate::text::strip_html; -pub(crate) static LATEX: Lazy = Lazy::new(|| { +pub(crate) static LATEX: LazyLock = LazyLock::new(|| { Regex::new( r"(?xsi) \[latex\](.+?)\[/latex\] # 1 - standard latex @@ -23,7 +23,7 @@ pub(crate) static LATEX: Lazy = Lazy::new(|| { ) .unwrap() }); -static LATEX_NEWLINES: Lazy = Lazy::new(|| { +static LATEX_NEWLINES: LazyLock = LazyLock::new(|| { Regex::new( r#"(?xi) diff --git a/rslib/src/lib.rs b/rslib/src/lib.rs index 8b73877064d..2258c359280 100644 --- a/rslib/src/lib.rs +++ b/rslib/src/lib.rs @@ -52,7 +52,7 @@ pub mod undo; pub mod version; use std::env; +use std::sync::LazyLock; -use once_cell::sync::Lazy; - -pub(crate) static PYTHON_UNIT_TESTS: Lazy = Lazy::new(|| env::var("ANKI_TEST_MODE").is_ok()); +pub(crate) static PYTHON_UNIT_TESTS: LazyLock = + LazyLock::new(|| env::var("ANKI_TEST_MODE").is_ok()); diff --git a/rslib/src/media/check.rs b/rslib/src/media/check.rs index 0c23918fed3..c87fadb91a1 100644 --- a/rslib/src/media/check.rs +++ b/rslib/src/media/check.rs @@ -6,11 +6,11 @@ use std::collections::HashMap; use std::collections::HashSet; use std::fs; use std::io; +use std::sync::LazyLock; use anki_i18n::without_unicode_isolation; use anki_io::write_file; use data_encoding::BASE64; -use once_cell::sync::Lazy; use regex::Regex; use tracing::debug; use tracing::info; @@ -459,7 +459,7 @@ impl MediaChecker<'_> { } fn maybe_extract_inline_image<'a>(&mut self, fname_decoded: &'a str) -> Result> { - static BASE64_IMG: Lazy = Lazy::new(|| { + static BASE64_IMG: LazyLock = LazyLock::new(|| { Regex::new("(?i)^data:image/(jpg|jpeg|png|gif|webp|avif);base64,(.+)$").unwrap() }); diff --git a/rslib/src/media/files.rs b/rslib/src/media/files.rs index 6f0a6b2fb6d..2cbef1a8944 100644 --- a/rslib/src/media/files.rs +++ b/rslib/src/media/files.rs @@ -7,6 +7,7 @@ use std::io; use std::io::Read; use std::path::Path; use std::path::PathBuf; +use std::sync::LazyLock; use std::time; use anki_io::create_dir; @@ -15,7 +16,6 @@ use anki_io::write_file; use anki_io::FileIoError; use anki_io::FileIoSnafu; use anki_io::FileOp; -use once_cell::sync::Lazy; use regex::Regex; use sha1::Digest; use sha1::Sha1; @@ -27,7 +27,7 @@ use unicode_normalization::UnicodeNormalization; use crate::prelude::*; use crate::sync::media::MAX_MEDIA_FILENAME_LENGTH; -static WINDOWS_DEVICE_NAME: Lazy = Lazy::new(|| { +static WINDOWS_DEVICE_NAME: LazyLock = LazyLock::new(|| { Regex::new( r"(?xi) # starting with one of the following names @@ -43,7 +43,7 @@ static WINDOWS_DEVICE_NAME: Lazy = Lazy::new(|| { ) .unwrap() }); -static WINDOWS_TRAILING_CHAR: Lazy = Lazy::new(|| { +static WINDOWS_TRAILING_CHAR: LazyLock = LazyLock::new(|| { Regex::new( r"(?x) # filenames can't end with a space or period @@ -55,7 +55,7 @@ static WINDOWS_TRAILING_CHAR: Lazy = Lazy::new(|| { ) .unwrap() }); -pub(crate) static NONSYNCABLE_FILENAME: Lazy = Lazy::new(|| { +pub(crate) static NONSYNCABLE_FILENAME: LazyLock = LazyLock::new(|| { Regex::new( r#"(?xi) ^ diff --git a/rslib/src/notetype/checks.rs b/rslib/src/notetype/checks.rs index 6ecd1d73a29..fd3f20d093d 100644 --- a/rslib/src/notetype/checks.rs +++ b/rslib/src/notetype/checks.rs @@ -4,9 +4,9 @@ use std::borrow::Cow; use std::fmt::Write; use std::ops::Deref; +use std::sync::LazyLock; use anki_i18n::without_unicode_isolation; -use once_cell::sync::Lazy; use regex::Captures; use regex::Match; use regex::Regex; @@ -24,7 +24,7 @@ struct Template<'a> { front: bool, } -static FIELD_REPLACEMENT: Lazy = Lazy::new(|| Regex::new(r"\{\{.+\}\}").unwrap()); +static FIELD_REPLACEMENT: LazyLock = LazyLock::new(|| Regex::new(r"\{\{.+\}\}").unwrap()); impl Collection { pub fn report_media_field_referencing_templates(&mut self, buf: &mut String) -> Result<()> { diff --git a/rslib/src/notetype/mod.rs b/rslib/src/notetype/mod.rs index d69a139152e..fc713c79863 100644 --- a/rslib/src/notetype/mod.rs +++ b/rslib/src/notetype/mod.rs @@ -20,6 +20,7 @@ use std::collections::HashMap; use std::collections::HashSet; use std::iter::FromIterator; use std::sync::Arc; +use std::sync::LazyLock; pub use anki_proto::notetypes::notetype::config::card_requirement::Kind as CardRequirementKind; pub use anki_proto::notetypes::notetype::config::CardRequirement; @@ -35,7 +36,6 @@ pub(crate) use cardgen::CardGenContext; pub use fields::NoteField; pub use notetypechange::ChangeNotetypeInput; pub use notetypechange::NotetypeChangeInfo; -use once_cell::sync::Lazy; use regex::Regex; pub(crate) use render::RenderCardOutput; pub use schema11::CardTemplateSchema11; @@ -67,7 +67,8 @@ pub(crate) const DEFAULT_CSS: &str = include_str!("styling.css"); pub(crate) const DEFAULT_CLOZE_CSS: &str = include_str!("cloze_styling.css"); pub(crate) const DEFAULT_LATEX_HEADER: &str = include_str!("header.tex"); pub(crate) const DEFAULT_LATEX_FOOTER: &str = r"\end{document}"; -static SPECIAL_FIELDS: Lazy> = Lazy::new(|| { +/// New entries must be handled in render.rs/add_special_fields(). +static SPECIAL_FIELDS: LazyLock> = LazyLock::new(|| { HashSet::from_iter(vec![ "FrontSide", "Card", @@ -364,7 +365,8 @@ impl Notetype { } fn ensure_template_fronts_unique(&self) -> Result<(), CardTypeError> { - static CARD_TAG: Lazy = Lazy::new(|| Regex::new(r"\{\{\s*Card\s*\}\}").unwrap()); + static CARD_TAG: LazyLock = + LazyLock::new(|| Regex::new(r"\{\{\s*Card\s*\}\}").unwrap()); let mut map = HashMap::new(); for (index, card) in self.templates.iter().enumerate() { diff --git a/rslib/src/scheduler/reviews.rs b/rslib/src/scheduler/reviews.rs index 2bfd9ce06fb..ea2f3cc4390 100644 --- a/rslib/src/scheduler/reviews.rs +++ b/rslib/src/scheduler/reviews.rs @@ -2,8 +2,8 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use std::collections::HashMap; +use std::sync::LazyLock; -use once_cell::sync::Lazy; use rand::distributions::Distribution; use rand::distributions::Uniform; use regex::Regex; @@ -65,7 +65,7 @@ pub struct DueDateSpecifier { } pub fn parse_due_date_str(s: &str) -> Result { - static RE: Lazy = Lazy::new(|| { + static RE: LazyLock = LazyLock::new(|| { Regex::new( r"(?x)^ # a number diff --git a/rslib/src/search/parser.rs b/rslib/src/search/parser.rs index 019e1a892a3..492c88d937a 100644 --- a/rslib/src/search/parser.rs +++ b/rslib/src/search/parser.rs @@ -1,6 +1,8 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use std::sync::LazyLock; + use nom::branch::alt; use nom::bytes::complete::escaped; use nom::bytes::complete::is_not; @@ -17,7 +19,6 @@ use nom::error::ErrorKind as NomErrorKind; use nom::multi::many0; use nom::sequence::preceded; use nom::sequence::separated_pair; -use once_cell::sync::Lazy; use regex::Captures; use regex::Regex; @@ -621,7 +622,7 @@ fn parse_mid(s: &str) -> ParseResult { /// ensure a list of ids contains only numbers and commas, returning unchanged /// if true used by nid: and cid: fn check_id_list<'a>(s: &'a str, context: &str) -> ParseResult<'a, &'a str> { - static RE: Lazy = Lazy::new(|| Regex::new(r"^(\d+,)*\d+$").unwrap()); + static RE: LazyLock = LazyLock::new(|| Regex::new(r"^(\d+,)*\d+$").unwrap()); if RE.is_match(s) { Ok(s) } else { @@ -698,7 +699,7 @@ fn unescape(txt: &str) -> ParseResult { )) } else { Ok(if is_parser_escape(txt) { - static RE: Lazy = Lazy::new(|| Regex::new(r#"\\[\\":()-]"#).unwrap()); + static RE: LazyLock = LazyLock::new(|| Regex::new(r#"\\[\\":()-]"#).unwrap()); RE.replace_all(txt, |caps: &Captures| match &caps[0] { r"\\" => r"\\", "\\\"" => "\"", @@ -718,7 +719,7 @@ fn unescape(txt: &str) -> ParseResult { /// Return invalid escape sequence if any. fn invalid_escape_sequence(txt: &str) -> Option { // odd number of \s not followed by an escapable character - static RE: Lazy = Lazy::new(|| { + static RE: LazyLock = LazyLock::new(|| { Regex::new( r#"(?x) (?:^|[^\\]) # not a backslash @@ -737,7 +738,7 @@ fn invalid_escape_sequence(txt: &str) -> Option { /// Check string for escape sequences handled by the parser: ":()- fn is_parser_escape(txt: &str) -> bool { // odd number of \s followed by a char with special meaning to the parser - static RE: Lazy = Lazy::new(|| { + static RE: LazyLock = LazyLock::new(|| { Regex::new( r#"(?x) (?:^|[^\\]) # not a backslash diff --git a/rslib/src/search/writer.rs b/rslib/src/search/writer.rs index 5567545b95e..600a18fd67a 100644 --- a/rslib/src/search/writer.rs +++ b/rslib/src/search/writer.rs @@ -2,8 +2,8 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use std::mem; +use std::sync::LazyLock; -use once_cell::sync::Lazy; use regex::Regex; use crate::notetype::NotetypeId as NotetypeIdType; @@ -109,8 +109,8 @@ fn maybe_quote(txt: &str) -> String { /// Checks for the reserved keywords "and" and "or", a prepended hyphen, /// whitespace and brackets. fn needs_quotation(txt: &str) -> bool { - static RE: Lazy = - Lazy::new(|| Regex::new("(?i)^and$|^or$|^-.| |\u{3000}|\\(|\\)").unwrap()); + static RE: LazyLock = + LazyLock::new(|| Regex::new("(?i)^and$|^or$|^-.| |\u{3000}|\\(|\\)").unwrap()); RE.is_match(txt) } diff --git a/rslib/src/sync/collection/tests.rs b/rslib/src/sync/collection/tests.rs index 991b895ac80..abf82262fd2 100644 --- a/rslib/src/sync/collection/tests.rs +++ b/rslib/src/sync/collection/tests.rs @@ -4,9 +4,9 @@ #![cfg(test)] use std::future::Future; +use std::sync::LazyLock; use axum::http::StatusCode; -use once_cell::sync::Lazy; use reqwest::Client; use reqwest::Url; use serde_json::json; @@ -57,7 +57,7 @@ struct TestAuth { host_key: String, } -static AUTH: Lazy = Lazy::new(|| { +static AUTH: LazyLock = LazyLock::new(|| { if let Ok(auth) = std::env::var("TEST_AUTH") { let mut auth = auth.split(':'); TestAuth { @@ -93,7 +93,7 @@ where .unwrap(); tokio::spawn(server_fut.instrument(Span::current())); // when not using ephemeral servers, tests need to be serialized - static LOCK: Lazy> = Lazy::new(|| Mutex::new(())); + static LOCK: LazyLock> = LazyLock::new(|| Mutex::new(())); let _lock: MutexGuard<()>; // setup client to connect to it let endpoint = if let Ok(endpoint) = std::env::var("TEST_ENDPOINT") { diff --git a/rslib/src/sync/request/mod.rs b/rslib/src/sync/request/mod.rs index 3e876ab5b2f..4678cef9bba 100644 --- a/rslib/src/sync/request/mod.rs +++ b/rslib/src/sync/request/mod.rs @@ -8,6 +8,7 @@ use std::any::Any; use std::env; use std::marker::PhantomData; use std::net::IpAddr; +use std::sync::LazyLock; use async_trait::async_trait; use axum::body::Body; @@ -19,7 +20,6 @@ use axum::RequestPartsExt; use axum_client_ip::SecureClientIp; use axum_extra::TypedHeader; use header_and_stream::SyncHeader; -use once_cell::sync::Lazy; use serde::de::DeserializeOwned; use serde::Serialize; use serde_json::Error; @@ -179,7 +179,7 @@ where } } -pub static MAXIMUM_SYNC_PAYLOAD_BYTES: Lazy = Lazy::new(|| { +pub static MAXIMUM_SYNC_PAYLOAD_BYTES: LazyLock = LazyLock::new(|| { env::var("MAX_SYNC_PAYLOAD_MEGS") .map(|v| v.parse().expect("invalid upload limit")) .unwrap_or(100) @@ -189,5 +189,5 @@ pub static MAXIMUM_SYNC_PAYLOAD_BYTES: Lazy = Lazy::new(|| { /// Client ignores this when a non-AnkiWeb endpoint is configured. Controls the /// maximum size of a payload after decompression, which effectively limits the /// how large a collection file can be uploaded. -pub static MAXIMUM_SYNC_PAYLOAD_BYTES_UNCOMPRESSED: Lazy = - Lazy::new(|| (*MAXIMUM_SYNC_PAYLOAD_BYTES * 3) as u64); +pub static MAXIMUM_SYNC_PAYLOAD_BYTES_UNCOMPRESSED: LazyLock = + LazyLock::new(|| (*MAXIMUM_SYNC_PAYLOAD_BYTES * 3) as u64); diff --git a/rslib/src/template.rs b/rslib/src/template.rs index 13b70616b36..4956b09b95e 100644 --- a/rslib/src/template.rs +++ b/rslib/src/template.rs @@ -6,6 +6,7 @@ use std::collections::HashMap; use std::collections::HashSet; use std::fmt::Write; use std::iter; +use std::sync::LazyLock; use anki_i18n::I18n; use nom::branch::alt; @@ -15,7 +16,6 @@ use nom::combinator::map; use nom::combinator::rest; use nom::combinator::verify; use nom::sequence::delimited; -use once_cell::sync::Lazy; use regex::Regex; use crate::cloze::add_cloze_numbers_in_string; @@ -546,7 +546,7 @@ fn append_str_to_nodes(nodes: &mut Vec, text: &str) { /// True if provided text contains only whitespace and/or empty BR/DIV tags. pub(crate) fn field_is_empty(text: &str) -> bool { - static RE: Lazy = Lazy::new(|| { + static RE: LazyLock = LazyLock::new(|| { Regex::new( r"(?xsi) ^(?: diff --git a/rslib/src/template_filters.rs b/rslib/src/template_filters.rs index f1246f7a64d..369a9ecf4ae 100644 --- a/rslib/src/template_filters.rs +++ b/rslib/src/template_filters.rs @@ -2,9 +2,9 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use std::borrow::Cow; +use std::sync::LazyLock; use blake3::Hasher; -use once_cell::sync::Lazy; use regex::Captures; use regex::Regex; @@ -107,7 +107,7 @@ fn apply_filter( // Ruby filters //---------------------------------------- -static FURIGANA: Lazy = Lazy::new(|| Regex::new(r" ?([^ >]+?)\[(.+?)\]").unwrap()); +static FURIGANA: LazyLock = LazyLock::new(|| Regex::new(r" ?([^ >]+?)\[(.+?)\]").unwrap()); /// Did furigana regex match a sound tag? fn captured_sound(caps: &Captures) -> bool { diff --git a/rslib/src/text.rs b/rslib/src/text.rs index c2f6103d783..432544c929a 100644 --- a/rslib/src/text.rs +++ b/rslib/src/text.rs @@ -2,8 +2,8 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use std::borrow::Cow; +use std::sync::LazyLock; -use once_cell::sync::Lazy; use percent_encoding_iri::percent_decode_str; use percent_encoding_iri::utf8_percent_encode; use percent_encoding_iri::AsciiSet; @@ -78,7 +78,7 @@ pub enum AvTag { }, } -static HTML: Lazy = Lazy::new(|| { +static HTML: LazyLock = LazyLock::new(|| { Regex::new(concat!( "(?si)", // wrapped text @@ -88,7 +88,7 @@ static HTML: Lazy = Lazy::new(|| { )) .unwrap() }); -static HTML_LINEBREAK_TAGS: Lazy = Lazy::new(|| { +static HTML_LINEBREAK_TAGS: LazyLock = LazyLock::new(|| { Regex::new( r#"(?xsi) = Lazy::new(|| { .unwrap() }); -pub static HTML_MEDIA_TAGS: Lazy = Lazy::new(|| { +pub static HTML_MEDIA_TAGS: LazyLock = LazyLock::new(|| { Regex::new( r#"(?xsi) # the start of the image, audio, or object tag @@ -150,7 +150,7 @@ pub static HTML_MEDIA_TAGS: Lazy = Lazy::new(|| { }); // videos are also in sound tags -static AV_TAGS: Lazy = Lazy::new(|| { +static AV_TAGS: LazyLock = LazyLock::new(|| { Regex::new( r"(?xs) \[sound:(.+?)\] # 1 - the filename in a sound tag @@ -164,14 +164,15 @@ static AV_TAGS: Lazy = Lazy::new(|| { .unwrap() }); -static PERSISTENT_HTML_SPACERS: Lazy = - Lazy::new(|| Regex::new(r"(?i)|
|\n").unwrap()); +static PERSISTENT_HTML_SPACERS: LazyLock = + LazyLock::new(|| Regex::new(r"(?i)|
|\n").unwrap()); -static TYPE_TAG: Lazy = Lazy::new(|| Regex::new(r"\[\[type:[^]]+\]\]").unwrap()); -pub(crate) static SOUND_TAG: Lazy = Lazy::new(|| Regex::new(r"\[sound:([^]]+)\]").unwrap()); +static TYPE_TAG: LazyLock = LazyLock::new(|| Regex::new(r"\[\[type:[^]]+\]\]").unwrap()); +pub(crate) static SOUND_TAG: LazyLock = + LazyLock::new(|| Regex::new(r"\[sound:([^]]+)\]").unwrap()); /// Files included in CSS with a leading underscore. -static UNDERSCORED_CSS_IMPORTS: Lazy = Lazy::new(|| { +static UNDERSCORED_CSS_IMPORTS: LazyLock = LazyLock::new(|| { Regex::new( r#"(?xi) (?:@import\s+ # import statement with a bare @@ -193,7 +194,7 @@ static UNDERSCORED_CSS_IMPORTS: Lazy = Lazy::new(|| { }); /// Strings, src and data attributes with a leading underscore. -static UNDERSCORED_REFERENCES: Lazy = Lazy::new(|| { +static UNDERSCORED_REFERENCES: LazyLock = LazyLock::new(|| { Regex::new( r#"(?x) \[sound:(_[^]]+)\] # a filename in an Anki sound tag @@ -457,7 +458,7 @@ pub(crate) fn without_combining(s: &str) -> Cow { /// Check if string contains an unescaped wildcard. pub(crate) fn is_glob(txt: &str) -> bool { // even number of \s followed by a wildcard - static RE: Lazy = Lazy::new(|| { + static RE: LazyLock = LazyLock::new(|| { Regex::new( r"(?x) (?:^|[^\\]) # not a backslash @@ -478,7 +479,7 @@ pub(crate) fn to_re(txt: &str) -> Cow { /// Convert Anki style to RegEx using the provided wildcard. pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> { - static RE: Lazy = Lazy::new(|| Regex::new(r"\\?.").unwrap()); + static RE: LazyLock = LazyLock::new(|| Regex::new(r"\\?.").unwrap()); RE.replace_all(txt, |caps: &Captures| { let s = &caps[0]; match s { @@ -494,7 +495,7 @@ pub(crate) fn to_custom_re<'a>(txt: &'a str, wildcard: &str) -> Cow<'a, str> { /// Convert to SQL respecting Anki wildcards. pub(crate) fn to_sql(txt: &str) -> Cow { // escape sequences and unescaped special characters which need conversion - static RE: Lazy = Lazy::new(|| Regex::new(r"\\[\\*]|[*%]").unwrap()); + static RE: LazyLock = LazyLock::new(|| Regex::new(r"\\[\\*]|[*%]").unwrap()); RE.replace_all(txt, |caps: &Captures| { let s = &caps[0]; match s { @@ -509,13 +510,13 @@ pub(crate) fn to_sql(txt: &str) -> Cow { /// Unescape everything. pub(crate) fn to_text(txt: &str) -> Cow { - static RE: Lazy = Lazy::new(|| Regex::new(r"\\(.)").unwrap()); + static RE: LazyLock = LazyLock::new(|| Regex::new(r"\\(.)").unwrap()); RE.replace_all(txt, "$1") } /// Escape Anki wildcards and the backslash for escaping them: \*_ pub(crate) fn escape_anki_wildcards(txt: &str) -> String { - static RE: Lazy = Lazy::new(|| Regex::new(r"[\\*_]").unwrap()); + static RE: LazyLock = LazyLock::new(|| Regex::new(r"[\\*_]").unwrap()); RE.replace_all(txt, r"\$0").into() } @@ -548,8 +549,8 @@ pub(crate) fn glob_matcher(search: &str) -> impl Fn(&str) -> bool + '_ { } } -pub(crate) static REMOTE_FILENAME: Lazy = - Lazy::new(|| Regex::new("(?i)^https?://").unwrap()); +pub(crate) static REMOTE_FILENAME: LazyLock = + LazyLock::new(|| Regex::new("(?i)^https?://").unwrap()); /// https://url.spec.whatwg.org/#fragment-percent-encode-set const FRAGMENT_QUERY_UNION: &AsciiSet = &CONTROLS diff --git a/rslib/src/typeanswer.rs b/rslib/src/typeanswer.rs index b8d8d4b9a3d..3ff8de3f960 100644 --- a/rslib/src/typeanswer.rs +++ b/rslib/src/typeanswer.rs @@ -2,9 +2,9 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use std::borrow::Cow; +use std::sync::LazyLock; use difflib::sequencematcher::SequenceMatcher; -use once_cell::sync::Lazy; use regex::Regex; use unic_ucd_category::GeneralCategory; @@ -12,7 +12,7 @@ use crate::card_rendering::strip_av_tags; use crate::text::normalize_to_nfc; use crate::text::strip_html; -static LINEBREAKS: Lazy = Lazy::new(|| { +static LINEBREAKS: LazyLock = LazyLock::new(|| { Regex::new( r"(?six) ( diff --git a/rslib/src/version.rs b/rslib/src/version.rs index ca05b95964a..29f8403040a 100644 --- a/rslib/src/version.rs +++ b/rslib/src/version.rs @@ -2,8 +2,7 @@ // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use std::env; - -use once_cell::sync::Lazy; +use std::sync::LazyLock; pub fn version() -> &'static str { include_str!("../../.version").trim() @@ -14,7 +13,7 @@ pub fn buildhash() -> &'static str { } pub(crate) fn sync_client_version() -> &'static str { - static VER: Lazy = Lazy::new(|| { + static VER: LazyLock = LazyLock::new(|| { format!( "anki,{version} ({buildhash}),{platform}", version = version(), @@ -26,7 +25,7 @@ pub(crate) fn sync_client_version() -> &'static str { } pub(crate) fn sync_client_version_short() -> &'static str { - static VER: Lazy = Lazy::new(|| { + static VER: LazyLock = LazyLock::new(|| { format!( "{version},{buildhash},{platform}", version = version(), diff --git a/tools/minilints/Cargo.toml b/tools/minilints/Cargo.toml index 3c284ada59d..bb6629f3892 100644 --- a/tools/minilints/Cargo.toml +++ b/tools/minilints/Cargo.toml @@ -12,6 +12,5 @@ anki_io.workspace = true anki_process.workspace = true anyhow.workspace = true camino.workspace = true -once_cell.workspace = true walkdir.workspace = true which.workspace = true diff --git a/tools/minilints/src/main.rs b/tools/minilints/src/main.rs index d3b384256fa..811f37b2b8a 100644 --- a/tools/minilints/src/main.rs +++ b/tools/minilints/src/main.rs @@ -1,6 +1,7 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html +use std::cell::LazyCell; use std::collections::HashSet; use std::env; use std::fs; @@ -16,7 +17,6 @@ use anki_process::CommandExt; use anyhow::Context; use anyhow::Result; use camino::Utf8Path; -use once_cell::unsync::Lazy; use walkdir::WalkDir; const NONSTANDARD_HEADER: &[&str] = &[ @@ -63,7 +63,7 @@ fn main() -> Result<()> { struct LintContext { want_fix: bool, - unstaged_changes: Lazy<()>, + unstaged_changes: LazyCell<()>, found_problems: bool, nonstandard_headers: HashSet<&'static Utf8Path>, } @@ -72,7 +72,7 @@ impl LintContext { pub fn new(want_fix: bool) -> Self { Self { want_fix, - unstaged_changes: Lazy::new(check_for_unstaged_changes), + unstaged_changes: LazyCell::new(check_for_unstaged_changes), found_problems: false, nonstandard_headers: NONSTANDARD_HEADER.iter().map(Utf8Path::new).collect(), } @@ -113,7 +113,7 @@ impl LintContext { let missing = !head.contains("Ankitects Pty Ltd and contributors"); if missing { if self.want_fix { - Lazy::force(&self.unstaged_changes); + LazyCell::force(&self.unstaged_changes); fix_copyright(path)?; } else { println!("missing standard copyright header: {:?}", path);