diff --git a/core/src/cell.rs b/core/src/cell.rs index ac026de95da12..6cdc1e00f4790 100644 --- a/core/src/cell.rs +++ b/core/src/cell.rs @@ -82,6 +82,20 @@ //! //! The corresponding [`Sync`] version of `OnceCell` is [`OnceLock`]. //! +//! ## `LazyCell` +//! +//! A common pattern with OnceCell is, for a given OnceCell, to use the same function on every +//! call to [`OnceCell::get_or_init`] with that cell. This is what is offered by [`LazyCell`], +//! which pairs cells of `T` with functions of `F`, and always calls `F` before it yields `&T`. +//! This happens implicitly by simply attempting to dereference the LazyCell to get its contents, +//! so its use is much more transparent with a place which has been initialized by a constant. +//! +//! More complicated patterns that don't fit this description can be built on `OnceCell` instead. +//! +//! `LazyCell` works by providing an implementation of `impl Deref` that calls the function, +//! so you can just use it by dereference (e.g. `*lazy_cell` or `lazy_cell.deref()`). +//! +//! The corresponding [`Sync`] version of `LazyCell` is [`LazyLock`]. //! //! # When to choose interior mutability //! @@ -230,6 +244,7 @@ //! [`RwLock`]: ../../std/sync/struct.RwLock.html //! [`Mutex`]: ../../std/sync/struct.Mutex.html //! [`OnceLock`]: ../../std/sync/struct.OnceLock.html +//! [`LazyLock`]: ../../std/sync/struct.LazyLock.html //! [`Sync`]: ../../std/marker/trait.Sync.html //! [`atomic`]: crate::sync::atomic diff --git a/std/src/sync/lazy_lock.rs b/std/src/sync/lazy_lock.rs index d3bb3bfdff92a..7a2eed93dd4f8 100644 --- a/std/src/sync/lazy_lock.rs +++ b/std/src/sync/lazy_lock.rs @@ -29,34 +29,26 @@ union Data { /// # Examples /// /// Initialize static variables with `LazyLock`. -/// /// ``` -/// use std::collections::HashMap; -/// /// use std::sync::LazyLock; /// -/// static HASHMAP: LazyLock> = LazyLock::new(|| { -/// println!("initializing"); -/// let mut m = HashMap::new(); -/// m.insert(13, "Spica".to_string()); -/// m.insert(74, "Hoyten".to_string()); -/// m +/// // n.b. static items do not call [`Drop`] on program termination, so this won't be deallocated. +/// // this is fine, as the OS can deallocate the terminated program faster than we can free memory +/// // but tools like valgrind might report "memory leaks" as it isn't obvious this is intentional. +/// static DEEP_THOUGHT: LazyLock = LazyLock::new(|| { +/// # mod another_crate { +/// # pub fn great_question() -> String { "42".to_string() } +/// # } +/// // M3 Ultra takes about 16 million years in --release config +/// another_crate::great_question() /// }); /// -/// fn main() { -/// println!("ready"); -/// std::thread::spawn(|| { -/// println!("{:?}", HASHMAP.get(&13)); -/// }).join().unwrap(); -/// println!("{:?}", HASHMAP.get(&74)); -/// -/// // Prints: -/// // ready -/// // initializing -/// // Some("Spica") -/// // Some("Hoyten") -/// } +/// // The `String` is built, stored in the `LazyLock`, and returned as `&String`. +/// let _ = &*DEEP_THOUGHT; +/// // The `String` is retrieved from the `LazyLock` and returned as `&String`. +/// let _ = &*DEEP_THOUGHT; /// ``` +/// /// Initialize fields with `LazyLock`. /// ``` /// use std::sync::LazyLock; diff --git a/std/src/sync/mod.rs b/std/src/sync/mod.rs index fb7d601b09478..70e8f5f90c61e 100644 --- a/std/src/sync/mod.rs +++ b/std/src/sync/mod.rs @@ -136,7 +136,10 @@ //! - [`Once`]: Used for a thread-safe, one-time global initialization routine //! //! - [`OnceLock`]: Used for thread-safe, one-time initialization of a -//! global variable. +//! variable, with potentially different initializers based on the caller. +//! +//! - [`LazyLock`]: Used for thread-safe, one-time initialization of a +//! variable, using one nullary initializer function provided at creation. //! //! - [`RwLock`]: Provides a mutual exclusion mechanism which allows //! multiple readers at the same time, while allowing only one diff --git a/std/src/sync/once_lock.rs b/std/src/sync/once_lock.rs index 6b9f70da85470..f52b9e52c54de 100644 --- a/std/src/sync/once_lock.rs +++ b/std/src/sync/once_lock.rs @@ -8,47 +8,17 @@ use crate::sync::Once; /// A synchronization primitive which can nominally be written to only once. /// /// This type is a thread-safe [`OnceCell`], and can be used in statics. +/// In many simple cases, you can use [`LazyLock`] instead to get the benefits of this type +/// with less effort: `LazyLock` "looks like" `&T` because it initializes with `F` on deref! +/// Where OnceLock shines is when LazyLock is too simple to support a given case, as LazyLock +/// doesn't allow additional inputs to its function after you call [`LazyLock::new(|| ...)`]. /// /// [`OnceCell`]: crate::cell::OnceCell +/// [`LazyLock`]: crate::sync::LazyLock +/// [`LazyLock::new(|| ...)`]: crate::sync::LazyLock::new /// /// # Examples /// -/// Using `OnceLock` to store a function’s previously computed value (a.k.a. -/// ‘lazy static’ or ‘memoizing’): -/// -/// ``` -/// use std::sync::OnceLock; -/// -/// struct DeepThought { -/// answer: String, -/// } -/// -/// impl DeepThought { -/// # fn great_question() -> String { -/// # "42".to_string() -/// # } -/// # -/// fn new() -> Self { -/// Self { -/// // M3 Ultra takes about 16 million years in --release config -/// answer: Self::great_question(), -/// } -/// } -/// } -/// -/// fn computation() -> &'static DeepThought { -/// // n.b. static items do not call [`Drop`] on program termination, so if -/// // [`DeepThought`] impls Drop, that will not be used for this instance. -/// static COMPUTATION: OnceLock = OnceLock::new(); -/// COMPUTATION.get_or_init(|| DeepThought::new()) -/// } -/// -/// // The `DeepThought` is built, stored in the `OnceLock`, and returned. -/// let _ = computation().answer; -/// // The `DeepThought` is retrieved from the `OnceLock` and returned. -/// let _ = computation().answer; -/// ``` -/// /// Writing to a `OnceLock` from a separate thread: /// /// ``` @@ -73,6 +43,55 @@ use crate::sync::Once; /// Some(&12345), /// ); /// ``` +/// +/// You can use `OnceLock` to implement a type that requires "append-only" logic: +/// +/// ``` +/// use std::sync::{OnceLock, atomic::{AtomicU32, Ordering}}; +/// use std::thread; +/// +/// struct OnceList { +/// data: OnceLock, +/// next: OnceLock>>, +/// } +/// impl OnceList { +/// const fn new() -> OnceList { +/// OnceList { data: OnceLock::new(), next: OnceLock::new() } +/// } +/// fn push(&self, value: T) { +/// // FIXME: this impl is concise, but is also slow for long lists or many threads. +/// // as an exercise, consider how you might improve on it while preserving the behavior +/// if let Err(value) = self.data.set(value) { +/// let next = self.next.get_or_init(|| Box::new(OnceList::new())); +/// next.push(value) +/// }; +/// } +/// fn contains(&self, example: &T) -> bool +/// where +/// T: PartialEq, +/// { +/// self.data.get().map(|item| item == example).filter(|v| *v).unwrap_or_else(|| { +/// self.next.get().map(|next| next.contains(example)).unwrap_or(false) +/// }) +/// } +/// } +/// +/// // Let's exercise this new Sync append-only list by doing a little counting +/// static LIST: OnceList = OnceList::new(); +/// static COUNTER: AtomicU32 = AtomicU32::new(0); +/// +/// let vec = (0..thread::available_parallelism().unwrap().get()).map(|_| thread::spawn(|| { +/// while let i @ 0..=1000 = COUNTER.fetch_add(1, Ordering::Relaxed) { +/// LIST.push(i); +/// } +/// })).collect::>>(); +/// vec.into_iter().for_each(|handle| handle.join().unwrap()); +/// +/// for i in 0..=1000 { +/// assert!(LIST.contains(&i)); +/// } +/// +/// ``` #[stable(feature = "once_cell", since = "1.70.0")] pub struct OnceLock { once: Once,