diff --git a/Cargo.toml b/Cargo.toml index d27857ca..db6d2e30 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,11 +21,15 @@ maintenance = { status = "experimental" } [features] sanitize = ['crossbeam-epoch/sanitize'] -std = ["crossbeam-epoch/std", "num_cpus"] +std = ["crossbeam-epoch/std", "num_cpus", "parking_lot"] default = ["std"] [dependencies] -parking_lot = "0.10" +lock_api = "0.3.3" + +[dependencies.parking_lot] +version = "0.10" +optional = true [dependencies.num_cpus] version = "1.12.0" diff --git a/src/iter/iter.rs b/src/iter/iter.rs index 90364188..b83fba7f 100644 --- a/src/iter/iter.rs +++ b/src/iter/iter.rs @@ -6,12 +6,18 @@ use crossbeam_epoch::Guard; /// /// See [`HashMap::iter`](crate::HashMap::iter) for details. #[derive(Debug)] -pub struct Iter<'g, K, V> { - pub(crate) node_iter: NodeIter<'g, K, V>, +pub struct Iter<'g, K, V, L> +where + L: lock_api::RawMutex, +{ + pub(crate) node_iter: NodeIter<'g, K, V, L>, pub(crate) guard: &'g Guard, } -impl<'g, K, V> Iterator for Iter<'g, K, V> { +impl<'g, K, V, L> Iterator for Iter<'g, K, V, L> +where + L: lock_api::RawMutex, +{ type Item = (&'g K, &'g V); fn next(&mut self) -> Option { let node = self.node_iter.next()?; @@ -26,11 +32,17 @@ impl<'g, K, V> Iterator for Iter<'g, K, V> { /// /// See [`HashMap::keys`](crate::HashMap::keys) for details. #[derive(Debug)] -pub struct Keys<'g, K, V> { - pub(crate) node_iter: NodeIter<'g, K, V>, +pub struct Keys<'g, K, V, L> +where + L: lock_api::RawMutex, +{ + pub(crate) node_iter: NodeIter<'g, K, V, L>, } -impl<'g, K, V> Iterator for Keys<'g, K, V> { +impl<'g, K, V, L> Iterator for Keys<'g, K, V, L> +where + L: lock_api::RawMutex, +{ type Item = &'g K; fn next(&mut self) -> Option { let node = self.node_iter.next()?; @@ -42,12 +54,18 @@ impl<'g, K, V> Iterator for Keys<'g, K, V> { /// /// See [`HashMap::values`](crate::HashMap::values) for details. #[derive(Debug)] -pub struct Values<'g, K, V> { - pub(crate) node_iter: NodeIter<'g, K, V>, +pub struct Values<'g, K, V, L> +where + L: lock_api::RawMutex, +{ + pub(crate) node_iter: NodeIter<'g, K, V, L>, pub(crate) guard: &'g Guard, } -impl<'g, K, V> Iterator for Values<'g, K, V> { +impl<'g, K, V, L> Iterator for Values<'g, K, V, L> +where + L: lock_api::RawMutex, +{ type Item = &'g V; fn next(&mut self) -> Option { let node = self.node_iter.next()?; diff --git a/src/iter/traverser.rs b/src/iter/traverser.rs index d309b746..43cddb6d 100644 --- a/src/iter/traverser.rs +++ b/src/iter/traverser.rs @@ -9,15 +9,18 @@ use core::sync::atomic::Ordering; use crossbeam_epoch::{Guard, Shared}; #[derive(Debug)] -pub(crate) struct NodeIter<'g, K, V> { +pub(crate) struct NodeIter<'g, K, V, L> +where + L: lock_api::RawMutex, +{ /// Current table; update if resized - table: Option<&'g Table>, + table: Option<&'g Table>, - stack: Option>>, - spare: Option>>, + stack: Option>>, + spare: Option>>, /// The last bin entry iterated over - prev: Option<&'g Node>, + prev: Option<&'g Node>, /// Index of bin to use next index: usize, @@ -34,8 +37,11 @@ pub(crate) struct NodeIter<'g, K, V> { guard: &'g Guard, } -impl<'g, K, V> NodeIter<'g, K, V> { - pub(crate) fn new(table: Shared<'g, Table>, guard: &'g Guard) -> Self { +impl<'g, K, V, L> NodeIter<'g, K, V, L> +where + L: lock_api::RawMutex, +{ + pub(crate) fn new(table: Shared<'g, Table>, guard: &'g Guard) -> Self { let (table, len) = if table.is_null() { (None, 0) } else { @@ -58,7 +64,7 @@ impl<'g, K, V> NodeIter<'g, K, V> { } } - fn push_state(&mut self, t: &'g Table, i: usize, n: usize) { + fn push_state(&mut self, t: &'g Table, i: usize, n: usize) { let mut s = self.spare.take(); if let Some(ref mut s) = s { self.spare = s.next.take(); @@ -114,8 +120,11 @@ impl<'g, K, V> NodeIter<'g, K, V> { } } -impl<'g, K, V> Iterator for NodeIter<'g, K, V> { - type Item = &'g Node; +impl<'g, K, V, L> Iterator for NodeIter<'g, K, V, L> +where + L: lock_api::RawMutex, +{ + type Item = &'g Node; fn next(&mut self) -> Option { let mut e = None; if let Some(prev) = self.prev { @@ -182,11 +191,14 @@ impl<'g, K, V> Iterator for NodeIter<'g, K, V> { } #[derive(Debug)] -struct TableStack<'g, K, V> { +struct TableStack<'g, K, V, L> +where + L: lock_api::RawMutex, +{ length: usize, index: usize, - table: &'g Table, - next: Option>>, + table: &'g Table, + next: Option>>, } #[cfg(test)] @@ -194,7 +206,7 @@ mod tests { use super::*; use crate::raw::Table; use crossbeam_epoch::{self as epoch, Atomic, Owned}; - use parking_lot::Mutex; + use lock_api::Mutex; #[test] fn iter_new() { diff --git a/src/map.rs b/src/map.rs index eec7ef4d..55ea8889 100644 --- a/src/map.rs +++ b/src/map.rs @@ -64,13 +64,19 @@ macro_rules! load_factor { /// See [`ahash`](https://github.com/tkaitchuck/ahash) for more information. /// /// See the [crate-level documentation](index.html) for details. -pub struct HashMap { +#[cfg(feature = "std")] +pub struct HashMap +where + L: lock_api::RawMutex, + S: BuildHasher, +{ + // NOTE: if you change any field here, you must _also_ change it in the copy below /// The array of bins. Lazily initialized upon first insertion. /// Size is always a power of two. Accessed directly by iterators. - table: Atomic>, + table: Atomic>, /// The next table to use; non-null only while resizing. - next_table: Atomic>, + next_table: Atomic>, /// The next table index (plus one) to split while resizing. transfer_index: AtomicIsize, @@ -88,10 +94,36 @@ pub struct HashMap { build_hasher: S, } -impl Default for HashMap +/// A concurrent hash table. +/// +/// Note that `ahash::RandomState`, the default value of `S`, is not +/// cryptographically secure. Therefore it is strongly recommended that you do +/// not use this hash for cryptographic purproses. +/// See [`ahash`](https://github.com/tkaitchuck/ahash) for more information. +/// +/// See the [crate-level documentation](index.html) for details. +#[cfg(not(feature = "std"))] +pub struct HashMap +where + L: lock_api::RawMutex, + S: BuildHasher, +{ + // NOTE: this is, and must be, an exact copy of the `HashMap` definition above, with just the + // default type for `L` unset. This is because in no_std environments, there is no sensible + // default lock type for us to use. + table: Atomic>, + next_table: Atomic>, + transfer_index: AtomicIsize, + count: AtomicUsize, + size_ctl: AtomicIsize, + build_hasher: S, +} + +impl Default for HashMap where K: Sync + Send + Clone + Hash + Eq, V: Sync + Send, + L: lock_api::RawMutex, S: BuildHasher + Default, { fn default() -> Self { @@ -99,10 +131,11 @@ where } } -impl HashMap +impl HashMap where K: Sync + Send + Clone + Hash + Eq, V: Sync + Send, + L: lock_api::RawMutex, S: BuildHasher + Default, { /// Creates a new, empty map with the default initial table size (16). @@ -117,10 +150,11 @@ where } } -impl HashMap +impl HashMap where K: Sync + Send + Clone + Hash + Eq, V: Sync + Send, + L: lock_api::RawMutex, S: BuildHasher, { /// Creates an empty map which will use `hash_builder` to hash keys. @@ -165,10 +199,11 @@ where } } -impl HashMap +impl HashMap where K: Sync + Send + Clone + Hash + Eq, V: Sync + Send, + L: lock_api::RawMutex, S: BuildHasher, { fn hash(&self, key: &Q) -> u64 { @@ -190,7 +225,7 @@ where self.get(key, &guard).is_some() } - fn get_node<'g, Q>(&'g self, key: &Q, guard: &'g Guard) -> Option<&'g Node> + fn get_node<'g, Q>(&'g self, key: &Q, guard: &'g Guard) -> Option<&'g Node> where K: Borrow, Q: ?Sized + Hash + Eq, @@ -304,7 +339,7 @@ where unsafe { v.as_ref() }.map(|v| (&node.key, v)) } - fn init_table<'g>(&'g self, guard: &'g Guard) -> Shared<'g, Table> { + fn init_table<'g>(&'g self, guard: &'g Guard) -> Shared<'g, Table> { loop { let table = self.table.load(Ordering::SeqCst, guard); // safety: we loaded the table while epoch was pinned. table won't be deallocated until @@ -376,7 +411,7 @@ where value: Atomic::new(value), hash: h, next: Atomic::null(), - lock: parking_lot::Mutex::new(()), + lock: lock_api::Mutex::new(()), })); loop { @@ -558,10 +593,10 @@ where fn help_transfer<'g>( &'g self, - table: Shared<'g, Table>, - next_table: *const Table, + table: Shared<'g, Table>, + next_table: *const Table, guard: &'g Guard, - ) -> Shared<'g, Table> { + ) -> Shared<'g, Table> { if table.is_null() || next_table.is_null() { return table; } @@ -674,8 +709,8 @@ where fn transfer<'g>( &'g self, - table: Shared<'g, Table>, - mut next_table: Shared<'g, Table>, + table: Shared<'g, Table>, + mut next_table: Shared<'g, Table>, guard: &'g Guard, ) { // safety: table was read while `guard` was held. the code that drops table only drops it @@ -911,7 +946,7 @@ where *link = Owned::new(BinEntry::Node(Node { hash: node.hash, key: node.key.clone(), - lock: parking_lot::Mutex::new(()), + lock: lock_api::Mutex::new(()), value: node.value.clone(), next: Atomic::from(*link), })) @@ -1171,7 +1206,7 @@ where // TODO: tree nodes let mut e = bin; - let mut pred: Shared<'_, BinEntry> = Shared::null(); + let mut pred: Shared<'_, BinEntry> = Shared::null(); loop { // safety: either e is bin, in which case it is valid due to the above, // or e was obtained from a next pointer. Any next pointer obtained from @@ -1304,7 +1339,7 @@ where /// The iterator element type is `(&'g K, &'g V)`. /// /// To obtain a `Guard`, use [`epoch::pin`]. - pub fn iter<'g>(&'g self, guard: &'g Guard) -> Iter<'g, K, V> { + pub fn iter<'g>(&'g self, guard: &'g Guard) -> Iter<'g, K, V, L> { let table = self.table.load(Ordering::SeqCst, guard); let node_iter = NodeIter::new(table, guard); Iter { node_iter, guard } @@ -1314,7 +1349,7 @@ where /// The iterator element type is `&'g K`. /// /// To obtain a `Guard`, use [`epoch::pin`]. - pub fn keys<'g>(&'g self, guard: &'g Guard) -> Keys<'g, K, V> { + pub fn keys<'g>(&'g self, guard: &'g Guard) -> Keys<'g, K, V, L> { let table = self.table.load(Ordering::SeqCst, guard); let node_iter = NodeIter::new(table, guard); Keys { node_iter } @@ -1324,7 +1359,7 @@ where /// The iterator element type is `&'g V`. /// /// To obtain a `Guard`, use [`epoch::pin`]. - pub fn values<'g>(&'g self, guard: &'g Guard) -> Values<'g, K, V> { + pub fn values<'g>(&'g self, guard: &'g Guard) -> Values<'g, K, V, L> { let table = self.table.load(Ordering::SeqCst, guard); let node_iter = NodeIter::new(table, guard); Values { node_iter, guard } @@ -1359,11 +1394,12 @@ where } #[cfg(feature = "std")] -impl PartialEq for HashMap +impl PartialEq for HashMap where K: Sync + Send + Clone + Eq + Hash, V: Sync + Send + PartialEq, S: BuildHasher, + L: lock_api::RawMutex, { fn eq(&self, other: &Self) -> bool { if self.len() != other.len() { @@ -1377,20 +1413,22 @@ where } #[cfg(feature = "std")] -impl Eq for HashMap +impl Eq for HashMap where K: Sync + Send + Clone + Eq + Hash, V: Sync + Send + Eq, S: BuildHasher, + L: lock_api::RawMutex, { } #[cfg(feature = "std")] -impl fmt::Debug for HashMap +impl fmt::Debug for HashMap where K: Sync + Send + Clone + Debug + Eq + Hash, V: Sync + Send + Debug, S: BuildHasher, + L: lock_api::RawMutex, { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { let guard = epoch::pin(); @@ -1398,7 +1436,11 @@ where } } -impl Drop for HashMap { +impl Drop for HashMap +where + L: lock_api::RawMutex, + S: BuildHasher, +{ fn drop(&mut self) { // safety: we have &mut self _and_ all references we have returned are bound to the // lifetime of their borrow of self, so there cannot be any outstanding references to @@ -1423,11 +1465,12 @@ impl Drop for HashMap { } #[cfg(feature = "std")] -impl Extend<(K, V)> for &HashMap +impl Extend<(K, V)> for &HashMap where K: Sync + Send + Clone + Hash + Eq, V: Sync + Send, S: BuildHasher, + L: lock_api::RawMutex, { #[inline] fn extend>(&mut self, iter: T) { @@ -1451,11 +1494,12 @@ where } #[cfg(feature = "std")] -impl<'a, K, V, S> Extend<(&'a K, &'a V)> for &HashMap +impl<'a, K, V, L, S> Extend<(&'a K, &'a V)> for &HashMap where K: Sync + Send + Copy + Hash + Eq, V: Sync + Send + Copy, S: BuildHasher, + L: lock_api::RawMutex, { #[inline] fn extend>(&mut self, iter: T) { @@ -1463,11 +1507,12 @@ where } } -impl FromIterator<(K, V)> for HashMap +impl FromIterator<(K, V)> for HashMap where K: Sync + Send + Clone + Hash + Eq, V: Sync + Send, S: BuildHasher + Default, + L: lock_api::RawMutex, { fn from_iter>(iter: T) -> Self { let mut iter = iter.into_iter(); @@ -1489,11 +1534,12 @@ where } } -impl<'a, K, V, S> FromIterator<(&'a K, &'a V)> for HashMap +impl<'a, K, V, L, S> FromIterator<(&'a K, &'a V)> for HashMap where K: Sync + Send + Copy + Hash + Eq, V: Sync + Send + Copy, S: BuildHasher + Default, + L: lock_api::RawMutex, { #[inline] fn from_iter>(iter: T) -> Self { @@ -1501,11 +1547,12 @@ where } } -impl<'a, K, V, S> FromIterator<&'a (K, V)> for HashMap +impl<'a, K, V, L, S> FromIterator<&'a (K, V)> for HashMap where K: Sync + Send + Copy + Hash + Eq, V: Sync + Send + Copy, S: BuildHasher + Default, + L: lock_api::RawMutex, { #[inline] fn from_iter>(iter: T) -> Self { @@ -1514,13 +1561,14 @@ where } #[cfg(feature = "std")] -impl Clone for HashMap +impl Clone for HashMap where K: Sync + Send + Clone + Hash + Eq, V: Sync + Send + Clone, S: BuildHasher + Clone, + L: lock_api::RawMutex, { - fn clone(&self) -> HashMap { + fn clone(&self) -> HashMap { let cloned_map = Self::with_capacity_and_hasher(self.build_hasher.clone(), self.len()); { let guard = epoch::pin(); diff --git a/src/node.rs b/src/node.rs index eef5bb62..84cacfdb 100644 --- a/src/node.rs +++ b/src/node.rs @@ -2,14 +2,17 @@ use crate::raw::Table; use core::borrow::Borrow; use core::sync::atomic::Ordering; use crossbeam_epoch::{Atomic, Guard, Shared}; -use parking_lot::Mutex; +use lock_api::Mutex; /// Entry in a bin. /// /// Will _generally_ be `Node`. Any entry that is not first in the bin, will be a `Node`. #[derive(Debug)] -pub(crate) enum BinEntry { - Node(Node), +pub(crate) enum BinEntry +where + L: lock_api::RawMutex, +{ + Node(Node), // safety: the pointer t to the next table inside Moved(t) is a valid pointer if the Moved(t) // entry was read after loading `map::HashMap.table` while the guard used to load that table is // still alive: @@ -40,29 +43,36 @@ pub(crate) enum BinEntry { // // Since finishing a resize is the only time a table is `defer_destroy`ed, the above covers // all cases. - Moved(*const Table), + Moved(*const Table), } -unsafe impl Send for BinEntry +unsafe impl Send for BinEntry where K: Send, V: Send, - Node: Send, - Table: Send, + L: Send, + Node: Send, + Table: Send, + L: lock_api::RawMutex, { } -unsafe impl Sync for BinEntry +unsafe impl Sync for BinEntry where K: Sync, V: Sync, - Node: Sync, - Table: Sync, + L: Sync, + Node: Sync, + Table: Sync, + L: lock_api::RawMutex, { } -impl BinEntry { - pub(crate) fn as_node(&self) -> Option<&Node> { +impl BinEntry +where + L: lock_api::RawMutex, +{ + pub(crate) fn as_node(&self) -> Option<&Node> { if let BinEntry::Node(ref n) = *self { Some(n) } else { @@ -71,13 +81,16 @@ impl BinEntry { } } -impl BinEntry { +impl BinEntry +where + L: lock_api::RawMutex, +{ pub(crate) fn find<'g, Q>( &'g self, hash: u64, key: &Q, guard: &'g Guard, - ) -> Shared<'g, BinEntry> + ) -> Shared<'g, BinEntry> where K: Borrow, Q: ?Sized + Eq, @@ -139,10 +152,13 @@ impl BinEntry { /// Key-value entry. #[derive(Debug)] -pub(crate) struct Node { +pub(crate) struct Node +where + L: lock_api::RawMutex, +{ pub(crate) hash: u64, pub(crate) key: K, pub(crate) value: Atomic, - pub(crate) next: Atomic>, - pub(crate) lock: Mutex<()>, + pub(crate) next: Atomic>, + pub(crate) lock: Mutex, } diff --git a/src/raw/mod.rs b/src/raw/mod.rs index 99880c14..8e04df49 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -8,19 +8,28 @@ use core::sync::atomic::Ordering; use crossbeam_epoch::{Atomic, Guard, Owned, Shared}; #[derive(Debug)] -pub(crate) struct Table { - bins: Box<[Atomic>]>, +pub(crate) struct Table +where + L: lock_api::RawMutex, +{ + bins: Box<[Atomic>]>, } -impl From>>> for Table { - fn from(bins: Vec>>) -> Self { +impl From>>> for Table +where + L: lock_api::RawMutex, +{ + fn from(bins: Vec>>) -> Self { Self { bins: bins.into_boxed_slice(), } } } -impl Table { +impl Table +where + L: lock_api::RawMutex, +{ pub(crate) fn new(bins: usize) -> Self { Self::from(vec![Atomic::null(); bins]) } @@ -81,7 +90,10 @@ impl Table { } } -impl Drop for Table { +impl Drop for Table +where + L: lock_api::RawMutex, +{ fn drop(&mut self) { // we need to drop any forwarding nodes (since they are heap allocated). @@ -106,7 +118,10 @@ impl Drop for Table { } } -impl Table { +impl Table +where + L: lock_api::RawMutex, +{ #[inline] pub(crate) fn bini(&self, hash: u64) -> usize { let mask = self.bins.len() as u64 - 1; @@ -114,7 +129,7 @@ impl Table { } #[inline] - pub(crate) fn bin<'g>(&'g self, i: usize, guard: &'g Guard) -> Shared<'g, BinEntry> { + pub(crate) fn bin<'g>(&'g self, i: usize, guard: &'g Guard) -> Shared<'g, BinEntry> { self.bins[i].load(Ordering::Acquire, guard) } @@ -123,18 +138,22 @@ impl Table { pub(crate) fn cas_bin<'g>( &'g self, i: usize, - current: Shared<'_, BinEntry>, - new: Owned>, + current: Shared<'_, BinEntry>, + new: Owned>, guard: &'g Guard, ) -> Result< - Shared<'g, BinEntry>, - crossbeam_epoch::CompareAndSetError<'g, BinEntry, Owned>>, + Shared<'g, BinEntry>, + crossbeam_epoch::CompareAndSetError<'g, BinEntry, Owned>>, > { self.bins[i].compare_and_set(current, new, Ordering::AcqRel, guard) } #[inline] - pub(crate) fn store_bin>>(&self, i: usize, new: P) { + pub(crate) fn store_bin>>( + &self, + i: usize, + new: P, + ) { self.bins[i].store(new, Ordering::Release) } }