Skip to content

Commit 9dc2226

Browse files
committed
util/epoch: Add preliminary support for loom
This patch only adds support to parts of `utils` and to `epoch`. Some parts of `utils` had to be left out, since they rely on `AtomicUsize::new` being `const` (which it is not in `loom`). Other parts had to be left out due to the lack of `thread::Thread` in `loom`. All the parts needed for `epoch` were successfully moved to loom. For this initial patch, there are two loom tests, both in `epoch`. One is a simple test of defer_destroy while a pin is held, and the other is the Triber stack example. They both pass loom with `LOOM_MAX_PREEMPTIONS=3` and `LOOM_MAX_PREEMPTIONS=2`. The latter tests fewer possible interleavings, but completes in 13 minutes on my laptop rather than ~2 hours. I have added loom testing of `epoch` to CI as well. The minimal version bump to 1.30 is a little awkward, but is needed to allow us to use paths for macro imports. Now, technically we already rely on MSRV above 1.30 for a bunch of features (like `alloc`), but this is a change that affects all of `crossbeam-epoch`. Note that the uses of `UnsafeCell` in `utils` have not been moved to `loom::cell::UnsafeCell`, as loom's `UnsafeCell` does not support `T: ?Sized`, which `AtomicCell` depends on. Fixes crossbeam-rs#486.
1 parent de59f9b commit 9dc2226

22 files changed

+381
-47
lines changed

Diff for: .github/workflows/ci.yml

+13-2
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ jobs:
2525
- crossbeam-skiplist
2626
- crossbeam-utils
2727
rust:
28-
- 1.28.0
28+
- 1.30.0
2929
- nightly
3030
steps:
3131
- uses: actions/checkout@master
@@ -38,7 +38,7 @@ jobs:
3838
rustup target add thumbv6m-none-eabi
3939
# cfg-if 0.1.10 requires Rust 1.31+ so downgrade it.
4040
- name: Downgrade dependencies
41-
if: matrix.rust == '1.28.0'
41+
if: matrix.rust == '1.30.0'
4242
run: |
4343
cargo generate-lockfile
4444
cargo update -p cfg-if --precise 0.1.9
@@ -66,3 +66,14 @@ jobs:
6666
run: rustup update stable && rustup default stable
6767
- name: rustfmt
6868
run: ./ci/rustfmt.sh
69+
70+
# Run loom tests.
71+
loom:
72+
name: loom
73+
runs-on: ubuntu-latest
74+
steps:
75+
- uses: actions/checkout@master
76+
- name: Install Rust
77+
run: rustup update stable && rustup default stable
78+
- name: loom
79+
run: ./ci/crossbeam-epoch-loom.sh

Diff for: ci/crossbeam-epoch-loom.sh

+8
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
#!/bin/bash
2+
3+
cd "$(dirname "$0")"/../crossbeam-epoch
4+
set -ex
5+
6+
export RUSTFLAGS="-D warnings --cfg=loom"
7+
8+
env LOOM_MAX_PREEMPTIONS=2 cargo test --test loom --features sanitize --release -- --nocapture

Diff for: crossbeam-epoch/Cargo.toml

+3
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,9 @@ cfg-if = "0.1.2"
2727
maybe-uninit = "2.0.0"
2828
memoffset = "0.5"
2929

30+
[target.'cfg(loom)'.dependencies]
31+
loom = "0.3.2"
32+
3033
[dependencies.crossbeam-utils]
3134
version = "0.7"
3235
path = "../crossbeam-utils"

Diff for: crossbeam-epoch/src/atomic.rs

+12-5
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
11
use alloc::boxed::Box;
2+
use concurrency::sync::atomic::{AtomicUsize, Ordering};
23
use core::borrow::{Borrow, BorrowMut};
34
use core::cmp;
45
use core::fmt;
56
use core::marker::PhantomData;
67
use core::mem;
78
use core::ops::{Deref, DerefMut};
89
use core::ptr;
9-
use core::sync::atomic::{AtomicUsize, Ordering};
1010

1111
use crossbeam_utils::atomic::AtomicConsume;
1212
use guard::Guard;
@@ -150,7 +150,7 @@ impl<T> Atomic<T> {
150150
///
151151
/// let a = Atomic::<i32>::null();
152152
/// ```
153-
#[cfg(not(has_min_const_fn))]
153+
#[cfg(any(loom, not(has_min_const_fn)))]
154154
pub fn null() -> Atomic<T> {
155155
Self {
156156
data: AtomicUsize::new(0),
@@ -167,7 +167,7 @@ impl<T> Atomic<T> {
167167
///
168168
/// let a = Atomic::<i32>::null();
169169
/// ```
170-
#[cfg(has_min_const_fn)]
170+
#[cfg(all(not(loom), has_min_const_fn))]
171171
pub const fn null() -> Atomic<T> {
172172
Self {
173173
data: AtomicUsize::new(0),
@@ -506,7 +506,14 @@ impl<T> Atomic<T> {
506506
/// }
507507
/// ```
508508
pub unsafe fn into_owned(self) -> Owned<T> {
509-
Owned::from_usize(self.data.into_inner())
509+
#[cfg(loom)]
510+
{
511+
Owned::from_usize(self.data.unsync_load())
512+
}
513+
#[cfg(not(loom))]
514+
{
515+
Owned::from_usize(self.data.into_inner())
516+
}
510517
}
511518
}
512519

@@ -1185,7 +1192,7 @@ impl<'g, T> Default for Shared<'g, T> {
11851192
}
11861193
}
11871194

1188-
#[cfg(test)]
1195+
#[cfg(all(test, not(loom)))]
11891196
mod tests {
11901197
use super::Shared;
11911198

Diff for: crossbeam-epoch/src/collector.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
///
1313
/// handle.pin().flush();
1414
/// ```
15-
use alloc::sync::Arc;
15+
use concurrency::sync::Arc;
1616
use core::fmt;
1717

1818
use guard::Guard;
@@ -103,7 +103,7 @@ impl fmt::Debug for LocalHandle {
103103
}
104104
}
105105

106-
#[cfg(test)]
106+
#[cfg(all(test, not(loom)))]
107107
mod tests {
108108
use std::mem;
109109
use std::sync::atomic::{AtomicUsize, Ordering};
@@ -145,9 +145,9 @@ mod tests {
145145
let a = Owned::new(7).into_shared(guard);
146146
guard.defer_destroy(a);
147147

148-
assert!(!(*(*guard.local).bag.get()).is_empty());
148+
assert!(!(*guard.local).bag.with(|b| (*b).is_empty()));
149149

150-
while !(*(*guard.local).bag.get()).is_empty() {
150+
while !(*guard.local).bag.with(|b| (*b).is_empty()) {
151151
guard.flush();
152152
}
153153
}
@@ -166,7 +166,7 @@ mod tests {
166166
let a = Owned::new(7).into_shared(guard);
167167
guard.defer_destroy(a);
168168
}
169-
assert!(!(*(*guard.local).bag.get()).is_empty());
169+
assert!(!(*guard.local).bag.with(|b| (*b).is_empty()));
170170
}
171171
}
172172

Diff for: crossbeam-epoch/src/default.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
//! destructed on thread exit, which in turn unregisters the thread.
66
77
use collector::{Collector, LocalHandle};
8+
use concurrency::{lazy_static, thread_local};
89
use guard::Guard;
910

1011
lazy_static! {
@@ -44,7 +45,7 @@ where
4445
.unwrap_or_else(|_| f(&COLLECTOR.register()))
4546
}
4647

47-
#[cfg(test)]
48+
#[cfg(all(test, not(loom)))]
4849
mod tests {
4950
use crossbeam_utils::thread;
5051

Diff for: crossbeam-epoch/src/deferred.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ impl Deferred {
7878
}
7979
}
8080

81-
#[cfg(test)]
81+
#[cfg(all(test, not(loom)))]
8282
mod tests {
8383
use super::Deferred;
8484
use std::cell::Cell;

Diff for: crossbeam-epoch/src/epoch.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
//! If an object became garbage in some epoch, then we can be sure that after two advancements no
88
//! participant will hold a reference to it. That is the crux of safe memory reclamation.
99
10-
use core::sync::atomic::{AtomicUsize, Ordering};
10+
use concurrency::sync::atomic::{AtomicUsize, Ordering};
1111

1212
/// An epoch that can be marked as pinned or unpinned.
1313
///

Diff for: crossbeam-epoch/src/internal.rs

+11-9
Original file line numberDiff line numberDiff line change
@@ -35,11 +35,12 @@
3535
//! Ideally each instance of concurrent data structure may have its own queue that gets fully
3636
//! destroyed as soon as the data structure gets dropped.
3737
38-
use core::cell::{Cell, UnsafeCell};
38+
use concurrency::cell::UnsafeCell;
39+
use concurrency::sync::atomic;
40+
use concurrency::sync::atomic::Ordering;
41+
use core::cell::Cell;
3942
use core::mem::{self, ManuallyDrop};
4043
use core::num::Wrapping;
41-
use core::sync::atomic;
42-
use core::sync::atomic::Ordering;
4344
use core::{fmt, ptr};
4445

4546
use crossbeam_utils::CachePadded;
@@ -411,7 +412,7 @@ impl Local {
411412
/// Returns a reference to the `Collector` in which this `Local` resides.
412413
#[inline]
413414
pub fn collector(&self) -> &Collector {
414-
unsafe { &**self.collector.get() }
415+
self.collector.with(|c| unsafe { &**c })
415416
}
416417

417418
/// Returns `true` if the current participant is pinned.
@@ -426,7 +427,7 @@ impl Local {
426427
///
427428
/// It should be safe for another thread to execute the given function.
428429
pub unsafe fn defer(&self, mut deferred: Deferred, guard: &Guard) {
429-
let bag = &mut *self.bag.get();
430+
let bag = self.bag.with_mut(|b| &mut *b);
430431

431432
while let Err(d) = bag.try_push(deferred) {
432433
self.global().push_bag(bag, guard);
@@ -435,7 +436,7 @@ impl Local {
435436
}
436437

437438
pub fn flush(&self, guard: &Guard) {
438-
let bag = unsafe { &mut *self.bag.get() };
439+
let bag = self.bag.with_mut(|b| unsafe { &mut *b });
439440

440441
if !bag.is_empty() {
441442
self.global().push_bag(bag, guard);
@@ -573,7 +574,8 @@ impl Local {
573574
// Pin and move the local bag into the global queue. It's important that `push_bag`
574575
// doesn't defer destruction on any new garbage.
575576
let guard = &self.pin();
576-
self.global().push_bag(&mut *self.bag.get(), guard);
577+
self.global()
578+
.push_bag(self.bag.with_mut(|b| &mut *b), guard);
577579
}
578580
// Revert the handle count back to zero.
579581
self.handle_count.set(0);
@@ -582,7 +584,7 @@ impl Local {
582584
// Take the reference to the `Global` out of this `Local`. Since we're not protected
583585
// by a guard at this time, it's crucial that the reference is read before marking the
584586
// `Local` as deleted.
585-
let collector: Collector = ptr::read(&*(*self.collector.get()));
587+
let collector: Collector = ptr::read(self.collector.with(|c| &*(*c)));
586588

587589
// Mark this node in the linked list as deleted.
588590
self.entry.delete(&unprotected());
@@ -613,7 +615,7 @@ impl IsElement<Local> for Local {
613615
}
614616
}
615617

616-
#[cfg(test)]
618+
#[cfg(all(test, not(loom)))]
617619
mod tests {
618620
use std::sync::atomic::{AtomicUsize, Ordering};
619621

Diff for: crossbeam-epoch/src/lib.rs

+81-1
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,87 @@ extern crate core;
6666

6767
extern crate maybe_uninit;
6868

69+
#[cfg(loom)]
70+
extern crate loom;
71+
72+
#[cfg(loom)]
73+
#[allow(unused_imports, dead_code)]
74+
pub(crate) mod concurrency {
75+
pub(crate) mod cell {
76+
pub(crate) use loom::cell::UnsafeCell;
77+
}
78+
pub(crate) mod sync {
79+
pub(crate) mod atomic {
80+
pub(crate) use loom::sync::atomic::{AtomicUsize, Ordering};
81+
pub(crate) fn fence(ord: Ordering) {
82+
if let Ordering::Acquire = ord {
83+
} else {
84+
// FIXME: loom only supports acquire fences at the moment.
85+
// https://github.com/tokio-rs/loom/issues/117
86+
// let's at least not panic...
87+
// this may generate some false positives (`SeqCst` is stronger than `Acquire`
88+
// for example), and some false negatives (`Relaxed` is weaker than `Acquire`),
89+
// but it's the best we can do for the time being.
90+
}
91+
loom::sync::atomic::fence(Ordering::Acquire)
92+
}
93+
94+
// FIXME: loom does not support compiler_fence at the moment.
95+
// https://github.com/tokio-rs/loom/issues/117
96+
// we use fence as a stand-in for compiler_fence for the time being.
97+
// this may miss some races since fence is stronger than compiler_fence,
98+
// but it's the best we can do for the time being.
99+
pub(crate) use self::fence as compiler_fence;
100+
}
101+
pub(crate) use loom::sync::Arc;
102+
}
103+
pub(crate) use loom::lazy_static;
104+
pub(crate) use loom::thread_local;
105+
}
106+
#[cfg(not(loom))]
107+
#[allow(unused_imports, dead_code)]
108+
pub(crate) mod concurrency {
109+
#[cfg(any(feature = "alloc", feature = "std"))]
110+
pub(crate) mod cell {
111+
#[derive(Debug)]
112+
#[repr(transparent)]
113+
pub(crate) struct UnsafeCell<T>(::core::cell::UnsafeCell<T>);
114+
115+
impl<T> UnsafeCell<T> {
116+
#[inline]
117+
pub(crate) fn new(data: T) -> UnsafeCell<T> {
118+
UnsafeCell(::core::cell::UnsafeCell::new(data))
119+
}
120+
121+
#[inline]
122+
pub(crate) fn with<R>(&self, f: impl FnOnce(*const T) -> R) -> R {
123+
f(self.0.get())
124+
}
125+
126+
#[inline]
127+
pub(crate) fn with_mut<R>(&self, f: impl FnOnce(*mut T) -> R) -> R {
128+
f(self.0.get())
129+
}
130+
}
131+
}
132+
#[cfg(any(feature = "alloc", feature = "std"))]
133+
pub(crate) mod sync {
134+
pub(crate) mod atomic {
135+
pub(crate) use core::sync::atomic::compiler_fence;
136+
pub(crate) use core::sync::atomic::fence;
137+
pub(crate) use core::sync::atomic::{AtomicUsize, Ordering};
138+
}
139+
#[cfg_attr(feature = "nightly", cfg(target_has_atomic = "ptr"))]
140+
pub(crate) use alloc::sync::Arc;
141+
}
142+
143+
#[cfg(feature = "std")]
144+
pub(crate) use std::thread_local;
145+
146+
#[cfg(feature = "std")]
147+
pub(crate) use lazy_static::lazy_static;
148+
}
149+
69150
cfg_if! {
70151
if #[cfg(feature = "alloc")] {
71152
extern crate alloc;
@@ -99,7 +180,6 @@ cfg_if! {
99180

100181
cfg_if! {
101182
if #[cfg(feature = "std")] {
102-
#[macro_use]
103183
extern crate lazy_static;
104184

105185
mod default;

Diff for: crossbeam-epoch/src/sync/list.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
//! Ideas from Michael. High Performance Dynamic Lock-Free Hash Tables and List-Based Sets. SPAA
44
//! 2002. http://dl.acm.org/citation.cfm?id=564870.564881
55
6+
use concurrency::sync::atomic::Ordering::{Acquire, Relaxed, Release};
67
use core::marker::PhantomData;
7-
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release};
88

99
use {unprotected, Atomic, Guard, Shared};
1010

@@ -295,7 +295,7 @@ impl<'g, T: 'g, C: IsElement<T>> Iterator for Iter<'g, T, C> {
295295
}
296296
}
297297

298-
#[cfg(test)]
298+
#[cfg(all(test, not(loom)))]
299299
mod tests {
300300
use super::*;
301301
use crossbeam_utils::thread;

Diff for: crossbeam-epoch/src/sync/queue.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
//! Simon Doherty, Lindsay Groves, Victor Luchangco, and Mark Moir. 2004b. Formal Verification of a
99
//! Practical Lock-Free Queue Algorithm. https://doi.org/10.1007/978-3-540-30232-2_7
1010
11-
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release};
11+
use concurrency::sync::atomic::Ordering::{Acquire, Relaxed, Release};
1212

1313
use crossbeam_utils::CachePadded;
1414

@@ -203,7 +203,7 @@ impl<T> Drop for Queue<T> {
203203
}
204204
}
205205

206-
#[cfg(test)]
206+
#[cfg(all(test, not(loom)))]
207207
mod test {
208208
use super::*;
209209
use crossbeam_utils::thread;

0 commit comments

Comments
 (0)