From 464dee8edf786117f9806ed88d7b238cd4b8abf5 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 27 Nov 2018 09:23:22 +0100 Subject: [PATCH 01/12] std::ptr no longer needs whitelisting --- src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lib.rs b/src/lib.rs index 10a1405b2a..47f00658e5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -318,7 +318,6 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { // We walk up the stack a few frames to also cover their callees. const WHITELIST: &[(&str, &str)] = &[ // Uses mem::uninitialized - ("std::ptr::read", ""), ("std::sys::windows::mutex::Mutex::", ""), ]; for frame in ecx.stack().iter() From e9370d2b74d38720fed7887fc708b8fa67fe7adb Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 15 Nov 2018 13:29:55 +0100 Subject: [PATCH 02/12] adjust for memory_allocated hook, make RangeMap preallocated with a fixed size --- src/lib.rs | 23 ++++++--- src/range_map.rs | 108 +++++++++++------------------------------ src/stacked_borrows.rs | 16 ++++-- 3 files changed, 56 insertions(+), 91 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index d2ead2493c..591cf57662 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -301,6 +301,7 @@ type MiriEvalContext<'a, 'mir, 'tcx> = EvalContext<'a, 'mir, 'tcx, Evaluator<'tc impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { type MemoryKinds = MiriMemoryKind; + type MemoryExtra = (); type AllocExtra = stacked_borrows::Stacks; type PointerTag = Borrow; @@ -405,8 +406,9 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { } fn find_foreign_static( - tcx: TyCtxtAt<'a, 'tcx, 'tcx>, def_id: DefId, + tcx: TyCtxtAt<'a, 'tcx, 'tcx>, + memory_extra: &Self::MemoryExtra, ) -> EvalResult<'tcx, Cow<'tcx, Allocation>> { let attrs = tcx.get_attrs(def_id); let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") { @@ -417,8 +419,10 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { let alloc = match &link_name[..] { "__cxa_thread_atexit_impl" => { // This should be all-zero, pointer-sized - let data = vec![0; tcx.data_layout.pointer_size.bytes() as usize]; - Allocation::from_bytes(&data[..], tcx.data_layout.pointer_align.abi) + let size = tcx.data_layout.pointer_size; + let data = vec![0; size.bytes() as usize]; + let extra = AllocationExtra::memory_allocated(size, memory_extra); + Allocation::from_bytes(&data[..], tcx.data_layout.pointer_align.abi, extra) } _ => return err!(Unimplemented( format!("can't access foreign static: {}", link_name), @@ -434,9 +438,14 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { Ok(()) } - fn adjust_static_allocation( - alloc: &'_ Allocation - ) -> Cow<'_, Allocation> { + fn adjust_static_allocation<'b>( + alloc: &'b Allocation, + memory_extra: &Self::MemoryExtra, + ) -> Cow<'b, Allocation> { + let extra = AllocationExtra::memory_allocated( + Size::from_bytes(alloc.bytes.len() as u64), + memory_extra, + ); let alloc: Allocation = Allocation { bytes: alloc.bytes.clone(), relocations: Relocations::from_presorted( @@ -447,7 +456,7 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { undef_mask: alloc.undef_mask.clone(), align: alloc.align, mutability: alloc.mutability, - extra: Self::AllocExtra::default(), + extra, }; Cow::Owned(alloc) } diff --git a/src/range_map.rs b/src/range_map.rs index 74a49bf83b..762b17b1ae 100644 --- a/src/range_map.rs +++ b/src/range_map.rs @@ -16,13 +16,6 @@ pub struct RangeMap { map: BTreeMap, } -impl Default for RangeMap { - #[inline(always)] - fn default() -> Self { - RangeMap::new() - } -} - // The derived `Ord` impl sorts first by the first field, then, if the fields are the same, // by the second field. // This is exactly what we need for our purposes, since a range query on a BTReeSet/BTreeMap will give us all @@ -73,9 +66,15 @@ impl Range { } impl RangeMap { + /// Create a new RangeMap for the given size, and with the given initial value used for + /// the entire range. #[inline(always)] - pub fn new() -> RangeMap { - RangeMap { map: BTreeMap::new() } + pub fn new(size: Size, init: T) -> RangeMap { + let mut map = RangeMap { map: BTreeMap::new() }; + if size.bytes() > 0 { + map.map.insert(Range { start: 0, end: size.bytes() }, init); + } + map } fn iter_with_range<'a>( @@ -95,6 +94,9 @@ impl RangeMap { ) } + /// Provide read-only iteration over everything in the given range. This does + /// *not* split items if they overlap with the edges. Do not use this to mutate + /// through interior mutability. pub fn iter<'a>(&'a self, offset: Size, len: Size) -> impl Iterator + 'a { self.iter_with_range(offset.bytes(), len.bytes()).map(|(_, data)| data) } @@ -140,8 +142,7 @@ impl RangeMap { /// Provide mutable iteration over everything in the given range. As a side-effect, /// this will split entries in the map that are only partially hit by the given range, /// to make sure that when they are mutated, the effect is constrained to the given range. - /// If there are gaps, leave them be. - pub fn iter_mut_with_gaps<'a>( + pub fn iter_mut<'a>( &'a mut self, offset: Size, len: Size, @@ -174,64 +175,6 @@ impl RangeMap { }, ) } - - /// Provide a mutable iterator over everything in the given range, with the same side-effects as - /// iter_mut_with_gaps. Furthermore, if there are gaps between ranges, fill them with the given default - /// before yielding them in the iterator. - /// This is also how you insert. - pub fn iter_mut<'a>(&'a mut self, offset: Size, len: Size) -> impl Iterator + 'a - where - T: Clone + Default, - { - if len.bytes() > 0 { - let offset = offset.bytes(); - let len = len.bytes(); - - // Do a first iteration to collect the gaps - let mut gaps = Vec::new(); - let mut last_end = offset; - for (range, _) in self.iter_with_range(offset, len) { - if last_end < range.start { - gaps.push(Range { - start: last_end, - end: range.start, - }); - } - last_end = range.end; - } - if last_end < offset + len { - gaps.push(Range { - start: last_end, - end: offset + len, - }); - } - - // Add default for all gaps - for gap in gaps { - let old = self.map.insert(gap, Default::default()); - assert!(old.is_none()); - } - } - - // Now provide mutable iteration - self.iter_mut_with_gaps(offset, len) - } - - pub fn retain(&mut self, mut f: F) - where - F: FnMut(&T) -> bool, - { - let mut remove = Vec::new(); - for (range, data) in &self.map { - if !f(data) { - remove.push(*range); - } - } - - for range in remove { - self.map.remove(&range); - } - } } #[cfg(test)] @@ -239,14 +182,13 @@ mod tests { use super::*; /// Query the map at every offset in the range and collect the results. - fn to_vec(map: &RangeMap, offset: u64, len: u64, default: Option) -> Vec { + fn to_vec(map: &RangeMap, offset: u64, len: u64) -> Vec { (offset..offset + len) .into_iter() .map(|i| map .iter(Size::from_bytes(i), Size::from_bytes(1)) .next() .map(|&t| t) - .or(default) .unwrap() ) .collect() @@ -254,13 +196,13 @@ mod tests { #[test] fn basic_insert() { - let mut map = RangeMap::::new(); + let mut map = RangeMap::::new(Size::from_bytes(20), -1); // Insert for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(1)) { *x = 42; } // Check - assert_eq!(to_vec(&map, 10, 1, None), vec![42]); + assert_eq!(to_vec(&map, 10, 1), vec![42]); // Insert with size 0 for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(0)) { @@ -269,12 +211,12 @@ mod tests { for x in map.iter_mut(Size::from_bytes(11), Size::from_bytes(0)) { *x = 19; } - assert_eq!(to_vec(&map, 10, 2, Some(-1)), vec![42, -1]); + assert_eq!(to_vec(&map, 10, 2), vec![42, -1]); } #[test] fn gaps() { - let mut map = RangeMap::::new(); + let mut map = RangeMap::::new(Size::from_bytes(20), -1); for x in map.iter_mut(Size::from_bytes(11), Size::from_bytes(1)) { *x = 42; } @@ -282,11 +224,10 @@ mod tests { *x = 43; } assert_eq!( - to_vec(&map, 10, 10, Some(-1)), + to_vec(&map, 10, 10), vec![-1, 42, -1, -1, -1, 43, -1, -1, -1, -1] ); - // Now request a range that needs three gaps filled for x in map.iter_mut(Size::from_bytes(10), Size::from_bytes(10)) { if *x < 42 { *x = 23; @@ -294,9 +235,18 @@ mod tests { } assert_eq!( - to_vec(&map, 10, 10, None), + to_vec(&map, 10, 10), vec![23, 42, 23, 23, 23, 43, 23, 23, 23, 23] ); - assert_eq!(to_vec(&map, 13, 5, None), vec![23, 23, 43, 23, 23]); + assert_eq!(to_vec(&map, 13, 5), vec![23, 23, 43, 23, 23]); + + // Now request a range that goes beyond the initial size + for x in map.iter_mut(Size::from_bytes(15), Size::from_bytes(10)) { + *x = 19; + } + assert_eq!(map.iter(Size::from_bytes(19), Size::from_bytes(1)) + .map(|&t| t).collect::>(), vec![19]); + assert_eq!(map.iter(Size::from_bytes(20), Size::from_bytes(1)) + .map(|&t| t).collect::>(), vec![]); } } diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index f292d08363..22ec6ffe6f 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -126,7 +126,7 @@ impl State { } /// Extra per-allocation state -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct Stacks { // Even reading memory can have effects on the stack, so we need a `RefCell` here. stacks: RefCell>, @@ -289,9 +289,8 @@ impl<'tcx> Stacks { ) -> EvalResult<'tcx> { trace!("deref for tag {:?} as {:?}: {:?}, size {}", ptr.tag, kind, ptr, size.bytes()); - let mut stacks = self.stacks.borrow_mut(); - // We need `iter_mut` because `iter` would skip gaps! - for stack in stacks.iter_mut(ptr.offset, size) { + let stacks = self.stacks.borrow(); + for stack in stacks.iter(ptr.offset, size) { stack.deref(ptr.tag, kind).map_err(EvalErrorKind::MachineError)?; } Ok(()) @@ -359,7 +358,14 @@ impl<'tcx> Stacks { } /// Hooks and glue -impl AllocationExtra for Stacks { +impl AllocationExtra for Stacks { + #[inline(always)] + fn memory_allocated<'tcx>(size: Size, _extra: &()) -> Self { + Stacks { + stacks: RefCell::new(RangeMap::new(size, Stack::default())) + } + } + #[inline(always)] fn memory_read<'tcx>( alloc: &Allocation, From 215ec38624818c4df9889c702fecb10c33b646ee Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 15 Nov 2018 18:15:05 +0100 Subject: [PATCH 03/12] track call IDs --- src/lib.rs | 20 ++++++++++++-- src/stacked_borrows.rs | 61 ++++++++++++++++++++++++++++++++---------- 2 files changed, 65 insertions(+), 16 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 591cf57662..3211f9c5bd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -289,7 +289,7 @@ impl<'tcx> Evaluator<'tcx> { env_vars: HashMap::default(), tls: TlsData::default(), validate, - stacked_borrows: stacked_borrows::State::new(), + stacked_borrows: stacked_borrows::State::default(), } } } @@ -301,7 +301,8 @@ type MiriEvalContext<'a, 'mir, 'tcx> = EvalContext<'a, 'mir, 'tcx, Evaluator<'tc impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { type MemoryKinds = MiriMemoryKind; - type MemoryExtra = (); + type FrameExtra = stacked_borrows::CallId; + type MemoryExtra = stacked_borrows::MemoryState; type AllocExtra = stacked_borrows::Stacks; type PointerTag = Borrow; @@ -538,4 +539,19 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { ecx.retag(fn_entry, place) } } + + #[inline(always)] + fn stack_push( + ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>, + ) -> EvalResult<'tcx, stacked_borrows::CallId> { + Ok(ecx.memory().extra.borrow_mut().new_call()) + } + + #[inline(always)] + fn stack_pop( + ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>, + extra: stacked_borrows::CallId, + ) -> EvalResult<'tcx> { + Ok(ecx.memory().extra.borrow_mut().end_call(extra)) + } } diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 22ec6ffe6f..067e3bb844 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -1,4 +1,6 @@ use std::cell::RefCell; +use std::collections::HashSet; +use std::rc::Rc; use rustc::ty::{self, layout::Size}; use rustc::hir::{Mutability, MutMutable, MutImmutable}; @@ -10,6 +12,7 @@ use crate::{ }; pub type Timestamp = u64; +pub type CallId = u64; /// Information about which kind of borrow was used to create the reference this is tagged /// with. @@ -80,15 +83,6 @@ pub struct Stack { frozen_since: Option, // virtual frozen "item" on top of the stack } -impl Default for Stack { - fn default() -> Self { - Stack { - borrows: vec![BorStackItem::Shr], - frozen_since: None, - } - } -} - impl Stack { #[inline(always)] pub fn is_frozen(&self) -> bool { @@ -107,17 +101,50 @@ pub enum RefKind { Raw, } +/// Extra global state in the memory, available to the memory access hooks +#[derive(Debug)] +pub struct BarrierTracking { + next_id: CallId, + active_calls: HashSet, +} +pub type MemoryState = Rc>; + +impl Default for BarrierTracking { + fn default() -> Self { + BarrierTracking { + next_id: 0, + active_calls: HashSet::default(), + } + } +} + +impl BarrierTracking { + pub fn new_call(&mut self) -> CallId { + let id = self.next_id; + trace!("new_call: Assigning ID {}", id); + self.active_calls.insert(id); + self.next_id += 1; + id + } + + pub fn end_call(&mut self, id: CallId) { + assert!(self.active_calls.remove(&id)); + } +} + /// Extra global machine state #[derive(Clone, Debug)] pub struct State { clock: Timestamp } -impl State { - pub fn new() -> State { +impl Default for State { + fn default() -> Self { State { clock: 0 } } +} +impl State { fn increment_clock(&mut self) -> Timestamp { let val = self.clock; self.clock = val + 1; @@ -130,6 +157,7 @@ impl State { pub struct Stacks { // Even reading memory can have effects on the stack, so we need a `RefCell` here. stacks: RefCell>, + barrier_tracking: MemoryState, } /// Core per-location operations: deref, access, create. @@ -358,11 +386,16 @@ impl<'tcx> Stacks { } /// Hooks and glue -impl AllocationExtra for Stacks { +impl AllocationExtra for Stacks { #[inline(always)] - fn memory_allocated<'tcx>(size: Size, _extra: &()) -> Self { + fn memory_allocated<'tcx>(size: Size, extra: &MemoryState) -> Self { + let stack = Stack { + borrows: vec![BorStackItem::Shr], + frozen_since: None, + }; Stacks { - stacks: RefCell::new(RangeMap::new(size, Stack::default())) + stacks: RefCell::new(RangeMap::new(size, stack)), + barrier_tracking: Rc::clone(extra), } } From dd94930ee3d928c4029b4d118befdd620eaf0919 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 15 Nov 2018 19:49:00 +0100 Subject: [PATCH 04/12] implement function barriers --- src/stacked_borrows.rs | 142 +++++++++++------- .../stacked_borrows/aliasing_mut1.rs | 19 ++- .../stacked_borrows/aliasing_mut2.rs | 19 ++- .../stacked_borrows/aliasing_mut3.rs | 19 ++- .../stacked_borrows/aliasing_mut4.rs | 22 +-- .../box_exclusive_violation1.rs | 4 +- .../stacked_borrows/illegal_write1.rs | 15 +- .../invalidate_against_barrier1.rs | 13 ++ .../invalidate_against_barrier2.rs | 13 ++ .../mut_exclusive_violation1.rs | 2 +- 10 files changed, 174 insertions(+), 94 deletions(-) create mode 100644 tests/compile-fail-fullmir/stacked_borrows/invalidate_against_barrier1.rs create mode 100644 tests/compile-fail-fullmir/stacked_borrows/invalidate_against_barrier2.rs diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 067e3bb844..6b851cd65b 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -62,18 +62,7 @@ pub enum BorStackItem { /// when there is no `UnsafeCell`. Shr, /// A barrier, tracking the function it belongs to by its index on the call stack - #[allow(dead_code)] // for future use - FnBarrier(usize) -} - -impl BorStackItem { - #[inline(always)] - pub fn is_fn_barrier(self) -> bool { - match self { - BorStackItem::FnBarrier(_) => true, - _ => false, - } - } + FnBarrier(CallId) } /// Extra per-location state @@ -130,6 +119,10 @@ impl BarrierTracking { pub fn end_call(&mut self, id: CallId) { assert!(self.active_calls.remove(&id)); } + + fn is_active(&self, id: CallId) -> bool { + self.active_calls.contains(&id) + } } /// Extra global machine state @@ -178,7 +171,11 @@ impl<'tcx> Stack { /// going to read or write. /// Returns the index of the item we matched, `None` if it was the frozen one. /// `kind` indicates which kind of reference is being dereferenced. - fn deref(&self, bor: Borrow, kind: RefKind) -> Result, String> { + fn deref( + &self, + bor: Borrow, + kind: RefKind, + ) -> Result, String> { // Exclude unique ref with frozen tag. if let (RefKind::Unique, Borrow::Shr(Some(_))) = (kind, bor) { return Err(format!("Encountered mutable reference with frozen tag ({:?})", bor)); @@ -200,7 +197,6 @@ impl<'tcx> Stack { // If we got here, we have to look for our item in the stack. for (idx, &itm) in self.borrows.iter().enumerate().rev() { match (itm, bor) { - (BorStackItem::FnBarrier(_), _) => break, (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { // Found matching unique item. This satisfies U3. return Ok(Some(idx)) @@ -209,21 +205,25 @@ impl<'tcx> Stack { // Found matching shared/raw item. return Ok(Some(idx)) } - // Go on looking. + // Go on looking. We ignore barriers! When an `&mut` and an `&` alias, + // dereferencing the `&` is still possible (to reborrow), but doing + // an access is not. _ => {} } } // If we got here, we did not find our item. We have to error to satisfy U3. - Err(format!( - "Borrow being dereferenced ({:?}) does not exist on the stack, or is guarded by a barrier", - bor - )) + Err(format!("Borrow being dereferenced ({:?}) does not exist on the stack", bor)) } /// Perform an actual memory access using `bor`. We do not know any types here /// or whether things should be frozen, but we *do* know if this is reading /// or writing. - fn access(&mut self, bor: Borrow, is_write: bool) -> EvalResult<'tcx> { + fn access( + &mut self, + bor: Borrow, + is_write: bool, + barrier_tracking: &BarrierTracking, + ) -> EvalResult<'tcx> { // Check if we can match the frozen "item". // Not possible on writes! if self.is_frozen() { @@ -240,7 +240,12 @@ impl<'tcx> Stack { // Pop the stack until we have something matching. while let Some(&itm) = self.borrows.last() { match (itm, bor) { - (BorStackItem::FnBarrier(_), _) => break, + (BorStackItem::FnBarrier(call), _) if barrier_tracking.is_active(call) => { + return err!(MachineError(format!( + "Stopping looking for borrow being accessed ({:?}) because of barrier ({})", + bor, call + ))) + } (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { // Found matching unique item. return Ok(()) @@ -265,7 +270,7 @@ impl<'tcx> Stack { } // If we got here, we did not find our item. err!(MachineError(format!( - "Borrow being accessed ({:?}) does not exist on the stack, or is guarded by a barrier", + "Borrow being accessed ({:?}) does not exist on the stack", bor ))) } @@ -275,18 +280,21 @@ impl<'tcx> Stack { /// is met: We cannot push `Uniq` onto frozen stacks. /// `kind` indicates which kind of reference is being created. fn create(&mut self, bor: Borrow, kind: RefKind) { - // First, push the item. We do this even if we will later freeze, because we - // will allow mutation of shared data at the expense of unfreezing. if self.frozen_since.is_some() { - // A frozen location, this should be impossible! - bug!("We should never try pushing to a frozen stack"); + // A frozen location? Possible if we create a barrier, then push again. + assert!(bor.is_shared(), "We should never try creating a unique borrow for a frozen stack"); + trace!("create: Not doing anything on frozen location"); + return; } - // First, push. + // First, push. We do this even if we will later freeze, because we + // will allow mutation of shared data at the expense of unfreezing. let itm = match bor { Borrow::Uniq(t) => BorStackItem::Uniq(t), Borrow::Shr(_) => BorStackItem::Shr, }; if *self.borrows.last().unwrap() == itm { + // This is just an optimization, no functional change: Avoid stacking + // multiple `Shr` on top of each other. assert!(bor.is_shared()); trace!("create: Sharing a shared location is a NOP"); } else { @@ -304,6 +312,21 @@ impl<'tcx> Stack { self.frozen_since = Some(bor_t); } } + + /// Add a barrier + fn barrier(&mut self, call: CallId) { + let itm = BorStackItem::FnBarrier(call); + if *self.borrows.last().unwrap() == itm { + // This is just an optimization, no functional change: Avoid stacking + // multiple identical barriers on top of each other. + // This can happen when a function receives several shared references + // that overlap. + trace!("barrier: Avoiding redundant extra barrier"); + } else { + trace!("barrier: Pushing barrier for call {}", call); + self.borrows.push(itm); + } + } } /// Higher-level per-location operations: deref, access, reborrow. @@ -330,6 +353,7 @@ impl<'tcx> Stacks { ptr: Pointer, size: Size, is_write: bool, + barrier_tracking: &BarrierTracking, ) -> EvalResult<'tcx> { trace!("{} access of tag {:?}: {:?}, size {}", if is_write { "read" } else { "write" }, @@ -339,7 +363,7 @@ impl<'tcx> Stacks { // are no accesses through other references, not even reads. let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.access(ptr.tag, is_write)?; + stack.access(ptr.tag, is_write, barrier_tracking)?; } Ok(()) } @@ -350,12 +374,19 @@ impl<'tcx> Stacks { &self, ptr: Pointer, size: Size, + mut barrier: Option, new_bor: Borrow, new_kind: RefKind, + barrier_tracking: &BarrierTracking, ) -> EvalResult<'tcx> { assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique); trace!("reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}", ptr.tag, new_bor, new_kind, ptr, size.bytes()); + if new_kind == RefKind::Raw { + // No barrier for raw, including `&UnsafeCell`. They can rightfully + // alias with `&mut`. + barrier = None; + } let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { // Access source `ptr`, create new ref. @@ -364,21 +395,25 @@ impl<'tcx> Stacks { // the stack than the one we come from, just use that. // IOW, we check if `new_bor` *already* is "derived from" `ptr.tag`. // This also checks frozenness, if required. - let bor_redundant = match (ptr_idx, stack.deref(new_bor, new_kind)) { - // If the new borrow works with the frozen item, or else if it lives - // above the old one in the stack, our job here is done. - (_, Ok(None)) => true, - (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true, - // Otherwise we need to create a new borrow. - _ => false, - }; + let bor_redundant = barrier.is_none() && + match (ptr_idx, stack.deref(new_bor, new_kind)) { + // If the new borrow works with the frozen item, or else if it lives + // above the old one in the stack, our job here is done. + (_, Ok(None)) => true, + (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true, + // Otherwise we need to create a new borrow. + _ => false, + }; if bor_redundant { assert!(new_bor.is_shared(), "A unique reborrow can never be redundant"); trace!("reborrow is redundant"); continue; } // We need to do some actual work. - stack.access(ptr.tag, new_kind == RefKind::Unique)?; + stack.access(ptr.tag, new_kind == RefKind::Unique, barrier_tracking)?; + if let Some(call) = barrier { + stack.barrier(call); + } stack.create(new_bor, new_kind); } Ok(()) @@ -405,7 +440,7 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, /*is_write*/false) + alloc.extra.access(ptr, size, /*is_write*/false, &*alloc.extra.barrier_tracking.borrow()) } #[inline(always)] @@ -414,7 +449,7 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, /*is_write*/true) + alloc.extra.access(ptr, size, /*is_write*/true, &*alloc.extra.barrier_tracking.borrow()) } #[inline(always)] @@ -424,19 +459,18 @@ impl AllocationExtra for Stacks { size: Size, ) -> EvalResult<'tcx> { // This is like mutating - alloc.extra.access(ptr, size, /*is_write*/true) + alloc.extra.access(ptr, size, /*is_write*/true, &*alloc.extra.barrier_tracking.borrow()) // FIXME: Error out of there are any barriers? } } impl<'tcx> Stacks { /// Pushes the first item to the stacks. - pub fn first_item( + pub(crate) fn first_item( &mut self, itm: BorStackItem, size: Size ) { - assert!(!itm.is_fn_barrier()); for stack in self.stacks.get_mut().iter_mut(Size::ZERO, size) { assert!(stack.borrows.len() == 1); assert_eq!(stack.borrows.pop().unwrap(), BorStackItem::Shr); @@ -466,6 +500,7 @@ pub trait EvalContextExt<'tcx> { &mut self, place: MPlaceTy<'tcx, Borrow>, size: Size, + fn_barrier: bool, new_bor: Borrow ) -> EvalResult<'tcx, Pointer>; @@ -474,6 +509,7 @@ pub trait EvalContextExt<'tcx> { &mut self, ptr: ImmTy<'tcx, Borrow>, mutbl: Mutability, + fn_barrier: bool, ) -> EvalResult<'tcx, Immediate>; fn retag( @@ -566,7 +602,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { place: MPlaceTy<'tcx, Borrow>, size: Size, ) -> EvalResult<'tcx> { - self.reborrow(place, size, Borrow::default())?; + self.reborrow(place, size, /*fn_barrier*/ false, Borrow::default())?; Ok(()) } @@ -574,10 +610,12 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { &mut self, place: MPlaceTy<'tcx, Borrow>, size: Size, + fn_barrier: bool, new_bor: Borrow ) -> EvalResult<'tcx, Pointer> { let ptr = place.ptr.to_ptr()?; let new_ptr = Pointer::new_with_tag(ptr.alloc_id, ptr.offset, new_bor); + let barrier = if fn_barrier { Some(self.frame().extra) } else { None }; trace!("reborrow: Creating new reference for {:?} (pointee {}): {:?}", ptr, place.layout.ty, new_bor); @@ -589,12 +627,12 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { // Reference that cares about freezing. We need a frozen-sensitive reborrow. self.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; - alloc.extra.reborrow(cur_ptr, size, new_bor, kind) + alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind, &*self.memory().extra.borrow()) })?; } else { // Just treat this as one big chunk. let kind = if new_bor.is_unique() { RefKind::Unique } else { RefKind::Raw }; - alloc.extra.reborrow(ptr, size, new_bor, kind)?; + alloc.extra.reborrow(ptr, size, barrier, new_bor, kind, &*self.memory().extra.borrow())?; } Ok(new_ptr) } @@ -603,6 +641,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { &mut self, val: ImmTy<'tcx, Borrow>, mutbl: Mutability, + fn_barrier: bool, ) -> EvalResult<'tcx, Immediate> { // We want a place for where the ptr *points to*, so we get one. let place = self.ref_to_mplace(val)?; @@ -622,7 +661,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { }; // Reborrow. - let new_ptr = self.reborrow(place, size, new_bor)?; + let new_ptr = self.reborrow(place, size, fn_barrier, new_bor)?; // Return new ptr let new_place = MemPlace { ptr: Scalar::Ptr(new_ptr), ..*place }; @@ -631,11 +670,9 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { fn retag( &mut self, - _fn_entry: bool, + fn_entry: bool, place: PlaceTy<'tcx, Borrow> ) -> EvalResult<'tcx> { - // TODO: Honor `fn_entry`. - // We need a visitor to visit all references. However, that requires // a `MemPlace`, so we have a fast path for reference types that // avoids allocating. @@ -648,18 +685,19 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { } { // fast path let val = self.read_immediate(self.place_to_op(place)?)?; - let val = self.retag_reference(val, mutbl)?; + let val = self.retag_reference(val, mutbl, fn_entry)?; self.write_immediate(val, place)?; return Ok(()); } let place = self.force_allocation(place)?; - let mut visitor = RetagVisitor { ecx: self }; + let mut visitor = RetagVisitor { ecx: self, fn_entry }; visitor.visit_value(place)?; // The actual visitor struct RetagVisitor<'ecx, 'a, 'mir, 'tcx> { ecx: &'ecx mut MiriEvalContext<'a, 'mir, 'tcx>, + fn_entry: bool, } impl<'ecx, 'a, 'mir, 'tcx> MutValueVisitor<'a, 'mir, 'tcx, Evaluator<'tcx>> @@ -684,7 +722,7 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { _ => return Ok(()), // nothing to do }; let val = self.ecx.read_immediate(place.into())?; - let val = self.ecx.retag_reference(val, mutbl)?; + let val = self.ecx.retag_reference(val, mutbl, self.fn_entry)?; self.ecx.write_immediate(val, place.into())?; Ok(()) } diff --git a/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut1.rs b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut1.rs index e812e13e70..b82901985b 100644 --- a/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut1.rs +++ b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut1.rs @@ -1,12 +1,17 @@ -// ignore-test validation_op is disabled - #![allow(unused_variables)] -mod safe { - pub fn safe(x: &mut i32, y: &mut i32) {} //~ ERROR: in conflict with lock WriteLock -} +use std::mem; + +pub fn safe(x: &mut i32, y: &mut i32) {} //~ ERROR barrier fn main() { - let x = &mut 0 as *mut _; - unsafe { safe::safe(&mut *x, &mut *x) }; + let mut x = 0; + let xraw: *mut i32 = unsafe { mem::transmute(&mut x) }; + // We need to apply some tricky to be able to call `safe` with two mutable references + // with the same tag: We transmute both the fn ptr (to take raw ptrs) and the argument + // (to be raw, but still have the unique tag). + let safe_raw: fn(x: *mut i32, y: *mut i32) = unsafe { + mem::transmute::(safe) + }; + safe_raw(xraw, xraw); } diff --git a/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut2.rs b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut2.rs index 36ebcc2b4a..69caddfa8c 100644 --- a/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut2.rs +++ b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut2.rs @@ -1,12 +1,17 @@ -// ignore-test validation_op is disabled - #![allow(unused_variables)] -mod safe { - pub fn safe(x: &i32, y: &mut i32) {} //~ ERROR: in conflict with lock ReadLock -} +use std::mem; + +pub fn safe(x: &i32, y: &mut i32) {} //~ ERROR barrier fn main() { - let x = &mut 0 as *mut _; - unsafe { safe::safe(&*x, &mut *x) }; + let mut x = 0; + let xref = &mut x; + let xraw: *mut i32 = unsafe { mem::transmute_copy(&xref) }; + let xshr = &*xref; + // transmute fn ptr around so that we can avoid retagging + let safe_raw: fn(x: *const i32, y: *mut i32) = unsafe { + mem::transmute::(safe) + }; + safe_raw(xshr, xraw); } diff --git a/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut3.rs b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut3.rs index ad50fbd61b..d37f9e63f6 100644 --- a/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut3.rs +++ b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut3.rs @@ -1,12 +1,17 @@ -// ignore-test validation_op is disabled - #![allow(unused_variables)] -mod safe { - pub fn safe(x: &mut i32, y: &i32) {} //~ ERROR: in conflict with lock WriteLock -} +use std::mem; + +pub fn safe(x: &mut i32, y: &i32) {} //~ ERROR does not exist on the stack fn main() { - let x = &mut 0 as *mut _; - unsafe { safe::safe(&mut *x, &*x) }; + let mut x = 0; + let xref = &mut x; + let xraw: *mut i32 = unsafe { mem::transmute_copy(&xref) }; + let xshr = &*xref; + // transmute fn ptr around so that we can avoid retagging + let safe_raw: fn(x: *mut i32, y: *const i32) = unsafe { + mem::transmute::(safe) + }; + safe_raw(xraw, xshr); } diff --git a/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut4.rs b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut4.rs index a0f0a3cf97..bf65d6e230 100644 --- a/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut4.rs +++ b/tests/compile-fail-fullmir/stacked_borrows/aliasing_mut4.rs @@ -1,15 +1,19 @@ -// ignore-test validation_op is disabled - #![allow(unused_variables)] -mod safe { - use std::cell::Cell; +use std::mem; +use std::cell::Cell; - // Make sure &mut UnsafeCell also has a lock to it - pub fn safe(x: &mut Cell, y: &i32) {} //~ ERROR: in conflict with lock WriteLock -} +// Make sure &mut UnsafeCell also is exclusive +pub fn safe(x: &i32, y: &mut Cell) {} //~ ERROR barrier fn main() { - let x = &mut 0 as *mut _; - unsafe { safe::safe(&mut *(x as *mut _), &*x) }; + let mut x = 0; + let xref = &mut x; + let xraw: *mut i32 = unsafe { mem::transmute_copy(&xref) }; + let xshr = &*xref; + // transmute fn ptr around so that we can avoid retagging + let safe_raw: fn(x: *const i32, y: *mut Cell) = unsafe { + mem::transmute::), _>(safe) + }; + safe_raw(xshr, xraw as *mut _); } diff --git a/tests/compile-fail-fullmir/stacked_borrows/box_exclusive_violation1.rs b/tests/compile-fail-fullmir/stacked_borrows/box_exclusive_violation1.rs index bd0fec859d..e8a182779a 100644 --- a/tests/compile-fail-fullmir/stacked_borrows/box_exclusive_violation1.rs +++ b/tests/compile-fail-fullmir/stacked_borrows/box_exclusive_violation1.rs @@ -8,7 +8,7 @@ fn demo_mut_advanced_unique(mut our: Box) -> i32 { unknown_code_2(); // We know this will return 5 - *our //~ ERROR does not exist on the stack + *our } // Now comes the evil context @@ -21,7 +21,7 @@ fn unknown_code_1(x: &i32) { unsafe { } } fn unknown_code_2() { unsafe { - *LEAK = 7; + *LEAK = 7; //~ ERROR barrier } } fn main() { diff --git a/tests/compile-fail-fullmir/stacked_borrows/illegal_write1.rs b/tests/compile-fail-fullmir/stacked_borrows/illegal_write1.rs index b106cc8dc4..d0a23cb444 100644 --- a/tests/compile-fail-fullmir/stacked_borrows/illegal_write1.rs +++ b/tests/compile-fail-fullmir/stacked_borrows/illegal_write1.rs @@ -1,12 +1,9 @@ -fn evil(x: &u32) { - // mutating shared ref without `UnsafeCell` - let x : *mut u32 = x as *const _ as *mut _; - unsafe { *x = 42; } -} - fn main() { let target = Box::new(42); // has an implicit raw - let ref_ = &*target; - evil(ref_); // invalidates shared ref, activates raw - let _x = *ref_; //~ ERROR is not frozen + let xref = &*target; + { + let x : *mut u32 = xref as *const _ as *mut _; + unsafe { *x = 42; } // invalidates shared ref, activates raw + } + let _x = *xref; //~ ERROR is not frozen } diff --git a/tests/compile-fail-fullmir/stacked_borrows/invalidate_against_barrier1.rs b/tests/compile-fail-fullmir/stacked_borrows/invalidate_against_barrier1.rs new file mode 100644 index 0000000000..fc0dbb9e13 --- /dev/null +++ b/tests/compile-fail-fullmir/stacked_borrows/invalidate_against_barrier1.rs @@ -0,0 +1,13 @@ +fn inner(x: *mut i32, _y: &mut i32) { + // If `x` and `y` alias, retagging is fine with this... but we really + // shouldn't be allowed to use `x` at all because `y` was assumed to be + // unique for the duration of this call. + let _val = unsafe { *x }; //~ ERROR barrier +} + +fn main() { + let mut x = 0; + let xraw = &mut x as *mut _; + let xref = unsafe { &mut *xraw }; + inner(xraw, xref); +} diff --git a/tests/compile-fail-fullmir/stacked_borrows/invalidate_against_barrier2.rs b/tests/compile-fail-fullmir/stacked_borrows/invalidate_against_barrier2.rs new file mode 100644 index 0000000000..a080c0958e --- /dev/null +++ b/tests/compile-fail-fullmir/stacked_borrows/invalidate_against_barrier2.rs @@ -0,0 +1,13 @@ +fn inner(x: *mut i32, _y: &i32) { + // If `x` and `y` alias, retagging is fine with this... but we really + // shouldn't be allowed to write to `x` at all because `y` was assumed to be + // immutable for the duration of this call. + unsafe { *x = 0 }; //~ ERROR barrier +} + +fn main() { + let mut x = 0; + let xraw = &mut x as *mut _; + let xref = unsafe { &*xraw }; + inner(xraw, xref); +} diff --git a/tests/compile-fail-fullmir/stacked_borrows/mut_exclusive_violation1.rs b/tests/compile-fail-fullmir/stacked_borrows/mut_exclusive_violation1.rs index fec699e35b..3fe6b65674 100644 --- a/tests/compile-fail-fullmir/stacked_borrows/mut_exclusive_violation1.rs +++ b/tests/compile-fail-fullmir/stacked_borrows/mut_exclusive_violation1.rs @@ -21,7 +21,7 @@ fn unknown_code_1(x: &i32) { unsafe { } } fn unknown_code_2() { unsafe { - *LEAK = 7; //~ ERROR does not exist on the stack + *LEAK = 7; //~ ERROR barrier } } fn main() { From 58309956c10914d55171666abd1782063e921d18 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 21 Nov 2018 13:43:55 +0100 Subject: [PATCH 05/12] for now, we allow Undef in raw pointers as we do in integers --- tests/compile-fail/validity/undef.rs | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 tests/compile-fail/validity/undef.rs diff --git a/tests/compile-fail/validity/undef.rs b/tests/compile-fail/validity/undef.rs deleted file mode 100644 index f86fef9454..0000000000 --- a/tests/compile-fail/validity/undef.rs +++ /dev/null @@ -1,12 +0,0 @@ -#![allow(unused_variables)] -// error-pattern: encountered undefined address in pointer - -use std::mem; - -fn make_raw() -> *const f32 { - unsafe { mem::uninitialized() } -} - -fn main() { - let _x = make_raw(); -} From 194710e112ef5cc7bd9f8a7f5924c4574eca0406 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 21 Nov 2018 15:21:41 +0100 Subject: [PATCH 06/12] no barriers for boxes --- src/stacked_borrows.rs | 37 +++++++++++-------- .../box_exclusive_violation1.rs | 4 +- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 6b851cd65b..a31fd462e5 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -673,19 +673,27 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { fn_entry: bool, place: PlaceTy<'tcx, Borrow> ) -> EvalResult<'tcx> { + // Determine mutability and whether to add a barrier. + // Cannot use `builtin_deref` because that reports *immutable* for `Box`, + // making it useless. + fn qualify(ty: ty::Ty<'_>, fn_entry: bool) -> Option<(Mutability, bool)> { + match ty.sty { + // References are simple + ty::Ref(_, _, mutbl) => Some((mutbl, fn_entry)), + // Boxes do not get a barrier: Barriers reflect that references outlive the call + // they were passed in to; that's just not the case for boxes. + ty::Adt(..) if ty.is_box() => Some((MutMutable, false)), + _ => None, + } + } + // We need a visitor to visit all references. However, that requires // a `MemPlace`, so we have a fast path for reference types that // avoids allocating. - // Cannot use `builtin_deref` because that reports *immutable* for `Box`, - // making it useless. - if let Some(mutbl) = match place.layout.ty.sty { - ty::Ref(_, _, mutbl) => Some(mutbl), - ty::Adt(..) if place.layout.ty.is_box() => Some(MutMutable), - _ => None, // handled with the general case below - } { + if let Some((mutbl, barrier)) = qualify(place.layout.ty, fn_entry) { // fast path let val = self.read_immediate(self.place_to_op(place)?)?; - let val = self.retag_reference(val, mutbl, fn_entry)?; + let val = self.retag_reference(val, mutbl, barrier)?; self.write_immediate(val, place)?; return Ok(()); } @@ -716,14 +724,11 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { { // Cannot use `builtin_deref` because that reports *immutable* for `Box`, // making it useless. - let mutbl = match place.layout.ty.sty { - ty::Ref(_, _, mutbl) => mutbl, - ty::Adt(..) if place.layout.ty.is_box() => MutMutable, - _ => return Ok(()), // nothing to do - }; - let val = self.ecx.read_immediate(place.into())?; - let val = self.ecx.retag_reference(val, mutbl, self.fn_entry)?; - self.ecx.write_immediate(val, place.into())?; + if let Some((mutbl, barrier)) = qualify(place.layout.ty, self.fn_entry) { + let val = self.ecx.read_immediate(place.into())?; + let val = self.ecx.retag_reference(val, mutbl, barrier)?; + self.ecx.write_immediate(val, place.into())?; + } Ok(()) } } diff --git a/tests/compile-fail-fullmir/stacked_borrows/box_exclusive_violation1.rs b/tests/compile-fail-fullmir/stacked_borrows/box_exclusive_violation1.rs index e8a182779a..bd0fec859d 100644 --- a/tests/compile-fail-fullmir/stacked_borrows/box_exclusive_violation1.rs +++ b/tests/compile-fail-fullmir/stacked_borrows/box_exclusive_violation1.rs @@ -8,7 +8,7 @@ fn demo_mut_advanced_unique(mut our: Box) -> i32 { unknown_code_2(); // We know this will return 5 - *our + *our //~ ERROR does not exist on the stack } // Now comes the evil context @@ -21,7 +21,7 @@ fn unknown_code_1(x: &i32) { unsafe { } } fn unknown_code_2() { unsafe { - *LEAK = 7; //~ ERROR barrier + *LEAK = 7; } } fn main() { From 97e010f5b97f767f4eceadd3d4ea9556052e8393 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 21 Nov 2018 15:25:47 +0100 Subject: [PATCH 07/12] barriers prevent deallocation --- src/stacked_borrows.rs | 82 +++++++++++++------ .../deallocate_against_barrier.rs | 13 +++ 2 files changed, 70 insertions(+), 25 deletions(-) create mode 100644 tests/compile-fail-fullmir/stacked_borrows/deallocate_against_barrier.rs diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index a31fd462e5..e06943f2ba 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -90,6 +90,14 @@ pub enum RefKind { Raw, } +/// What kind of access is being performed? +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum AccessKind { + Read, + Write, + Dealloc, +} + /// Extra global state in the memory, available to the memory access hooks #[derive(Debug)] pub struct BarrierTracking { @@ -221,13 +229,13 @@ impl<'tcx> Stack { fn access( &mut self, bor: Borrow, - is_write: bool, + kind: AccessKind, barrier_tracking: &BarrierTracking, ) -> EvalResult<'tcx> { // Check if we can match the frozen "item". // Not possible on writes! if self.is_frozen() { - if !is_write { + if kind == AccessKind::Read { // When we are frozen, we just accept all reads. No harm in this. // The deref already checked that `Uniq` items are in the stack, and that // the location is frozen if it should be. @@ -247,26 +255,41 @@ impl<'tcx> Stack { ))) } (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. - return Ok(()) + // Found matching unique item. Continue after the match. } - (BorStackItem::Shr, _) if !is_write => { + (BorStackItem::Shr, _) if kind == AccessKind::Read => { // When reading, everything can use a shared item! // We do not want to do this when writing: Writing to an `&mut` // should reaffirm its exclusivity (i.e., make sure it is - // on top of the stack). - return Ok(()) + // on top of the stack). Continue after the match. } (BorStackItem::Shr, Borrow::Shr(_)) => { - // Found matching shared item. - return Ok(()) + // Found matching shared item. Continue after the match. } _ => { - // Pop this. This ensures U2. + // Pop this, go on. This ensures U2. let itm = self.borrows.pop().unwrap(); trace!("access: Popping {:?}", itm); + continue + } + } + // If we got here, we found a matching item. Congratulations! + // However, we are not done yet: If this access is deallocating, we must make sure + // there are no active barriers remaining on the stack. + if kind == AccessKind::Dealloc { + for &itm in self.borrows.iter().rev() { + match itm { + BorStackItem::FnBarrier(call) if barrier_tracking.is_active(call) => { + return err!(MachineError(format!( + "Deallocating with active barrier ({})", call + ))) + } + _ => {}, + } } } + // NOW we are done. + return Ok(()) } // If we got here, we did not find our item. err!(MachineError(format!( @@ -352,18 +375,16 @@ impl<'tcx> Stacks { &self, ptr: Pointer, size: Size, - is_write: bool, - barrier_tracking: &BarrierTracking, + kind: AccessKind, ) -> EvalResult<'tcx> { - trace!("{} access of tag {:?}: {:?}, size {}", - if is_write { "read" } else { "write" }, - ptr.tag, ptr, size.bytes()); + trace!("{:?} access of tag {:?}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes()); // Even reads can have a side-effect, by invalidating other references. // This is fundamentally necessary since `&mut` asserts that there // are no accesses through other references, not even reads. + let barrier_tracking = self.barrier_tracking.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.access(ptr.tag, is_write, barrier_tracking)?; + stack.access(ptr.tag, kind, &*barrier_tracking)?; } Ok(()) } @@ -377,7 +398,6 @@ impl<'tcx> Stacks { mut barrier: Option, new_bor: Borrow, new_kind: RefKind, - barrier_tracking: &BarrierTracking, ) -> EvalResult<'tcx> { assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique); trace!("reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}", @@ -385,8 +405,17 @@ impl<'tcx> Stacks { if new_kind == RefKind::Raw { // No barrier for raw, including `&UnsafeCell`. They can rightfully // alias with `&mut`. + // FIXME: This means that the `dereferencable` attribute on non-frozen shared + // references is incorrect! They are dereferencable when the function is + // called, but might become non-dereferencable during the coruse of execution. + // Also see [1], [2]. + // + // [1]: , + // [2]: barrier = None; } + let barrier_tracking = self.barrier_tracking.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { // Access source `ptr`, create new ref. @@ -410,7 +439,12 @@ impl<'tcx> Stacks { continue; } // We need to do some actual work. - stack.access(ptr.tag, new_kind == RefKind::Unique, barrier_tracking)?; + let access_kind = if new_kind == RefKind::Unique { + AccessKind::Write + } else { + AccessKind::Read + }; + stack.access(ptr.tag, access_kind, &*barrier_tracking)?; if let Some(call) = barrier { stack.barrier(call); } @@ -440,7 +474,7 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, /*is_write*/false, &*alloc.extra.barrier_tracking.borrow()) + alloc.extra.access(ptr, size, AccessKind::Read) } #[inline(always)] @@ -449,7 +483,7 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, /*is_write*/true, &*alloc.extra.barrier_tracking.borrow()) + alloc.extra.access(ptr, size, AccessKind::Write) } #[inline(always)] @@ -458,9 +492,7 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - // This is like mutating - alloc.extra.access(ptr, size, /*is_write*/true, &*alloc.extra.barrier_tracking.borrow()) - // FIXME: Error out of there are any barriers? + alloc.extra.access(ptr, size, AccessKind::Dealloc) } } @@ -627,12 +659,12 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for MiriEvalContext<'a, 'mir, 'tcx> { // Reference that cares about freezing. We need a frozen-sensitive reborrow. self.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; - alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind, &*self.memory().extra.borrow()) + alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind) })?; } else { // Just treat this as one big chunk. let kind = if new_bor.is_unique() { RefKind::Unique } else { RefKind::Raw }; - alloc.extra.reborrow(ptr, size, barrier, new_bor, kind, &*self.memory().extra.borrow())?; + alloc.extra.reborrow(ptr, size, barrier, new_bor, kind)?; } Ok(new_ptr) } diff --git a/tests/compile-fail-fullmir/stacked_borrows/deallocate_against_barrier.rs b/tests/compile-fail-fullmir/stacked_borrows/deallocate_against_barrier.rs new file mode 100644 index 0000000000..eb988a5899 --- /dev/null +++ b/tests/compile-fail-fullmir/stacked_borrows/deallocate_against_barrier.rs @@ -0,0 +1,13 @@ +// error-pattern: Deallocating with active barrier + +fn inner(x: &mut i32, f: fn(&mut i32)) { + // `f` may mutate, but it may not deallocate! + f(x) +} + +fn main() { + inner(Box::leak(Box::new(0)), |x| { + let raw = x as *mut _; + drop(unsafe { Box::from_raw(raw) }); + }); +} From 26fe778c55ef562e88bb59fd4d43adc1b7885e24 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Oliver=20S=CC=B6c=CC=B6h=CC=B6n=CC=B6e=CC=B6i=CC=B6d=CC=B6?= =?UTF-8?q?e=CC=B6r=20Scherer?= Date: Wed, 28 Nov 2018 09:58:23 +0100 Subject: [PATCH 08/12] Typo Co-Authored-By: RalfJung --- src/stacked_borrows.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index e06943f2ba..31f80fe2f6 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -407,7 +407,7 @@ impl<'tcx> Stacks { // alias with `&mut`. // FIXME: This means that the `dereferencable` attribute on non-frozen shared // references is incorrect! They are dereferencable when the function is - // called, but might become non-dereferencable during the coruse of execution. + // called, but might become non-dereferencable during the course of execution. // Also see [1], [2]. // // [1]: Date: Wed, 28 Nov 2018 16:14:41 +0100 Subject: [PATCH 09/12] bump Rust version --- rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-version b/rust-version index 0268b07ac4..826965c66d 100644 --- a/rust-version +++ b/rust-version @@ -1 +1 @@ -nightly-2018-11-26 +nightly-2018-11-28 From fb72348e5f4bd396d91c808b1e404f233908fb05 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 28 Nov 2018 19:02:56 +0100 Subject: [PATCH 10/12] disable async-fn, for now --- tests/run-pass-fullmir/async-fn.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/run-pass-fullmir/async-fn.rs b/tests/run-pass-fullmir/async-fn.rs index 7c32b026df..56e6031ceb 100644 --- a/tests/run-pass-fullmir/async-fn.rs +++ b/tests/run-pass-fullmir/async-fn.rs @@ -1,3 +1,6 @@ +// FIXME: investigate why this fails since barriers have been added +// compile-flags: -Zmiri-disable-validation + #![feature( async_await, await_macro, From 42e73b5536ff01c4ba846197fc18b5e051ba8515 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 29 Nov 2018 17:29:00 +0100 Subject: [PATCH 11/12] async fn got fixed --- tests/run-pass-fullmir/async-fn.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/run-pass-fullmir/async-fn.rs b/tests/run-pass-fullmir/async-fn.rs index 56e6031ceb..7c32b026df 100644 --- a/tests/run-pass-fullmir/async-fn.rs +++ b/tests/run-pass-fullmir/async-fn.rs @@ -1,6 +1,3 @@ -// FIXME: investigate why this fails since barriers have been added -// compile-flags: -Zmiri-disable-validation - #![feature( async_await, await_macro, From 3999db1159f06672c25ba661b3a5e4de26f5b58e Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 30 Nov 2018 07:26:20 +0100 Subject: [PATCH 12/12] new nightly, new luck --- rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-version b/rust-version index 826965c66d..6fd720c5c7 100644 --- a/rust-version +++ b/rust-version @@ -1 +1 @@ -nightly-2018-11-28 +nightly-2018-11-30