diff --git a/README.md b/README.md
index 07c096046640..6ab11e7e2be6 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,4 @@
-
-
-
+# The Rust Programming Language
This is the main source code repository for [Rust]. It contains the compiler,
standard library, and documentation.
diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs
index be5b247bb9f0..8946ac43bc65 100644
--- a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs
+++ b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs
@@ -824,7 +824,7 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
}
ty => unreachable!("bswap {}", ty),
}
- };
+ }
let res = CValue::by_val(swap(&mut fx.bcx, arg), fx.layout_of(T));
ret.write_cvalue(fx, res);
};
diff --git a/compiler/rustc_error_codes/src/error_codes/E0373.md b/compiler/rustc_error_codes/src/error_codes/E0373.md
index fd9698779311..effa597aad91 100644
--- a/compiler/rustc_error_codes/src/error_codes/E0373.md
+++ b/compiler/rustc_error_codes/src/error_codes/E0373.md
@@ -50,3 +50,24 @@ fn foo() -> Box u32> {
Now that the closure has its own copy of the data, there's no need to worry
about safety.
+
+This error may also be encountered while using `async` blocks:
+
+```compile_fail,E0373,edition2018
+use std::future::Future;
+
+async fn f() {
+ let v = vec![1, 2, 3i32];
+ spawn(async { //~ ERROR E0373
+ println!("{:?}", v)
+ });
+}
+
+fn spawn(future: F) {
+ unimplemented!()
+}
+```
+
+Similarly to closures, `async` blocks are not executed immediately and may
+capture closed-over data by reference. For more information, see
+https://rust-lang.github.io/async-book/03_async_await/01_chapter.html.
diff --git a/compiler/rustc_mir/src/borrow_check/diagnostics/conflict_errors.rs b/compiler/rustc_mir/src/borrow_check/diagnostics/conflict_errors.rs
index db02ee67910b..a5fb8a1cbe8b 100644
--- a/compiler/rustc_mir/src/borrow_check/diagnostics/conflict_errors.rs
+++ b/compiler/rustc_mir/src/borrow_check/diagnostics/conflict_errors.rs
@@ -141,6 +141,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
self.add_moved_or_invoked_closure_note(location, used_place, &mut err);
let mut is_loop_move = false;
+ let mut in_pattern = false;
for move_site in &move_site_vec {
let move_out = self.move_data.moves[(*move_site).moi];
@@ -256,6 +257,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
"ref ".to_string(),
Applicability::MachineApplicable,
);
+ in_pattern = true;
}
if let Some(DesugaringKind::ForLoop(_)) = move_span.desugaring_kind() {
@@ -302,7 +304,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let place = &self.move_data.move_paths[mpi].place;
let ty = place.ty(self.body, self.infcx.tcx).ty;
- if is_loop_move {
+ // If we're in pattern, we do nothing in favor of the previous suggestion (#80913).
+ if is_loop_move & !in_pattern {
if let ty::Ref(_, _, hir::Mutability::Mut) = ty.kind() {
// We have a `&mut` ref, we need to reborrow on each iteration (#62112).
err.span_suggestion_verbose(
@@ -1318,21 +1321,30 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
Applicability::MachineApplicable,
);
- let msg = match category {
+ match category {
ConstraintCategory::Return(_) | ConstraintCategory::OpaqueType => {
- format!("{} is returned here", kind)
+ let msg = format!("{} is returned here", kind);
+ err.span_note(constraint_span, &msg);
}
ConstraintCategory::CallArgument => {
fr_name.highlight_region_name(&mut err);
- format!("function requires argument type to outlive `{}`", fr_name)
+ if matches!(use_span.generator_kind(), Some(GeneratorKind::Async(_))) {
+ err.note(
+ "async blocks are not executed immediately and must either take a \
+ reference or ownership of outside variables they use",
+ );
+ } else {
+ let msg = format!("function requires argument type to outlive `{}`", fr_name);
+ err.span_note(constraint_span, &msg);
+ }
}
_ => bug!(
"report_escaping_closure_capture called with unexpected constraint \
category: `{:?}`",
category
),
- };
- err.span_note(constraint_span, &msg);
+ }
+
err
}
diff --git a/library/alloc/src/collections/binary_heap.rs b/library/alloc/src/collections/binary_heap.rs
index 4dfdbe0a5b45..3c515af71f5f 100644
--- a/library/alloc/src/collections/binary_heap.rs
+++ b/library/alloc/src/collections/binary_heap.rs
@@ -161,7 +161,10 @@ use super::SpecExtend;
/// It is a logic error for an item to be modified in such a way that the
/// item's ordering relative to any other item, as determined by the `Ord`
/// trait, changes while it is in the heap. This is normally only possible
-/// through `Cell`, `RefCell`, global state, I/O, or unsafe code.
+/// through `Cell`, `RefCell`, global state, I/O, or unsafe code. The
+/// behavior resulting from such a logic error is not specified, but will
+/// not result in undefined behavior. This could include panics, incorrect
+/// results, aborts, memory leaks, and non-termination.
///
/// # Examples
///
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
index 944e0e65cf7c..5e63a303d22c 100644
--- a/library/alloc/src/collections/btree/map.rs
+++ b/library/alloc/src/collections/btree/map.rs
@@ -51,6 +51,9 @@ pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+/// The behavior resulting from such a logic error is not specified, but will not result in
+/// undefined behavior. This could include panics, incorrect results, aborts, memory leaks, and
+/// non-termination.
///
/// [`Cell`]: core::cell::Cell
/// [`RefCell`]: core::cell::RefCell
diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs
index c72e305a1f94..c2a96dd8ef47 100644
--- a/library/alloc/src/collections/btree/set.rs
+++ b/library/alloc/src/collections/btree/set.rs
@@ -22,6 +22,9 @@ use super::Recover;
/// It is a logic error for an item to be modified in such a way that the item's ordering relative
/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+/// The behavior resulting from such a logic error is not specified, but will not result in
+/// undefined behavior. This could include panics, incorrect results, aborts, memory leaks, and
+/// non-termination.
///
/// [`Ord`]: core::cmp::Ord
/// [`Cell`]: core::cell::Cell
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
index cfad111aa546..d0bfa038aa13 100644
--- a/library/alloc/src/lib.rs
+++ b/library/alloc/src/lib.rs
@@ -120,6 +120,7 @@
#![feature(receiver_trait)]
#![cfg_attr(bootstrap, feature(min_const_generics))]
#![feature(min_specialization)]
+#![feature(set_ptr_value)]
#![feature(slice_ptr_get)]
#![feature(slice_ptr_len)]
#![feature(staged_api)]
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index 0973a6e362bc..ee03f15eece3 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -353,6 +353,26 @@ impl Rc {
/// to upgrade the weak reference before this function returns will result
/// in a `None` value. However, the weak reference may be cloned freely and
/// stored for use at a later time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(arc_new_cyclic)]
+ /// #![allow(dead_code)]
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// struct Gadget {
+ /// self_weak: Weak,
+ /// // ... more fields
+ /// }
+ /// impl Gadget {
+ /// pub fn new() -> Rc {
+ /// Rc::new_cyclic(|self_weak| {
+ /// Gadget { self_weak: self_weak.clone(), /* ... */ }
+ /// })
+ /// }
+ /// }
+ /// ```
#[unstable(feature = "arc_new_cyclic", issue = "75861")]
pub fn new_cyclic(data_fn: impl FnOnce(&Weak) -> T) -> Rc {
// Construct the inner in the "uninitialized" state with a single
@@ -829,8 +849,8 @@ impl Rc {
let offset = unsafe { data_offset(ptr) };
// Reverse the offset to find the original RcBox.
- let fake_ptr = ptr as *mut RcBox;
- let rc_ptr = unsafe { set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)) };
+ let rc_ptr =
+ unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) };
unsafe { Self::from_ptr(rc_ptr) }
}
@@ -850,7 +870,7 @@ impl Rc {
pub fn downgrade(this: &Self) -> Weak {
this.inner().inc_weak();
// Make sure we do not create a dangling Weak
- debug_assert!(!is_dangling(this.ptr));
+ debug_assert!(!is_dangling(this.ptr.as_ptr()));
Weak { ptr: this.ptr }
}
@@ -1164,7 +1184,7 @@ impl Rc {
Self::allocate_for_layout(
Layout::for_value(&*ptr),
|layout| Global.allocate(layout),
- |mem| set_data_ptr(ptr as *mut T, mem) as *mut RcBox,
+ |mem| (ptr as *mut RcBox).set_ptr_value(mem),
)
}
}
@@ -1203,20 +1223,7 @@ impl Rc<[T]> {
)
}
}
-}
-
-/// Sets the data pointer of a `?Sized` raw pointer.
-///
-/// For a slice/trait object, this sets the `data` field and leaves the rest
-/// unchanged. For a sized raw pointer, this simply sets the pointer.
-unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T {
- unsafe {
- ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
- }
- ptr
-}
-impl Rc<[T]> {
/// Copy elements from slice into newly allocated Rc<\[T\]>
///
/// Unsafe because the caller must either take ownership or bind `T: Copy`
@@ -1860,8 +1867,8 @@ impl Weak {
}
}
-pub(crate) fn is_dangling(ptr: NonNull) -> bool {
- let address = ptr.as_ptr() as *mut () as usize;
+pub(crate) fn is_dangling(ptr: *mut T) -> bool {
+ let address = ptr as *mut () as usize;
address == usize::MAX
}
@@ -1872,7 +1879,7 @@ struct WeakInner<'a> {
strong: &'a Cell,
}
-impl Weak {
+impl Weak {
/// Returns a raw pointer to the object `T` pointed to by this `Weak`.
///
/// The pointer is valid only if there are some strong references. The pointer may be dangling,
@@ -1902,15 +1909,15 @@ impl Weak {
pub fn as_ptr(&self) -> *const T {
let ptr: *mut RcBox = NonNull::as_ptr(self.ptr);
- // SAFETY: we must offset the pointer manually, and said pointer may be
- // a dangling weak (usize::MAX) if T is sized. data_offset is safe to call,
- // because we know that a pointer to unsized T was derived from a real
- // unsized T, as dangling weaks are only created for sized T. wrapping_offset
- // is used so that we can use the same code path for the non-dangling
- // unsized case and the potentially dangling sized case.
- unsafe {
- let offset = data_offset(ptr as *mut T);
- set_data_ptr(ptr as *mut T, (ptr as *mut u8).wrapping_offset(offset))
+ if is_dangling(ptr) {
+ // If the pointer is dangling, we return the sentinel directly. This cannot be
+ // a valid payload address, as the payload is at least as aligned as RcBox (usize).
+ ptr as *const T
+ } else {
+ // SAFETY: if is_dangling returns false, then the pointer is dereferencable.
+ // The payload may be dropped at this point, and we have to maintain provenance,
+ // so use raw pointer manipulation.
+ unsafe { &raw const (*ptr).value }
}
}
@@ -1992,22 +1999,24 @@ impl Weak {
/// [`new`]: Weak::new
#[stable(feature = "weak_into_raw", since = "1.45.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
- // SAFETY: data_offset is safe to call, because this pointer originates from a Weak.
// See Weak::as_ptr for context on how the input pointer is derived.
- let offset = unsafe { data_offset(ptr) };
- // Reverse the offset to find the original RcBox.
- // SAFETY: we use wrapping_offset here because the pointer may be dangling (but only if T: Sized).
- let ptr = unsafe {
- set_data_ptr(ptr as *mut RcBox, (ptr as *mut u8).wrapping_offset(-offset))
+ let ptr = if is_dangling(ptr as *mut T) {
+ // This is a dangling Weak.
+ ptr as *mut RcBox
+ } else {
+ // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
+ // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
+ let offset = unsafe { data_offset(ptr) };
+ // Thus, we reverse the offset to get the whole RcBox.
+ // SAFETY: the pointer originated from a Weak, so this offset is safe.
+ unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) }
};
// SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
}
-}
-impl Weak {
/// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
/// dropping of the inner value if successful.
///
@@ -2070,7 +2079,7 @@ impl Weak {
/// (i.e., when this `Weak` was created by `Weak::new`).
#[inline]
fn inner(&self) -> Option> {
- if is_dangling(self.ptr) {
+ if is_dangling(self.ptr.as_ptr()) {
None
} else {
// We are careful to *not* create a reference covering the "data" field, as
@@ -2325,21 +2334,19 @@ impl AsRef for Rc {
#[stable(feature = "pin", since = "1.33.0")]
impl Unpin for Rc {}
-/// Get the offset within an `RcBox` for
-/// a payload of type described by a pointer.
+/// Get the offset within an `RcBox` for the payload behind a pointer.
///
/// # Safety
///
-/// This has the same safety requirements as `align_of_val_raw`. In effect:
-///
-/// - This function is safe for any argument if `T` is sized, and
-/// - if `T` is unsized, the pointer must have appropriate pointer metadata
-/// acquired from the real instance that you are getting this offset for.
+/// The pointer must point to (and have valid metadata for) a previously
+/// valid instance of T, but the T is allowed to be dropped.
unsafe fn data_offset(ptr: *const T) -> isize {
- // Align the unsized value to the end of the `RcBox`.
- // Because it is ?Sized, it will always be the last field in memory.
- // Note: This is a detail of the current implementation of the compiler,
- // and is not a guaranteed language detail. Do not rely on it outside of std.
+ // Align the unsized value to the end of the RcBox.
+ // Because RcBox is repr(C), it will always be the last field in memory.
+ // SAFETY: since the only unsized types possible are slices, trait objects,
+ // and extern types, the input safety requirement is currently enough to
+ // satisfy the requirements of align_of_val_raw; this is an implementation
+ // detail of the language that may not be relied upon outside of std.
unsafe { data_offset_align(align_of_val_raw(ptr)) }
}
diff --git a/library/alloc/src/rc/tests.rs b/library/alloc/src/rc/tests.rs
index 2d183a8c88c6..843a9b07fa93 100644
--- a/library/alloc/src/rc/tests.rs
+++ b/library/alloc/src/rc/tests.rs
@@ -208,6 +208,30 @@ fn into_from_weak_raw() {
}
}
+#[test]
+fn test_into_from_weak_raw_unsized() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let arc: Rc = Rc::from("foo");
+ let weak: Weak = Rc::downgrade(&arc);
+
+ let ptr = Weak::into_raw(weak.clone());
+ let weak2 = unsafe { Weak::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }, "foo");
+ assert!(weak.ptr_eq(&weak2));
+
+ let arc: Rc = Rc::new(123);
+ let weak: Weak = Rc::downgrade(&arc);
+
+ let ptr = Weak::into_raw(weak.clone());
+ let weak2 = unsafe { Weak::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }.to_string(), "123");
+ assert!(weak.ptr_eq(&weak2));
+}
+
#[test]
fn get_mut() {
let mut x = Rc::new(3);
@@ -294,6 +318,23 @@ fn test_unsized() {
assert_eq!(foo, foo.clone());
}
+#[test]
+fn test_maybe_thin_unsized() {
+ // If/when custom thin DSTs exist, this test should be updated to use one
+ use std::ffi::{CStr, CString};
+
+ let x: Rc = Rc::from(CString::new("swordfish").unwrap().into_boxed_c_str());
+ assert_eq!(format!("{:?}", x), "\"swordfish\"");
+ let y: Weak = Rc::downgrade(&x);
+ drop(x);
+
+ // At this point, the weak points to a dropped DST
+ assert!(y.upgrade().is_none());
+ // But we still need to be able to get the alloc layout to drop.
+ // CStr has no drop glue, but custom DSTs might, and need to work.
+ drop(y);
+}
+
#[test]
fn test_from_owned() {
let foo = 123;
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index 05bfeccbda13..c0d684fbb457 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -846,8 +846,7 @@ impl Arc {
let offset = data_offset(ptr);
// Reverse the offset to find the original ArcInner.
- let fake_ptr = ptr as *mut ArcInner;
- let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+ let arc_ptr = (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset));
Self::from_ptr(arc_ptr)
}
@@ -888,7 +887,7 @@ impl Arc {
match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
Ok(_) => {
// Make sure we do not create a dangling Weak
- debug_assert!(!is_dangling(this.ptr));
+ debug_assert!(!is_dangling(this.ptr.as_ptr()));
return Weak { ptr: this.ptr };
}
Err(old) => cur = old,
@@ -1131,7 +1130,7 @@ impl Arc {
Self::allocate_for_layout(
Layout::for_value(&*ptr),
|layout| Global.allocate(layout),
- |mem| set_data_ptr(ptr as *mut T, mem) as *mut ArcInner,
+ |mem| (ptr as *mut ArcInner).set_ptr_value(mem) as *mut ArcInner,
)
}
}
@@ -1170,20 +1169,7 @@ impl Arc<[T]> {
)
}
}
-}
-
-/// Sets the data pointer of a `?Sized` raw pointer.
-///
-/// For a slice/trait object, this sets the `data` field and leaves the rest
-/// unchanged. For a sized raw pointer, this simply sets the pointer.
-unsafe fn set_data_ptr(mut ptr: *mut T, data: *mut U) -> *mut T {
- unsafe {
- ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
- }
- ptr
-}
-impl Arc<[T]> {
/// Copy elements from slice into newly allocated Arc<\[T\]>
///
/// Unsafe because the caller must either take ownership or bind `T: Copy`.
@@ -1653,7 +1639,7 @@ struct WeakInner<'a> {
strong: &'a atomic::AtomicUsize,
}
-impl Weak {
+impl Weak {
/// Returns a raw pointer to the object `T` pointed to by this `Weak`.
///
/// The pointer is valid only if there are some strong references. The pointer may be dangling,
@@ -1683,15 +1669,15 @@ impl Weak {
pub fn as_ptr(&self) -> *const T {
let ptr: *mut ArcInner = NonNull::as_ptr(self.ptr);
- // SAFETY: we must offset the pointer manually, and said pointer may be
- // a dangling weak (usize::MAX) if T is sized. data_offset is safe to call,
- // because we know that a pointer to unsized T was derived from a real
- // unsized T, as dangling weaks are only created for sized T. wrapping_offset
- // is used so that we can use the same code path for the non-dangling
- // unsized case and the potentially dangling sized case.
- unsafe {
- let offset = data_offset(ptr as *mut T);
- set_data_ptr(ptr as *mut T, (ptr as *mut u8).wrapping_offset(offset))
+ if is_dangling(ptr) {
+ // If the pointer is dangling, we return the sentinel directly. This cannot be
+ // a valid payload address, as the payload is at least as aligned as ArcInner (usize).
+ ptr as *const T
+ } else {
+ // SAFETY: if is_dangling returns false, then the pointer is dereferencable.
+ // The payload may be dropped at this point, and we have to maintain provenance,
+ // so use raw pointer manipulation.
+ unsafe { &raw mut (*ptr).data }
}
}
@@ -1773,18 +1759,22 @@ impl Weak {
/// [`forget`]: std::mem::forget
#[stable(feature = "weak_into_raw", since = "1.45.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
- // SAFETY: data_offset is safe to call, because this pointer originates from a Weak.
// See Weak::as_ptr for context on how the input pointer is derived.
- let offset = unsafe { data_offset(ptr) };
- // Reverse the offset to find the original ArcInner.
- // SAFETY: we use wrapping_offset here because the pointer may be dangling (but only if T: Sized)
- let ptr = unsafe {
- set_data_ptr(ptr as *mut ArcInner, (ptr as *mut u8).wrapping_offset(-offset))
+ let ptr = if is_dangling(ptr as *mut T) {
+ // This is a dangling Weak.
+ ptr as *mut ArcInner
+ } else {
+ // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
+ // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
+ let offset = unsafe { data_offset(ptr) };
+ // Thus, we reverse the offset to get the whole RcBox.
+ // SAFETY: the pointer originated from a Weak, so this offset is safe.
+ unsafe { (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)) }
};
// SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
- unsafe { Weak { ptr: NonNull::new_unchecked(ptr) } }
+ Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
}
}
@@ -1889,7 +1879,7 @@ impl Weak {
/// (i.e., when this `Weak` was created by `Weak::new`).
#[inline]
fn inner(&self) -> Option> {
- if is_dangling(self.ptr) {
+ if is_dangling(self.ptr.as_ptr()) {
None
} else {
// We are careful to *not* create a reference covering the "data" field, as
@@ -2469,21 +2459,19 @@ impl AsRef for Arc {
#[stable(feature = "pin", since = "1.33.0")]
impl Unpin for Arc {}
-/// Get the offset within an `ArcInner` for
-/// a payload of type described by a pointer.
+/// Get the offset within an `ArcInner` for the payload behind a pointer.
///
/// # Safety
///
-/// This has the same safety requirements as `align_of_val_raw`. In effect:
-///
-/// - This function is safe for any argument if `T` is sized, and
-/// - if `T` is unsized, the pointer must have appropriate pointer metadata
-/// acquired from the real instance that you are getting this offset for.
+/// The pointer must point to (and have valid metadata for) a previously
+/// valid instance of T, but the T is allowed to be dropped.
unsafe fn data_offset(ptr: *const T) -> isize {
- // Align the unsized value to the end of the `ArcInner`.
- // Because it is `?Sized`, it will always be the last field in memory.
- // Note: This is a detail of the current implementation of the compiler,
- // and is not a guaranteed language detail. Do not rely on it outside of std.
+ // Align the unsized value to the end of the ArcInner.
+ // Because RcBox is repr(C), it will always be the last field in memory.
+ // SAFETY: since the only unsized types possible are slices, trait objects,
+ // and extern types, the input safety requirement is currently enough to
+ // satisfy the requirements of align_of_val_raw; this is an implementation
+ // detail of the language that may not be relied upon outside of std.
unsafe { data_offset_align(align_of_val_raw(ptr)) }
}
diff --git a/library/alloc/src/sync/tests.rs b/library/alloc/src/sync/tests.rs
index 5067af1d4ff6..4ccb32fbbf63 100644
--- a/library/alloc/src/sync/tests.rs
+++ b/library/alloc/src/sync/tests.rs
@@ -158,6 +158,30 @@ fn into_from_weak_raw() {
}
}
+#[test]
+fn test_into_from_weak_raw_unsized() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let arc: Arc = Arc::from("foo");
+ let weak: Weak = Arc::downgrade(&arc);
+
+ let ptr = Weak::into_raw(weak.clone());
+ let weak2 = unsafe { Weak::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }, "foo");
+ assert!(weak.ptr_eq(&weak2));
+
+ let arc: Arc = Arc::new(123);
+ let weak: Weak = Arc::downgrade(&arc);
+
+ let ptr = Weak::into_raw(weak.clone());
+ let weak2 = unsafe { Weak::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }.to_string(), "123");
+ assert!(weak.ptr_eq(&weak2));
+}
+
#[test]
fn test_cowarc_clone_make_mut() {
let mut cow0 = Arc::new(75);
@@ -329,6 +353,23 @@ fn test_unsized() {
assert!(y.upgrade().is_none());
}
+#[test]
+fn test_maybe_thin_unsized() {
+ // If/when custom thin DSTs exist, this test should be updated to use one
+ use std::ffi::{CStr, CString};
+
+ let x: Arc = Arc::from(CString::new("swordfish").unwrap().into_boxed_c_str());
+ assert_eq!(format!("{:?}", x), "\"swordfish\"");
+ let y: Weak = Arc::downgrade(&x);
+ drop(x);
+
+ // At this point, the weak points to a dropped DST
+ assert!(y.upgrade().is_none());
+ // But we still need to be able to get the alloc layout to drop.
+ // CStr has no drop glue, but custom DSTs might, and need to work.
+ drop(y);
+}
+
#[test]
fn test_from_owned() {
let foo = 123;
diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs
index c5ab7a39ff0c..fa0fbaa35c95 100644
--- a/library/core/src/cell.rs
+++ b/library/core/src/cell.rs
@@ -1261,6 +1261,40 @@ impl<'b, T: ?Sized> Ref<'b, T> {
Ref { value: f(orig.value), borrow: orig.borrow }
}
+ /// Makes a new `Ref` for an optional component of the borrowed data. The
+ /// original guard is returned as an `Err(..)` if the closure returns
+ /// `None`.
+ ///
+ /// The `RefCell` is already immutably borrowed, so this cannot fail.
+ ///
+ /// This is an associated function that needs to be used as
+ /// `Ref::filter_map(...)`. A method would interfere with methods of the same
+ /// name on the contents of a `RefCell` used through `Deref`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(cell_filter_map)]
+ ///
+ /// use std::cell::{RefCell, Ref};
+ ///
+ /// let c = RefCell::new(vec![1, 2, 3]);
+ /// let b1: Ref> = c.borrow();
+ /// let b2: Result, _> = Ref::filter_map(b1, |v| v.get(1));
+ /// assert_eq!(*b2.unwrap(), 2);
+ /// ```
+ #[unstable(feature = "cell_filter_map", reason = "recently added", issue = "81061")]
+ #[inline]
+ pub fn filter_map(orig: Ref<'b, T>, f: F) -> Result, Self>
+ where
+ F: FnOnce(&T) -> Option<&U>,
+ {
+ match f(orig.value) {
+ Some(value) => Ok(Ref { value, borrow: orig.borrow }),
+ None => Err(orig),
+ }
+ }
+
/// Splits a `Ref` into multiple `Ref`s for different components of the
/// borrowed data.
///
@@ -1372,6 +1406,58 @@ impl<'b, T: ?Sized> RefMut<'b, T> {
RefMut { value: f(value), borrow }
}
+ /// Makes a new `RefMut` for an optional component of the borrowed data. The
+ /// original guard is returned as an `Err(..)` if the closure returns
+ /// `None`.
+ ///
+ /// The `RefCell` is already mutably borrowed, so this cannot fail.
+ ///
+ /// This is an associated function that needs to be used as
+ /// `RefMut::filter_map(...)`. A method would interfere with methods of the
+ /// same name on the contents of a `RefCell` used through `Deref`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(cell_filter_map)]
+ ///
+ /// use std::cell::{RefCell, RefMut};
+ ///
+ /// let c = RefCell::new(vec![1, 2, 3]);
+ ///
+ /// {
+ /// let b1: RefMut> = c.borrow_mut();
+ /// let mut b2: Result, _> = RefMut::filter_map(b1, |v| v.get_mut(1));
+ ///
+ /// if let Ok(mut b2) = b2 {
+ /// *b2 += 2;
+ /// }
+ /// }
+ ///
+ /// assert_eq!(*c.borrow(), vec![1, 4, 3]);
+ /// ```
+ #[unstable(feature = "cell_filter_map", reason = "recently added", issue = "81061")]
+ #[inline]
+ pub fn filter_map(orig: RefMut<'b, T>, f: F) -> Result, Self>
+ where
+ F: FnOnce(&mut T) -> Option<&mut U>,
+ {
+ // FIXME(nll-rfc#40): fix borrow-check
+ let RefMut { value, borrow } = orig;
+ let value = value as *mut T;
+ // SAFETY: function holds onto an exclusive reference for the duration
+ // of its call through `orig`, and the pointer is only de-referenced
+ // inside of the function call never allowing the exclusive reference to
+ // escape.
+ match f(unsafe { &mut *value }) {
+ Some(value) => Ok(RefMut { value, borrow }),
+ None => {
+ // SAFETY: same as above.
+ Err(RefMut { value: unsafe { &mut *value }, borrow })
+ }
+ }
+ }
+
/// Splits a `RefMut` into multiple `RefMut`s for different components of the
/// borrowed data.
///
diff --git a/library/core/src/iter/adapters/zip.rs b/library/core/src/iter/adapters/zip.rs
index 5766fd3c8874..98b8dca96140 100644
--- a/library/core/src/iter/adapters/zip.rs
+++ b/library/core/src/iter/adapters/zip.rs
@@ -286,6 +286,7 @@ where
#[inline]
unsafe fn get_unchecked(&mut self, idx: usize) -> ::Item {
+ let idx = self.index + idx;
// SAFETY: the caller must uphold the contract for
// `Iterator::__iterator_get_unchecked`.
unsafe { (self.a.__iterator_get_unchecked(idx), self.b.__iterator_get_unchecked(idx)) }
diff --git a/library/core/src/task/poll.rs b/library/core/src/task/poll.rs
index 6851f3fcd2fc..42c9d9f0cc03 100644
--- a/library/core/src/task/poll.rs
+++ b/library/core/src/task/poll.rs
@@ -84,7 +84,7 @@ impl Poll> {
impl Poll