diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index 37614a7ca4571..7c71594c43035 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -1489,10 +1489,11 @@ impl String { Some(ch) } - /// Removes a [`char`] from this `String` at a byte position and returns it. + /// Removes a [`char`] from this `String` at byte position `idx` and returns it. /// - /// This is an *O*(*n*) operation, as it requires copying every element in the - /// buffer. + /// Copies all bytes after the removed char to new positions. + /// + /// Note that calling this in a loop can result in quadratic behavior. /// /// # Panics /// @@ -1678,10 +1679,13 @@ impl String { drop(guard); } - /// Inserts a character into this `String` at a byte position. + /// Inserts a character into this `String` at byte position `idx`. + /// + /// Reallocates if `self.capacity()` is insufficient, which may involve copying all + /// `self.capacity()` bytes. Makes space for the insertion by copying all bytes of + /// `&self[idx..]` to new positions. /// - /// This is an *O*(*n*) operation as it requires copying every element in the - /// buffer. + /// Note that calling this in a loop can result in quadratic behavior. /// /// # Panics /// @@ -1733,10 +1737,13 @@ impl String { } } - /// Inserts a string slice into this `String` at a byte position. + /// Inserts a string slice into this `String` at byte position `idx`. + /// + /// Reallocates if `self.capacity()` is insufficient, which may involve copying all + /// `self.capacity()` bytes. Makes space for the insertion by copying all bytes of + /// `&self[idx..]` to new positions. /// - /// This is an *O*(*n*) operation as it requires copying every element in the - /// buffer. + /// Note that calling this in a loop can result in quadratic behavior. /// /// # Panics /// diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs index a4b6efe35fc14..a2c1ba835f366 100644 --- a/library/core/src/cell.rs +++ b/library/core/src/cell.rs @@ -719,7 +719,7 @@ impl Cell<[T; N]> { #[rustc_diagnostic_item = "RefCell"] #[stable(feature = "rust1", since = "1.0.0")] pub struct RefCell { - borrow: Cell, + borrow: Cell, // Stores the location of the earliest currently active borrow. // This gets updated whenever we go from having zero borrows // to having a single borrow. When a borrow occurs, this gets included @@ -732,54 +732,48 @@ pub struct RefCell { /// An error returned by [`RefCell::try_borrow`]. #[stable(feature = "try_borrow", since = "1.13.0")] #[non_exhaustive] +#[derive(Debug)] pub struct BorrowError { #[cfg(feature = "debug_refcell")] location: &'static crate::panic::Location<'static>, } #[stable(feature = "try_borrow", since = "1.13.0")] -impl Debug for BorrowError { +impl Display for BorrowError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut builder = f.debug_struct("BorrowError"); - #[cfg(feature = "debug_refcell")] - builder.field("location", self.location); + let res = write!( + f, + "RefCell already mutably borrowed; a previous borrow was at {}", + self.location + ); - builder.finish() - } -} + #[cfg(not(feature = "debug_refcell"))] + let res = Display::fmt("RefCell already mutably borrowed", f); -#[stable(feature = "try_borrow", since = "1.13.0")] -impl Display for BorrowError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Display::fmt("already mutably borrowed", f) + res } } /// An error returned by [`RefCell::try_borrow_mut`]. #[stable(feature = "try_borrow", since = "1.13.0")] #[non_exhaustive] +#[derive(Debug)] pub struct BorrowMutError { #[cfg(feature = "debug_refcell")] location: &'static crate::panic::Location<'static>, } #[stable(feature = "try_borrow", since = "1.13.0")] -impl Debug for BorrowMutError { +impl Display for BorrowMutError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut builder = f.debug_struct("BorrowMutError"); - #[cfg(feature = "debug_refcell")] - builder.field("location", self.location); + let res = write!(f, "RefCell already borrowed; a previous borrow was at {}", self.location); - builder.finish() - } -} + #[cfg(not(feature = "debug_refcell"))] + let res = Display::fmt("RefCell already borrowed", f); -#[stable(feature = "try_borrow", since = "1.13.0")] -impl Display for BorrowMutError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - Display::fmt("already borrowed", f) + res } } @@ -788,7 +782,7 @@ impl Display for BorrowMutError { #[track_caller] #[cold] fn panic_already_borrowed(err: BorrowMutError) -> ! { - panic!("already borrowed: {:?}", err) + panic!("{err}") } // This ensures the panicking code is outlined from `borrow` for `RefCell`. @@ -796,7 +790,7 @@ fn panic_already_borrowed(err: BorrowMutError) -> ! { #[track_caller] #[cold] fn panic_already_mutably_borrowed(err: BorrowError) -> ! { - panic!("already mutably borrowed: {:?}", err) + panic!("{err}") } // Positive values represent the number of `Ref` active. Negative values @@ -806,22 +800,22 @@ fn panic_already_mutably_borrowed(err: BorrowError) -> ! { // // `Ref` and `RefMut` are both two words in size, and so there will likely never // be enough `Ref`s or `RefMut`s in existence to overflow half of the `usize` -// range. Thus, a `BorrowFlag` will probably never overflow or underflow. +// range. Thus, a `BorrowCounter` will probably never overflow or underflow. // However, this is not a guarantee, as a pathological program could repeatedly // create and then mem::forget `Ref`s or `RefMut`s. Thus, all code must // explicitly check for overflow and underflow in order to avoid unsafety, or at // least behave correctly in the event that overflow or underflow happens (e.g., // see BorrowRef::new). -type BorrowFlag = isize; -const UNUSED: BorrowFlag = 0; +type BorrowCounter = isize; +const UNUSED: BorrowCounter = 0; #[inline(always)] -fn is_writing(x: BorrowFlag) -> bool { +fn is_writing(x: BorrowCounter) -> bool { x < UNUSED } #[inline(always)] -fn is_reading(x: BorrowFlag) -> bool { +fn is_reading(x: BorrowCounter) -> bool { x > UNUSED } @@ -1401,12 +1395,12 @@ impl From for RefCell { impl, U> CoerceUnsized> for RefCell {} struct BorrowRef<'b> { - borrow: &'b Cell, + borrow: &'b Cell, } impl<'b> BorrowRef<'b> { #[inline] - fn new(borrow: &'b Cell) -> Option> { + fn new(borrow: &'b Cell) -> Option> { let b = borrow.get().wrapping_add(1); if !is_reading(b) { // Incrementing borrow can result in a non-reading value (<= 0) in these cases: @@ -1447,7 +1441,7 @@ impl Clone for BorrowRef<'_> { debug_assert!(is_reading(borrow)); // Prevent the borrow counter from overflowing into // a writing borrow. - assert!(borrow != BorrowFlag::MAX); + assert!(borrow != BorrowCounter::MAX); self.borrow.set(borrow + 1); BorrowRef { borrow: self.borrow } } @@ -1795,7 +1789,7 @@ impl<'b, T: ?Sized> RefMut<'b, T> { } struct BorrowRefMut<'b> { - borrow: &'b Cell, + borrow: &'b Cell, } impl Drop for BorrowRefMut<'_> { @@ -1809,7 +1803,7 @@ impl Drop for BorrowRefMut<'_> { impl<'b> BorrowRefMut<'b> { #[inline] - fn new(borrow: &'b Cell) -> Option> { + fn new(borrow: &'b Cell) -> Option> { // NOTE: Unlike BorrowRefMut::clone, new is called to create the initial // mutable reference, and so there must currently be no existing // references. Thus, while clone increments the mutable refcount, here @@ -1833,7 +1827,7 @@ impl<'b> BorrowRefMut<'b> { let borrow = self.borrow.get(); debug_assert!(is_writing(borrow)); // Prevent the borrow counter from underflowing. - assert!(borrow != BorrowFlag::MIN); + assert!(borrow != BorrowCounter::MIN); self.borrow.set(borrow - 1); BorrowRefMut { borrow: self.borrow } } diff --git a/library/core/src/clone.rs b/library/core/src/clone.rs index 2c0662c96290a..57de507a73e82 100644 --- a/library/core/src/clone.rs +++ b/library/core/src/clone.rs @@ -36,6 +36,8 @@ #![stable(feature = "rust1", since = "1.0.0")] +use crate::marker::PointeeSized; + mod uninit; /// A common trait that allows explicit creation of a duplicate value. @@ -283,7 +285,7 @@ impl_use_cloned! { reason = "deriving hack, should not be public", issue = "none" )] -pub struct AssertParamIsClone { +pub struct AssertParamIsClone { _field: crate::marker::PhantomData, } #[doc(hidden)] @@ -293,7 +295,7 @@ pub struct AssertParamIsClone { reason = "deriving hack, should not be public", issue = "none" )] -pub struct AssertParamIsCopy { +pub struct AssertParamIsCopy { _field: crate::marker::PhantomData, } @@ -530,6 +532,8 @@ unsafe impl CloneToUninit for crate::bstr::ByteStr { /// are implemented in `traits::SelectionContext::copy_clone_conditions()` /// in `rustc_trait_selection`. mod impls { + use crate::marker::PointeeSized; + macro_rules! impl_clone { ($($t:ty)*) => { $( @@ -560,7 +564,7 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Clone for *const T { + impl Clone for *const T { #[inline(always)] fn clone(&self) -> Self { *self @@ -568,7 +572,7 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Clone for *mut T { + impl Clone for *mut T { #[inline(always)] fn clone(&self) -> Self { *self @@ -577,7 +581,7 @@ mod impls { /// Shared references can be cloned, but mutable references *cannot*! #[stable(feature = "rust1", since = "1.0.0")] - impl Clone for &T { + impl Clone for &T { #[inline(always)] #[rustc_diagnostic_item = "noop_method_clone"] fn clone(&self) -> Self { @@ -587,5 +591,5 @@ mod impls { /// Shared references can be cloned, but mutable references *cannot*! #[stable(feature = "rust1", since = "1.0.0")] - impl !Clone for &mut T {} + impl !Clone for &mut T {} } diff --git a/library/core/src/cmp.rs b/library/core/src/cmp.rs index c315131f4136c..5cb1a14847702 100644 --- a/library/core/src/cmp.rs +++ b/library/core/src/cmp.rs @@ -29,6 +29,7 @@ mod bytewise; pub(crate) use bytewise::BytewiseEq; use self::Ordering::*; +use crate::marker::PointeeSized; use crate::ops::ControlFlow; /// Trait for comparisons using the equality operator. @@ -246,7 +247,7 @@ use crate::ops::ControlFlow; append_const_msg )] #[rustc_diagnostic_item = "PartialEq"] -pub trait PartialEq { +pub trait PartialEq: PointeeSized { /// Tests for `self` and `other` values to be equal, and is used by `==`. #[must_use] #[stable(feature = "rust1", since = "1.0.0")] @@ -332,7 +333,7 @@ pub macro PartialEq($item:item) { #[doc(alias = "!=")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "Eq"] -pub trait Eq: PartialEq { +pub trait Eq: PartialEq + PointeeSized { // this method is used solely by `impl Eq or #[derive(Eq)]` to assert that every component of a // type implements `Eq` itself. The current deriving infrastructure means doing this assertion // without using a method on this trait is nearly impossible. @@ -361,7 +362,7 @@ pub macro Eq($item:item) { #[doc(hidden)] #[allow(missing_debug_implementations)] #[unstable(feature = "derive_eq", reason = "deriving hack, should not be public", issue = "none")] -pub struct AssertParamIsEq { +pub struct AssertParamIsEq { _field: crate::marker::PhantomData, } @@ -954,7 +955,7 @@ impl Clone for Reverse { #[doc(alias = ">=")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "Ord"] -pub trait Ord: Eq + PartialOrd { +pub trait Ord: Eq + PartialOrd + PointeeSized { /// This method returns an [`Ordering`] between `self` and `other`. /// /// By convention, `self.cmp(&other)` returns the ordering matching the expression @@ -1337,7 +1338,8 @@ pub macro Ord($item:item) { append_const_msg )] #[rustc_diagnostic_item = "PartialOrd"] -pub trait PartialOrd: PartialEq { +#[allow(multiple_supertrait_upcastable)] // FIXME(sized_hierarchy): remove this +pub trait PartialOrd: PartialEq + PointeeSized { /// This method returns an ordering between `self` and `other` values if one exists. /// /// # Examples @@ -1481,7 +1483,7 @@ pub trait PartialOrd: PartialEq { } } -fn default_chaining_impl( +fn default_chaining_impl( lhs: &T, rhs: &U, p: impl FnOnce(Ordering) -> bool, @@ -1803,6 +1805,7 @@ where mod impls { use crate::cmp::Ordering::{self, Equal, Greater, Less}; use crate::hint::unreachable_unchecked; + use crate::marker::PointeeSized; use crate::ops::ControlFlow::{self, Break, Continue}; macro_rules! partial_eq_impl { @@ -2015,7 +2018,7 @@ mod impls { // & pointers #[stable(feature = "rust1", since = "1.0.0")] - impl PartialEq<&B> for &A + impl PartialEq<&B> for &A where A: PartialEq, { @@ -2029,7 +2032,7 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl PartialOrd<&B> for &A + impl PartialOrd<&B> for &A where A: PartialOrd, { @@ -2071,7 +2074,7 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl Ord for &A + impl Ord for &A where A: Ord, { @@ -2081,12 +2084,12 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl Eq for &A where A: Eq {} + impl Eq for &A where A: Eq {} // &mut pointers #[stable(feature = "rust1", since = "1.0.0")] - impl PartialEq<&mut B> for &mut A + impl PartialEq<&mut B> for &mut A where A: PartialEq, { @@ -2100,7 +2103,7 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl PartialOrd<&mut B> for &mut A + impl PartialOrd<&mut B> for &mut A where A: PartialOrd, { @@ -2142,7 +2145,7 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl Ord for &mut A + impl Ord for &mut A where A: Ord, { @@ -2152,10 +2155,10 @@ mod impls { } } #[stable(feature = "rust1", since = "1.0.0")] - impl Eq for &mut A where A: Eq {} + impl Eq for &mut A where A: Eq {} #[stable(feature = "rust1", since = "1.0.0")] - impl PartialEq<&mut B> for &A + impl PartialEq<&mut B> for &A where A: PartialEq, { @@ -2170,7 +2173,7 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl PartialEq<&B> for &mut A + impl PartialEq<&B> for &mut A where A: PartialEq, { diff --git a/library/core/src/convert/mod.rs b/library/core/src/convert/mod.rs index d86dc24fb5792..7132e712ec52f 100644 --- a/library/core/src/convert/mod.rs +++ b/library/core/src/convert/mod.rs @@ -38,6 +38,7 @@ use crate::error::Error; use crate::fmt; use crate::hash::{Hash, Hasher}; +use crate::marker::PointeeSized; mod num; @@ -215,7 +216,7 @@ pub const fn identity(x: T) -> T { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "AsRef"] -pub trait AsRef { +pub trait AsRef: PointeeSized { /// Converts this type into a shared reference of the (usually inferred) input type. #[stable(feature = "rust1", since = "1.0.0")] fn as_ref(&self) -> &T; @@ -366,7 +367,7 @@ pub trait AsRef { /// `&mut Vec`, for example, is the better choice (callers need to pass the correct type then). #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "AsMut"] -pub trait AsMut { +pub trait AsMut: PointeeSized { /// Converts this type into a mutable reference of the (usually inferred) input type. #[stable(feature = "rust1", since = "1.0.0")] fn as_mut(&mut self) -> &mut T; @@ -701,7 +702,7 @@ pub trait TryFrom: Sized { // As lifts over & #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef for &T +impl AsRef for &T where T: AsRef, { @@ -713,7 +714,7 @@ where // As lifts over &mut #[stable(feature = "rust1", since = "1.0.0")] -impl AsRef for &mut T +impl AsRef for &mut T where T: AsRef, { @@ -733,7 +734,7 @@ where // AsMut lifts over &mut #[stable(feature = "rust1", since = "1.0.0")] -impl AsMut for &mut T +impl AsMut for &mut T where T: AsMut, { diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs index 145e581d1fb51..c20b3d4817f93 100644 --- a/library/core/src/fmt/mod.rs +++ b/library/core/src/fmt/mod.rs @@ -4,7 +4,7 @@ use crate::cell::{Cell, Ref, RefCell, RefMut, SyncUnsafeCell, UnsafeCell}; use crate::char::{EscapeDebugExtArgs, MAX_LEN_UTF8}; -use crate::marker::PhantomData; +use crate::marker::{PhantomData, PointeeSized}; use crate::num::fmt as numfmt; use crate::ops::Deref; use crate::{iter, result, str}; @@ -864,7 +864,7 @@ impl Display for Arguments<'_> { #[doc(alias = "{:?}")] #[rustc_diagnostic_item = "Debug"] #[rustc_trivial_field_reads] -pub trait Debug { +pub trait Debug: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] /// /// # Examples @@ -995,7 +995,7 @@ pub use macros::Debug; #[doc(alias = "{}")] #[rustc_diagnostic_item = "Display"] #[stable(feature = "rust1", since = "1.0.0")] -pub trait Display { +pub trait Display: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] /// /// # Examples @@ -1071,7 +1071,7 @@ pub trait Display { /// assert_eq!(format!("l as octal is: {l:#06o}"), "l as octal is: 0o0011"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -pub trait Octal { +pub trait Octal: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] #[stable(feature = "rust1", since = "1.0.0")] fn fmt(&self, f: &mut Formatter<'_>) -> Result; @@ -1130,7 +1130,7 @@ pub trait Octal { /// ); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -pub trait Binary { +pub trait Binary: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] #[stable(feature = "rust1", since = "1.0.0")] fn fmt(&self, f: &mut Formatter<'_>) -> Result; @@ -1185,7 +1185,7 @@ pub trait Binary { /// assert_eq!(format!("l as hex is: {l:#010x}"), "l as hex is: 0x00000009"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -pub trait LowerHex { +pub trait LowerHex: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] #[stable(feature = "rust1", since = "1.0.0")] fn fmt(&self, f: &mut Formatter<'_>) -> Result; @@ -1240,7 +1240,7 @@ pub trait LowerHex { /// assert_eq!(format!("l as hex is: {l:#010X}"), "l as hex is: 0x7FFFFFFF"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -pub trait UpperHex { +pub trait UpperHex: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] #[stable(feature = "rust1", since = "1.0.0")] fn fmt(&self, f: &mut Formatter<'_>) -> Result; @@ -1299,7 +1299,7 @@ pub trait UpperHex { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "Pointer"] -pub trait Pointer { +pub trait Pointer: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] #[stable(feature = "rust1", since = "1.0.0")] fn fmt(&self, f: &mut Formatter<'_>) -> Result; @@ -1350,7 +1350,7 @@ pub trait Pointer { /// ); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -pub trait LowerExp { +pub trait LowerExp: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] #[stable(feature = "rust1", since = "1.0.0")] fn fmt(&self, f: &mut Formatter<'_>) -> Result; @@ -1401,7 +1401,7 @@ pub trait LowerExp { /// ); /// ``` #[stable(feature = "rust1", since = "1.0.0")] -pub trait UpperExp { +pub trait UpperExp: PointeeSized { #[doc = include_str!("fmt_trait_method_doc.md")] #[stable(feature = "rust1", since = "1.0.0")] fn fmt(&self, f: &mut Formatter<'_>) -> Result; @@ -2646,11 +2646,11 @@ macro_rules! fmt_refs { ($($tr:ident),*) => { $( #[stable(feature = "rust1", since = "1.0.0")] - impl $tr for &T { + impl $tr for &T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { $tr::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] - impl $tr for &mut T { + impl $tr for &mut T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { $tr::fmt(&**self, f) } } )* @@ -2772,7 +2772,7 @@ impl Display for char { } #[stable(feature = "rust1", since = "1.0.0")] -impl Pointer for *const T { +impl Pointer for *const T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { if <::Metadata as core::unit::IsUnit>::is_unit() { pointer_fmt_inner(self.expose_provenance(), f) @@ -2817,21 +2817,21 @@ pub(crate) fn pointer_fmt_inner(ptr_addr: usize, f: &mut Formatter<'_>) -> Resul } #[stable(feature = "rust1", since = "1.0.0")] -impl Pointer for *mut T { +impl Pointer for *mut T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { Pointer::fmt(&(*self as *const T), f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Pointer for &T { +impl Pointer for &T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { Pointer::fmt(&(*self as *const T), f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Pointer for &mut T { +impl Pointer for &mut T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { Pointer::fmt(&(&**self as *const T), f) } @@ -2840,13 +2840,13 @@ impl Pointer for &mut T { // Implementation of Display/Debug for various core types #[stable(feature = "rust1", since = "1.0.0")] -impl Debug for *const T { +impl Debug for *const T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { Pointer::fmt(self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Debug for *mut T { +impl Debug for *mut T { fn fmt(&self, f: &mut Formatter<'_>) -> Result { Pointer::fmt(self, f) } diff --git a/library/core/src/hash/mod.rs b/library/core/src/hash/mod.rs index f7b874b26bb74..efda64791d403 100644 --- a/library/core/src/hash/mod.rs +++ b/library/core/src/hash/mod.rs @@ -183,7 +183,7 @@ mod sip; /// [impl]: ../../std/primitive.str.html#impl-Hash-for-str #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "Hash"] -pub trait Hash { +pub trait Hash: marker::PointeeSized { /// Feeds this value into the given [`Hasher`]. /// /// # Examples @@ -941,7 +941,7 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Hash for &T { + impl Hash for &T { #[inline] fn hash(&self, state: &mut H) { (**self).hash(state); @@ -949,7 +949,7 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Hash for &mut T { + impl Hash for &mut T { #[inline] fn hash(&self, state: &mut H) { (**self).hash(state); @@ -957,7 +957,7 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Hash for *const T { + impl Hash for *const T { #[inline] fn hash(&self, state: &mut H) { let (address, metadata) = self.to_raw_parts(); @@ -967,7 +967,7 @@ mod impls { } #[stable(feature = "rust1", since = "1.0.0")] - impl Hash for *mut T { + impl Hash for *mut T { #[inline] fn hash(&self, state: &mut H) { let (address, metadata) = self.to_raw_parts(); diff --git a/library/core/src/intrinsics/bounds.rs b/library/core/src/intrinsics/bounds.rs index 046e191212cc0..353908598d40b 100644 --- a/library/core/src/intrinsics/bounds.rs +++ b/library/core/src/intrinsics/bounds.rs @@ -1,39 +1,41 @@ //! Various traits used to restrict intrinsics to not-completely-wrong types. +use crate::marker::PointeeSized; + /// Types with a built-in dereference operator in runtime MIR, /// aka references and raw pointers. /// /// # Safety /// Must actually *be* such a type. pub unsafe trait BuiltinDeref: Sized { - type Pointee: ?Sized; + type Pointee: PointeeSized; } -unsafe impl BuiltinDeref for &mut T { +unsafe impl BuiltinDeref for &mut T { type Pointee = T; } -unsafe impl BuiltinDeref for &T { +unsafe impl BuiltinDeref for &T { type Pointee = T; } -unsafe impl BuiltinDeref for *mut T { +unsafe impl BuiltinDeref for *mut T { type Pointee = T; } -unsafe impl BuiltinDeref for *const T { +unsafe impl BuiltinDeref for *const T { type Pointee = T; } -pub trait ChangePointee: BuiltinDeref { +pub trait ChangePointee: BuiltinDeref { type Output; } -impl<'a, T: ?Sized + 'a, U: ?Sized + 'a> ChangePointee for &'a mut T { +impl<'a, T: PointeeSized + 'a, U: PointeeSized + 'a> ChangePointee for &'a mut T { type Output = &'a mut U; } -impl<'a, T: ?Sized + 'a, U: ?Sized + 'a> ChangePointee for &'a T { +impl<'a, T: PointeeSized + 'a, U: PointeeSized + 'a> ChangePointee for &'a T { type Output = &'a U; } -impl ChangePointee for *mut T { +impl ChangePointee for *mut T { type Output = *mut U; } -impl ChangePointee for *const T { +impl ChangePointee for *const T { type Output = *const U; } diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 0e87ea2c41a20..c0570d504d9a7 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -58,7 +58,7 @@ use safety::{ensures, requires}; #[cfg(kani)] use crate::kani; -use crate::marker::{ConstParamTy, DiscriminantKind, Tuple}; +use crate::marker::{ConstParamTy, DiscriminantKind, PointeeSized, Tuple}; use crate::ptr; #[cfg(kani)] use crate::ub_checks; @@ -2765,7 +2765,7 @@ where #[unstable(feature = "core_intrinsics", issue = "none")] #[rustc_intrinsic_const_stable_indirect] #[rustc_intrinsic] -pub const fn ptr_metadata + ?Sized, M>(ptr: *const P) -> M; +pub const fn ptr_metadata + PointeeSized, M>(ptr: *const P) -> M; /// This is an accidentally-stable alias to [`ptr::copy_nonoverlapping`]; use that instead. // Note (intentionally not in the doc comment): `ptr::copy_nonoverlapping` adds some extra diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index 9991b76cd0a3c..0cc5640941a32 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -93,15 +93,15 @@ pub unsafe auto trait Send { } #[stable(feature = "rust1", since = "1.0.0")] -impl !Send for *const T {} +impl !Send for *const T {} #[stable(feature = "rust1", since = "1.0.0")] -impl !Send for *mut T {} +impl !Send for *mut T {} // Most instances arise automatically, but this instance is needed to link up `T: Sync` with // `&T: Send` (and it also removes the unsound default instance `T Send` -> `&T: Send` that would // otherwise exist). #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Send for &T {} +unsafe impl Send for &T {} /// Types with a constant size known at compile time. /// @@ -151,11 +151,48 @@ unsafe impl Send for &T {} #[rustc_specialization_trait] #[rustc_deny_explicit_impl] #[rustc_do_not_implement_via_object] +// `Sized` being coinductive, despite having supertraits, is okay as there are no user-written impls, +// and we know that the supertraits are always implemented if the subtrait is just by looking at +// the builtin impls. #[rustc_coinductive] -pub trait Sized { +pub trait Sized: MetaSized { // Empty. } +/// Types with a size that can be determined from pointer metadata. +#[unstable(feature = "sized_hierarchy", issue = "none")] +#[lang = "meta_sized"] +#[diagnostic::on_unimplemented( + message = "the size for values of type `{Self}` cannot be known", + label = "doesn't have a known size" +)] +#[fundamental] +#[rustc_specialization_trait] +#[rustc_deny_explicit_impl] +#[rustc_do_not_implement_via_object] +// `MetaSized` being coinductive, despite having supertraits, is okay for the same reasons as +// `Sized` above. +#[rustc_coinductive] +pub trait MetaSized: PointeeSized { + // Empty +} + +/// Types that may or may not have a size. +#[unstable(feature = "sized_hierarchy", issue = "none")] +#[lang = "pointee_sized"] +#[diagnostic::on_unimplemented( + message = "values of type `{Self}` may or may not have a size", + label = "may or may not have a known size" +)] +#[fundamental] +#[rustc_specialization_trait] +#[rustc_deny_explicit_impl] +#[rustc_do_not_implement_via_object] +#[rustc_coinductive] +pub trait PointeeSized { + // Empty +} + /// Types that can be "unsized" to a dynamically-sized type. /// /// For example, the sized array type `[i8; 2]` implements `Unsize<[i8]>` and @@ -192,7 +229,7 @@ pub trait Sized { #[lang = "unsize"] #[rustc_deny_explicit_impl] #[rustc_do_not_implement_via_object] -pub trait Unsize { +pub trait Unsize: PointeeSized { // Empty. } @@ -229,7 +266,7 @@ marker_impls! { (), {T, const N: usize} [T; N], {T} [T], - {T: ?Sized} &T, + {T: PointeeSized} &T, } /// Types whose values can be duplicated simply by copying bits. @@ -442,8 +479,8 @@ marker_impls! { isize, i8, i16, i32, i64, i128, f16, f32, f64, f128, bool, char, - {T: ?Sized} *const T, - {T: ?Sized} *mut T, + {T: PointeeSized} *const T, + {T: PointeeSized} *mut T, } @@ -452,7 +489,7 @@ impl Copy for ! {} /// Shared references can be copied, but mutable references *cannot*! #[stable(feature = "rust1", since = "1.0.0")] -impl Copy for &T {} +impl Copy for &T {} /// Marker trait for the types that are allowed in union fields and unsafe /// binder types. @@ -636,9 +673,9 @@ pub unsafe auto trait Sync { } #[stable(feature = "rust1", since = "1.0.0")] -impl !Sync for *const T {} +impl !Sync for *const T {} #[stable(feature = "rust1", since = "1.0.0")] -impl !Sync for *mut T {} +impl !Sync for *mut T {} /// Zero-sized type used to mark things that "act like" they own a `T`. /// @@ -775,57 +812,57 @@ impl !Sync for *mut T {} /// [drop check]: Drop#drop-check #[lang = "phantom_data"] #[stable(feature = "rust1", since = "1.0.0")] -pub struct PhantomData; +pub struct PhantomData; #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for PhantomData { +impl Hash for PhantomData { #[inline] fn hash(&self, _: &mut H) {} } #[stable(feature = "rust1", since = "1.0.0")] -impl cmp::PartialEq for PhantomData { +impl cmp::PartialEq for PhantomData { fn eq(&self, _other: &PhantomData) -> bool { true } } #[stable(feature = "rust1", since = "1.0.0")] -impl cmp::Eq for PhantomData {} +impl cmp::Eq for PhantomData {} #[stable(feature = "rust1", since = "1.0.0")] -impl cmp::PartialOrd for PhantomData { +impl cmp::PartialOrd for PhantomData { fn partial_cmp(&self, _other: &PhantomData) -> Option { Option::Some(cmp::Ordering::Equal) } } #[stable(feature = "rust1", since = "1.0.0")] -impl cmp::Ord for PhantomData { +impl cmp::Ord for PhantomData { fn cmp(&self, _other: &PhantomData) -> cmp::Ordering { cmp::Ordering::Equal } } #[stable(feature = "rust1", since = "1.0.0")] -impl Copy for PhantomData {} +impl Copy for PhantomData {} #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for PhantomData { +impl Clone for PhantomData { fn clone(&self) -> Self { Self } } #[stable(feature = "rust1", since = "1.0.0")] -impl Default for PhantomData { +impl Default for PhantomData { fn default() -> Self { Self } } #[unstable(feature = "structural_match", issue = "31434")] -impl StructuralPartialEq for PhantomData {} +impl StructuralPartialEq for PhantomData {} /// Compiler-internal trait used to indicate the type of enum discriminants. /// @@ -868,15 +905,15 @@ pub trait DiscriminantKind { pub unsafe auto trait Freeze {} #[unstable(feature = "freeze", issue = "121675")] -impl !Freeze for UnsafeCell {} +impl !Freeze for UnsafeCell {} marker_impls! { #[unstable(feature = "freeze", issue = "121675")] unsafe Freeze for - {T: ?Sized} PhantomData, - {T: ?Sized} *const T, - {T: ?Sized} *mut T, - {T: ?Sized} &T, - {T: ?Sized} &mut T, + {T: PointeeSized} PhantomData, + {T: PointeeSized} *const T, + {T: PointeeSized} *mut T, + {T: PointeeSized} &T, + {T: PointeeSized} &mut T, } /// Used to determine whether a type contains any `UnsafePinned` (or `PhantomPinned`) internally, @@ -991,15 +1028,15 @@ impl !UnsafeUnpin for PhantomPinned {} marker_impls! { #[stable(feature = "pin", since = "1.33.0")] Unpin for - {T: ?Sized} &T, - {T: ?Sized} &mut T, + {T: PointeeSized} &T, + {T: PointeeSized} &mut T, } marker_impls! { #[stable(feature = "pin_raw", since = "1.38.0")] Unpin for - {T: ?Sized} *const T, - {T: ?Sized} *mut T, + {T: PointeeSized} *const T, + {T: PointeeSized} *mut T, } /// A marker for types that can be dropped. diff --git a/library/core/src/ops/deref.rs b/library/core/src/ops/deref.rs index e74f5443ac2d8..9d9d18095bc64 100644 --- a/library/core/src/ops/deref.rs +++ b/library/core/src/ops/deref.rs @@ -1,3 +1,5 @@ +use crate::marker::PointeeSized; + /// Used for immutable dereferencing operations, like `*v`. /// /// In addition to being used for explicit dereferencing operations with the @@ -135,7 +137,7 @@ #[rustc_diagnostic_item = "Deref"] #[const_trait] #[rustc_const_unstable(feature = "const_deref", issue = "88955")] -pub trait Deref { +pub trait Deref: PointeeSized { /// The resulting type after dereferencing. #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "deref_target"] @@ -267,7 +269,7 @@ impl const Deref for &mut T { #[stable(feature = "rust1", since = "1.0.0")] #[const_trait] #[rustc_const_unstable(feature = "const_deref", issue = "88955")] -pub trait DerefMut: ~const Deref { +pub trait DerefMut: ~const Deref + PointeeSized { /// Mutably dereferences the value. #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "deref_mut_method"] @@ -293,7 +295,7 @@ impl const DerefMut for &mut T { /// unchanged. #[unstable(feature = "deref_pure_trait", issue = "87121")] #[lang = "deref_pure"] -pub unsafe trait DerefPure {} +pub unsafe trait DerefPure: PointeeSized {} #[unstable(feature = "deref_pure_trait", issue = "87121")] unsafe impl DerefPure for &T {} @@ -366,7 +368,7 @@ unsafe impl DerefPure for &mut T {} /// ``` #[lang = "receiver"] #[unstable(feature = "arbitrary_self_types", issue = "44874")] -pub trait Receiver { +pub trait Receiver: PointeeSized { /// The target type on which the method may be called. #[rustc_diagnostic_item = "receiver_target"] #[lang = "receiver_target"] @@ -393,12 +395,12 @@ where #[lang = "legacy_receiver"] #[unstable(feature = "legacy_receiver_trait", issue = "none")] #[doc(hidden)] -pub trait LegacyReceiver { +pub trait LegacyReceiver: PointeeSized { // Empty. } #[unstable(feature = "legacy_receiver_trait", issue = "none")] -impl LegacyReceiver for &T {} +impl LegacyReceiver for &T {} #[unstable(feature = "legacy_receiver_trait", issue = "none")] -impl LegacyReceiver for &mut T {} +impl LegacyReceiver for &mut T {} diff --git a/library/core/src/ops/unsize.rs b/library/core/src/ops/unsize.rs index d2a07197f6f6a..f0781ee01fd53 100644 --- a/library/core/src/ops/unsize.rs +++ b/library/core/src/ops/unsize.rs @@ -1,4 +1,4 @@ -use crate::marker::Unsize; +use crate::marker::{PointeeSized, Unsize}; /// Trait that indicates that this is a pointer or a wrapper for one, /// where unsizing can be performed on the pointee. @@ -33,40 +33,40 @@ use crate::marker::Unsize; /// [nomicon-coerce]: ../../nomicon/coercions.html #[unstable(feature = "coerce_unsized", issue = "18598")] #[lang = "coerce_unsized"] -pub trait CoerceUnsized { +pub trait CoerceUnsized { // Empty. } // &mut T -> &mut U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a mut U> for &'a mut T {} +impl<'a, T: PointeeSized + Unsize, U: PointeeSized> CoerceUnsized<&'a mut U> for &'a mut T {} // &mut T -> &U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<'a, 'b: 'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a U> for &'b mut T {} +impl<'a, 'b: 'a, T: PointeeSized + Unsize, U: PointeeSized> CoerceUnsized<&'a U> for &'b mut T {} // &mut T -> *mut U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<*mut U> for &'a mut T {} +impl<'a, T: PointeeSized + Unsize, U: PointeeSized> CoerceUnsized<*mut U> for &'a mut T {} // &mut T -> *const U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<*const U> for &'a mut T {} +impl<'a, T: PointeeSized + Unsize, U: PointeeSized> CoerceUnsized<*const U> for &'a mut T {} // &T -> &U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<'a, 'b: 'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<&'a U> for &'b T {} +impl<'a, 'b: 'a, T: PointeeSized + Unsize, U: PointeeSized> CoerceUnsized<&'a U> for &'b T {} // &T -> *const U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<'a, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized<*const U> for &'a T {} +impl<'a, T: PointeeSized + Unsize, U: PointeeSized> CoerceUnsized<*const U> for &'a T {} // *mut T -> *mut U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl, U: ?Sized> CoerceUnsized<*mut U> for *mut T {} +impl, U: PointeeSized> CoerceUnsized<*mut U> for *mut T {} // *mut T -> *const U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl, U: ?Sized> CoerceUnsized<*const U> for *mut T {} +impl, U: PointeeSized> CoerceUnsized<*const U> for *mut T {} // *const T -> *const U #[unstable(feature = "coerce_unsized", issue = "18598")] -impl, U: ?Sized> CoerceUnsized<*const U> for *const T {} +impl, U: PointeeSized> CoerceUnsized<*const U> for *const T {} /// `DispatchFromDyn` is used in the implementation of dyn-compatibility[^1] checks (specifically /// allowing arbitrary self types), to guarantee that a method's receiver type can be dispatched on. @@ -122,13 +122,13 @@ pub trait DispatchFromDyn { // &T -> &U #[unstable(feature = "dispatch_from_dyn", issue = "none")] -impl<'a, T: ?Sized + Unsize, U: ?Sized> DispatchFromDyn<&'a U> for &'a T {} +impl<'a, T: PointeeSized + Unsize, U: PointeeSized> DispatchFromDyn<&'a U> for &'a T {} // &mut T -> &mut U #[unstable(feature = "dispatch_from_dyn", issue = "none")] -impl<'a, T: ?Sized + Unsize, U: ?Sized> DispatchFromDyn<&'a mut U> for &'a mut T {} +impl<'a, T: PointeeSized + Unsize, U: PointeeSized> DispatchFromDyn<&'a mut U> for &'a mut T {} // *const T -> *const U #[unstable(feature = "dispatch_from_dyn", issue = "none")] -impl, U: ?Sized> DispatchFromDyn<*const U> for *const T {} +impl, U: PointeeSized> DispatchFromDyn<*const U> for *const T {} // *mut T -> *mut U #[unstable(feature = "dispatch_from_dyn", issue = "none")] -impl, U: ?Sized> DispatchFromDyn<*mut U> for *mut T {} +impl, U: PointeeSized> DispatchFromDyn<*mut U> for *mut T {} diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index e17e85fa9dee5..690aadc06125a 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -8,7 +8,7 @@ use crate::kani; use crate::mem::{self, SizedTypeProperties}; use crate::slice::{self, SliceIndex}; -impl *const T { +impl *const T { #[doc = include_str!("docs/is_null.md")] /// /// # Examples @@ -133,7 +133,7 @@ impl *const T { #[inline] pub const fn with_metadata_of(self, meta: *const U) -> *const U where - U: ?Sized, + U: PointeeSized, { from_raw_parts::(self as *const (), metadata(meta)) } @@ -469,15 +469,15 @@ impl *const T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] - #[requires( - count == 0 || - ( - (core::mem::size_of_val_raw(self) > 0) && - (self.addr() as isize).checked_add(count).is_some()) && - (core::ub_checks::same_allocation(self, self.wrapping_byte_offset(count)) - ) - )] - #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] + // #[requires( + // count == 0 || + // ( + // (core::mem::size_of_val_raw(self) > 0) && + // (self.addr() as isize).checked_add(count).is_some()) && + // (core::ub_checks::same_allocation(self, self.wrapping_byte_offset(count)) + // ) + // )] + // #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. unsafe { self.cast::().offset(count).with_metadata_of(self) } @@ -721,16 +721,16 @@ impl *const T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces - #[requires( - (mem::size_of_val_raw(self) != 0) && - // Ensures subtracting `origin` from `self` doesn't overflow - (self.addr() as isize).checked_sub(origin.addr() as isize).is_some() && - // Ensure both pointers are in the same allocation or are pointing to the same address - (self.addr() == origin.addr() || - core::ub_checks::same_allocation(self as *const u8, origin as *const u8)) - )] - // The result should equal the distance in terms of bytes - #[ensures(|result| *result == (self.addr() as isize - origin.addr() as isize))] + // #[requires( + // (mem::size_of_val_raw(self) != 0) && + // // Ensures subtracting `origin` from `self` doesn't overflow + // (self.addr() as isize).checked_sub(origin.addr() as isize).is_some() && + // // Ensure both pointers are in the same allocation or are pointing to the same address + // (self.addr() == origin.addr() || + // core::ub_checks::same_allocation(self as *const u8, origin as *const u8)) + // )] + // // The result should equal the distance in terms of bytes + // #[ensures(|result| *result == (self.addr() as isize - origin.addr() as isize))] pub const unsafe fn byte_offset_from(self, origin: *const U) -> isize { // SAFETY: the caller must uphold the safety contract for `offset_from`. unsafe { self.cast::().offset_from(origin.cast::()) } @@ -994,20 +994,21 @@ impl *const T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] - #[requires( - // If count is zero, any pointer is valid including null pointer. - (count == 0) || - // Else if count is not zero, then ensure that adding `count` doesn't cause - // overflow and that both pointers `self` and the result are in the same - // allocation - ( - (count <= isize::MAX as usize) && - (core::mem::size_of_val_raw(self) > 0) && - ((self.addr() as isize).checked_add(count as isize).is_some()) && - (core::ub_checks::same_allocation(self, self.wrapping_byte_add(count))) - ) - )] - #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[requires( + // // If count is zero, any pointer is valid including null pointer. + // (count == 0) || + // // Else if count is not zero, then ensure that adding `count` doesn't cause + // // overflow and that both pointers `self` and the result are in the same + // // allocation + // ( + // (count <= isize::MAX as usize) && + // (core::mem::size_of_val_raw(self) > 0) && + // ((self.addr() as isize).checked_add(count as isize).is_some()) && + // (core::ub_checks::same_allocation(self, self.wrapping_byte_add(count))) + // ) + // )] + // #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. unsafe { self.cast::().add(count).with_metadata_of(self) } @@ -1137,20 +1138,21 @@ impl *const T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] - #[requires( - // If count is zero, any pointer is valid including null pointer. - (count == 0) || - // Else if count is not zero, then ensure that subtracting `count` doesn't - // cause overflow and that both pointers `self` and the result are in the - // same allocation. - ( - (count <= isize::MAX as usize) && - (core::mem::size_of_val_raw(self) > 0) && - ((self.addr() as isize).checked_sub(count as isize).is_some()) && - (core::ub_checks::same_allocation(self, self.wrapping_byte_sub(count))) - ) - )] - #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[requires( + // // If count is zero, any pointer is valid including null pointer. + // (count == 0) || + // // Else if count is not zero, then ensure that subtracting `count` doesn't + // // cause overflow and that both pointers `self` and the result are in the + // // same allocation. + // ( + // (count <= isize::MAX as usize) && + // (core::mem::size_of_val_raw(self) > 0) && + // ((self.addr() as isize).checked_sub(count as isize).is_some()) && + // (core::ub_checks::same_allocation(self, self.wrapping_byte_sub(count))) + // ) + // )] + // #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. unsafe { self.cast::().sub(count).with_metadata_of(self) } @@ -1695,7 +1697,7 @@ impl *const [T; N] { /// Pointer equality is by address, as produced by the [`<*const T>::addr`](pointer::addr) method. #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for *const T { +impl PartialEq for *const T { #[inline] #[allow(ambiguous_wide_pointer_comparisons)] fn eq(&self, other: &*const T) -> bool { @@ -1705,11 +1707,11 @@ impl PartialEq for *const T { /// Pointer equality is an equivalence relation. #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for *const T {} +impl Eq for *const T {} /// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method. #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for *const T { +impl Ord for *const T { #[inline] #[allow(ambiguous_wide_pointer_comparisons)] fn cmp(&self, other: &*const T) -> Ordering { @@ -1725,7 +1727,7 @@ impl Ord for *const T { /// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method. #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for *const T { +impl PartialOrd for *const T { #[inline] #[allow(ambiguous_wide_pointer_comparisons)] fn partial_cmp(&self, other: &*const T) -> Option { @@ -2211,26 +2213,28 @@ mod verify { check_const_offset_from_tuple_4_arr ); - #[kani::proof_for_contract(<*const ()>::byte_offset)] - pub fn check_const_byte_offset_unit_invalid_count() { - let val = (); - let ptr: *const () = &val; - let count: isize = kani::any_where(|&x| x != (mem::size_of::<()>() as isize)); - unsafe { - ptr.byte_offset(count); - } - } - - #[kani::proof_for_contract(<*const ()>::byte_offset)] - pub fn check_const_byte_offset_cast_unit() { - let mut generator = PointerGenerator::::new(); - let ptr: *const u8 = generator.any_in_bounds().ptr; - let ptr1: *const () = ptr as *const (); - let count: isize = kani::any(); - unsafe { - ptr1.byte_offset(count); - } - } + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(<*const ()>::byte_offset)] + // pub fn check_const_byte_offset_unit_invalid_count() { + // let val = (); + // let ptr: *const () = &val; + // let count: isize = kani::any_where(|&x| x != (mem::size_of::<()>() as isize)); + // unsafe { + // ptr.byte_offset(count); + // } + // } + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(<*const ()>::byte_offset)] + // pub fn check_const_byte_offset_cast_unit() { + // let mut generator = PointerGenerator::::new(); + // let ptr: *const u8 = generator.any_in_bounds().ptr; + // let ptr1: *const () = ptr as *const (); + // let count: isize = kani::any(); + // unsafe { + // ptr1.byte_offset(count); + // } + // } // generate proof for contracts of byte_add, byte_sub and byte_offset to verify // unit pointee type @@ -2263,9 +2267,10 @@ mod verify { }; } - gen_const_byte_arith_harness_for_unit!(byte_add, check_const_byte_add_unit); - gen_const_byte_arith_harness_for_unit!(byte_sub, check_const_byte_sub_unit); - gen_const_byte_arith_harness_for_unit!(byte_offset, check_const_byte_offset_unit); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_const_byte_arith_harness_for_unit!(byte_add, check_const_byte_add_unit); + // gen_const_byte_arith_harness_for_unit!(byte_sub, check_const_byte_sub_unit); + // gen_const_byte_arith_harness_for_unit!(byte_offset, check_const_byte_offset_unit); // generate proof for contracts for byte_add, byte_sub and byte_offset // - `$type`: pointee type @@ -2320,68 +2325,71 @@ mod verify { }; } - gen_const_byte_arith_harness!(i8, byte_add, check_const_byte_add_i8); - gen_const_byte_arith_harness!(i16, byte_add, check_const_byte_add_i16); - gen_const_byte_arith_harness!(i32, byte_add, check_const_byte_add_i32); - gen_const_byte_arith_harness!(i64, byte_add, check_const_byte_add_i64); - gen_const_byte_arith_harness!(i128, byte_add, check_const_byte_add_i128); - gen_const_byte_arith_harness!(isize, byte_add, check_const_byte_add_isize); - gen_const_byte_arith_harness!(u8, byte_add, check_const_byte_add_u8); - gen_const_byte_arith_harness!(u16, byte_add, check_const_byte_add_u16); - gen_const_byte_arith_harness!(u32, byte_add, check_const_byte_add_u32); - gen_const_byte_arith_harness!(u64, byte_add, check_const_byte_add_u64); - gen_const_byte_arith_harness!(u128, byte_add, check_const_byte_add_u128); - gen_const_byte_arith_harness!(usize, byte_add, check_const_byte_add_usize); - gen_const_byte_arith_harness!((i8, i8), byte_add, check_const_byte_add_tuple_1); - gen_const_byte_arith_harness!((f64, bool), byte_add, check_const_byte_add_tuple_2); - gen_const_byte_arith_harness!((i32, f64, bool), byte_add, check_const_byte_add_tuple_3); - gen_const_byte_arith_harness!( - (i8, u16, i32, u64, isize), - byte_add, - check_const_byte_add_tuple_4 - ); - - gen_const_byte_arith_harness!(i8, byte_sub, check_const_byte_sub_i8); - gen_const_byte_arith_harness!(i16, byte_sub, check_const_byte_sub_i16); - gen_const_byte_arith_harness!(i32, byte_sub, check_const_byte_sub_i32); - gen_const_byte_arith_harness!(i64, byte_sub, check_const_byte_sub_i64); - gen_const_byte_arith_harness!(i128, byte_sub, check_const_byte_sub_i128); - gen_const_byte_arith_harness!(isize, byte_sub, check_const_byte_sub_isize); - gen_const_byte_arith_harness!(u8, byte_sub, check_const_byte_sub_u8); - gen_const_byte_arith_harness!(u16, byte_sub, check_const_byte_sub_u16); - gen_const_byte_arith_harness!(u32, byte_sub, check_const_byte_sub_u32); - gen_const_byte_arith_harness!(u64, byte_sub, check_const_byte_sub_u64); - gen_const_byte_arith_harness!(u128, byte_sub, check_const_byte_sub_u128); - gen_const_byte_arith_harness!(usize, byte_sub, check_const_byte_sub_usize); - gen_const_byte_arith_harness!((i8, i8), byte_sub, check_const_byte_sub_tuple_1); - gen_const_byte_arith_harness!((f64, bool), byte_sub, check_const_byte_sub_tuple_2); - gen_const_byte_arith_harness!((i32, f64, bool), byte_sub, check_const_byte_sub_tuple_3); - gen_const_byte_arith_harness!( - (i8, u16, i32, u64, isize), - byte_sub, - check_const_byte_sub_tuple_4 - ); - - gen_const_byte_arith_harness!(i8, byte_offset, check_const_byte_offset_i8); - gen_const_byte_arith_harness!(i16, byte_offset, check_const_byte_offset_i16); - gen_const_byte_arith_harness!(i32, byte_offset, check_const_byte_offset_i32); - gen_const_byte_arith_harness!(i64, byte_offset, check_const_byte_offset_i64); - gen_const_byte_arith_harness!(i128, byte_offset, check_const_byte_offset_i128); - gen_const_byte_arith_harness!(isize, byte_offset, check_const_byte_offset_isize); - gen_const_byte_arith_harness!(u8, byte_offset, check_const_byte_offset_u8); - gen_const_byte_arith_harness!(u16, byte_offset, check_const_byte_offset_u16); - gen_const_byte_arith_harness!(u32, byte_offset, check_const_byte_offset_u32); - gen_const_byte_arith_harness!(u64, byte_offset, check_const_byte_offset_u64); - gen_const_byte_arith_harness!(u128, byte_offset, check_const_byte_offset_u128); - gen_const_byte_arith_harness!(usize, byte_offset, check_const_byte_offset_usize); - gen_const_byte_arith_harness!((i8, i8), byte_offset, check_const_byte_offset_tuple_1); - gen_const_byte_arith_harness!((f64, bool), byte_offset, check_const_byte_offset_tuple_2); - gen_const_byte_arith_harness!((i32, f64, bool), byte_offset, check_const_byte_offset_tuple_3); - gen_const_byte_arith_harness!( - (i8, u16, i32, u64, isize), - byte_offset, - check_const_byte_offset_tuple_4 - ); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_const_byte_arith_harness!(i8, byte_add, check_const_byte_add_i8); + // gen_const_byte_arith_harness!(i16, byte_add, check_const_byte_add_i16); + // gen_const_byte_arith_harness!(i32, byte_add, check_const_byte_add_i32); + // gen_const_byte_arith_harness!(i64, byte_add, check_const_byte_add_i64); + // gen_const_byte_arith_harness!(i128, byte_add, check_const_byte_add_i128); + // gen_const_byte_arith_harness!(isize, byte_add, check_const_byte_add_isize); + // gen_const_byte_arith_harness!(u8, byte_add, check_const_byte_add_u8); + // gen_const_byte_arith_harness!(u16, byte_add, check_const_byte_add_u16); + // gen_const_byte_arith_harness!(u32, byte_add, check_const_byte_add_u32); + // gen_const_byte_arith_harness!(u64, byte_add, check_const_byte_add_u64); + // gen_const_byte_arith_harness!(u128, byte_add, check_const_byte_add_u128); + // gen_const_byte_arith_harness!(usize, byte_add, check_const_byte_add_usize); + // gen_const_byte_arith_harness!((i8, i8), byte_add, check_const_byte_add_tuple_1); + // gen_const_byte_arith_harness!((f64, bool), byte_add, check_const_byte_add_tuple_2); + // gen_const_byte_arith_harness!((i32, f64, bool), byte_add, check_const_byte_add_tuple_3); + // gen_const_byte_arith_harness!( + // (i8, u16, i32, u64, isize), + // byte_add, + // check_const_byte_add_tuple_4 + // ); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_const_byte_arith_harness!(i8, byte_sub, check_const_byte_sub_i8); + // gen_const_byte_arith_harness!(i16, byte_sub, check_const_byte_sub_i16); + // gen_const_byte_arith_harness!(i32, byte_sub, check_const_byte_sub_i32); + // gen_const_byte_arith_harness!(i64, byte_sub, check_const_byte_sub_i64); + // gen_const_byte_arith_harness!(i128, byte_sub, check_const_byte_sub_i128); + // gen_const_byte_arith_harness!(isize, byte_sub, check_const_byte_sub_isize); + // gen_const_byte_arith_harness!(u8, byte_sub, check_const_byte_sub_u8); + // gen_const_byte_arith_harness!(u16, byte_sub, check_const_byte_sub_u16); + // gen_const_byte_arith_harness!(u32, byte_sub, check_const_byte_sub_u32); + // gen_const_byte_arith_harness!(u64, byte_sub, check_const_byte_sub_u64); + // gen_const_byte_arith_harness!(u128, byte_sub, check_const_byte_sub_u128); + // gen_const_byte_arith_harness!(usize, byte_sub, check_const_byte_sub_usize); + // gen_const_byte_arith_harness!((i8, i8), byte_sub, check_const_byte_sub_tuple_1); + // gen_const_byte_arith_harness!((f64, bool), byte_sub, check_const_byte_sub_tuple_2); + // gen_const_byte_arith_harness!((i32, f64, bool), byte_sub, check_const_byte_sub_tuple_3); + // gen_const_byte_arith_harness!( + // (i8, u16, i32, u64, isize), + // byte_sub, + // check_const_byte_sub_tuple_4 + // ); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_const_byte_arith_harness!(i8, byte_offset, check_const_byte_offset_i8); + // gen_const_byte_arith_harness!(i16, byte_offset, check_const_byte_offset_i16); + // gen_const_byte_arith_harness!(i32, byte_offset, check_const_byte_offset_i32); + // gen_const_byte_arith_harness!(i64, byte_offset, check_const_byte_offset_i64); + // gen_const_byte_arith_harness!(i128, byte_offset, check_const_byte_offset_i128); + // gen_const_byte_arith_harness!(isize, byte_offset, check_const_byte_offset_isize); + // gen_const_byte_arith_harness!(u8, byte_offset, check_const_byte_offset_u8); + // gen_const_byte_arith_harness!(u16, byte_offset, check_const_byte_offset_u16); + // gen_const_byte_arith_harness!(u32, byte_offset, check_const_byte_offset_u32); + // gen_const_byte_arith_harness!(u64, byte_offset, check_const_byte_offset_u64); + // gen_const_byte_arith_harness!(u128, byte_offset, check_const_byte_offset_u128); + // gen_const_byte_arith_harness!(usize, byte_offset, check_const_byte_offset_usize); + // gen_const_byte_arith_harness!((i8, i8), byte_offset, check_const_byte_offset_tuple_1); + // gen_const_byte_arith_harness!((f64, bool), byte_offset, check_const_byte_offset_tuple_2); + // gen_const_byte_arith_harness!((i32, f64, bool), byte_offset, check_const_byte_offset_tuple_3); + // gen_const_byte_arith_harness!( + // (i8, u16, i32, u64, isize), + // byte_offset, + // check_const_byte_offset_tuple_4 + // ); macro_rules! gen_const_byte_arith_harness_for_slice { ($type:ty, byte_offset, $proof_name:ident) => { @@ -2416,52 +2424,55 @@ mod verify { }; } - gen_const_byte_arith_harness_for_slice!(i8, byte_add, check_const_byte_add_i8_slice); - gen_const_byte_arith_harness_for_slice!(i16, byte_add, check_const_byte_add_i16_slice); - gen_const_byte_arith_harness_for_slice!(i32, byte_add, check_const_byte_add_i32_slice); - gen_const_byte_arith_harness_for_slice!(i64, byte_add, check_const_byte_add_i64_slice); - gen_const_byte_arith_harness_for_slice!(i128, byte_add, check_const_byte_add_i128_slice); - gen_const_byte_arith_harness_for_slice!(isize, byte_add, check_const_byte_add_isize_slice); - gen_const_byte_arith_harness_for_slice!(u8, byte_add, check_const_byte_add_u8_slice); - gen_const_byte_arith_harness_for_slice!(u16, byte_add, check_const_byte_add_u16_slice); - gen_const_byte_arith_harness_for_slice!(u32, byte_add, check_const_byte_add_u32_slice); - gen_const_byte_arith_harness_for_slice!(u64, byte_add, check_const_byte_add_u64_slice); - gen_const_byte_arith_harness_for_slice!(u128, byte_add, check_const_byte_add_u128_slice); - gen_const_byte_arith_harness_for_slice!(usize, byte_add, check_const_byte_add_usize_slice); - - gen_const_byte_arith_harness_for_slice!(i8, byte_sub, check_const_byte_sub_i8_slice); - gen_const_byte_arith_harness_for_slice!(i16, byte_sub, check_const_byte_sub_i16_slice); - gen_const_byte_arith_harness_for_slice!(i32, byte_sub, check_const_byte_sub_i32_slice); - gen_const_byte_arith_harness_for_slice!(i64, byte_sub, check_const_byte_sub_i64_slice); - gen_const_byte_arith_harness_for_slice!(i128, byte_sub, check_const_byte_sub_i128_slice); - gen_const_byte_arith_harness_for_slice!(isize, byte_sub, check_const_byte_sub_isize_slice); - gen_const_byte_arith_harness_for_slice!(u8, byte_sub, check_const_byte_sub_u8_slice); - gen_const_byte_arith_harness_for_slice!(u16, byte_sub, check_const_byte_sub_u16_slice); - gen_const_byte_arith_harness_for_slice!(u32, byte_sub, check_const_byte_sub_u32_slice); - gen_const_byte_arith_harness_for_slice!(u64, byte_sub, check_const_byte_sub_u64_slice); - gen_const_byte_arith_harness_for_slice!(u128, byte_sub, check_const_byte_sub_u128_slice); - gen_const_byte_arith_harness_for_slice!(usize, byte_sub, check_const_byte_sub_usize_slice); - - gen_const_byte_arith_harness_for_slice!(i8, byte_offset, check_const_byte_offset_i8_slice); - gen_const_byte_arith_harness_for_slice!(i16, byte_offset, check_const_byte_offset_i16_slice); - gen_const_byte_arith_harness_for_slice!(i32, byte_offset, check_const_byte_offset_i32_slice); - gen_const_byte_arith_harness_for_slice!(i64, byte_offset, check_const_byte_offset_i64_slice); - gen_const_byte_arith_harness_for_slice!(i128, byte_offset, check_const_byte_offset_i128_slice); - gen_const_byte_arith_harness_for_slice!( - isize, - byte_offset, - check_const_byte_offset_isize_slice - ); - gen_const_byte_arith_harness_for_slice!(u8, byte_offset, check_const_byte_offset_u8_slice); - gen_const_byte_arith_harness_for_slice!(u16, byte_offset, check_const_byte_offset_u16_slice); - gen_const_byte_arith_harness_for_slice!(u32, byte_offset, check_const_byte_offset_u32_slice); - gen_const_byte_arith_harness_for_slice!(u64, byte_offset, check_const_byte_offset_u64_slice); - gen_const_byte_arith_harness_for_slice!(u128, byte_offset, check_const_byte_offset_u128_slice); - gen_const_byte_arith_harness_for_slice!( - usize, - byte_offset, - check_const_byte_offset_usize_slice - ); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_const_byte_arith_harness_for_slice!(i8, byte_add, check_const_byte_add_i8_slice); + // gen_const_byte_arith_harness_for_slice!(i16, byte_add, check_const_byte_add_i16_slice); + // gen_const_byte_arith_harness_for_slice!(i32, byte_add, check_const_byte_add_i32_slice); + // gen_const_byte_arith_harness_for_slice!(i64, byte_add, check_const_byte_add_i64_slice); + // gen_const_byte_arith_harness_for_slice!(i128, byte_add, check_const_byte_add_i128_slice); + // gen_const_byte_arith_harness_for_slice!(isize, byte_add, check_const_byte_add_isize_slice); + // gen_const_byte_arith_harness_for_slice!(u8, byte_add, check_const_byte_add_u8_slice); + // gen_const_byte_arith_harness_for_slice!(u16, byte_add, check_const_byte_add_u16_slice); + // gen_const_byte_arith_harness_for_slice!(u32, byte_add, check_const_byte_add_u32_slice); + // gen_const_byte_arith_harness_for_slice!(u64, byte_add, check_const_byte_add_u64_slice); + // gen_const_byte_arith_harness_for_slice!(u128, byte_add, check_const_byte_add_u128_slice); + // gen_const_byte_arith_harness_for_slice!(usize, byte_add, check_const_byte_add_usize_slice); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_const_byte_arith_harness_for_slice!(i8, byte_sub, check_const_byte_sub_i8_slice); + // gen_const_byte_arith_harness_for_slice!(i16, byte_sub, check_const_byte_sub_i16_slice); + // gen_const_byte_arith_harness_for_slice!(i32, byte_sub, check_const_byte_sub_i32_slice); + // gen_const_byte_arith_harness_for_slice!(i64, byte_sub, check_const_byte_sub_i64_slice); + // gen_const_byte_arith_harness_for_slice!(i128, byte_sub, check_const_byte_sub_i128_slice); + // gen_const_byte_arith_harness_for_slice!(isize, byte_sub, check_const_byte_sub_isize_slice); + // gen_const_byte_arith_harness_for_slice!(u8, byte_sub, check_const_byte_sub_u8_slice); + // gen_const_byte_arith_harness_for_slice!(u16, byte_sub, check_const_byte_sub_u16_slice); + // gen_const_byte_arith_harness_for_slice!(u32, byte_sub, check_const_byte_sub_u32_slice); + // gen_const_byte_arith_harness_for_slice!(u64, byte_sub, check_const_byte_sub_u64_slice); + // gen_const_byte_arith_harness_for_slice!(u128, byte_sub, check_const_byte_sub_u128_slice); + // gen_const_byte_arith_harness_for_slice!(usize, byte_sub, check_const_byte_sub_usize_slice); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_const_byte_arith_harness_for_slice!(i8, byte_offset, check_const_byte_offset_i8_slice); + // gen_const_byte_arith_harness_for_slice!(i16, byte_offset, check_const_byte_offset_i16_slice); + // gen_const_byte_arith_harness_for_slice!(i32, byte_offset, check_const_byte_offset_i32_slice); + // gen_const_byte_arith_harness_for_slice!(i64, byte_offset, check_const_byte_offset_i64_slice); + // gen_const_byte_arith_harness_for_slice!(i128, byte_offset, check_const_byte_offset_i128_slice); + // gen_const_byte_arith_harness_for_slice!( + // isize, + // byte_offset, + // check_const_byte_offset_isize_slice + // ); + // gen_const_byte_arith_harness_for_slice!(u8, byte_offset, check_const_byte_offset_u8_slice); + // gen_const_byte_arith_harness_for_slice!(u16, byte_offset, check_const_byte_offset_u16_slice); + // gen_const_byte_arith_harness_for_slice!(u32, byte_offset, check_const_byte_offset_u32_slice); + // gen_const_byte_arith_harness_for_slice!(u64, byte_offset, check_const_byte_offset_u64_slice); + // gen_const_byte_arith_harness_for_slice!(u128, byte_offset, check_const_byte_offset_u128_slice); + // gen_const_byte_arith_harness_for_slice!( + // usize, + // byte_offset, + // check_const_byte_offset_usize_slice + // ); // Trait used exclusively for implementing proofs for contracts for `dyn Trait` type. trait TestTrait {} @@ -2518,20 +2529,22 @@ mod verify { }; } - gen_const_byte_arith_harness_for_dyn!(byte_add, check_const_byte_add_dyn); - gen_const_byte_arith_harness_for_dyn!(byte_sub, check_const_byte_sub_dyn); - gen_const_byte_arith_harness_for_dyn!(byte_offset, check_const_byte_offset_dyn); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_const_byte_arith_harness_for_dyn!(byte_add, check_const_byte_add_dyn); + // gen_const_byte_arith_harness_for_dyn!(byte_sub, check_const_byte_sub_dyn); + // gen_const_byte_arith_harness_for_dyn!(byte_offset, check_const_byte_offset_dyn); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy // Proof for contract of byte_offset_from to verify unit type - #[kani::proof_for_contract(<*const ()>::byte_offset_from)] - pub fn check_const_byte_offset_from_unit() { - let val: () = (); - let src_ptr: *const () = &val; - let dest_ptr: *const () = &val; - unsafe { - dest_ptr.byte_offset_from(src_ptr); - } - } + // #[kani::proof_for_contract(<*const ()>::byte_offset_from)] + // pub fn check_const_byte_offset_from_unit() { + // let val: () = (); + // let src_ptr: *const () = &val; + // let dest_ptr: *const () = &val; + // unsafe { + // dest_ptr.byte_offset_from(src_ptr); + // } + // } // generate proofs for contracts for byte_offset_from to verify int and composite // types @@ -2578,88 +2591,91 @@ mod verify { }; } - generate_const_byte_offset_from_harness!( - u8, - check_const_byte_offset_from_u8, - check_const_byte_offset_from_u8_arr - ); - generate_const_byte_offset_from_harness!( - u16, - check_const_byte_offset_from_u16, - check_const_byte_offset_from_u16_arr - ); - generate_const_byte_offset_from_harness!( - u32, - check_const_byte_offset_from_u32, - check_const_byte_offset_from_u32_arr - ); - generate_const_byte_offset_from_harness!( - u64, - check_const_byte_offset_from_u64, - check_const_byte_offset_from_u64_arr - ); - generate_const_byte_offset_from_harness!( - u128, - check_const_byte_offset_from_u128, - check_const_byte_offset_from_u128_arr - ); - generate_const_byte_offset_from_harness!( - usize, - check_const_byte_offset_from_usize, - check_const_byte_offset_from_usize_arr - ); - - generate_const_byte_offset_from_harness!( - i8, - check_const_byte_offset_from_i8, - check_const_byte_offset_from_i8_arr - ); - generate_const_byte_offset_from_harness!( - i16, - check_const_byte_offset_from_i16, - check_const_byte_offset_from_i16_arr - ); - generate_const_byte_offset_from_harness!( - i32, - check_const_byte_offset_from_i32, - check_const_byte_offset_from_i32_arr - ); - generate_const_byte_offset_from_harness!( - i64, - check_const_byte_offset_from_i64, - check_const_byte_offset_from_i64_arr - ); - generate_const_byte_offset_from_harness!( - i128, - check_const_byte_offset_from_i128, - check_const_byte_offset_from_i128_arr - ); - generate_const_byte_offset_from_harness!( - isize, - check_const_byte_offset_from_isize, - check_const_byte_offset_from_isize_arr - ); - - generate_const_byte_offset_from_harness!( - (i8, i8), - check_const_byte_offset_from_tuple_1, - check_const_byte_offset_from_tuple_1_arr - ); - generate_const_byte_offset_from_harness!( - (f64, bool), - check_const_byte_offset_from_tuple_2, - check_const_byte_offset_from_tuple_2_arr - ); - generate_const_byte_offset_from_harness!( - (u32, i16, f32), - check_const_byte_offset_from_tuple_3, - check_const_byte_offset_from_tuple_3_arr - ); - generate_const_byte_offset_from_harness!( - ((), bool, u8, u16, i32, f64, i128, usize), - check_const_byte_offset_from_tuple_4, - check_const_byte_offset_from_tuple_4_arr - ); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // generate_const_byte_offset_from_harness!( + // u8, + // check_const_byte_offset_from_u8, + // check_const_byte_offset_from_u8_arr + // ); + // generate_const_byte_offset_from_harness!( + // u16, + // check_const_byte_offset_from_u16, + // check_const_byte_offset_from_u16_arr + // ); + // generate_const_byte_offset_from_harness!( + // u32, + // check_const_byte_offset_from_u32, + // check_const_byte_offset_from_u32_arr + // ); + // generate_const_byte_offset_from_harness!( + // u64, + // check_const_byte_offset_from_u64, + // check_const_byte_offset_from_u64_arr + // ); + // generate_const_byte_offset_from_harness!( + // u128, + // check_const_byte_offset_from_u128, + // check_const_byte_offset_from_u128_arr + // ); + // generate_const_byte_offset_from_harness!( + // usize, + // check_const_byte_offset_from_usize, + // check_const_byte_offset_from_usize_arr + // ); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // generate_const_byte_offset_from_harness!( + // i8, + // check_const_byte_offset_from_i8, + // check_const_byte_offset_from_i8_arr + // ); + // generate_const_byte_offset_from_harness!( + // i16, + // check_const_byte_offset_from_i16, + // check_const_byte_offset_from_i16_arr + // ); + // generate_const_byte_offset_from_harness!( + // i32, + // check_const_byte_offset_from_i32, + // check_const_byte_offset_from_i32_arr + // ); + // generate_const_byte_offset_from_harness!( + // i64, + // check_const_byte_offset_from_i64, + // check_const_byte_offset_from_i64_arr + // ); + // generate_const_byte_offset_from_harness!( + // i128, + // check_const_byte_offset_from_i128, + // check_const_byte_offset_from_i128_arr + // ); + // generate_const_byte_offset_from_harness!( + // isize, + // check_const_byte_offset_from_isize, + // check_const_byte_offset_from_isize_arr + // ); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // generate_const_byte_offset_from_harness!( + // (i8, i8), + // check_const_byte_offset_from_tuple_1, + // check_const_byte_offset_from_tuple_1_arr + // ); + // generate_const_byte_offset_from_harness!( + // (f64, bool), + // check_const_byte_offset_from_tuple_2, + // check_const_byte_offset_from_tuple_2_arr + // ); + // generate_const_byte_offset_from_harness!( + // (u32, i16, f32), + // check_const_byte_offset_from_tuple_3, + // check_const_byte_offset_from_tuple_3_arr + // ); + // generate_const_byte_offset_from_harness!( + // ((), bool, u8, u16, i32, f64, i128, usize), + // check_const_byte_offset_from_tuple_4, + // check_const_byte_offset_from_tuple_4_arr + // ); // Length of the slice generated from PointerGenerator. const SLICE_LEN: usize = 10; @@ -2689,42 +2705,44 @@ mod verify { }; } - generate_const_byte_offset_from_slice_harness!(u8, check_const_byte_offset_from_u8_slice); - generate_const_byte_offset_from_slice_harness!(u16, check_const_byte_offset_from_u16_slice); - generate_const_byte_offset_from_slice_harness!(u32, check_const_byte_offset_from_u32_slice); - generate_const_byte_offset_from_slice_harness!(u64, check_const_byte_offset_from_u64_slice); - generate_const_byte_offset_from_slice_harness!(u128, check_const_byte_offset_from_u128_slice); - generate_const_byte_offset_from_slice_harness!(usize, check_const_byte_offset_from_usize_slice); - generate_const_byte_offset_from_slice_harness!(i8, check_const_byte_offset_from_i8_slice); - generate_const_byte_offset_from_slice_harness!(i16, check_const_byte_offset_from_i16_slice); - generate_const_byte_offset_from_slice_harness!(i32, check_const_byte_offset_from_i32_slice); - generate_const_byte_offset_from_slice_harness!(i64, check_const_byte_offset_from_i64_slice); - generate_const_byte_offset_from_slice_harness!(i128, check_const_byte_offset_from_i128_slice); - generate_const_byte_offset_from_slice_harness!(isize, check_const_byte_offset_from_isize_slice); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // generate_const_byte_offset_from_slice_harness!(u8, check_const_byte_offset_from_u8_slice); + // generate_const_byte_offset_from_slice_harness!(u16, check_const_byte_offset_from_u16_slice); + // generate_const_byte_offset_from_slice_harness!(u32, check_const_byte_offset_from_u32_slice); + // generate_const_byte_offset_from_slice_harness!(u64, check_const_byte_offset_from_u64_slice); + // generate_const_byte_offset_from_slice_harness!(u128, check_const_byte_offset_from_u128_slice); + // generate_const_byte_offset_from_slice_harness!(usize, check_const_byte_offset_from_usize_slice); + // generate_const_byte_offset_from_slice_harness!(i8, check_const_byte_offset_from_i8_slice); + // generate_const_byte_offset_from_slice_harness!(i16, check_const_byte_offset_from_i16_slice); + // generate_const_byte_offset_from_slice_harness!(i32, check_const_byte_offset_from_i32_slice); + // generate_const_byte_offset_from_slice_harness!(i64, check_const_byte_offset_from_i64_slice); + // generate_const_byte_offset_from_slice_harness!(i128, check_const_byte_offset_from_i128_slice); + // generate_const_byte_offset_from_slice_harness!(isize, check_const_byte_offset_from_isize_slice); // tracking issue: https://github.com/model-checking/kani/issues/3763 // Workaround: Directly verifying the method `<*const dyn TestTrait>::byte_offset_from` // causes a compilation error. As a workaround, the proof is annotated with the // underlying struct type instead. - #[kani::proof_for_contract(<*const TestStruct>::byte_offset_from)] - pub fn check_const_byte_offset_from_dyn() { - const gen_size: usize = mem::size_of::(); - // Since the pointer generator cannot directly create pointers to `dyn Trait`, - // we first generate a pointer to the underlying struct and then cast it to a `dyn Trait` pointer. - let mut generator_caller = PointerGenerator::::new(); - let mut generator_input = PointerGenerator::::new(); - let ptr_caller: *const TestStruct = generator_caller.any_in_bounds().ptr; - let ptr_input: *const TestStruct = if kani::any() { - generator_caller.any_alloc_status().ptr - } else { - generator_input.any_alloc_status().ptr - }; - - let ptr_caller = ptr_caller as *const dyn TestTrait; - let ptr_input = ptr_input as *const dyn TestTrait; - - unsafe { - ptr_caller.byte_offset_from(ptr_input); - } - } + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(<*const TestStruct>::byte_offset_from)] + // pub fn check_const_byte_offset_from_dyn() { + // const gen_size: usize = mem::size_of::(); + // // Since the pointer generator cannot directly create pointers to `dyn Trait`, + // // we first generate a pointer to the underlying struct and then cast it to a `dyn Trait` pointer. + // let mut generator_caller = PointerGenerator::::new(); + // let mut generator_input = PointerGenerator::::new(); + // let ptr_caller: *const TestStruct = generator_caller.any_in_bounds().ptr; + // let ptr_input: *const TestStruct = if kani::any() { + // generator_caller.any_alloc_status().ptr + // } else { + // generator_input.any_alloc_status().ptr + // }; + // + // let ptr_caller = ptr_caller as *const dyn TestTrait; + // let ptr_input = ptr_input as *const dyn TestTrait; + // + // unsafe { + // ptr_caller.byte_offset_from(ptr_input); + // } + // } } diff --git a/library/core/src/ptr/metadata.rs b/library/core/src/ptr/metadata.rs index 9c5da306e27a7..0deac3621e84c 100644 --- a/library/core/src/ptr/metadata.rs +++ b/library/core/src/ptr/metadata.rs @@ -3,7 +3,7 @@ use crate::fmt; use crate::hash::{Hash, Hasher}; use crate::intrinsics::{aggregate_raw_ptr, ptr_metadata}; -use crate::marker::Freeze; +use crate::marker::{Freeze, PointeeSized}; use crate::ptr::NonNull; /// Provides the pointer metadata type of any pointed-to type. @@ -55,7 +55,7 @@ use crate::ptr::NonNull; #[lang = "pointee_trait"] #[rustc_deny_explicit_impl] #[rustc_do_not_implement_via_object] -pub trait Pointee { +pub trait Pointee: PointeeSized { /// The type for metadata in pointers and references to `Self`. #[lang = "metadata_type"] // NOTE: Keep trait bounds in `static_assert_expected_bounds_for_metadata` @@ -81,7 +81,7 @@ pub trait Pointee { /// ``` #[unstable(feature = "ptr_metadata", issue = "81513")] // NOTE: don’t stabilize this before trait aliases are stable in the language? -pub trait Thin = Pointee; +pub trait Thin = Pointee + PointeeSized; /// Extracts the metadata component of a pointer. /// @@ -96,7 +96,7 @@ pub trait Thin = Pointee; /// assert_eq!(std::ptr::metadata("foo"), 3_usize); /// ``` #[inline] -pub const fn metadata(ptr: *const T) -> ::Metadata { +pub const fn metadata(ptr: *const T) -> ::Metadata { ptr_metadata(ptr) } @@ -109,7 +109,7 @@ pub const fn metadata(ptr: *const T) -> ::Metadata { /// [`slice::from_raw_parts`]: crate::slice::from_raw_parts #[unstable(feature = "ptr_metadata", issue = "81513")] #[inline] -pub const fn from_raw_parts( +pub const fn from_raw_parts( data_pointer: *const impl Thin, metadata: ::Metadata, ) -> *const T { @@ -122,7 +122,7 @@ pub const fn from_raw_parts( /// See the documentation of [`from_raw_parts`] for more details. #[unstable(feature = "ptr_metadata", issue = "81513")] #[inline] -pub const fn from_raw_parts_mut( +pub const fn from_raw_parts_mut( data_pointer: *mut impl Thin, metadata: ::Metadata, ) -> *mut T { @@ -152,7 +152,7 @@ pub const fn from_raw_parts_mut( /// duplicated in multiple codegen units), and pointers to vtables of *different* types/traits can /// compare equal (since identical vtables can be deduplicated within a codegen unit). #[lang = "dyn_metadata"] -pub struct DynMetadata { +pub struct DynMetadata { _vtable_ptr: NonNull, _phantom: crate::marker::PhantomData, } @@ -165,7 +165,7 @@ unsafe extern "C" { type VTable; } -impl DynMetadata { +impl DynMetadata { /// When `DynMetadata` appears as the metadata field of a wide pointer, the rustc_middle layout /// computation does magic and the resulting layout is *not* a `FieldsShape::Aggregate`, instead /// it is a `FieldsShape::Primitive`. This means that the same type can have different layout @@ -206,10 +206,10 @@ impl DynMetadata { } } -unsafe impl Send for DynMetadata {} -unsafe impl Sync for DynMetadata {} +unsafe impl Send for DynMetadata {} +unsafe impl Sync for DynMetadata {} -impl fmt::Debug for DynMetadata { +impl fmt::Debug for DynMetadata { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("DynMetadata").field(&self.vtable_ptr()).finish() } @@ -217,27 +217,27 @@ impl fmt::Debug for DynMetadata { // Manual impls needed to avoid `Dyn: $Trait` bounds. -impl Unpin for DynMetadata {} +impl Unpin for DynMetadata {} -impl Copy for DynMetadata {} +impl Copy for DynMetadata {} -impl Clone for DynMetadata { +impl Clone for DynMetadata { #[inline] fn clone(&self) -> Self { *self } } -impl Eq for DynMetadata {} +impl Eq for DynMetadata {} -impl PartialEq for DynMetadata { +impl PartialEq for DynMetadata { #[inline] fn eq(&self, other: &Self) -> bool { crate::ptr::eq::(self.vtable_ptr(), other.vtable_ptr()) } } -impl Ord for DynMetadata { +impl Ord for DynMetadata { #[inline] #[allow(ambiguous_wide_pointer_comparisons)] fn cmp(&self, other: &Self) -> crate::cmp::Ordering { @@ -245,14 +245,14 @@ impl Ord for DynMetadata { } } -impl PartialOrd for DynMetadata { +impl PartialOrd for DynMetadata { #[inline] fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl Hash for DynMetadata { +impl Hash for DynMetadata { #[inline] fn hash(&self, hasher: &mut H) { crate::ptr::hash::(self.vtable_ptr(), hasher) diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 1682e977ae19d..0a77caa98eda1 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -400,7 +400,7 @@ use crate::cmp::Ordering; use crate::intrinsics::const_eval_select; #[cfg(kani)] use crate::kani; -use crate::marker::FnPtr; +use crate::marker::{FnPtr, PointeeSized}; use crate::mem::{self, MaybeUninit, SizedTypeProperties}; use crate::num::NonZero; use crate::{fmt, hash, intrinsics, ub_checks}; @@ -798,7 +798,7 @@ pub const unsafe fn write_bytes(dst: *mut T, val: u8, count: usize) { #[lang = "drop_in_place"] #[allow(unconditional_recursion)] #[rustc_diagnostic_item = "ptr_drop_in_place"] -pub unsafe fn drop_in_place(to_drop: *mut T) { +pub unsafe fn drop_in_place(to_drop: *mut T) { // Code here does not matter - this is replaced by the // real drop glue by the compiler. @@ -827,7 +827,7 @@ pub unsafe fn drop_in_place(to_drop: *mut T) { #[rustc_promotable] #[rustc_const_stable(feature = "const_ptr_null", since = "1.24.0")] #[rustc_diagnostic_item = "ptr_null"] -pub const fn null() -> *const T { +pub const fn null() -> *const T { from_raw_parts(without_provenance::<()>(0), ()) } @@ -852,7 +852,7 @@ pub const fn null() -> *const T { #[rustc_promotable] #[rustc_const_stable(feature = "const_ptr_null", since = "1.24.0")] #[rustc_diagnostic_item = "ptr_null_mut"] -pub const fn null_mut() -> *mut T { +pub const fn null_mut() -> *mut T { from_raw_parts_mut(without_provenance_mut::<()>(0), ()) } @@ -1070,7 +1070,7 @@ pub fn with_exposed_provenance_mut(addr: usize) -> *mut T { #[rustc_const_stable(feature = "ptr_from_ref", since = "1.76.0")] #[rustc_never_returns_null_ptr] #[rustc_diagnostic_item = "ptr_from_ref"] -pub const fn from_ref(r: &T) -> *const T { +pub const fn from_ref(r: &T) -> *const T { r } @@ -1120,7 +1120,7 @@ pub const fn from_ref(r: &T) -> *const T { #[stable(feature = "ptr_from_ref", since = "1.76.0")] #[rustc_const_stable(feature = "ptr_from_ref", since = "1.76.0")] #[rustc_never_returns_null_ptr] -pub const fn from_mut(r: &mut T) -> *mut T { +pub const fn from_mut(r: &mut T) -> *mut T { r } @@ -2464,7 +2464,7 @@ pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { #[must_use = "pointer comparison produces a value"] #[rustc_diagnostic_item = "ptr_eq"] #[allow(ambiguous_wide_pointer_comparisons)] // it's actually clear here -pub fn eq(a: *const T, b: *const T) -> bool { +pub fn eq(a: *const T, b: *const T) -> bool { a == b } @@ -2488,7 +2488,7 @@ pub fn eq(a: *const T, b: *const T) -> bool { #[stable(feature = "ptr_addr_eq", since = "1.76.0")] #[inline(always)] #[must_use = "pointer comparison produces a value"] -pub fn addr_eq(p: *const T, q: *const U) -> bool { +pub fn addr_eq(p: *const T, q: *const U) -> bool { (p as *const ()) == (q as *const ()) } @@ -2571,7 +2571,7 @@ pub fn fn_addr_eq(f: T, g: U) -> bool { /// assert_eq!(actual, expected); /// ``` #[stable(feature = "ptr_hash", since = "1.35.0")] -pub fn hash(hashee: *const T, into: &mut S) { +pub fn hash(hashee: *const T, into: &mut S) { use crate::hash::Hash; hashee.hash(into); } diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 4af197ff2afa6..2792432146adc 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -5,10 +5,11 @@ use crate::cmp::Ordering::{Equal, Greater, Less}; use crate::intrinsics::const_eval_select; #[cfg(kani)] use crate::kani; +use crate::marker::PointeeSized; use crate::mem::{self, SizedTypeProperties}; use crate::slice::{self, SliceIndex}; -impl *mut T { +impl *mut T { #[doc = include_str!("docs/is_null.md")] /// /// # Examples @@ -114,7 +115,7 @@ impl *mut T { #[inline] pub const fn with_metadata_of(self, meta: *const U) -> *mut U where - U: ?Sized, + U: PointeeSized, { from_raw_parts_mut::(self as *mut (), metadata(meta)) } @@ -471,15 +472,16 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] - #[requires( - count == 0 || - ( - (core::mem::size_of_val_raw(self) > 0) && - (self.addr() as isize).checked_add(count).is_some()) && - (core::ub_checks::same_allocation(self, self.wrapping_byte_offset(count)) - ) - )] - #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[requires( + // count == 0 || + // ( + // (core::mem::size_of_val_raw(self) > 0) && + // (self.addr() as isize).checked_add(count).is_some()) && + // (core::ub_checks::same_allocation(self, self.wrapping_byte_offset(count)) + // ) + // )] + // #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. unsafe { self.cast::().offset(count).with_metadata_of(self) } @@ -896,16 +898,17 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces - #[requires( - (mem::size_of_val_raw(self) != 0) && - // Ensures subtracting `origin` from `self` doesn't overflow - (self.addr() as isize).checked_sub(origin.addr() as isize).is_some() && - // Ensure both pointers are in the same allocation or are pointing to the same address - (self.addr() == origin.addr() || - core::ub_checks::same_allocation(self as *const u8, origin as *const u8)) - )] - // The result should equal the distance in terms of bytes - #[ensures(|result| *result == (self.addr() as isize - origin.addr() as isize))] + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[requires( + // (mem::size_of_val_raw(self) != 0) && + // // Ensures subtracting `origin` from `self` doesn't overflow + // (self.addr() as isize).checked_sub(origin.addr() as isize).is_some() && + // // Ensure both pointers are in the same allocation or are pointing to the same address + // (self.addr() == origin.addr() || + // core::ub_checks::same_allocation(self as *const u8, origin as *const u8)) + // )] + // // The result should equal the distance in terms of bytes + // #[ensures(|result| *result == (self.addr() as isize - origin.addr() as isize))] pub const unsafe fn byte_offset_from(self, origin: *const U) -> isize { // SAFETY: the caller must uphold the safety contract for `offset_from`. unsafe { self.cast::().offset_from(origin.cast::()) } @@ -1090,20 +1093,21 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] - #[requires( - // If count is zero, any pointer is valid including null pointer. - (count == 0) || - // Else if count is not zero, then ensure that adding `count` doesn't cause - // overflow and that both pointers `self` and the result are in the same - // allocation - ( - (count <= isize::MAX as usize) && - (core::mem::size_of_val_raw(self) > 0) && - ((self.addr() as isize).checked_add(count as isize).is_some()) && - (core::ub_checks::same_allocation(self, self.wrapping_byte_add(count))) - ) - )] - #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[requires( + // // If count is zero, any pointer is valid including null pointer. + // (count == 0) || + // // Else if count is not zero, then ensure that adding `count` doesn't cause + // // overflow and that both pointers `self` and the result are in the same + // // allocation + // ( + // (count <= isize::MAX as usize) && + // (core::mem::size_of_val_raw(self) > 0) && + // ((self.addr() as isize).checked_add(count as isize).is_some()) && + // (core::ub_checks::same_allocation(self, self.wrapping_byte_add(count))) + // ) + // )] + // #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. unsafe { self.cast::().add(count).with_metadata_of(self) } @@ -1235,20 +1239,21 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[track_caller] - #[requires( - // If count is zero, any pointer is valid including null pointer. - (count == 0) || - // Else if count is not zero, then ensure that subtracting `count` doesn't - // cause overflow and that both pointers `self` and the result are in the - // same allocation. - ( - (count <= isize::MAX as usize) && - (core::mem::size_of_val_raw(self) > 0) && - ((self.addr() as isize).checked_sub(count as isize).is_some()) && - (core::ub_checks::same_allocation(self, self.wrapping_byte_sub(count))) - ) - )] - #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[requires( + // // If count is zero, any pointer is valid including null pointer. + // (count == 0) || + // // Else if count is not zero, then ensure that subtracting `count` doesn't + // // cause overflow and that both pointers `self` and the result are in the + // // same allocation. + // ( + // (count <= isize::MAX as usize) && + // (core::mem::size_of_val_raw(self) > 0) && + // ((self.addr() as isize).checked_sub(count as isize).is_some()) && + // (core::ub_checks::same_allocation(self, self.wrapping_byte_sub(count))) + // ) + // )] + // #[ensures(|result| core::mem::size_of_val_raw(self) == 0 || core::ub_checks::same_allocation(self, *result))] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. unsafe { self.cast::().sub(count).with_metadata_of(self) } @@ -2121,7 +2126,7 @@ impl *mut [T; N] { /// Pointer equality is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method. #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for *mut T { +impl PartialEq for *mut T { #[inline(always)] #[allow(ambiguous_wide_pointer_comparisons)] fn eq(&self, other: &*mut T) -> bool { @@ -2131,11 +2136,11 @@ impl PartialEq for *mut T { /// Pointer equality is an equivalence relation. #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for *mut T {} +impl Eq for *mut T {} /// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method. #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for *mut T { +impl Ord for *mut T { #[inline] #[allow(ambiguous_wide_pointer_comparisons)] fn cmp(&self, other: &*mut T) -> Ordering { @@ -2151,7 +2156,7 @@ impl Ord for *mut T { /// Pointer comparison is by address, as produced by the [`<*mut T>::addr`](pointer::addr) method. #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for *mut T { +impl PartialOrd for *mut T { #[inline(always)] #[allow(ambiguous_wide_pointer_comparisons)] fn partial_cmp(&self, other: &*mut T) -> Option { @@ -2573,26 +2578,28 @@ mod verify { check_mut_offset_from_tuple_4_array ); - #[kani::proof_for_contract(<*mut ()>::byte_offset)] - pub fn check_mut_byte_offset_unit_invalid_count() { - let mut val = (); - let ptr: *mut () = &mut val; - let count: isize = kani::any_where(|&x| x > (mem::size_of::<()>() as isize)); - unsafe { - ptr.byte_offset(count); - } - } - - #[kani::proof_for_contract(<*mut ()>::byte_offset)] - pub fn check_mut_byte_offset_cast_unit() { - let mut generator = PointerGenerator::::new(); - let ptr: *mut u8 = generator.any_in_bounds().ptr; - let ptr1: *mut () = ptr as *mut (); - let count: isize = kani::any(); - unsafe { - ptr1.byte_offset(count); - } - } + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(<*mut ()>::byte_offset)] + // pub fn check_mut_byte_offset_unit_invalid_count() { + // let mut val = (); + // let ptr: *mut () = &mut val; + // let count: isize = kani::any_where(|&x| x > (mem::size_of::<()>() as isize)); + // unsafe { + // ptr.byte_offset(count); + // } + // } + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(<*mut ()>::byte_offset)] + // pub fn check_mut_byte_offset_cast_unit() { + // let mut generator = PointerGenerator::::new(); + // let ptr: *mut u8 = generator.any_in_bounds().ptr; + // let ptr1: *mut () = ptr as *mut (); + // let count: isize = kani::any(); + // unsafe { + // ptr1.byte_offset(count); + // } + // } // generate proof for contracts of byte_add, byte_sub and byte_offset to verify // unit pointee type. @@ -2625,9 +2632,10 @@ mod verify { }; } - gen_mut_byte_arith_harness_for_unit!(byte_add, check_mut_byte_add_unit); - gen_mut_byte_arith_harness_for_unit!(byte_sub, check_mut_byte_sub_unit); - gen_mut_byte_arith_harness_for_unit!(byte_offset, check_mut_byte_offset_unit); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_mut_byte_arith_harness_for_unit!(byte_add, check_mut_byte_add_unit); + // gen_mut_byte_arith_harness_for_unit!(byte_sub, check_mut_byte_sub_unit); + // gen_mut_byte_arith_harness_for_unit!(byte_offset, check_mut_byte_offset_unit); // generate proof for contracts for byte_add, byte_sub and byte_offset // - `$type`: pointee type @@ -2681,60 +2689,63 @@ mod verify { }; } - gen_mut_byte_arith_harness!(i8, byte_add, check_mut_byte_add_i8); - gen_mut_byte_arith_harness!(i16, byte_add, check_mut_byte_add_i16); - gen_mut_byte_arith_harness!(i32, byte_add, check_mut_byte_add_i32); - gen_mut_byte_arith_harness!(i64, byte_add, check_mut_byte_add_i64); - gen_mut_byte_arith_harness!(i128, byte_add, check_mut_byte_add_i128); - gen_mut_byte_arith_harness!(isize, byte_add, check_mut_byte_add_isize); - gen_mut_byte_arith_harness!(u8, byte_add, check_mut_byte_add_u8); - gen_mut_byte_arith_harness!(u16, byte_add, check_mut_byte_add_u16); - gen_mut_byte_arith_harness!(u32, byte_add, check_mut_byte_add_u32); - gen_mut_byte_arith_harness!(u64, byte_add, check_mut_byte_add_u64); - gen_mut_byte_arith_harness!(u128, byte_add, check_mut_byte_add_u128); - gen_mut_byte_arith_harness!(usize, byte_add, check_mut_byte_add_usize); - gen_mut_byte_arith_harness!((i8, i8), byte_add, check_mut_byte_add_tuple_1); - gen_mut_byte_arith_harness!((f64, bool), byte_add, check_mut_byte_add_tuple_2); - gen_mut_byte_arith_harness!((i32, f64, bool), byte_add, check_mut_byte_add_tuple_3); - gen_mut_byte_arith_harness!((i8, u16, i32, u64, isize), byte_add, check_mut_byte_add_tuple_4); - - gen_mut_byte_arith_harness!(i8, byte_sub, check_mut_byte_sub_i8); - gen_mut_byte_arith_harness!(i16, byte_sub, check_mut_byte_sub_i16); - gen_mut_byte_arith_harness!(i32, byte_sub, check_mut_byte_sub_i32); - gen_mut_byte_arith_harness!(i64, byte_sub, check_mut_byte_sub_i64); - gen_mut_byte_arith_harness!(i128, byte_sub, check_mut_byte_sub_i128); - gen_mut_byte_arith_harness!(isize, byte_sub, check_mut_byte_sub_isize); - gen_mut_byte_arith_harness!(u8, byte_sub, check_mut_byte_sub_u8); - gen_mut_byte_arith_harness!(u16, byte_sub, check_mut_byte_sub_u16); - gen_mut_byte_arith_harness!(u32, byte_sub, check_mut_byte_sub_u32); - gen_mut_byte_arith_harness!(u64, byte_sub, check_mut_byte_sub_u64); - gen_mut_byte_arith_harness!(u128, byte_sub, check_mut_byte_sub_u128); - gen_mut_byte_arith_harness!(usize, byte_sub, check_mut_byte_sub_usize); - gen_mut_byte_arith_harness!((i8, i8), byte_sub, check_mut_byte_sub_tuple_1); - gen_mut_byte_arith_harness!((f64, bool), byte_sub, check_mut_byte_sub_tuple_2); - gen_mut_byte_arith_harness!((i32, f64, bool), byte_sub, check_mut_byte_sub_tuple_3); - gen_mut_byte_arith_harness!((i8, u16, i32, u64, isize), byte_sub, check_mut_byte_sub_tuple_4); - - gen_mut_byte_arith_harness!(i8, byte_offset, check_mut_byte_offset_i8); - gen_mut_byte_arith_harness!(i16, byte_offset, check_mut_byte_offset_i16); - gen_mut_byte_arith_harness!(i32, byte_offset, check_mut_byte_offset_i32); - gen_mut_byte_arith_harness!(i64, byte_offset, check_mut_byte_offset_i64); - gen_mut_byte_arith_harness!(i128, byte_offset, check_mut_byte_offset_i128); - gen_mut_byte_arith_harness!(isize, byte_offset, check_mut_byte_offset_isize); - gen_mut_byte_arith_harness!(u8, byte_offset, check_mut_byte_offset_u8); - gen_mut_byte_arith_harness!(u16, byte_offset, check_mut_byte_offset_u16); - gen_mut_byte_arith_harness!(u32, byte_offset, check_mut_byte_offset_u32); - gen_mut_byte_arith_harness!(u64, byte_offset, check_mut_byte_offset_u64); - gen_mut_byte_arith_harness!(u128, byte_offset, check_mut_byte_offset_u128); - gen_mut_byte_arith_harness!(usize, byte_offset, check_mut_byte_offset_usize); - gen_mut_byte_arith_harness!((i8, i8), byte_offset, check_mut_byte_offset_tuple_1); - gen_mut_byte_arith_harness!((f64, bool), byte_offset, check_mut_byte_offset_tuple_2); - gen_mut_byte_arith_harness!((i32, f64, bool), byte_offset, check_mut_byte_offset_tuple_3); - gen_mut_byte_arith_harness!( - (i8, u16, i32, u64, isize), - byte_offset, - check_mut_byte_offset_tuple_4 - ); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_mut_byte_arith_harness!(i8, byte_add, check_mut_byte_add_i8); + // gen_mut_byte_arith_harness!(i16, byte_add, check_mut_byte_add_i16); + // gen_mut_byte_arith_harness!(i32, byte_add, check_mut_byte_add_i32); + // gen_mut_byte_arith_harness!(i64, byte_add, check_mut_byte_add_i64); + // gen_mut_byte_arith_harness!(i128, byte_add, check_mut_byte_add_i128); + // gen_mut_byte_arith_harness!(isize, byte_add, check_mut_byte_add_isize); + // gen_mut_byte_arith_harness!(u8, byte_add, check_mut_byte_add_u8); + // gen_mut_byte_arith_harness!(u16, byte_add, check_mut_byte_add_u16); + // gen_mut_byte_arith_harness!(u32, byte_add, check_mut_byte_add_u32); + // gen_mut_byte_arith_harness!(u64, byte_add, check_mut_byte_add_u64); + // gen_mut_byte_arith_harness!(u128, byte_add, check_mut_byte_add_u128); + // gen_mut_byte_arith_harness!(usize, byte_add, check_mut_byte_add_usize); + // gen_mut_byte_arith_harness!((i8, i8), byte_add, check_mut_byte_add_tuple_1); + // gen_mut_byte_arith_harness!((f64, bool), byte_add, check_mut_byte_add_tuple_2); + // gen_mut_byte_arith_harness!((i32, f64, bool), byte_add, check_mut_byte_add_tuple_3); + // gen_mut_byte_arith_harness!((i8, u16, i32, u64, isize), byte_add, check_mut_byte_add_tuple_4); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_mut_byte_arith_harness!(i8, byte_sub, check_mut_byte_sub_i8); + // gen_mut_byte_arith_harness!(i16, byte_sub, check_mut_byte_sub_i16); + // gen_mut_byte_arith_harness!(i32, byte_sub, check_mut_byte_sub_i32); + // gen_mut_byte_arith_harness!(i64, byte_sub, check_mut_byte_sub_i64); + // gen_mut_byte_arith_harness!(i128, byte_sub, check_mut_byte_sub_i128); + // gen_mut_byte_arith_harness!(isize, byte_sub, check_mut_byte_sub_isize); + // gen_mut_byte_arith_harness!(u8, byte_sub, check_mut_byte_sub_u8); + // gen_mut_byte_arith_harness!(u16, byte_sub, check_mut_byte_sub_u16); + // gen_mut_byte_arith_harness!(u32, byte_sub, check_mut_byte_sub_u32); + // gen_mut_byte_arith_harness!(u64, byte_sub, check_mut_byte_sub_u64); + // gen_mut_byte_arith_harness!(u128, byte_sub, check_mut_byte_sub_u128); + // gen_mut_byte_arith_harness!(usize, byte_sub, check_mut_byte_sub_usize); + // gen_mut_byte_arith_harness!((i8, i8), byte_sub, check_mut_byte_sub_tuple_1); + // gen_mut_byte_arith_harness!((f64, bool), byte_sub, check_mut_byte_sub_tuple_2); + // gen_mut_byte_arith_harness!((i32, f64, bool), byte_sub, check_mut_byte_sub_tuple_3); + // gen_mut_byte_arith_harness!((i8, u16, i32, u64, isize), byte_sub, check_mut_byte_sub_tuple_4); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_mut_byte_arith_harness!(i8, byte_offset, check_mut_byte_offset_i8); + // gen_mut_byte_arith_harness!(i16, byte_offset, check_mut_byte_offset_i16); + // gen_mut_byte_arith_harness!(i32, byte_offset, check_mut_byte_offset_i32); + // gen_mut_byte_arith_harness!(i64, byte_offset, check_mut_byte_offset_i64); + // gen_mut_byte_arith_harness!(i128, byte_offset, check_mut_byte_offset_i128); + // gen_mut_byte_arith_harness!(isize, byte_offset, check_mut_byte_offset_isize); + // gen_mut_byte_arith_harness!(u8, byte_offset, check_mut_byte_offset_u8); + // gen_mut_byte_arith_harness!(u16, byte_offset, check_mut_byte_offset_u16); + // gen_mut_byte_arith_harness!(u32, byte_offset, check_mut_byte_offset_u32); + // gen_mut_byte_arith_harness!(u64, byte_offset, check_mut_byte_offset_u64); + // gen_mut_byte_arith_harness!(u128, byte_offset, check_mut_byte_offset_u128); + // gen_mut_byte_arith_harness!(usize, byte_offset, check_mut_byte_offset_usize); + // gen_mut_byte_arith_harness!((i8, i8), byte_offset, check_mut_byte_offset_tuple_1); + // gen_mut_byte_arith_harness!((f64, bool), byte_offset, check_mut_byte_offset_tuple_2); + // gen_mut_byte_arith_harness!((i32, f64, bool), byte_offset, check_mut_byte_offset_tuple_3); + // gen_mut_byte_arith_harness!( + // (i8, u16, i32, u64, isize), + // byte_offset, + // check_mut_byte_offset_tuple_4 + // ); macro_rules! gen_mut_byte_arith_harness_for_slice { ($type:ty, byte_offset, $proof_name:ident) => { @@ -2769,44 +2780,47 @@ mod verify { }; } - gen_mut_byte_arith_harness_for_slice!(i8, byte_add, check_mut_byte_add_i8_slice); - gen_mut_byte_arith_harness_for_slice!(i16, byte_add, check_mut_byte_add_i16_slice); - gen_mut_byte_arith_harness_for_slice!(i32, byte_add, check_mut_byte_add_i32_slice); - gen_mut_byte_arith_harness_for_slice!(i64, byte_add, check_mut_byte_add_i64_slice); - gen_mut_byte_arith_harness_for_slice!(i128, byte_add, check_mut_byte_add_i128_slice); - gen_mut_byte_arith_harness_for_slice!(isize, byte_add, check_mut_byte_add_isize_slice); - gen_mut_byte_arith_harness_for_slice!(u8, byte_add, check_mut_byte_add_u8_slice); - gen_mut_byte_arith_harness_for_slice!(u16, byte_add, check_mut_byte_add_u16_slice); - gen_mut_byte_arith_harness_for_slice!(u32, byte_add, check_mut_byte_add_u32_slice); - gen_mut_byte_arith_harness_for_slice!(u64, byte_add, check_mut_byte_add_u64_slice); - gen_mut_byte_arith_harness_for_slice!(u128, byte_add, check_mut_byte_add_u128_slice); - gen_mut_byte_arith_harness_for_slice!(usize, byte_add, check_mut_byte_add_usize_slice); - - gen_mut_byte_arith_harness_for_slice!(i8, byte_sub, check_mut_byte_sub_i8_slice); - gen_mut_byte_arith_harness_for_slice!(i16, byte_sub, check_mut_byte_sub_i16_slice); - gen_mut_byte_arith_harness_for_slice!(i32, byte_sub, check_mut_byte_sub_i32_slice); - gen_mut_byte_arith_harness_for_slice!(i64, byte_sub, check_mut_byte_sub_i64_slice); - gen_mut_byte_arith_harness_for_slice!(i128, byte_sub, check_mut_byte_sub_i128_slice); - gen_mut_byte_arith_harness_for_slice!(isize, byte_sub, check_mut_byte_sub_isize_slice); - gen_mut_byte_arith_harness_for_slice!(u8, byte_sub, check_mut_byte_sub_u8_slice); - gen_mut_byte_arith_harness_for_slice!(u16, byte_sub, check_mut_byte_sub_u16_slice); - gen_mut_byte_arith_harness_for_slice!(u32, byte_sub, check_mut_byte_sub_u32_slice); - gen_mut_byte_arith_harness_for_slice!(u64, byte_sub, check_mut_byte_sub_u64_slice); - gen_mut_byte_arith_harness_for_slice!(u128, byte_sub, check_mut_byte_sub_u128_slice); - gen_mut_byte_arith_harness_for_slice!(usize, byte_sub, check_mut_byte_sub_usize_slice); - - gen_mut_byte_arith_harness_for_slice!(i8, byte_offset, check_mut_byte_offset_i8_slice); - gen_mut_byte_arith_harness_for_slice!(i16, byte_offset, check_mut_byte_offset_i16_slice); - gen_mut_byte_arith_harness_for_slice!(i32, byte_offset, check_mut_byte_offset_i32_slice); - gen_mut_byte_arith_harness_for_slice!(i64, byte_offset, check_mut_byte_offset_i64_slice); - gen_mut_byte_arith_harness_for_slice!(i128, byte_offset, check_mut_byte_offset_i128_slice); - gen_mut_byte_arith_harness_for_slice!(isize, byte_offset, check_mut_byte_offset_isize_slice); - gen_mut_byte_arith_harness_for_slice!(u8, byte_offset, check_mut_byte_offset_u8_slice); - gen_mut_byte_arith_harness_for_slice!(u16, byte_offset, check_mut_byte_offset_u16_slice); - gen_mut_byte_arith_harness_for_slice!(u32, byte_offset, check_mut_byte_offset_u32_slice); - gen_mut_byte_arith_harness_for_slice!(u64, byte_offset, check_mut_byte_offset_u64_slice); - gen_mut_byte_arith_harness_for_slice!(u128, byte_offset, check_mut_byte_offset_u128_slice); - gen_mut_byte_arith_harness_for_slice!(usize, byte_offset, check_mut_byte_offset_usize_slice); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_mut_byte_arith_harness_for_slice!(i8, byte_add, check_mut_byte_add_i8_slice); + // gen_mut_byte_arith_harness_for_slice!(i16, byte_add, check_mut_byte_add_i16_slice); + // gen_mut_byte_arith_harness_for_slice!(i32, byte_add, check_mut_byte_add_i32_slice); + // gen_mut_byte_arith_harness_for_slice!(i64, byte_add, check_mut_byte_add_i64_slice); + // gen_mut_byte_arith_harness_for_slice!(i128, byte_add, check_mut_byte_add_i128_slice); + // gen_mut_byte_arith_harness_for_slice!(isize, byte_add, check_mut_byte_add_isize_slice); + // gen_mut_byte_arith_harness_for_slice!(u8, byte_add, check_mut_byte_add_u8_slice); + // gen_mut_byte_arith_harness_for_slice!(u16, byte_add, check_mut_byte_add_u16_slice); + // gen_mut_byte_arith_harness_for_slice!(u32, byte_add, check_mut_byte_add_u32_slice); + // gen_mut_byte_arith_harness_for_slice!(u64, byte_add, check_mut_byte_add_u64_slice); + // gen_mut_byte_arith_harness_for_slice!(u128, byte_add, check_mut_byte_add_u128_slice); + // gen_mut_byte_arith_harness_for_slice!(usize, byte_add, check_mut_byte_add_usize_slice); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_mut_byte_arith_harness_for_slice!(i8, byte_sub, check_mut_byte_sub_i8_slice); + // gen_mut_byte_arith_harness_for_slice!(i16, byte_sub, check_mut_byte_sub_i16_slice); + // gen_mut_byte_arith_harness_for_slice!(i32, byte_sub, check_mut_byte_sub_i32_slice); + // gen_mut_byte_arith_harness_for_slice!(i64, byte_sub, check_mut_byte_sub_i64_slice); + // gen_mut_byte_arith_harness_for_slice!(i128, byte_sub, check_mut_byte_sub_i128_slice); + // gen_mut_byte_arith_harness_for_slice!(isize, byte_sub, check_mut_byte_sub_isize_slice); + // gen_mut_byte_arith_harness_for_slice!(u8, byte_sub, check_mut_byte_sub_u8_slice); + // gen_mut_byte_arith_harness_for_slice!(u16, byte_sub, check_mut_byte_sub_u16_slice); + // gen_mut_byte_arith_harness_for_slice!(u32, byte_sub, check_mut_byte_sub_u32_slice); + // gen_mut_byte_arith_harness_for_slice!(u64, byte_sub, check_mut_byte_sub_u64_slice); + // gen_mut_byte_arith_harness_for_slice!(u128, byte_sub, check_mut_byte_sub_u128_slice); + // gen_mut_byte_arith_harness_for_slice!(usize, byte_sub, check_mut_byte_sub_usize_slice); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_mut_byte_arith_harness_for_slice!(i8, byte_offset, check_mut_byte_offset_i8_slice); + // gen_mut_byte_arith_harness_for_slice!(i16, byte_offset, check_mut_byte_offset_i16_slice); + // gen_mut_byte_arith_harness_for_slice!(i32, byte_offset, check_mut_byte_offset_i32_slice); + // gen_mut_byte_arith_harness_for_slice!(i64, byte_offset, check_mut_byte_offset_i64_slice); + // gen_mut_byte_arith_harness_for_slice!(i128, byte_offset, check_mut_byte_offset_i128_slice); + // gen_mut_byte_arith_harness_for_slice!(isize, byte_offset, check_mut_byte_offset_isize_slice); + // gen_mut_byte_arith_harness_for_slice!(u8, byte_offset, check_mut_byte_offset_u8_slice); + // gen_mut_byte_arith_harness_for_slice!(u16, byte_offset, check_mut_byte_offset_u16_slice); + // gen_mut_byte_arith_harness_for_slice!(u32, byte_offset, check_mut_byte_offset_u32_slice); + // gen_mut_byte_arith_harness_for_slice!(u64, byte_offset, check_mut_byte_offset_u64_slice); + // gen_mut_byte_arith_harness_for_slice!(u128, byte_offset, check_mut_byte_offset_u128_slice); + // gen_mut_byte_arith_harness_for_slice!(usize, byte_offset, check_mut_byte_offset_usize_slice); // Trait used exclusively for implementing proofs for contracts for `dyn Trait` type. trait TestTrait {} @@ -2864,9 +2878,10 @@ mod verify { } // fn <*mut T>::add(), <*mut T>::sub() and <*mut T>::offset() dyn Trait verification - gen_mut_byte_arith_harness_for_dyn!(byte_add, check_mut_byte_add_dyn); - gen_mut_byte_arith_harness_for_dyn!(byte_sub, check_mut_byte_sub_dyn); - gen_mut_byte_arith_harness_for_dyn!(byte_offset, check_mut_byte_offset_dyn); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // gen_mut_byte_arith_harness_for_dyn!(byte_add, check_mut_byte_add_dyn); + // gen_mut_byte_arith_harness_for_dyn!(byte_sub, check_mut_byte_sub_dyn); + // gen_mut_byte_arith_harness_for_dyn!(byte_offset, check_mut_byte_offset_dyn); #[kani::proof] pub fn check_mut_byte_offset_from_fixed_offset() { @@ -2880,15 +2895,16 @@ mod verify { } // Proof for unit size - #[kani::proof_for_contract(<*mut ()>::byte_offset_from)] - pub fn check_mut_byte_offset_from_unit() { - let mut val: () = (); - let src_ptr: *mut () = &mut val; - let dest_ptr: *mut () = &mut val; - unsafe { - dest_ptr.byte_offset_from(src_ptr); - } - } + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(<*mut ()>::byte_offset_from)] + // pub fn check_mut_byte_offset_from_unit() { + // let mut val: () = (); + // let src_ptr: *mut () = &mut val; + // let dest_ptr: *mut () = &mut val; + // unsafe { + // dest_ptr.byte_offset_from(src_ptr); + // } + // } // Generate proofs for contracts for byte_offset_from to verify pointer to int // and composite types. @@ -2935,88 +2951,91 @@ mod verify { }; } - generate_mut_byte_offset_from_harness!( - u8, - check_mut_byte_offset_from_u8, - check_mut_byte_offset_from_u8_arr - ); - generate_mut_byte_offset_from_harness!( - u16, - check_mut_byte_offset_from_u16, - check_mut_byte_offset_from_u16_arr - ); - generate_mut_byte_offset_from_harness!( - u32, - check_mut_byte_offset_from_u32, - check_mut_byte_offset_from_u32_arr - ); - generate_mut_byte_offset_from_harness!( - u64, - check_mut_byte_offset_from_u64, - check_mut_byte_offset_from_u64_arr - ); - generate_mut_byte_offset_from_harness!( - u128, - check_mut_byte_offset_from_u128, - check_mut_byte_offset_from_u128_arr - ); - generate_mut_byte_offset_from_harness!( - usize, - check_mut_byte_offset_from_usize, - check_mut_byte_offset_from_usize_arr - ); - - generate_mut_byte_offset_from_harness!( - i8, - check_mut_byte_offset_from_i8, - check_mut_byte_offset_from_i8_arr - ); - generate_mut_byte_offset_from_harness!( - i16, - check_mut_byte_offset_from_i16, - check_mut_byte_offset_from_i16_arr - ); - generate_mut_byte_offset_from_harness!( - i32, - check_mut_byte_offset_from_i32, - check_mut_byte_offset_from_i32_arr - ); - generate_mut_byte_offset_from_harness!( - i64, - check_mut_byte_offset_from_i64, - check_mut_byte_offset_from_i64_arr - ); - generate_mut_byte_offset_from_harness!( - i128, - check_mut_byte_offset_from_i128, - check_mut_byte_offset_from_i128_arr - ); - generate_mut_byte_offset_from_harness!( - isize, - check_mut_byte_offset_from_isize, - check_mut_byte_offset_from_isize_arr - ); - - generate_mut_byte_offset_from_harness!( - (i8, i8), - check_mut_byte_offset_from_tuple_1, - check_mut_byte_offset_from_tuple_1_arr - ); - generate_mut_byte_offset_from_harness!( - (f64, bool), - check_mut_byte_offset_from_tuple_2, - check_mut_byte_offset_from_tuple_2_arr - ); - generate_mut_byte_offset_from_harness!( - (u32, i16, f32), - check_mut_byte_offset_from_tuple_3, - check_mut_byte_offset_from_tuple_3_arr - ); - generate_mut_byte_offset_from_harness!( - ((), bool, u8, u16, i32, f64, i128, usize), - check_mut_byte_offset_from_tuple_4, - check_mut_byte_offset_from_tuple_4_arr - ); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // generate_mut_byte_offset_from_harness!( + // u8, + // check_mut_byte_offset_from_u8, + // check_mut_byte_offset_from_u8_arr + // ); + // generate_mut_byte_offset_from_harness!( + // u16, + // check_mut_byte_offset_from_u16, + // check_mut_byte_offset_from_u16_arr + // ); + // generate_mut_byte_offset_from_harness!( + // u32, + // check_mut_byte_offset_from_u32, + // check_mut_byte_offset_from_u32_arr + // ); + // generate_mut_byte_offset_from_harness!( + // u64, + // check_mut_byte_offset_from_u64, + // check_mut_byte_offset_from_u64_arr + // ); + // generate_mut_byte_offset_from_harness!( + // u128, + // check_mut_byte_offset_from_u128, + // check_mut_byte_offset_from_u128_arr + // ); + // generate_mut_byte_offset_from_harness!( + // usize, + // check_mut_byte_offset_from_usize, + // check_mut_byte_offset_from_usize_arr + // ); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // generate_mut_byte_offset_from_harness!( + // i8, + // check_mut_byte_offset_from_i8, + // check_mut_byte_offset_from_i8_arr + // ); + // generate_mut_byte_offset_from_harness!( + // i16, + // check_mut_byte_offset_from_i16, + // check_mut_byte_offset_from_i16_arr + // ); + // generate_mut_byte_offset_from_harness!( + // i32, + // check_mut_byte_offset_from_i32, + // check_mut_byte_offset_from_i32_arr + // ); + // generate_mut_byte_offset_from_harness!( + // i64, + // check_mut_byte_offset_from_i64, + // check_mut_byte_offset_from_i64_arr + // ); + // generate_mut_byte_offset_from_harness!( + // i128, + // check_mut_byte_offset_from_i128, + // check_mut_byte_offset_from_i128_arr + // ); + // generate_mut_byte_offset_from_harness!( + // isize, + // check_mut_byte_offset_from_isize, + // check_mut_byte_offset_from_isize_arr + // ); + + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // generate_mut_byte_offset_from_harness!( + // (i8, i8), + // check_mut_byte_offset_from_tuple_1, + // check_mut_byte_offset_from_tuple_1_arr + // ); + // generate_mut_byte_offset_from_harness!( + // (f64, bool), + // check_mut_byte_offset_from_tuple_2, + // check_mut_byte_offset_from_tuple_2_arr + // ); + // generate_mut_byte_offset_from_harness!( + // (u32, i16, f32), + // check_mut_byte_offset_from_tuple_3, + // check_mut_byte_offset_from_tuple_3_arr + // ); + // generate_mut_byte_offset_from_harness!( + // ((), bool, u8, u16, i32, f64, i128, usize), + // check_mut_byte_offset_from_tuple_4, + // check_mut_byte_offset_from_tuple_4_arr + // ); // The length of a slice is set to an arbitrary value, which defines its size. // In this case, implementing a slice with a dynamic size set using kani::any() @@ -3048,42 +3067,44 @@ mod verify { }; } - generate_mut_byte_offset_from_slice_harness!(u8, check_mut_byte_offset_from_u8_slice); - generate_mut_byte_offset_from_slice_harness!(u16, check_mut_byte_offset_from_u16_slice); - generate_mut_byte_offset_from_slice_harness!(u32, check_mut_byte_offset_from_u32_slice); - generate_mut_byte_offset_from_slice_harness!(u64, check_mut_byte_offset_from_u64_slice); - generate_mut_byte_offset_from_slice_harness!(u128, check_mut_byte_offset_from_u128_slice); - generate_mut_byte_offset_from_slice_harness!(usize, check_mut_byte_offset_from_usize_slice); - generate_mut_byte_offset_from_slice_harness!(i8, check_mut_byte_offset_from_i8_slice); - generate_mut_byte_offset_from_slice_harness!(i16, check_mut_byte_offset_from_i16_slice); - generate_mut_byte_offset_from_slice_harness!(i32, check_mut_byte_offset_from_i32_slice); - generate_mut_byte_offset_from_slice_harness!(i64, check_mut_byte_offset_from_i64_slice); - generate_mut_byte_offset_from_slice_harness!(i128, check_mut_byte_offset_from_i128_slice); - generate_mut_byte_offset_from_slice_harness!(isize, check_mut_byte_offset_from_isize_slice); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // generate_mut_byte_offset_from_slice_harness!(u8, check_mut_byte_offset_from_u8_slice); + // generate_mut_byte_offset_from_slice_harness!(u16, check_mut_byte_offset_from_u16_slice); + // generate_mut_byte_offset_from_slice_harness!(u32, check_mut_byte_offset_from_u32_slice); + // generate_mut_byte_offset_from_slice_harness!(u64, check_mut_byte_offset_from_u64_slice); + // generate_mut_byte_offset_from_slice_harness!(u128, check_mut_byte_offset_from_u128_slice); + // generate_mut_byte_offset_from_slice_harness!(usize, check_mut_byte_offset_from_usize_slice); + // generate_mut_byte_offset_from_slice_harness!(i8, check_mut_byte_offset_from_i8_slice); + // generate_mut_byte_offset_from_slice_harness!(i16, check_mut_byte_offset_from_i16_slice); + // generate_mut_byte_offset_from_slice_harness!(i32, check_mut_byte_offset_from_i32_slice); + // generate_mut_byte_offset_from_slice_harness!(i64, check_mut_byte_offset_from_i64_slice); + // generate_mut_byte_offset_from_slice_harness!(i128, check_mut_byte_offset_from_i128_slice); + // generate_mut_byte_offset_from_slice_harness!(isize, check_mut_byte_offset_from_isize_slice); // tracking issue: https://github.com/model-checking/kani/issues/3763 // Workaround: Directly verifying the method `<*mut dyn TestTrait>::byte_offset_from` // causes a compilation error. As a workaround, the proof is annotated with the // underlying struct type instead. - #[kani::proof_for_contract(<*mut TestStruct>::byte_offset_from)] - pub fn check_mut_byte_offset_from_dyn() { - const gen_size: usize = mem::size_of::(); - // Since the pointer generator cannot directly create pointers to `dyn Trait`, - // we first generate a pointer to the underlying struct and then cast it to a `dyn Trait` pointer. - let mut generator_caller = PointerGenerator::::new(); - let mut generator_input = PointerGenerator::::new(); - let ptr_caller: *mut TestStruct = generator_caller.any_in_bounds().ptr; - let ptr_input: *mut TestStruct = if kani::any() { - generator_caller.any_alloc_status().ptr - } else { - generator_input.any_alloc_status().ptr - }; - - let ptr_caller = ptr_caller as *mut dyn TestTrait; - let ptr_input = ptr_input as *mut dyn TestTrait; - - unsafe { - ptr_caller.byte_offset_from(ptr_input); - } - } + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(<*mut TestStruct>::byte_offset_from)] + // pub fn check_mut_byte_offset_from_dyn() { + // const gen_size: usize = mem::size_of::(); + // // Since the pointer generator cannot directly create pointers to `dyn Trait`, + // // we first generate a pointer to the underlying struct and then cast it to a `dyn Trait` pointer. + // let mut generator_caller = PointerGenerator::::new(); + // let mut generator_input = PointerGenerator::::new(); + // let ptr_caller: *mut TestStruct = generator_caller.any_in_bounds().ptr; + // let ptr_input: *mut TestStruct = if kani::any() { + // generator_caller.any_alloc_status().ptr + // } else { + // generator_input.any_alloc_status().ptr + // }; + // + // let ptr_caller = ptr_caller as *mut dyn TestTrait; + // let ptr_input = ptr_input as *mut dyn TestTrait; + // + // unsafe { + // ptr_caller.byte_offset_from(ptr_input); + // } + // } } diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index cba6d73b34266..dffe7ac0aeae7 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -3,7 +3,7 @@ use safety::{ensures, requires}; use crate::cmp::Ordering; #[cfg(kani)] use crate::kani; -use crate::marker::Unsize; +use crate::marker::{PointeeSized, Unsize}; use crate::mem::{MaybeUninit, SizedTypeProperties}; use crate::num::NonZero; use crate::ops::{CoerceUnsized, DispatchFromDyn}; @@ -26,19 +26,24 @@ use crate::{fmt, hash, intrinsics, mem, ptr}; /// as a discriminant -- `Option>` has the same size as `*mut T`. /// However the pointer may still dangle if it isn't dereferenced. /// -/// Unlike `*mut T`, `NonNull` was chosen to be covariant over `T`. This makes it -/// possible to use `NonNull` when building covariant types, but introduces the -/// risk of unsoundness if used in a type that shouldn't actually be covariant. -/// (The opposite choice was made for `*mut T` even though technically the unsoundness -/// could only be caused by calling unsafe functions.) +/// Unlike `*mut T`, `NonNull` is covariant over `T`. This is usually the correct +/// choice for most data structures and safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`, +/// and `LinkedList`. /// -/// Covariance is correct for most safe abstractions, such as `Box`, `Rc`, `Arc`, `Vec`, -/// and `LinkedList`. This is the case because they provide a public API that follows the -/// normal shared XOR mutable rules of Rust. +/// In rare cases, if your type exposes a way to mutate the value of `T` through a `NonNull`, +/// and you need to prevent unsoundness from variance (for example, if `T` could be a reference +/// with a shorter lifetime), you should add a field to make your type invariant, such as +/// `PhantomData>` or `PhantomData<&'a mut T>`. /// -/// If your type cannot safely be covariant, you must ensure it contains some -/// additional field to provide invariance. Often this field will be a [`PhantomData`] -/// type like `PhantomData>` or `PhantomData<&'a mut T>`. +/// Example of a type that must be invariant: +/// ```rust +/// use std::cell::Cell; +/// use std::marker::PhantomData; +/// struct Invariant { +/// ptr: std::ptr::NonNull, +/// _invariant: PhantomData>, +/// } +/// ``` /// /// Notice that `NonNull` has a `From` instance for `&T`. However, this does /// not change the fact that mutating through a (pointer derived from a) shared @@ -73,7 +78,7 @@ use crate::{fmt, hash, intrinsics, mem, ptr}; #[rustc_layout_scalar_valid_range_start(1)] #[rustc_nonnull_optimization_guaranteed] #[rustc_diagnostic_item = "NonNull"] -pub struct NonNull { +pub struct NonNull { // Remember to use `.as_ptr()` instead of `.pointer`, as field projecting to // this is banned by . pointer: *const T, @@ -82,12 +87,12 @@ pub struct NonNull { /// `NonNull` pointers are not `Send` because the data they reference may be aliased. // N.B., this impl is unnecessary, but should provide better error messages. #[stable(feature = "nonnull", since = "1.25.0")] -impl !Send for NonNull {} +impl !Send for NonNull {} /// `NonNull` pointers are not `Sync` because the data they reference may be aliased. // N.B., this impl is unnecessary, but should provide better error messages. #[stable(feature = "nonnull", since = "1.25.0")] -impl !Sync for NonNull {} +impl !Sync for NonNull {} impl NonNull { /// Creates a pointer with the given address and no [provenance][crate::ptr#provenance]. @@ -201,7 +206,7 @@ impl NonNull { } } -impl NonNull { +impl NonNull { /// Creates a new `NonNull`. /// /// # Safety @@ -622,11 +627,12 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] - #[requires( - (self.as_ptr().addr() as isize).checked_add(count).is_some() && - core::ub_checks::same_allocation(self.as_ptr(), self.as_ptr().wrapping_byte_offset(count)) - )] - #[ensures(|result: &Self| result.as_ptr() == self.as_ptr().wrapping_byte_offset(count))] + // TODO: requires https://github.com/model-checking/kani/pull/4193 + // #[requires( + // (self.as_ptr().addr() as isize).checked_add(count).is_some() && + // core::ub_checks::same_allocation(self.as_ptr(), self.as_ptr().wrapping_byte_offset(count)) + // )] + // #[ensures(|result: &Self| result.as_ptr() == self.as_ptr().wrapping_byte_offset(count))] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has // the same safety contract. @@ -708,14 +714,15 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] - #[requires( - count == 0 || ( - (core::mem::size_of_val_raw(self.as_ptr() as * const _) > 0) && - (count <= (isize::MAX as usize)) && - (self.as_ptr().addr().checked_add(count).is_some()) && - (core::ub_checks::same_allocation(self.as_ptr(), self.as_ptr().wrapping_byte_add(count))) - ) - )] + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[requires( + // count == 0 || ( + // (core::mem::size_of_val_raw(self.as_ptr() as * const _) > 0) && + // (count <= (isize::MAX as usize)) && + // (self.as_ptr().addr().checked_add(count).is_some()) && + // (core::ub_checks::same_allocation(self.as_ptr(), self.as_ptr().wrapping_byte_add(count))) + // ) + // )] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same // safety contract. @@ -804,14 +811,15 @@ impl NonNull { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")] - #[requires( - count == 0 || ( - (core::mem::size_of_val_raw(self.as_ptr() as * const _) > 0) && - (count <= (isize::MAX as usize)) && - (self.as_ptr().addr().checked_sub(count).is_some()) && - (core::ub_checks::same_allocation(self.as_ptr(), self.as_ptr().wrapping_byte_sub(count))) - ) - )] + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[requires( + // count == 0 || ( + // (core::mem::size_of_val_raw(self.as_ptr() as * const _) > 0) && + // (count <= (isize::MAX as usize)) && + // (self.as_ptr().addr().checked_sub(count).is_some()) && + // (core::ub_checks::same_allocation(self.as_ptr(), self.as_ptr().wrapping_byte_sub(count))) + // ) + // )] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub` and `byte_sub` has the same // safety contract. @@ -1802,7 +1810,7 @@ impl NonNull<[T]> { } #[stable(feature = "nonnull", since = "1.25.0")] -impl Clone for NonNull { +impl Clone for NonNull { #[inline(always)] fn clone(&self) -> Self { *self @@ -1810,39 +1818,39 @@ impl Clone for NonNull { } #[stable(feature = "nonnull", since = "1.25.0")] -impl Copy for NonNull {} +impl Copy for NonNull {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl CoerceUnsized> for NonNull where T: Unsize {} +impl CoerceUnsized> for NonNull where T: Unsize {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] -impl DispatchFromDyn> for NonNull where T: Unsize {} +impl DispatchFromDyn> for NonNull where T: Unsize {} #[stable(feature = "pin", since = "1.33.0")] -unsafe impl PinCoerceUnsized for NonNull {} +unsafe impl PinCoerceUnsized for NonNull {} #[unstable(feature = "pointer_like_trait", issue = "none")] impl core::marker::PointerLike for NonNull {} #[stable(feature = "nonnull", since = "1.25.0")] -impl fmt::Debug for NonNull { +impl fmt::Debug for NonNull { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&self.as_ptr(), f) } } #[stable(feature = "nonnull", since = "1.25.0")] -impl fmt::Pointer for NonNull { +impl fmt::Pointer for NonNull { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&self.as_ptr(), f) } } #[stable(feature = "nonnull", since = "1.25.0")] -impl Eq for NonNull {} +impl Eq for NonNull {} #[stable(feature = "nonnull", since = "1.25.0")] -impl PartialEq for NonNull { +impl PartialEq for NonNull { #[inline] #[allow(ambiguous_wide_pointer_comparisons)] fn eq(&self, other: &Self) -> bool { @@ -1851,7 +1859,7 @@ impl PartialEq for NonNull { } #[stable(feature = "nonnull", since = "1.25.0")] -impl Ord for NonNull { +impl Ord for NonNull { #[inline] #[allow(ambiguous_wide_pointer_comparisons)] fn cmp(&self, other: &Self) -> Ordering { @@ -1860,7 +1868,7 @@ impl Ord for NonNull { } #[stable(feature = "nonnull", since = "1.25.0")] -impl PartialOrd for NonNull { +impl PartialOrd for NonNull { #[inline] #[allow(ambiguous_wide_pointer_comparisons)] fn partial_cmp(&self, other: &Self) -> Option { @@ -1869,7 +1877,7 @@ impl PartialOrd for NonNull { } #[stable(feature = "nonnull", since = "1.25.0")] -impl hash::Hash for NonNull { +impl hash::Hash for NonNull { #[inline] fn hash(&self, state: &mut H) { self.as_ptr().hash(state) @@ -1877,7 +1885,7 @@ impl hash::Hash for NonNull { } #[unstable(feature = "ptr_internals", issue = "none")] -impl From> for NonNull { +impl From> for NonNull { #[inline] fn from(unique: Unique) -> Self { unique.as_non_null_ptr() @@ -1885,7 +1893,7 @@ impl From> for NonNull { } #[stable(feature = "nonnull", since = "1.25.0")] -impl From<&mut T> for NonNull { +impl From<&mut T> for NonNull { /// Converts a `&mut T` to a `NonNull`. /// /// This conversion is safe and infallible since references cannot be null. @@ -1896,7 +1904,7 @@ impl From<&mut T> for NonNull { } #[stable(feature = "nonnull", since = "1.25.0")] -impl From<&T> for NonNull { +impl From<&T> for NonNull { /// Converts a `&T` to a `NonNull`. /// /// This conversion is safe and infallible since references cannot be null. @@ -2460,17 +2468,18 @@ mod verify { let result = non_null_ptr.is_aligned_to(align); } - #[kani::proof_for_contract(NonNull::byte_sub)] - pub fn non_null_check_byte_sub() { - const SIZE: usize = mem::size_of::() * 10000; - let mut generator = PointerGenerator::::new(); - let count: usize = kani::any(); - let raw_ptr: *mut i32 = generator.any_in_bounds().ptr as *mut i32; - let ptr = NonNull::new(raw_ptr).unwrap(); - unsafe { - let result = ptr.byte_sub(count); - } - } + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(NonNull::byte_sub)] + // pub fn non_null_check_byte_sub() { + // const SIZE: usize = mem::size_of::() * 10000; + // let mut generator = PointerGenerator::::new(); + // let count: usize = kani::any(); + // let raw_ptr: *mut i32 = generator.any_in_bounds().ptr as *mut i32; + // let ptr = NonNull::new(raw_ptr).unwrap(); + // unsafe { + // let result = ptr.byte_sub(count); + // } + // } #[kani::proof_for_contract(NonNull::offset)] pub fn non_null_check_offset() { @@ -2841,43 +2850,46 @@ mod verify { generate_write_bytes_harness!(u128, non_null_check_write_bytes_u128); generate_write_bytes_harness!(usize, non_null_check_write_bytes_usize); - #[kani::proof_for_contract(NonNull::byte_add)] - pub fn non_null_byte_add_proof() { - // Make size as 1000 to ensure the array is large enough to cover various senarios - // while maintaining a reasonable proof runtime - const ARR_SIZE: usize = mem::size_of::() * 1000; - let mut generator = PointerGenerator::::new(); - - let count: usize = kani::any(); - let raw_ptr: *mut i32 = generator.any_in_bounds().ptr as *mut i32; - - unsafe { - let ptr = NonNull::new(raw_ptr).unwrap(); - let result = ptr.byte_add(count); - } - } - - #[kani::proof_for_contract(NonNull::byte_add)] - pub fn non_null_byte_add_dangling_proof() { - let ptr = NonNull::::dangling(); - unsafe { - let _ = ptr.byte_add(0); - } - } - - #[kani::proof_for_contract(NonNull::byte_offset)] - pub fn non_null_byte_offset_proof() { - const ARR_SIZE: usize = mem::size_of::() * 1000; - let mut generator = PointerGenerator::::new(); + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(NonNull::byte_add)] + // pub fn non_null_byte_add_proof() { + // // Make size as 1000 to ensure the array is large enough to cover various senarios + // // while maintaining a reasonable proof runtime + // const ARR_SIZE: usize = mem::size_of::() * 1000; + // let mut generator = PointerGenerator::::new(); + // + // let count: usize = kani::any(); + // let raw_ptr: *mut i32 = generator.any_in_bounds().ptr as *mut i32; + // + // unsafe { + // let ptr = NonNull::new(raw_ptr).unwrap(); + // let result = ptr.byte_add(count); + // } + // } - let count: isize = kani::any(); - let raw_ptr: *mut i32 = generator.any_in_bounds().ptr as *mut i32; + // TODO: we can no longer use size_of_val_raw with the Sized hierarchy + // #[kani::proof_for_contract(NonNull::byte_add)] + // pub fn non_null_byte_add_dangling_proof() { + // let ptr = NonNull::::dangling(); + // unsafe { + // let _ = ptr.byte_add(0); + // } + // } - unsafe { - let ptr = NonNull::new(raw_ptr).unwrap(); - let result = ptr.byte_offset(count); - } - } + // TODO: requires https://github.com/model-checking/kani/pull/4193 + // #[kani::proof_for_contract(NonNull::byte_offset)] + // pub fn non_null_byte_offset_proof() { + // const ARR_SIZE: usize = mem::size_of::() * 1000; + // let mut generator = PointerGenerator::::new(); + // + // let count: isize = kani::any(); + // let raw_ptr: *mut i32 = generator.any_in_bounds().ptr as *mut i32; + // + // unsafe { + // let ptr = NonNull::new(raw_ptr).unwrap(); + // let result = ptr.byte_offset(count); + // } + // } #[kani::proof_for_contract(NonNull::byte_offset_from)] pub fn non_null_byte_offset_from_proof() { diff --git a/library/core/src/ptr/unique.rs b/library/core/src/ptr/unique.rs index 9c67975ce5835..006b7e62ae95d 100644 --- a/library/core/src/ptr/unique.rs +++ b/library/core/src/ptr/unique.rs @@ -3,7 +3,7 @@ use safety::{ensures, requires}; use crate::fmt; #[cfg(kani)] use crate::kani; -use crate::marker::{PhantomData, Unsize}; +use crate::marker::{PhantomData, PointeeSized, Unsize}; use crate::ops::{CoerceUnsized, DispatchFromDyn}; use crate::pin::PinCoerceUnsized; use crate::ptr::NonNull; @@ -38,7 +38,7 @@ use crate::ptr::NonNull; #[repr(transparent)] // Lang item used experimentally by Miri to define the semantics of `Unique`. #[lang = "ptr_unique"] -pub struct Unique { +pub struct Unique { pointer: NonNull, // NOTE: this marker has no consequences for variance, but is necessary // for dropck to understand that we logically own a `T`. @@ -53,14 +53,14 @@ pub struct Unique { /// unenforced by the type system; the abstraction using the /// `Unique` must enforce it. #[unstable(feature = "ptr_internals", issue = "none")] -unsafe impl Send for Unique {} +unsafe impl Send for Unique {} /// `Unique` pointers are `Sync` if `T` is `Sync` because the data they /// reference is unaliased. Note that this aliasing invariant is /// unenforced by the type system; the abstraction using the /// `Unique` must enforce it. #[unstable(feature = "ptr_internals", issue = "none")] -unsafe impl Sync for Unique {} +unsafe impl Sync for Unique {} #[unstable(feature = "ptr_internals", issue = "none")] impl Unique { @@ -82,7 +82,7 @@ impl Unique { } #[unstable(feature = "ptr_internals", issue = "none")] -impl Unique { +impl Unique { /// Creates a new `Unique`. /// /// # Safety @@ -167,7 +167,7 @@ impl Unique { } #[unstable(feature = "ptr_internals", issue = "none")] -impl Clone for Unique { +impl Clone for Unique { #[inline] fn clone(&self) -> Self { *self @@ -175,33 +175,33 @@ impl Clone for Unique { } #[unstable(feature = "ptr_internals", issue = "none")] -impl Copy for Unique {} +impl Copy for Unique {} #[unstable(feature = "ptr_internals", issue = "none")] -impl CoerceUnsized> for Unique where T: Unsize {} +impl CoerceUnsized> for Unique where T: Unsize {} #[unstable(feature = "ptr_internals", issue = "none")] -impl DispatchFromDyn> for Unique where T: Unsize {} +impl DispatchFromDyn> for Unique where T: Unsize {} #[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")] -unsafe impl PinCoerceUnsized for Unique {} +unsafe impl PinCoerceUnsized for Unique {} #[unstable(feature = "ptr_internals", issue = "none")] -impl fmt::Debug for Unique { +impl fmt::Debug for Unique { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&self.as_ptr(), f) } } #[unstable(feature = "ptr_internals", issue = "none")] -impl fmt::Pointer for Unique { +impl fmt::Pointer for Unique { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&self.as_ptr(), f) } } #[unstable(feature = "ptr_internals", issue = "none")] -impl From<&mut T> for Unique { +impl From<&mut T> for Unique { /// Converts a `&mut T` to a `Unique`. /// /// This conversion is infallible since references cannot be null. @@ -212,7 +212,7 @@ impl From<&mut T> for Unique { } #[unstable(feature = "ptr_internals", issue = "none")] -impl From> for Unique { +impl From> for Unique { /// Converts a `NonNull` to a `Unique`. /// /// This conversion is infallible since `NonNull` cannot be null. diff --git a/library/core/src/tuple.rs b/library/core/src/tuple.rs index 02eb805ece121..3ff55792431b8 100644 --- a/library/core/src/tuple.rs +++ b/library/core/src/tuple.rs @@ -1,7 +1,7 @@ // See core/src/primitive_docs.rs for documentation. use crate::cmp::Ordering::{self, *}; -use crate::marker::{ConstParamTy_, StructuralPartialEq, UnsizedConstParamTy}; +use crate::marker::{ConstParamTy_, PointeeSized, StructuralPartialEq, UnsizedConstParamTy}; use crate::ops::ControlFlow::{self, Break, Continue}; // Recursive macro for implementing n-ary tuple functions and operations @@ -25,7 +25,7 @@ macro_rules! tuple_impls { #[stable(feature = "rust1", since = "1.0.0")] impl<$($T: PartialEq),+> PartialEq for ($($T,)+) where - last_type!($($T,)+): ?Sized + last_type!($($T,)+): PointeeSized { #[inline] fn eq(&self, other: &($($T,)+)) -> bool { @@ -43,7 +43,7 @@ macro_rules! tuple_impls { #[stable(feature = "rust1", since = "1.0.0")] impl<$($T: Eq),+> Eq for ($($T,)+) where - last_type!($($T,)+): ?Sized + last_type!($($T,)+): PointeeSized {} } @@ -73,7 +73,7 @@ macro_rules! tuple_impls { #[stable(feature = "rust1", since = "1.0.0")] impl<$($T: PartialOrd),+> PartialOrd for ($($T,)+) where - last_type!($($T,)+): ?Sized + last_type!($($T,)+): PointeeSized { #[inline] fn partial_cmp(&self, other: &($($T,)+)) -> Option { @@ -119,7 +119,7 @@ macro_rules! tuple_impls { #[stable(feature = "rust1", since = "1.0.0")] impl<$($T: Ord),+> Ord for ($($T,)+) where - last_type!($($T,)+): ?Sized + last_type!($($T,)+): PointeeSized { #[inline] fn cmp(&self, other: &($($T,)+)) -> Ordering { diff --git a/library/rtstartup/rsbegin.rs b/library/rtstartup/rsbegin.rs index 67b09599d9d2b..0e915b92697f1 100644 --- a/library/rtstartup/rsbegin.rs +++ b/library/rtstartup/rsbegin.rs @@ -21,8 +21,21 @@ #![allow(internal_features)] #![warn(unreachable_pub)] +#[cfg(not(bootstrap))] +#[lang = "pointee_sized"] +pub trait PointeeSized {} + +#[cfg(not(bootstrap))] +#[lang = "meta_sized"] +pub trait MetaSized: PointeeSized {} + +#[cfg(bootstrap)] +#[lang = "sized"] +pub trait Sized {} +#[cfg(not(bootstrap))] #[lang = "sized"] -trait Sized {} +pub trait Sized: MetaSized {} + #[lang = "sync"] auto trait Sync {} #[lang = "copy"] @@ -30,14 +43,25 @@ trait Copy {} #[lang = "freeze"] auto trait Freeze {} +#[cfg(bootstrap)] impl Copy for *mut T {} +#[cfg(not(bootstrap))] +impl Copy for *mut T {} +#[cfg(bootstrap)] #[lang = "drop_in_place"] #[inline] #[allow(unconditional_recursion)] pub unsafe fn drop_in_place(to_drop: *mut T) { drop_in_place(to_drop); } +#[cfg(not(bootstrap))] +#[lang = "drop_in_place"] +#[inline] +#[allow(unconditional_recursion)] +pub unsafe fn drop_in_place(to_drop: *mut T) { + drop_in_place(to_drop); +} // Frame unwind info registration // diff --git a/library/rtstartup/rsend.rs b/library/rtstartup/rsend.rs index a6f7d103356bf..75f9212695d1f 100644 --- a/library/rtstartup/rsend.rs +++ b/library/rtstartup/rsend.rs @@ -8,8 +8,21 @@ #![allow(internal_features)] #![warn(unreachable_pub)] +#[cfg(not(bootstrap))] +#[lang = "pointee_sized"] +pub trait PointeeSized {} + +#[cfg(not(bootstrap))] +#[lang = "meta_sized"] +pub trait MetaSized: PointeeSized {} + +#[cfg(bootstrap)] +#[lang = "sized"] +pub trait Sized {} +#[cfg(not(bootstrap))] #[lang = "sized"] -trait Sized {} +pub trait Sized: MetaSized {} + #[lang = "sync"] trait Sync {} impl Sync for T {} @@ -18,14 +31,25 @@ trait Copy {} #[lang = "freeze"] auto trait Freeze {} +#[cfg(bootstrap)] impl Copy for *mut T {} +#[cfg(not(bootstrap))] +impl Copy for *mut T {} +#[cfg(bootstrap)] #[lang = "drop_in_place"] #[inline] #[allow(unconditional_recursion)] pub unsafe fn drop_in_place(to_drop: *mut T) { drop_in_place(to_drop); } +#[cfg(not(bootstrap))] +#[lang = "drop_in_place"] +#[inline] +#[allow(unconditional_recursion)] +pub unsafe fn drop_in_place(to_drop: *mut T) { + drop_in_place(to_drop); +} #[cfg(all(target_os = "windows", target_arch = "x86", target_env = "gnu"))] pub mod eh_frames { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index ea21ca3741881..4de3f345b8329 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -2,5 +2,5 @@ # standard library we currently track. [toolchain] -channel = "nightly-2025-06-17" +channel = "nightly-2025-06-18" components = ["llvm-tools-preview", "rustc-dev", "rust-src", "rustfmt"] diff --git a/tool_config/kani-version.toml b/tool_config/kani-version.toml index 73e0f3eda317c..05d03a4d93d80 100644 --- a/tool_config/kani-version.toml +++ b/tool_config/kani-version.toml @@ -2,4 +2,4 @@ # incompatible with the verify-std repo. [kani] -commit = "b64e59de669cd77b625cc8c0b9a94f29117a0ff7" +commit = "0024103719917fa097b0beaab089671eb83572e8"