From 919de701b02297ff6b782bcf592b39964ec80bcb Mon Sep 17 00:00:00 2001
From: Ralf Jung <post@ralfj.de>
Date: Sun, 3 Nov 2024 19:58:57 +0100
Subject: [PATCH] add const_eval_select macro to reduce redundancy

also move internal const_panic helpers to a better location
---
 core/src/char/methods.rs    |   2 +-
 core/src/ffi/c_str.rs       |  99 ++++++++++++-----------
 core/src/intrinsics.rs      |  83 +++++++++++++++++---
 core/src/macros/mod.rs      |  61 ---------------
 core/src/num/f128.rs        |   2 +-
 core/src/num/f16.rs         |   2 +-
 core/src/num/f32.rs         |   2 +-
 core/src/num/f64.rs         |   2 +-
 core/src/num/mod.rs         |   2 +-
 core/src/panic.rs           |  56 +++++++++++++
 core/src/panicking.rs       |  62 +++++++--------
 core/src/ptr/const_ptr.rs   | 128 ++++++++++++++----------------
 core/src/ptr/mut_ptr.rs     |  81 +++++++++----------
 core/src/slice/ascii.rs     | 152 ++++++++++++++++++------------------
 core/src/slice/index.rs     |   2 +-
 core/src/slice/memchr.rs    |  98 ++++++++++++-----------
 core/src/str/validations.rs |  17 ++--
 core/src/ub_checks.rs       |  83 +++++++++-----------
 18 files changed, 478 insertions(+), 456 deletions(-)

diff --git a/core/src/char/methods.rs b/core/src/char/methods.rs
index 701e34b135e23..3915afb49d641 100644
--- a/core/src/char/methods.rs
+++ b/core/src/char/methods.rs
@@ -1,7 +1,7 @@
 //! impl char {}
 
 use super::*;
-use crate::macros::const_panic;
+use crate::panic::const_panic;
 use crate::slice;
 use crate::str::from_utf8_unchecked_mut;
 use crate::unicode::printable::is_printable;
diff --git a/core/src/ffi/c_str.rs b/core/src/ffi/c_str.rs
index 4ea5cbf862645..85571222b5cd2 100644
--- a/core/src/ffi/c_str.rs
+++ b/core/src/ffi/c_str.rs
@@ -3,11 +3,12 @@
 use crate::cmp::Ordering;
 use crate::error::Error;
 use crate::ffi::c_char;
+use crate::intrinsics::const_eval_select;
 use crate::iter::FusedIterator;
 use crate::marker::PhantomData;
 use crate::ptr::NonNull;
 use crate::slice::memchr;
-use crate::{fmt, intrinsics, ops, slice, str};
+use crate::{fmt, ops, slice, str};
 
 // FIXME: because this is doc(inline)d, we *have* to use intra-doc links because the actual link
 //   depends on where the item is being documented. however, since this is libcore, we can't
@@ -411,37 +412,35 @@ impl CStr {
     #[rustc_const_stable(feature = "const_cstr_unchecked", since = "1.59.0")]
     #[rustc_allow_const_fn_unstable(const_eval_select)]
     pub const unsafe fn from_bytes_with_nul_unchecked(bytes: &[u8]) -> &CStr {
-        #[inline]
-        fn rt_impl(bytes: &[u8]) -> &CStr {
-            // Chance at catching some UB at runtime with debug builds.
-            debug_assert!(!bytes.is_empty() && bytes[bytes.len() - 1] == 0);
-
-            // SAFETY: Casting to CStr is safe because its internal representation
-            // is a [u8] too (safe only inside std).
-            // Dereferencing the obtained pointer is safe because it comes from a
-            // reference. Making a reference is then safe because its lifetime
-            // is bound by the lifetime of the given `bytes`.
-            unsafe { &*(bytes as *const [u8] as *const CStr) }
-        }
-
-        const fn const_impl(bytes: &[u8]) -> &CStr {
-            // Saturating so that an empty slice panics in the assert with a good
-            // message, not here due to underflow.
-            let mut i = bytes.len().saturating_sub(1);
-            assert!(!bytes.is_empty() && bytes[i] == 0, "input was not nul-terminated");
-
-            // Ending nul byte exists, skip to the rest.
-            while i != 0 {
-                i -= 1;
-                let byte = bytes[i];
-                assert!(byte != 0, "input contained interior nul");
+        const_eval_select!(
+            @capture { bytes: &[u8] } -> &CStr:
+            if const {
+                // Saturating so that an empty slice panics in the assert with a good
+                // message, not here due to underflow.
+                let mut i = bytes.len().saturating_sub(1);
+                assert!(!bytes.is_empty() && bytes[i] == 0, "input was not nul-terminated");
+
+                // Ending nul byte exists, skip to the rest.
+                while i != 0 {
+                    i -= 1;
+                    let byte = bytes[i];
+                    assert!(byte != 0, "input contained interior nul");
+                }
+
+                // SAFETY: See runtime cast comment below.
+                unsafe { &*(bytes as *const [u8] as *const CStr) }
+            } else {
+                // Chance at catching some UB at runtime with debug builds.
+                debug_assert!(!bytes.is_empty() && bytes[bytes.len() - 1] == 0);
+
+                // SAFETY: Casting to CStr is safe because its internal representation
+                // is a [u8] too (safe only inside std).
+                // Dereferencing the obtained pointer is safe because it comes from a
+                // reference. Making a reference is then safe because its lifetime
+                // is bound by the lifetime of the given `bytes`.
+                unsafe { &*(bytes as *const [u8] as *const CStr) }
             }
-
-            // SAFETY: See `rt_impl` cast.
-            unsafe { &*(bytes as *const [u8] as *const CStr) }
-        }
-
-        intrinsics::const_eval_select((bytes,), const_impl, rt_impl)
+        )
     }
 
     /// Returns the inner pointer to this C string.
@@ -735,29 +734,27 @@ impl AsRef<CStr> for CStr {
 #[cfg_attr(bootstrap, rustc_const_stable(feature = "const_cstr_from_ptr", since = "1.81.0"))]
 #[rustc_allow_const_fn_unstable(const_eval_select)]
 const unsafe fn strlen(ptr: *const c_char) -> usize {
-    const fn strlen_ct(s: *const c_char) -> usize {
-        let mut len = 0;
-
-        // SAFETY: Outer caller has provided a pointer to a valid C string.
-        while unsafe { *s.add(len) } != 0 {
-            len += 1;
-        }
+    const_eval_select!(
+        @capture { s: *const c_char = ptr } -> usize:
+        if const {
+            let mut len = 0;
+
+            // SAFETY: Outer caller has provided a pointer to a valid C string.
+            while unsafe { *s.add(len) } != 0 {
+                len += 1;
+            }
 
-        len
-    }
+            len
+        } else {
+            extern "C" {
+                /// Provided by libc or compiler_builtins.
+                fn strlen(s: *const c_char) -> usize;
+            }
 
-    #[inline]
-    fn strlen_rt(s: *const c_char) -> usize {
-        extern "C" {
-            /// Provided by libc or compiler_builtins.
-            fn strlen(s: *const c_char) -> usize;
+            // SAFETY: Outer caller has provided a pointer to a valid C string.
+            unsafe { strlen(s) }
         }
-
-        // SAFETY: Outer caller has provided a pointer to a valid C string.
-        unsafe { strlen(s) }
-    }
-
-    intrinsics::const_eval_select((ptr,), strlen_ct, strlen_rt)
+    )
 }
 
 /// An iterator over the bytes of a [`CStr`], without the nul terminator.
diff --git a/core/src/intrinsics.rs b/core/src/intrinsics.rs
index fc09da7bcbc65..3e5c085923070 100644
--- a/core/src/intrinsics.rs
+++ b/core/src/intrinsics.rs
@@ -2788,6 +2788,68 @@ where
     unreachable!()
 }
 
+/// A macro to make it easier to invoke const_eval_select. Use as follows:
+/// ```rust,ignore (just a macro example)
+/// const_eval_select!(
+///     @capture { arg1: i32 = some_expr, arg2: T = other_expr } -> U:
+///     if const #[attributes_for_const_arm] {
+///         // Compile-time code goes here.
+///     } else #[attributes_for_runtime_arm] {
+///         // Run-time code goes here.
+///     }
+/// )
+/// ```
+/// The `@capture` block declares which surrounding variables / expressions can be
+/// used inside the `if const`.
+/// Note that the two arms of this `if` really each become their own function, which is why the
+/// macro supports setting attributes for those functions. The runtime function is always
+/// markes as `#[inline]`.
+///
+/// See [`const_eval_select()`] for the rules and requirements around that intrinsic.
+pub(crate) macro const_eval_select {
+    (
+        @capture { $($arg:ident : $ty:ty = $val:expr),* $(,)? } $( -> $ret:ty )? :
+        if const
+            $(#[$compiletime_attr:meta])* $compiletime:block
+        else
+            $(#[$runtime_attr:meta])* $runtime:block
+    ) => {{
+        #[inline] // avoid the overhead of an extra fn call
+        $(#[$runtime_attr])*
+        fn runtime($($arg: $ty),*) $( -> $ret )? {
+            $runtime
+        }
+
+        #[inline] // prevent codegen on this function
+        $(#[$compiletime_attr])*
+        const fn compiletime($($arg: $ty),*) $( -> $ret )? {
+            // Don't warn if one of the arguments is unused.
+            $(let _ = $arg;)*
+
+            $compiletime
+        }
+
+        const_eval_select(($($val,)*), compiletime, runtime)
+    }},
+    // We support leaving away the `val` expressions for *all* arguments
+    // (but not for *some* arguments, that's too tricky).
+    (
+        @capture { $($arg:ident : $ty:ty),* $(,)? } $( -> $ret:ty )? :
+        if const
+            $(#[$compiletime_attr:meta])* $compiletime:block
+        else
+            $(#[$runtime_attr:meta])* $runtime:block
+    ) => {
+        $crate::intrinsics::const_eval_select!(
+            @capture { $($arg : $ty = $arg),* } $(-> $ret)? :
+            if const
+                $(#[$compiletime_attr])* $compiletime
+            else
+                $(#[$runtime_attr])* $runtime
+        )
+    },
+}
+
 /// Returns whether the argument's value is statically known at
 /// compile-time.
 ///
@@ -2830,7 +2892,7 @@ where
 /// # Stability concerns
 ///
 /// While it is safe to call, this intrinsic may behave differently in
-/// a `const` context than otherwise. See the [`const_eval_select`]
+/// a `const` context than otherwise. See the [`const_eval_select()`]
 /// documentation for an explanation of the issues this can cause. Unlike
 /// `const_eval_select`, this intrinsic isn't guaranteed to behave
 /// deterministically even in a `const` context.
@@ -3734,14 +3796,15 @@ pub(crate) const fn miri_promise_symbolic_alignment(ptr: *const (), align: usize
         fn miri_promise_symbolic_alignment(ptr: *const (), align: usize);
     }
 
-    fn runtime(ptr: *const (), align: usize) {
-        // SAFETY: this call is always safe.
-        unsafe {
-            miri_promise_symbolic_alignment(ptr, align);
+    const_eval_select!(
+        @capture { ptr: *const (), align: usize}:
+        if const {
+            // Do nothing.
+        } else {
+            // SAFETY: this call is always safe.
+            unsafe {
+                miri_promise_symbolic_alignment(ptr, align);
+            }
         }
-    }
-
-    const fn compiletime(_ptr: *const (), _align: usize) {}
-
-    const_eval_select((ptr, align), compiletime, runtime);
+    )
 }
diff --git a/core/src/macros/mod.rs b/core/src/macros/mod.rs
index 9a91ff82acd7c..771c2d31b60e0 100644
--- a/core/src/macros/mod.rs
+++ b/core/src/macros/mod.rs
@@ -12,54 +12,6 @@ macro_rules! panic {
     };
 }
 
-/// Helper macro for panicking in a `const fn`.
-/// Invoke as:
-/// ```rust,ignore (just an example)
-/// core::macros::const_panic!("boring message", "flavored message {a} {b:?}", a: u32 = foo.len(), b: Something = bar);
-/// ```
-/// where the first message will be printed in const-eval,
-/// and the second message will be printed at runtime.
-// All uses of this macro are FIXME(const-hack).
-#[unstable(feature = "panic_internals", issue = "none")]
-#[doc(hidden)]
-pub macro const_panic {
-    ($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty = $val:expr),* $(,)?) => {{
-        #[inline]
-        #[track_caller]
-        fn runtime($($arg: $ty),*) -> ! {
-            $crate::panic!($runtime_msg);
-        }
-
-        #[inline]
-        #[track_caller]
-        const fn compiletime($(_: $ty),*) -> ! {
-            $crate::panic!($const_msg);
-        }
-
-        // Wrap call to `const_eval_select` in a function so that we can
-        // add the `rustc_allow_const_fn_unstable`. This is okay to do
-        // because both variants will panic, just with different messages.
-        #[rustc_allow_const_fn_unstable(const_eval_select)]
-        #[inline(always)]
-        #[track_caller]
-        #[cfg_attr(bootstrap, rustc_const_stable(feature = "const_panic", since = "CURRENT_RUSTC_VERSION"))]
-        const fn do_panic($($arg: $ty),*) -> ! {
-            $crate::intrinsics::const_eval_select(($($arg),* ,), compiletime, runtime)
-        }
-
-        do_panic($($val),*)
-    }},
-    // We support leaving away the `val` expressions for *all* arguments
-    // (but not for *some* arguments, that's too tricky).
-    ($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty),* $(,)?) => {
-        $crate::macros::const_panic!(
-            $const_msg,
-            $runtime_msg,
-            $($arg: $ty = $arg),*
-        )
-    },
-}
-
 /// Asserts that two expressions are equal to each other (using [`PartialEq`]).
 ///
 /// Assertions are always checked in both debug and release builds, and cannot
@@ -244,19 +196,6 @@ pub macro assert_matches {
     },
 }
 
-/// A version of `assert` that prints a non-formatting message in const contexts.
-///
-/// See [`const_panic!`].
-#[unstable(feature = "panic_internals", issue = "none")]
-#[doc(hidden)]
-pub macro const_assert {
-    ($condition: expr, $const_msg:literal, $runtime_msg:literal, $($arg:tt)*) => {{
-        if !$crate::intrinsics::likely($condition) {
-            $crate::macros::const_panic!($const_msg, $runtime_msg, $($arg)*)
-        }
-    }}
-}
-
 /// A macro for defining `#[cfg]` match-like statements.
 ///
 /// It is similar to the `if/elif` C preprocessor macro by allowing definition of a cascade of
diff --git a/core/src/num/f128.rs b/core/src/num/f128.rs
index 7709e7de01b0b..0484611958d87 100644
--- a/core/src/num/f128.rs
+++ b/core/src/num/f128.rs
@@ -14,9 +14,9 @@
 use crate::convert::FloatToInt;
 #[cfg(not(test))]
 use crate::intrinsics;
-use crate::macros::const_assert;
 use crate::mem;
 use crate::num::FpCategory;
+use crate::panic::const_assert;
 
 /// Basic mathematical constants.
 #[unstable(feature = "f128", issue = "116909")]
diff --git a/core/src/num/f16.rs b/core/src/num/f16.rs
index eb0225c58b837..898caf835bfd3 100644
--- a/core/src/num/f16.rs
+++ b/core/src/num/f16.rs
@@ -14,9 +14,9 @@
 use crate::convert::FloatToInt;
 #[cfg(not(test))]
 use crate::intrinsics;
-use crate::macros::const_assert;
 use crate::mem;
 use crate::num::FpCategory;
+use crate::panic::const_assert;
 
 /// Basic mathematical constants.
 #[unstable(feature = "f16", issue = "116909")]
diff --git a/core/src/num/f32.rs b/core/src/num/f32.rs
index 686a6c5092792..20ece883da60b 100644
--- a/core/src/num/f32.rs
+++ b/core/src/num/f32.rs
@@ -14,9 +14,9 @@
 use crate::convert::FloatToInt;
 #[cfg(not(test))]
 use crate::intrinsics;
-use crate::macros::const_assert;
 use crate::mem;
 use crate::num::FpCategory;
+use crate::panic::const_assert;
 
 /// The radix or base of the internal representation of `f32`.
 /// Use [`f32::RADIX`] instead.
diff --git a/core/src/num/f64.rs b/core/src/num/f64.rs
index 798cb4b1b5ccd..5640e71788b85 100644
--- a/core/src/num/f64.rs
+++ b/core/src/num/f64.rs
@@ -14,9 +14,9 @@
 use crate::convert::FloatToInt;
 #[cfg(not(test))]
 use crate::intrinsics;
-use crate::macros::const_assert;
 use crate::mem;
 use crate::num::FpCategory;
+use crate::panic::const_assert;
 
 /// The radix or base of the internal representation of `f64`.
 /// Use [`f64::RADIX`] instead.
diff --git a/core/src/num/mod.rs b/core/src/num/mod.rs
index f4930ca5c7dbd..5a69dc0c7242b 100644
--- a/core/src/num/mod.rs
+++ b/core/src/num/mod.rs
@@ -2,7 +2,7 @@
 
 #![stable(feature = "rust1", since = "1.0.0")]
 
-use crate::macros::const_panic;
+use crate::panic::const_panic;
 use crate::str::FromStr;
 use crate::ub_checks::assert_unsafe_precondition;
 use crate::{ascii, intrinsics, mem};
diff --git a/core/src/panic.rs b/core/src/panic.rs
index c95a000561c35..f8f3962ce55ac 100644
--- a/core/src/panic.rs
+++ b/core/src/panic.rs
@@ -189,3 +189,59 @@ pub unsafe trait PanicPayload: crate::fmt::Display {
         None
     }
 }
+
+/// Helper macro for panicking in a `const fn`.
+/// Invoke as:
+/// ```rust,ignore (just an example)
+/// core::macros::const_panic!("boring message", "flavored message {a} {b:?}", a: u32 = foo.len(), b: Something = bar);
+/// ```
+/// where the first message will be printed in const-eval,
+/// and the second message will be printed at runtime.
+// All uses of this macro are FIXME(const-hack).
+#[unstable(feature = "panic_internals", issue = "none")]
+#[doc(hidden)]
+pub macro const_panic {
+    ($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty = $val:expr),* $(,)?) => {{
+        // Wrap call to `const_eval_select` in a function so that we can
+        // add the `rustc_allow_const_fn_unstable`. This is okay to do
+        // because both variants will panic, just with different messages.
+        #[rustc_allow_const_fn_unstable(const_eval_select)]
+        #[inline(always)]
+        #[track_caller]
+        #[cfg_attr(bootstrap, rustc_const_stable(feature = "const_panic", since = "CURRENT_RUSTC_VERSION"))]
+        const fn do_panic($($arg: $ty),*) -> ! {
+            $crate::intrinsics::const_eval_select!(
+                @capture { $($arg: $ty),* } -> !:
+                if const #[track_caller] {
+                    $crate::panic!($const_msg)
+                } else #[track_caller] {
+                    $crate::panic!($runtime_msg)
+                }
+            )
+        }
+
+        do_panic($($val),*)
+    }},
+    // We support leaving away the `val` expressions for *all* arguments
+    // (but not for *some* arguments, that's too tricky).
+    ($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty),* $(,)?) => {
+        $crate::panic::const_panic!(
+            $const_msg,
+            $runtime_msg,
+            $($arg: $ty = $arg),*
+        )
+    },
+}
+
+/// A version of `assert` that prints a non-formatting message in const contexts.
+///
+/// See [`const_panic!`].
+#[unstable(feature = "panic_internals", issue = "none")]
+#[doc(hidden)]
+pub macro const_assert {
+    ($condition: expr, $const_msg:literal, $runtime_msg:literal, $($arg:tt)*) => {{
+        if !$crate::intrinsics::likely($condition) {
+            $crate::panic::const_panic!($const_msg, $runtime_msg, $($arg)*)
+        }
+    }}
+}
diff --git a/core/src/panicking.rs b/core/src/panicking.rs
index 9071d6719a30e..f603eb2971f6d 100644
--- a/core/src/panicking.rs
+++ b/core/src/panicking.rs
@@ -29,6 +29,7 @@
 )]
 
 use crate::fmt;
+use crate::intrinsics::const_eval_select;
 use crate::panic::{Location, PanicInfo};
 
 #[cfg(feature = "panic_immediate_abort")]
@@ -89,40 +90,35 @@ pub const fn panic_fmt(fmt: fmt::Arguments<'_>) -> ! {
 #[cfg_attr(not(bootstrap), rustc_const_stable_indirect)] // must follow stable const rules since it is exposed to stable
 #[rustc_allow_const_fn_unstable(const_eval_select)]
 pub const fn panic_nounwind_fmt(fmt: fmt::Arguments<'_>, force_no_backtrace: bool) -> ! {
-    #[inline] // this should always be inlined into `panic_nounwind_fmt`
-    #[track_caller]
-    fn runtime(fmt: fmt::Arguments<'_>, force_no_backtrace: bool) -> ! {
-        if cfg!(feature = "panic_immediate_abort") {
-            super::intrinsics::abort()
+    const_eval_select!(
+        @capture { fmt: fmt::Arguments<'_>, force_no_backtrace: bool } -> !:
+        if const #[track_caller] {
+            // We don't unwind anyway at compile-time so we can call the regular `panic_fmt`.
+            panic_fmt(fmt)
+        } else #[track_caller] {
+            if cfg!(feature = "panic_immediate_abort") {
+                super::intrinsics::abort()
+            }
+
+            // NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call
+            // that gets resolved to the `#[panic_handler]` function.
+            extern "Rust" {
+                #[lang = "panic_impl"]
+                fn panic_impl(pi: &PanicInfo<'_>) -> !;
+            }
+
+            // PanicInfo with the `can_unwind` flag set to false forces an abort.
+            let pi = PanicInfo::new(
+                &fmt,
+                Location::caller(),
+                /* can_unwind */ false,
+                force_no_backtrace,
+            );
+
+            // SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call.
+            unsafe { panic_impl(&pi) }
         }
-
-        // NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call
-        // that gets resolved to the `#[panic_handler]` function.
-        extern "Rust" {
-            #[lang = "panic_impl"]
-            fn panic_impl(pi: &PanicInfo<'_>) -> !;
-        }
-
-        // PanicInfo with the `can_unwind` flag set to false forces an abort.
-        let pi = PanicInfo::new(
-            &fmt,
-            Location::caller(),
-            /* can_unwind */ false,
-            force_no_backtrace,
-        );
-
-        // SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call.
-        unsafe { panic_impl(&pi) }
-    }
-
-    #[inline]
-    #[track_caller]
-    const fn comptime(fmt: fmt::Arguments<'_>, _force_no_backtrace: bool) -> ! {
-        // We don't unwind anyway at compile-time so we can call the regular `panic_fmt`.
-        panic_fmt(fmt);
-    }
-
-    super::intrinsics::const_eval_select((fmt, force_no_backtrace), comptime, runtime);
+    )
 }
 
 // Next we define a bunch of higher-level wrappers that all bottom out in the two core functions
diff --git a/core/src/ptr/const_ptr.rs b/core/src/ptr/const_ptr.rs
index a4e8e373e041e..2d7507e2d53ee 100644
--- a/core/src/ptr/const_ptr.rs
+++ b/core/src/ptr/const_ptr.rs
@@ -33,26 +33,23 @@ impl<T: ?Sized> *const T {
     #[rustc_diagnostic_item = "ptr_const_is_null"]
     #[inline]
     pub const fn is_null(self) -> bool {
-        #[inline]
-        fn runtime_impl(ptr: *const u8) -> bool {
-            ptr.addr() == 0
-        }
-
-        #[inline]
-        #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
-        const fn const_impl(ptr: *const u8) -> bool {
-            match (ptr).guaranteed_eq(null_mut()) {
-                Some(res) => res,
-                // To remain maximally convervative, we stop execution when we don't
-                // know whether the pointer is null or not.
-                // We can *not* return `false` here, that would be unsound in `NonNull::new`!
-                None => panic!("null-ness of this pointer cannot be determined in const context"),
-            }
-        }
-
         // Compare via a cast to a thin pointer, so fat pointers are only
         // considering their "data" part for null-ness.
-        const_eval_select((self as *const u8,), const_impl, runtime_impl)
+        let ptr = self as *const u8;
+        const_eval_select!(
+            @capture { ptr: *const u8 } -> bool:
+            if const #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")] {
+                match (ptr).guaranteed_eq(null_mut()) {
+                    Some(res) => res,
+                    // To remain maximally convervative, we stop execution when we don't
+                    // know whether the pointer is null or not.
+                    // We can *not* return `false` here, that would be unsound in `NonNull::new`!
+                    None => panic!("null-ness of this pointer cannot be determined in const context"),
+                }
+            } else {
+                ptr.addr() == 0
+            }
+        )
     }
 
     /// Casts to a pointer of another type.
@@ -410,22 +407,21 @@ impl<T: ?Sized> *const T {
         #[inline]
         #[rustc_allow_const_fn_unstable(const_eval_select)]
         const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
-            #[inline]
-            fn runtime(this: *const (), count: isize, size: usize) -> bool {
-                // We know `size <= isize::MAX` so the `as` cast here is not lossy.
-                let Some(byte_offset) = count.checked_mul(size as isize) else {
-                    return false;
-                };
-                let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
-                !overflow
-            }
-
-            const fn comptime(_: *const (), _: isize, _: usize) -> bool {
-                true
-            }
-
             // We can use const_eval_select here because this is only for UB checks.
-            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+            const_eval_select!(
+                @capture { this: *const (), count: isize, size: usize } -> bool:
+                if const {
+                    true
+                } else {
+                    // `size` is the size of a Rust type, so we know that
+                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
+                    let Some(byte_offset) = count.checked_mul(size as isize) else {
+                        return false;
+                    };
+                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
+                    !overflow
+                }
+            )
         }
 
         ub_checks::assert_unsafe_precondition!(
@@ -763,14 +759,14 @@ impl<T: ?Sized> *const T {
     {
         #[rustc_allow_const_fn_unstable(const_eval_select)]
         const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
-            fn runtime(this: *const (), origin: *const ()) -> bool {
-                this >= origin
-            }
-            const fn comptime(_: *const (), _: *const ()) -> bool {
-                true
-            }
-
-            intrinsics::const_eval_select((this, origin), comptime, runtime)
+            const_eval_select!(
+                @capture { this: *const (), origin: *const () } -> bool:
+                if const {
+                    true
+                } else {
+                    this >= origin
+                }
+            )
         }
 
         ub_checks::assert_unsafe_precondition!(
@@ -924,20 +920,18 @@ impl<T: ?Sized> *const T {
         #[inline]
         #[rustc_allow_const_fn_unstable(const_eval_select)]
         const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
-            #[inline]
-            fn runtime(this: *const (), count: usize, size: usize) -> bool {
-                let Some(byte_offset) = count.checked_mul(size) else {
-                    return false;
-                };
-                let (_, overflow) = this.addr().overflowing_add(byte_offset);
-                byte_offset <= (isize::MAX as usize) && !overflow
-            }
-
-            const fn comptime(_: *const (), _: usize, _: usize) -> bool {
-                true
-            }
-
-            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+            const_eval_select!(
+                @capture { this: *const (), count: usize, size: usize } -> bool:
+                if const {
+                    true
+                } else {
+                    let Some(byte_offset) = count.checked_mul(size) else {
+                        return false;
+                    };
+                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
+                    byte_offset <= (isize::MAX as usize) && !overflow
+                }
+            )
         }
 
         #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
@@ -1033,19 +1027,17 @@ impl<T: ?Sized> *const T {
         #[inline]
         #[rustc_allow_const_fn_unstable(const_eval_select)]
         const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
-            #[inline]
-            fn runtime(this: *const (), count: usize, size: usize) -> bool {
-                let Some(byte_offset) = count.checked_mul(size) else {
-                    return false;
-                };
-                byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
-            }
-
-            const fn comptime(_: *const (), _: usize, _: usize) -> bool {
-                true
-            }
-
-            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+            const_eval_select!(
+                @capture { this: *const (), count: usize, size: usize } -> bool:
+                if const {
+                    true
+                } else {
+                    let Some(byte_offset) = count.checked_mul(size) else {
+                        return false;
+                    };
+                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
+                }
+            )
         }
 
         #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
diff --git a/core/src/ptr/mut_ptr.rs b/core/src/ptr/mut_ptr.rs
index 0d94a7f491c21..344ba46a50e20 100644
--- a/core/src/ptr/mut_ptr.rs
+++ b/core/src/ptr/mut_ptr.rs
@@ -1,5 +1,6 @@
 use super::*;
 use crate::cmp::Ordering::{Equal, Greater, Less};
+use crate::intrinsics::const_eval_select;
 use crate::mem::SizedTypeProperties;
 use crate::slice::{self, SliceIndex};
 
@@ -404,23 +405,21 @@ impl<T: ?Sized> *mut T {
         #[inline]
         #[rustc_allow_const_fn_unstable(const_eval_select)]
         const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
-            #[inline]
-            fn runtime(this: *const (), count: isize, size: usize) -> bool {
-                // `size` is the size of a Rust type, so we know that
-                // `size <= isize::MAX` and thus `as` cast here is not lossy.
-                let Some(byte_offset) = count.checked_mul(size as isize) else {
-                    return false;
-                };
-                let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
-                !overflow
-            }
-
-            const fn comptime(_: *const (), _: isize, _: usize) -> bool {
-                true
-            }
-
             // We can use const_eval_select here because this is only for UB checks.
-            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+            const_eval_select!(
+                @capture { this: *const (), count: isize, size: usize } -> bool:
+                if const {
+                    true
+                } else {
+                    // `size` is the size of a Rust type, so we know that
+                    // `size <= isize::MAX` and thus `as` cast here is not lossy.
+                    let Some(byte_offset) = count.checked_mul(size as isize) else {
+                        return false;
+                    };
+                    let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
+                    !overflow
+                }
+            )
         }
 
         ub_checks::assert_unsafe_precondition!(
@@ -1002,20 +1001,18 @@ impl<T: ?Sized> *mut T {
         #[inline]
         #[rustc_allow_const_fn_unstable(const_eval_select)]
         const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
-            #[inline]
-            fn runtime(this: *const (), count: usize, size: usize) -> bool {
-                let Some(byte_offset) = count.checked_mul(size) else {
-                    return false;
-                };
-                let (_, overflow) = this.addr().overflowing_add(byte_offset);
-                byte_offset <= (isize::MAX as usize) && !overflow
-            }
-
-            const fn comptime(_: *const (), _: usize, _: usize) -> bool {
-                true
-            }
-
-            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+            const_eval_select!(
+                @capture { this: *const (), count: usize, size: usize } -> bool:
+                if const {
+                    true
+                } else {
+                    let Some(byte_offset) = count.checked_mul(size) else {
+                        return false;
+                    };
+                    let (_, overflow) = this.addr().overflowing_add(byte_offset);
+                    byte_offset <= (isize::MAX as usize) && !overflow
+                }
+            )
         }
 
         #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
@@ -1111,19 +1108,17 @@ impl<T: ?Sized> *mut T {
         #[inline]
         #[rustc_allow_const_fn_unstable(const_eval_select)]
         const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
-            #[inline]
-            fn runtime(this: *const (), count: usize, size: usize) -> bool {
-                let Some(byte_offset) = count.checked_mul(size) else {
-                    return false;
-                };
-                byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
-            }
-
-            const fn comptime(_: *const (), _: usize, _: usize) -> bool {
-                true
-            }
-
-            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+            const_eval_select!(
+                @capture { this: *const (), count: usize, size: usize } -> bool:
+                if const {
+                    true
+                } else {
+                    let Some(byte_offset) = count.checked_mul(size) else {
+                        return false;
+                    };
+                    byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
+                }
+            )
         }
 
         #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
diff --git a/core/src/slice/ascii.rs b/core/src/slice/ascii.rs
index 8dcd34929e18d..58ba3a1573a81 100644
--- a/core/src/slice/ascii.rs
+++ b/core/src/slice/ascii.rs
@@ -351,89 +351,87 @@ pub const fn is_ascii_simple(mut bytes: &[u8]) -> bool {
 const fn is_ascii(s: &[u8]) -> bool {
     // The runtime version behaves the same as the compiletime version, it's
     // just more optimized.
-    return const_eval_select((s,), compiletime, runtime);
-
-    const fn compiletime(s: &[u8]) -> bool {
-        is_ascii_simple(s)
-    }
-
-    #[inline]
-    fn runtime(s: &[u8]) -> bool {
-        const USIZE_SIZE: usize = mem::size_of::<usize>();
-
-        let len = s.len();
-        let align_offset = s.as_ptr().align_offset(USIZE_SIZE);
-
-        // If we wouldn't gain anything from the word-at-a-time implementation, fall
-        // back to a scalar loop.
-        //
-        // We also do this for architectures where `size_of::<usize>()` isn't
-        // sufficient alignment for `usize`, because it's a weird edge case.
-        if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() {
-            return is_ascii_simple(s);
-        }
+    const_eval_select!(
+        @capture { s: &[u8] } -> bool:
+        if const {
+            is_ascii_simple(s)
+        } else {
+            const USIZE_SIZE: usize = mem::size_of::<usize>();
+
+            let len = s.len();
+            let align_offset = s.as_ptr().align_offset(USIZE_SIZE);
+
+            // If we wouldn't gain anything from the word-at-a-time implementation, fall
+            // back to a scalar loop.
+            //
+            // We also do this for architectures where `size_of::<usize>()` isn't
+            // sufficient alignment for `usize`, because it's a weird edge case.
+            if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() {
+                return is_ascii_simple(s);
+            }
 
-        // We always read the first word unaligned, which means `align_offset` is
-        // 0, we'd read the same value again for the aligned read.
-        let offset_to_aligned = if align_offset == 0 { USIZE_SIZE } else { align_offset };
+            // We always read the first word unaligned, which means `align_offset` is
+            // 0, we'd read the same value again for the aligned read.
+            let offset_to_aligned = if align_offset == 0 { USIZE_SIZE } else { align_offset };
 
-        let start = s.as_ptr();
-        // SAFETY: We verify `len < USIZE_SIZE` above.
-        let first_word = unsafe { (start as *const usize).read_unaligned() };
+            let start = s.as_ptr();
+            // SAFETY: We verify `len < USIZE_SIZE` above.
+            let first_word = unsafe { (start as *const usize).read_unaligned() };
 
-        if contains_nonascii(first_word) {
-            return false;
-        }
-        // We checked this above, somewhat implicitly. Note that `offset_to_aligned`
-        // is either `align_offset` or `USIZE_SIZE`, both of are explicitly checked
-        // above.
-        debug_assert!(offset_to_aligned <= len);
-
-        // SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the
-        // middle chunk of the slice.
-        let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize };
-
-        // `byte_pos` is the byte index of `word_ptr`, used for loop end checks.
-        let mut byte_pos = offset_to_aligned;
-
-        // Paranoia check about alignment, since we're about to do a bunch of
-        // unaligned loads. In practice this should be impossible barring a bug in
-        // `align_offset` though.
-        // While this method is allowed to spuriously fail in CTFE, if it doesn't
-        // have alignment information it should have given a `usize::MAX` for
-        // `align_offset` earlier, sending things through the scalar path instead of
-        // this one, so this check should pass if it's reachable.
-        debug_assert!(word_ptr.is_aligned_to(mem::align_of::<usize>()));
-
-        // Read subsequent words until the last aligned word, excluding the last
-        // aligned word by itself to be done in tail check later, to ensure that
-        // tail is always one `usize` at most to extra branch `byte_pos == len`.
-        while byte_pos < len - USIZE_SIZE {
-            // Sanity check that the read is in bounds
-            debug_assert!(byte_pos + USIZE_SIZE <= len);
-            // And that our assumptions about `byte_pos` hold.
-            debug_assert!(word_ptr.cast::<u8>() == start.wrapping_add(byte_pos));
-
-            // SAFETY: We know `word_ptr` is properly aligned (because of
-            // `align_offset`), and we know that we have enough bytes between `word_ptr` and the end
-            let word = unsafe { word_ptr.read() };
-            if contains_nonascii(word) {
+            if contains_nonascii(first_word) {
                 return false;
             }
+            // We checked this above, somewhat implicitly. Note that `offset_to_aligned`
+            // is either `align_offset` or `USIZE_SIZE`, both of are explicitly checked
+            // above.
+            debug_assert!(offset_to_aligned <= len);
+
+            // SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the
+            // middle chunk of the slice.
+            let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize };
+
+            // `byte_pos` is the byte index of `word_ptr`, used for loop end checks.
+            let mut byte_pos = offset_to_aligned;
+
+            // Paranoia check about alignment, since we're about to do a bunch of
+            // unaligned loads. In practice this should be impossible barring a bug in
+            // `align_offset` though.
+            // While this method is allowed to spuriously fail in CTFE, if it doesn't
+            // have alignment information it should have given a `usize::MAX` for
+            // `align_offset` earlier, sending things through the scalar path instead of
+            // this one, so this check should pass if it's reachable.
+            debug_assert!(word_ptr.is_aligned_to(mem::align_of::<usize>()));
+
+            // Read subsequent words until the last aligned word, excluding the last
+            // aligned word by itself to be done in tail check later, to ensure that
+            // tail is always one `usize` at most to extra branch `byte_pos == len`.
+            while byte_pos < len - USIZE_SIZE {
+                // Sanity check that the read is in bounds
+                debug_assert!(byte_pos + USIZE_SIZE <= len);
+                // And that our assumptions about `byte_pos` hold.
+                debug_assert!(word_ptr.cast::<u8>() == start.wrapping_add(byte_pos));
+
+                // SAFETY: We know `word_ptr` is properly aligned (because of
+                // `align_offset`), and we know that we have enough bytes between `word_ptr` and the end
+                let word = unsafe { word_ptr.read() };
+                if contains_nonascii(word) {
+                    return false;
+                }
+
+                byte_pos += USIZE_SIZE;
+                // SAFETY: We know that `byte_pos <= len - USIZE_SIZE`, which means that
+                // after this `add`, `word_ptr` will be at most one-past-the-end.
+                word_ptr = unsafe { word_ptr.add(1) };
+            }
 
-            byte_pos += USIZE_SIZE;
-            // SAFETY: We know that `byte_pos <= len - USIZE_SIZE`, which means that
-            // after this `add`, `word_ptr` will be at most one-past-the-end.
-            word_ptr = unsafe { word_ptr.add(1) };
-        }
-
-        // Sanity check to ensure there really is only one `usize` left. This should
-        // be guaranteed by our loop condition.
-        debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE);
+            // Sanity check to ensure there really is only one `usize` left. This should
+            // be guaranteed by our loop condition.
+            debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE);
 
-        // SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start.
-        let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() };
+            // SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start.
+            let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() };
 
-        !contains_nonascii(last_word)
-    }
+            !contains_nonascii(last_word)
+        }
+    )
 }
diff --git a/core/src/slice/index.rs b/core/src/slice/index.rs
index ebb4bdb144909..aafa19c0dd3d3 100644
--- a/core/src/slice/index.rs
+++ b/core/src/slice/index.rs
@@ -1,6 +1,6 @@
 //! Indexing implementations for `[T]`.
 
-use crate::macros::const_panic;
+use crate::panic::const_panic;
 use crate::ub_checks::assert_unsafe_precondition;
 use crate::{ops, range};
 
diff --git a/core/src/slice/memchr.rs b/core/src/slice/memchr.rs
index b7c4a1f6f08b1..339adad1b17bf 100644
--- a/core/src/slice/memchr.rs
+++ b/core/src/slice/memchr.rs
@@ -56,61 +56,59 @@ const fn memchr_naive(x: u8, text: &[u8]) -> Option<usize> {
 const fn memchr_aligned(x: u8, text: &[u8]) -> Option<usize> {
     // The runtime version behaves the same as the compiletime version, it's
     // just more optimized.
-    return const_eval_select((x, text), compiletime, runtime);
-
-    const fn compiletime(x: u8, text: &[u8]) -> Option<usize> {
-        memchr_naive(x, text)
-    }
-
-    #[inline]
-    fn runtime(x: u8, text: &[u8]) -> Option<usize> {
-        // Scan for a single byte value by reading two `usize` words at a time.
-        //
-        // Split `text` in three parts
-        // - unaligned initial part, before the first word aligned address in text
-        // - body, scan by 2 words at a time
-        // - the last remaining part, < 2 word size
-
-        // search up to an aligned boundary
-        let len = text.len();
-        let ptr = text.as_ptr();
-        let mut offset = ptr.align_offset(USIZE_BYTES);
-
-        if offset > 0 {
-            offset = offset.min(len);
-            let slice = &text[..offset];
-            if let Some(index) = memchr_naive(x, slice) {
-                return Some(index);
+    const_eval_select!(
+        @capture { x: u8, text: &[u8] } -> Option<usize>:
+        if const {
+            memchr_naive(x, text)
+        } else {
+            // Scan for a single byte value by reading two `usize` words at a time.
+            //
+            // Split `text` in three parts
+            // - unaligned initial part, before the first word aligned address in text
+            // - body, scan by 2 words at a time
+            // - the last remaining part, < 2 word size
+
+            // search up to an aligned boundary
+            let len = text.len();
+            let ptr = text.as_ptr();
+            let mut offset = ptr.align_offset(USIZE_BYTES);
+
+            if offset > 0 {
+                offset = offset.min(len);
+                let slice = &text[..offset];
+                if let Some(index) = memchr_naive(x, slice) {
+                    return Some(index);
+                }
             }
-        }
 
-        // search the body of the text
-        let repeated_x = usize::repeat_u8(x);
-        while offset <= len - 2 * USIZE_BYTES {
-            // SAFETY: the while's predicate guarantees a distance of at least 2 * usize_bytes
-            // between the offset and the end of the slice.
-            unsafe {
-                let u = *(ptr.add(offset) as *const usize);
-                let v = *(ptr.add(offset + USIZE_BYTES) as *const usize);
-
-                // break if there is a matching byte
-                let zu = contains_zero_byte(u ^ repeated_x);
-                let zv = contains_zero_byte(v ^ repeated_x);
-                if zu || zv {
-                    break;
+            // search the body of the text
+            let repeated_x = usize::repeat_u8(x);
+            while offset <= len - 2 * USIZE_BYTES {
+                // SAFETY: the while's predicate guarantees a distance of at least 2 * usize_bytes
+                // between the offset and the end of the slice.
+                unsafe {
+                    let u = *(ptr.add(offset) as *const usize);
+                    let v = *(ptr.add(offset + USIZE_BYTES) as *const usize);
+
+                    // break if there is a matching byte
+                    let zu = contains_zero_byte(u ^ repeated_x);
+                    let zv = contains_zero_byte(v ^ repeated_x);
+                    if zu || zv {
+                        break;
+                    }
                 }
+                offset += USIZE_BYTES * 2;
             }
-            offset += USIZE_BYTES * 2;
-        }
 
-        // Find the byte after the point the body loop stopped.
-        // FIXME(const-hack): Use `?` instead.
-        // FIXME(const-hack, fee1-dead): use range slicing
-        let slice =
-        // SAFETY: offset is within bounds
-            unsafe { super::from_raw_parts(text.as_ptr().add(offset), text.len() - offset) };
-        if let Some(i) = memchr_naive(x, slice) { Some(offset + i) } else { None }
-    }
+            // Find the byte after the point the body loop stopped.
+            // FIXME(const-hack): Use `?` instead.
+            // FIXME(const-hack, fee1-dead): use range slicing
+            let slice =
+            // SAFETY: offset is within bounds
+                unsafe { super::from_raw_parts(text.as_ptr().add(offset), text.len() - offset) };
+            if let Some(i) = memchr_naive(x, slice) { Some(offset + i) } else { None }
+        }
+    )
 }
 
 /// Returns the last index matching the byte `x` in `text`.
diff --git a/core/src/str/validations.rs b/core/src/str/validations.rs
index 6095b589e18c3..0f724dd961329 100644
--- a/core/src/str/validations.rs
+++ b/core/src/str/validations.rs
@@ -132,19 +132,16 @@ pub(super) const fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
 
     let ascii_block_size = 2 * USIZE_BYTES;
     let blocks_end = if len >= ascii_block_size { len - ascii_block_size + 1 } else { 0 };
-    let align = {
-        const fn compiletime(_v: &[u8]) -> usize {
+    // Below, we safely fall back to a slower codepath if the offset is `usize::MAX`,
+    // so the end-to-end behavior is the same at compiletime and runtime.
+    let align = const_eval_select!(
+        @capture { v: &[u8] } -> usize:
+        if const {
             usize::MAX
-        }
-
-        fn runtime(v: &[u8]) -> usize {
+        } else {
             v.as_ptr().align_offset(USIZE_BYTES)
         }
-
-        // Below, we safely fall back to a slower codepath if the offset is `usize::MAX`,
-        // so the end-to-end behavior is the same at compiletime and runtime.
-        const_eval_select((v,), compiletime, runtime)
-    };
+    );
 
     while index < len {
         let old_offset = index;
diff --git a/core/src/ub_checks.rs b/core/src/ub_checks.rs
index dd1454f401eaf..8fcbda141dab7 100644
--- a/core/src/ub_checks.rs
+++ b/core/src/ub_checks.rs
@@ -95,20 +95,18 @@ pub use intrinsics::ub_checks as check_library_ub;
 #[inline]
 #[rustc_allow_const_fn_unstable(const_eval_select)]
 pub(crate) const fn check_language_ub() -> bool {
-    #[inline]
-    fn runtime() -> bool {
-        // Disable UB checks in Miri.
-        !cfg!(miri)
-    }
-
-    #[inline]
-    const fn comptime() -> bool {
-        // Always disable UB checks.
-        false
-    }
-
     // Only used for UB checks so we may const_eval_select.
-    intrinsics::ub_checks() && const_eval_select((), comptime, runtime)
+    intrinsics::ub_checks()
+        && const_eval_select!(
+            @capture { } -> bool:
+            if const {
+                // Always disable UB checks.
+                false
+            } else {
+                // Disable UB checks in Miri.
+                !cfg!(miri)
+            }
+        )
 }
 
 /// Checks whether `ptr` is properly aligned with respect to the given alignment, and
@@ -120,19 +118,15 @@ pub(crate) const fn check_language_ub() -> bool {
 #[inline]
 #[rustc_const_unstable(feature = "const_ub_checks", issue = "none")]
 pub(crate) const fn is_aligned_and_not_null(ptr: *const (), align: usize, is_zst: bool) -> bool {
-    #[inline]
-    fn runtime(ptr: *const (), align: usize, is_zst: bool) -> bool {
-        ptr.is_aligned_to(align) && (is_zst || !ptr.is_null())
-    }
-
-    #[inline]
-    #[rustc_const_unstable(feature = "const_ub_checks", issue = "none")]
-    const fn comptime(ptr: *const (), _align: usize, is_zst: bool) -> bool {
-        is_zst || !ptr.is_null()
-    }
-
     // This is just for safety checks so we can const_eval_select.
-    const_eval_select((ptr, align, is_zst), comptime, runtime)
+    const_eval_select!(
+        @capture { ptr: *const (), align: usize, is_zst: bool } -> bool:
+        if const #[rustc_const_unstable(feature = "const_ub_checks", issue = "none")] {
+            is_zst || !ptr.is_null()
+        } else {
+            ptr.is_aligned_to(align) && (is_zst || !ptr.is_null())
+        }
+    )
 }
 
 #[inline]
@@ -154,26 +148,23 @@ pub(crate) const fn is_nonoverlapping(
     size: usize,
     count: usize,
 ) -> bool {
-    #[inline]
-    fn runtime(src: *const (), dst: *const (), size: usize, count: usize) -> bool {
-        let src_usize = src.addr();
-        let dst_usize = dst.addr();
-        let Some(size) = size.checked_mul(count) else {
-            crate::panicking::panic_nounwind(
-                "is_nonoverlapping: `size_of::<T>() * count` overflows a usize",
-            )
-        };
-        let diff = src_usize.abs_diff(dst_usize);
-        // If the absolute distance between the ptrs is at least as big as the size of the buffer,
-        // they do not overlap.
-        diff >= size
-    }
-
-    #[inline]
-    const fn comptime(_: *const (), _: *const (), _: usize, _: usize) -> bool {
-        true
-    }
-
     // This is just for safety checks so we can const_eval_select.
-    const_eval_select((src, dst, size, count), comptime, runtime)
+    const_eval_select!(
+        @capture { src: *const (), dst: *const (), size: usize, count: usize } -> bool:
+        if const {
+            true
+        } else {
+            let src_usize = src.addr();
+            let dst_usize = dst.addr();
+            let Some(size) = size.checked_mul(count) else {
+                crate::panicking::panic_nounwind(
+                    "is_nonoverlapping: `size_of::<T>() * count` overflows a usize",
+                )
+            };
+            let diff = src_usize.abs_diff(dst_usize);
+            // If the absolute distance between the ptrs is at least as big as the size of the buffer,
+            // they do not overlap.
+            diff >= size
+        }
+    )
 }