From 8a9f40043fb6177a8a6d720103bac4e8889e0732 Mon Sep 17 00:00:00 2001 From: Lukas Markeffsky <@> Date: Sun, 27 Oct 2024 17:47:00 +0100 Subject: [PATCH 1/2] add test for panicking drop in Box/Rc/Arc --- library/alloc/tests/arc.rs | 40 ++++++++++++++++++++++++++++++++++-- library/alloc/tests/boxed.rs | 38 ++++++++++++++++++++++++++++++++++ library/alloc/tests/rc.rs | 38 +++++++++++++++++++++++++++++++++- 3 files changed, 113 insertions(+), 3 deletions(-) diff --git a/library/alloc/tests/arc.rs b/library/alloc/tests/arc.rs index dc27c578b57ef..a259c0131ecdf 100644 --- a/library/alloc/tests/arc.rs +++ b/library/alloc/tests/arc.rs @@ -1,5 +1,5 @@ use std::any::Any; -use std::cell::RefCell; +use std::cell::{Cell, RefCell}; use std::iter::TrustedLen; use std::mem; use std::sync::{Arc, Weak}; @@ -89,7 +89,7 @@ fn eq() { // The test code below is identical to that in `rc.rs`. // For better maintainability we therefore define this type alias. -type Rc = Arc; +type Rc = Arc; const SHARED_ITER_MAX: u16 = 100; @@ -210,6 +210,42 @@ fn weak_may_dangle() { // borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::sync::Weak` } +/// Test that a panic from a destructor does not leak the allocation. +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn panic_no_leak() { + use std::alloc::{AllocError, Allocator, Global, Layout}; + use std::panic::{AssertUnwindSafe, catch_unwind}; + use std::ptr::NonNull; + + struct AllocCount(Cell); + unsafe impl Allocator for AllocCount { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.set(self.0.get() + 1); + Global.allocate(layout) + } + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.0.set(self.0.get() - 1); + unsafe { Global.deallocate(ptr, layout) } + } + } + + struct PanicOnDrop; + impl Drop for PanicOnDrop { + fn drop(&mut self) { + panic!("PanicOnDrop"); + } + } + + let alloc = AllocCount(Cell::new(0)); + let rc = Rc::new_in(PanicOnDrop, &alloc); + assert_eq!(alloc.0.get(), 1); + + let panic_message = catch_unwind(AssertUnwindSafe(|| drop(rc))).unwrap_err(); + assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop"); + assert_eq!(alloc.0.get(), 0); +} + /// This is similar to the doc-test for `Arc::make_mut()`, but on an unsized type (slice). #[test] fn make_mut_unsized() { diff --git a/library/alloc/tests/boxed.rs b/library/alloc/tests/boxed.rs index bfc31a626fadd..2d172fc213799 100644 --- a/library/alloc/tests/boxed.rs +++ b/library/alloc/tests/boxed.rs @@ -59,6 +59,44 @@ fn box_deref_lval() { assert_eq!(x.get(), 1000); } +/// Test that a panic from a destructor does not leak the allocation. +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn panic_no_leak() { + use std::alloc::{AllocError, Allocator, Global, Layout}; + use std::panic::{AssertUnwindSafe, catch_unwind}; + use std::ptr::NonNull; + + struct AllocCount(Cell); + unsafe impl Allocator for AllocCount { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.set(self.0.get() + 1); + Global.allocate(layout) + } + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.0.set(self.0.get() - 1); + unsafe { Global.deallocate(ptr, layout) } + } + } + + struct PanicOnDrop { + _data: u8, + } + impl Drop for PanicOnDrop { + fn drop(&mut self) { + panic!("PanicOnDrop"); + } + } + + let alloc = AllocCount(Cell::new(0)); + let b = Box::new_in(PanicOnDrop { _data: 42 }, &alloc); + assert_eq!(alloc.0.get(), 1); + + let panic_message = catch_unwind(AssertUnwindSafe(|| drop(b))).unwrap_err(); + assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop"); + assert_eq!(alloc.0.get(), 0); +} + #[allow(unused)] pub struct ConstAllocator; diff --git a/library/alloc/tests/rc.rs b/library/alloc/tests/rc.rs index 29dbdcf225eb5..451765d724283 100644 --- a/library/alloc/tests/rc.rs +++ b/library/alloc/tests/rc.rs @@ -1,5 +1,5 @@ use std::any::Any; -use std::cell::RefCell; +use std::cell::{Cell, RefCell}; use std::iter::TrustedLen; use std::mem; use std::rc::{Rc, Weak}; @@ -206,6 +206,42 @@ fn weak_may_dangle() { // borrow might be used here, when `val` is dropped and runs the `Drop` code for type `std::rc::Weak` } +/// Test that a panic from a destructor does not leak the allocation. +#[test] +#[cfg_attr(not(panic = "unwind"), ignore = "test requires unwinding support")] +fn panic_no_leak() { + use std::alloc::{AllocError, Allocator, Global, Layout}; + use std::panic::{AssertUnwindSafe, catch_unwind}; + use std::ptr::NonNull; + + struct AllocCount(Cell); + unsafe impl Allocator for AllocCount { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.set(self.0.get() + 1); + Global.allocate(layout) + } + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.0.set(self.0.get() - 1); + unsafe { Global.deallocate(ptr, layout) } + } + } + + struct PanicOnDrop; + impl Drop for PanicOnDrop { + fn drop(&mut self) { + panic!("PanicOnDrop"); + } + } + + let alloc = AllocCount(Cell::new(0)); + let rc = Rc::new_in(PanicOnDrop, &alloc); + assert_eq!(alloc.0.get(), 1); + + let panic_message = catch_unwind(AssertUnwindSafe(|| drop(rc))).unwrap_err(); + assert_eq!(*panic_message.downcast_ref::<&'static str>().unwrap(), "PanicOnDrop"); + assert_eq!(alloc.0.get(), 0); +} + #[allow(unused)] mod pin_coerce_unsized { use alloc::rc::{Rc, UniqueRc}; From 4a71a592819b6fe87377dd5f857218ed37dbf195 Mon Sep 17 00:00:00 2001 From: Lukas Markeffsky <@> Date: Sun, 27 Oct 2024 17:48:15 +0100 Subject: [PATCH 2/2] Rc/Arc: don't leak the allocation if drop panics --- library/alloc/src/rc.rs | 19 ++++++++----------- library/alloc/src/sync.rs | 16 +++++++++------- 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 9fdd51ce331fb..52ec8237d834a 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -2256,17 +2256,14 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc { unsafe { self.inner().dec_strong(); if self.inner().strong() == 0 { - // destroy the contained object - ptr::drop_in_place(Self::get_mut_unchecked(self)); - - // remove the implicit "strong weak" pointer now that we've - // destroyed the contents. - self.inner().dec_weak(); - - if self.inner().weak() == 0 { - self.alloc - .deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); - } + // Reconstruct the "strong weak" pointer and drop it when this + // variable goes out of scope. This ensures that the memory is + // deallocated even if the destructor of `T` panics. + let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; + + // Destroy the contained object. + // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. + ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value); } } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 15a1b0f283449..98a2fe242570f 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1872,15 +1872,17 @@ impl Arc { // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { + // Drop the weak ref collectively held by all strong references when this + // variable goes out of scope. This ensures that the memory is deallocated + // even if the destructor of `T` panics. + // Take a reference to `self.alloc` instead of cloning because 1. it'll last long + // enough, and 2. you should be able to drop `Arc`s with unclonable allocators + let _weak = Weak { ptr: self.ptr, alloc: &self.alloc }; + // Destroy the data at this time, even though we must not free the box // allocation itself (there might still be weak pointers lying around). - unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; - - // Drop the weak ref collectively held by all strong references - // Take a reference to `self.alloc` instead of cloning because 1. it'll - // last long enough, and 2. you should be able to drop `Arc`s with - // unclonable allocators - drop(Weak { ptr: self.ptr, alloc: &self.alloc }); + // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed. + unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) }; } /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to