From 3045b75c6d5fb5011fd9cc3a4146bb984a037ca4 Mon Sep 17 00:00:00 2001 From: Stein Somers Date: Fri, 22 Jan 2021 17:03:52 +0100 Subject: [PATCH] BTreeMap: disentangle Drop implementation from IntoIter --- library/alloc/src/collections/btree/map.rs | 60 ++++++---- .../alloc/src/collections/btree/navigate.rs | 111 ++++++++++++------ 2 files changed, 106 insertions(+), 65 deletions(-) diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index dc1098757268a..bea83a37d531b 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -145,8 +145,8 @@ pub struct BTreeMap { #[stable(feature = "btree_drop", since = "1.7.0")] unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap { fn drop(&mut self) { - unsafe { - drop(ptr::read(self).into_iter()); + if let Some(root) = self.root.take() { + Dropper { front: root.into_dying().first_leaf_edge(), remaining_length: self.length }; } } } @@ -332,6 +332,14 @@ impl fmt::Debug for IntoIter { } } +/// A simplified version of `IntoIter` that is not double-ended and has only one +/// purpose: to drop the remainder of an `IntoIter`. Therefore it also serves to +/// drop an entire tree without the need to first look up a `back` leaf edge. +struct Dropper { + front: Handle, marker::Edge>, + remaining_length: usize, +} + /// An iterator over the keys of a `BTreeMap`. /// /// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its @@ -1410,42 +1418,42 @@ impl IntoIterator for BTreeMap { } } -#[stable(feature = "btree_drop", since = "1.7.0")] -impl Drop for IntoIter { +impl Drop for Dropper { fn drop(&mut self) { - struct DropGuard<'a, K, V>(&'a mut IntoIter); + // Similar to advancing a non-fusing iterator. + fn next_or_end(this: &mut Dropper) -> Option<(K, V)> { + if this.remaining_length == 0 { + unsafe { ptr::read(&this.front).deallocating_end() } + None + } else { + this.remaining_length -= 1; + Some(unsafe { this.front.deallocating_next_unchecked() }) + } + } + + struct DropGuard<'a, K, V>(&'a mut Dropper); impl<'a, K, V> Drop for DropGuard<'a, K, V> { fn drop(&mut self) { // Continue the same loop we perform below. This only runs when unwinding, so we // don't have to care about panics this time (they'll abort). - while let Some(_) = self.0.next() {} - - unsafe { - let mut node = - ptr::read(&self.0.front).unwrap_unchecked().into_node().forget_type(); - while let Some(parent) = node.deallocate_and_ascend() { - node = parent.into_node().forget_type(); - } - } + while let Some(_pair) = next_or_end(&mut self.0) {} } } - while let Some(pair) = self.next() { + while let Some(pair) = next_or_end(self) { let guard = DropGuard(self); drop(pair); mem::forget(guard); } + } +} - unsafe { - if let Some(front) = ptr::read(&self.front) { - let mut node = front.into_node().forget_type(); - // Most of the nodes have been deallocated while traversing - // but one pile from a leaf up to the root is left standing. - while let Some(parent) = node.deallocate_and_ascend() { - node = parent.into_node().forget_type(); - } - } +#[stable(feature = "btree_drop", since = "1.7.0")] +impl Drop for IntoIter { + fn drop(&mut self) { + if let Some(front) = self.front.take() { + Dropper { front, remaining_length: self.length }; } } } @@ -1459,7 +1467,7 @@ impl Iterator for IntoIter { None } else { self.length -= 1; - Some(unsafe { self.front.as_mut().unwrap().next_unchecked() }) + Some(unsafe { self.front.as_mut().unwrap().deallocating_next_unchecked() }) } } @@ -1475,7 +1483,7 @@ impl DoubleEndedIterator for IntoIter { None } else { self.length -= 1; - Some(unsafe { self.back.as_mut().unwrap().next_back_unchecked() }) + Some(unsafe { self.back.as_mut().unwrap().deallocating_next_back_unchecked() }) } } } diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs index 1ef2a572ddd91..43838578ca21f 100644 --- a/library/alloc/src/collections/btree/navigate.rs +++ b/library/alloc/src/collections/btree/navigate.rs @@ -289,37 +289,76 @@ impl } } -macro_rules! def_next_kv_uncheched_dealloc { - { unsafe fn $name:ident : $adjacent_kv:ident } => { - /// Given a leaf edge handle into an owned tree, returns a handle to the next KV, - /// while deallocating any node left behind yet leaving the corresponding edge - /// in its parent node dangling. - /// - /// # Safety - /// - The leaf edge must not be the last one in the direction travelled. - /// - The node carrying the next KV returned must not have been deallocated by a - /// previous call on any handle obtained for this tree. - unsafe fn $name ( - leaf_edge: Handle, marker::Edge>, - ) -> Handle, marker::KV> { - let mut edge = leaf_edge.forget_node_type(); - loop { - edge = match edge.$adjacent_kv() { - Ok(internal_kv) => return internal_kv, - Err(last_edge) => { - unsafe { - let parent_edge = last_edge.into_node().deallocate_and_ascend(); - parent_edge.unwrap_unchecked().forget_node_type() - } - } +impl Handle, marker::Edge> { + /// Given a leaf edge handle into a dying tree, returns the next leaf edge + /// on the right side, and the key-value pair in between, which is either + /// in the same leaf node, in an ancestor node, or non-existent. + /// + /// This method also deallocates any node(s) it reaches the end of. This + /// implies that if no more key-value pair exists, the entire remainder of + /// the tree will have been deallocated and there is nothing left to return. + /// + /// # Safety + /// The given edge must not have been previously returned by counterpart + /// `deallocating_next_back`. + unsafe fn deallocating_next(self) -> Option<(Self, (K, V))> { + let mut edge = self.forget_node_type(); + loop { + edge = match edge.right_kv() { + Ok(kv) => { + let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; + let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; + return Some((kv.next_leaf_edge(), (k, v))); } + Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } { + Some(parent_edge) => parent_edge.forget_node_type(), + None => return None, + }, } } - }; -} + } -def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv} -def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv} + /// Given a leaf edge handle into a dying tree, returns the next leaf edge + /// on the left side, and the key-value pair in between, which is either + /// in the same leaf node, in an ancestor node, or non-existent. + /// + /// This method also deallocates any node(s) it reaches the end of. This + /// implies that if no more key-value pair exists, the entire remainder of + /// the tree will have been deallocated and there is nothing left to return. + /// + /// # Safety + /// The given edge must not have been previously returned by counterpart + /// `deallocating_next`. + unsafe fn deallocating_next_back(self) -> Option<(Self, (K, V))> { + let mut edge = self.forget_node_type(); + loop { + edge = match edge.left_kv() { + Ok(kv) => { + let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; + let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; + return Some((kv.next_back_leaf_edge(), (k, v))); + } + Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } { + Some(parent_edge) => parent_edge.forget_node_type(), + None => return None, + }, + } + } + } + + /// Deallocates a pile of nodes from the leaf up to the root. + /// This is the only way to deallocate the remainder of a tree after + /// `deallocating_next` and `deallocating_next_back` have been nibbling at + /// both sides of the tree, and have hit the same edge. As it is intended + /// only to be called when all keys and values have been returned, + /// no cleanup is done on any of the keys or values. + pub fn deallocating_end(self) { + let mut edge = self.forget_node_type(); + while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } { + edge = parent_edge.forget_node_type(); + } + } +} impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge> { /// Moves the leaf edge handle to the next leaf edge and returns references to the @@ -394,12 +433,9 @@ impl Handle, marker::Edge> { /// The only safe way to proceed with the updated handle is to compare it, drop it, /// call this method again subject to its safety conditions, or call counterpart /// `next_back_unchecked` subject to its safety conditions. - pub unsafe fn next_unchecked(&mut self) -> (K, V) { - super::mem::replace(self, |leaf_edge| { - let kv = unsafe { next_kv_unchecked_dealloc(leaf_edge) }; - let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; - let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; - (kv.next_leaf_edge(), (k, v)) + pub unsafe fn deallocating_next_unchecked(&mut self) -> (K, V) { + super::mem::replace(self, |leaf_edge| unsafe { + leaf_edge.deallocating_next().unwrap_unchecked() }) } @@ -415,12 +451,9 @@ impl Handle, marker::Edge> { /// The only safe way to proceed with the updated handle is to compare it, drop it, /// call this method again subject to its safety conditions, or call counterpart /// `next_unchecked` subject to its safety conditions. - pub unsafe fn next_back_unchecked(&mut self) -> (K, V) { - super::mem::replace(self, |leaf_edge| { - let kv = unsafe { next_back_kv_unchecked_dealloc(leaf_edge) }; - let k = unsafe { ptr::read(kv.reborrow().into_kv().0) }; - let v = unsafe { ptr::read(kv.reborrow().into_kv().1) }; - (kv.next_back_leaf_edge(), (k, v)) + pub unsafe fn deallocating_next_back_unchecked(&mut self) -> (K, V) { + super::mem::replace(self, |leaf_edge| unsafe { + leaf_edge.deallocating_next_back().unwrap_unchecked() }) } }