@@ -93,7 +93,7 @@ impl<T> ArenaChunk<T> {
9393    #[ inline]  
9494    fn  end ( & mut  self )  -> * mut  T  { 
9595        unsafe  { 
96-             if  mem :: size_of :: < T > ( )  == 0  { 
96+             if  size_of :: < T > ( )  == 0  { 
9797                // A pointer as large as possible for zero-sized elements. 
9898                ptr:: without_provenance_mut ( !0 ) 
9999            }  else  { 
@@ -151,7 +151,7 @@ impl<T> TypedArena<T> {
151151        } 
152152
153153        unsafe  { 
154-             if  mem :: size_of :: < T > ( )  == 0  { 
154+             if  size_of :: < T > ( )  == 0  { 
155155                self . ptr . set ( self . ptr . get ( ) . wrapping_byte_add ( 1 ) ) ; 
156156                let  ptr = ptr:: NonNull :: < T > :: dangling ( ) . as_ptr ( ) ; 
157157                // Don't drop the object. This `write` is equivalent to `forget`. 
@@ -173,13 +173,13 @@ impl<T> TypedArena<T> {
173173        // FIXME: this should *likely* use `offset_from`, but more 
174174        // investigation is needed (including running tests in miri). 
175175        let  available_bytes = self . end . get ( ) . addr ( )  - self . ptr . get ( ) . addr ( ) ; 
176-         let  additional_bytes = additional. checked_mul ( mem :: size_of :: < T > ( ) ) . unwrap ( ) ; 
176+         let  additional_bytes = additional. checked_mul ( size_of :: < T > ( ) ) . unwrap ( ) ; 
177177        available_bytes >= additional_bytes
178178    } 
179179
180180    #[ inline]  
181181    fn  alloc_raw_slice ( & self ,  len :  usize )  -> * mut  T  { 
182-         assert ! ( mem :: size_of:: <T >( )  != 0 ) ; 
182+         assert ! ( size_of:: <T >( )  != 0 ) ; 
183183        assert ! ( len != 0 ) ; 
184184
185185        // Ensure the current chunk can fit `len` objects. 
@@ -213,7 +213,7 @@ impl<T> TypedArena<T> {
213213        // So we collect all the elements beforehand, which takes care of reentrancy and panic 
214214        // safety. This function is much less hot than `DroplessArena::alloc_from_iter`, so it 
215215        // doesn't need to be hyper-optimized. 
216-         assert ! ( mem :: size_of:: <T >( )  != 0 ) ; 
216+         assert ! ( size_of:: <T >( )  != 0 ) ; 
217217
218218        let  mut  vec:  SmallVec < [ _ ;  8 ] >  = iter. into_iter ( ) . collect ( ) ; 
219219        if  vec. is_empty ( )  { 
@@ -236,7 +236,7 @@ impl<T> TypedArena<T> {
236236        unsafe  { 
237237            // We need the element size to convert chunk sizes (ranging from 
238238            // PAGE to HUGE_PAGE bytes) to element counts. 
239-             let  elem_size = cmp:: max ( 1 ,  mem :: size_of :: < T > ( ) ) ; 
239+             let  elem_size = cmp:: max ( 1 ,  size_of :: < T > ( ) ) ; 
240240            let  mut  chunks = self . chunks . borrow_mut ( ) ; 
241241            let  mut  new_cap; 
242242            if  let  Some ( last_chunk)  = chunks. last_mut ( )  { 
@@ -246,7 +246,7 @@ impl<T> TypedArena<T> {
246246                    // FIXME: this should *likely* use `offset_from`, but more 
247247                    // investigation is needed (including running tests in miri). 
248248                    let  used_bytes = self . ptr . get ( ) . addr ( )  - last_chunk. start ( ) . addr ( ) ; 
249-                     last_chunk. entries  = used_bytes / mem :: size_of :: < T > ( ) ; 
249+                     last_chunk. entries  = used_bytes / size_of :: < T > ( ) ; 
250250                } 
251251
252252                // If the previous chunk's len is less than HUGE_PAGE 
@@ -276,15 +276,15 @@ impl<T> TypedArena<T> {
276276        let  end = self . ptr . get ( ) . addr ( ) ; 
277277        // We then calculate the number of elements to be dropped in the last chunk, 
278278        // which is the filled area's length. 
279-         let  diff = if  mem :: size_of :: < T > ( )  == 0  { 
279+         let  diff = if  size_of :: < T > ( )  == 0  { 
280280            // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get 
281281            // the number of zero-sized values in the last and only chunk, just out of caution. 
282282            // Recall that `end` was incremented for each allocated value. 
283283            end - start
284284        }  else  { 
285285            // FIXME: this should *likely* use `offset_from`, but more 
286286            // investigation is needed (including running tests in miri). 
287-             ( end - start)  / mem :: size_of :: < T > ( ) 
287+             ( end - start)  / size_of :: < T > ( ) 
288288        } ; 
289289        // Pass that to the `destroy` method. 
290290        unsafe  { 
@@ -329,7 +329,7 @@ fn align_up(val: usize, align: usize) -> usize {
329329
330330// Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them 
331331// to optimize away alignment code. 
332- const  DROPLESS_ALIGNMENT :  usize  = mem :: align_of :: < usize > ( ) ; 
332+ const  DROPLESS_ALIGNMENT :  usize  = align_of :: < usize > ( ) ; 
333333
334334/// An arena that can hold objects of multiple different types that impl `Copy` 
335335/// and/or satisfy `!mem::needs_drop`. 
@@ -447,7 +447,7 @@ impl DroplessArena {
447447    #[ inline]  
448448    pub  fn  alloc < T > ( & self ,  object :  T )  -> & mut  T  { 
449449        assert ! ( !mem:: needs_drop:: <T >( ) ) ; 
450-         assert ! ( mem :: size_of:: <T >( )  != 0 ) ; 
450+         assert ! ( size_of:: <T >( )  != 0 ) ; 
451451
452452        let  mem = self . alloc_raw ( Layout :: new :: < T > ( ) )  as  * mut  T ; 
453453
@@ -471,7 +471,7 @@ impl DroplessArena {
471471        T :  Copy , 
472472    { 
473473        assert ! ( !mem:: needs_drop:: <T >( ) ) ; 
474-         assert ! ( mem :: size_of:: <T >( )  != 0 ) ; 
474+         assert ! ( size_of:: <T >( )  != 0 ) ; 
475475        assert ! ( !slice. is_empty( ) ) ; 
476476
477477        let  mem = self . alloc_raw ( Layout :: for_value :: < [ T ] > ( slice) )  as  * mut  T ; 
@@ -546,7 +546,7 @@ impl DroplessArena {
546546        // Warning: this function is reentrant: `iter` could hold a reference to `&self` and 
547547        // allocate additional elements while we're iterating. 
548548        let  iter = iter. into_iter ( ) ; 
549-         assert ! ( mem :: size_of:: <T >( )  != 0 ) ; 
549+         assert ! ( size_of:: <T >( )  != 0 ) ; 
550550        assert ! ( !mem:: needs_drop:: <T >( ) ) ; 
551551
552552        let  size_hint = iter. size_hint ( ) ; 
0 commit comments