1919
2020use crate :: RngCore ;
2121use core:: cmp:: min;
22- use zerocopy:: { Immutable , IntoBytes } ;
2322
2423/// Implement `next_u64` via `next_u32`, little-endian order.
2524pub fn next_u64_via_u32 < R : RngCore + ?Sized > ( rng : & mut R ) -> u64 {
@@ -53,17 +52,22 @@ pub fn fill_bytes_via_next<R: RngCore + ?Sized>(rng: &mut R, dest: &mut [u8]) {
5352 }
5453}
5554
56- trait Observable : IntoBytes + Immutable + Copy {
57- fn to_le ( self ) -> Self ;
55+ trait Observable : Copy {
56+ type Bytes : Sized + IntoIterator < Item = u8 > ;
57+ fn to_le_bytes ( self ) -> Self :: Bytes ;
5858}
5959impl Observable for u32 {
60- fn to_le ( self ) -> Self {
61- self . to_le ( )
60+ type Bytes = [ u8 ; 4 ] ;
61+
62+ fn to_le_bytes ( self ) -> Self :: Bytes {
63+ Self :: to_le_bytes ( self )
6264 }
6365}
6466impl Observable for u64 {
65- fn to_le ( self ) -> Self {
66- self . to_le ( )
67+ type Bytes = [ u8 ; 8 ] ;
68+
69+ fn to_le_bytes ( self ) -> Self :: Bytes {
70+ Self :: to_le_bytes ( self )
6771 }
6872}
6973
@@ -77,16 +81,10 @@ fn fill_via_chunks<T: Observable>(src: &mut [T], dest: &mut [u8]) -> (usize, usi
7781 let byte_len = min ( core:: mem:: size_of_val ( src) , dest. len ( ) ) ;
7882 let num_chunks = ( byte_len + size - 1 ) / size;
7983
80- // Byte-swap for portability of results. This must happen before copying
81- // since the size of dest is not guaranteed to be a multiple of T or to be
82- // sufficiently aligned.
83- if cfg ! ( target_endian = "big" ) {
84- for x in & mut src[ ..num_chunks] {
85- * x = x. to_le ( ) ;
86- }
87- }
88-
89- dest[ ..byte_len] . copy_from_slice ( & <[ T ] >:: as_bytes ( & src[ ..num_chunks] ) [ ..byte_len] ) ;
84+ // Always using little endian for portability of results.
85+ dest. into_iter ( )
86+ . zip ( src. iter ( ) . map ( |t| t. to_le_bytes ( ) ) . flatten ( ) )
87+ . for_each ( |( dest, src) | * dest = src) ;
9088
9189 ( num_chunks, byte_len)
9290}
0 commit comments