@@ -204,14 +204,30 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
204204        let  alloc_align = alloc. inner ( ) . align ; 
205205        assert ! ( alloc_align >= layout. align. abi) ; 
206206
207+         // Returns `None` when the value is partially undefined or any byte of it has provenance. 
208+         // Otherwise returns the value or (if the entire value is undef) returns an undef. 
207209        let  read_scalar = |start,  size,  s :  abi:: Scalar ,  ty| { 
210+             let  range = alloc_range ( start,  size) ; 
208211            match  alloc. 0 . read_scalar ( 
209212                bx, 
210-                 alloc_range ( start ,  size ) , 
213+                 range , 
211214                /*read_provenance*/  matches ! ( s. primitive( ) ,  abi:: Primitive :: Pointer ( _) ) , 
212215            )  { 
213-                 Ok ( val)  => bx. scalar_to_backend ( val,  s,  ty) , 
214-                 Err ( _)  => bx. const_poison ( ty) , 
216+                 Ok ( val)  => Some ( bx. scalar_to_backend ( val,  s,  ty) ) , 
217+                 Err ( _)  => { 
218+                     // We may have failed due to partial provenance or unexpected provenance, 
219+                     // continue down the normal code path if so. 
220+                     if  alloc. 0 . provenance ( ) . range_empty ( range,  & bx. tcx ( ) ) 
221+                         // Since `read_scalar` failed, but there were no relocations involved, the 
222+                         // bytes must be partially or fully uninitialized. Thus we can now unwrap the 
223+                         // information about the range of uninit bytes and check if it's the full range. 
224+                         && alloc. 0 . init_mask ( ) . is_range_initialized ( range) . unwrap_err ( )  == range
225+                     { 
226+                         Some ( bx. const_undef ( ty) ) 
227+                     }  else  { 
228+                         None 
229+                     } 
230+                 } 
215231            } 
216232        } ; 
217233
@@ -222,16 +238,14 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
222238        // check that walks over the type of `mplace` to make sure it is truly correct to treat this 
223239        // like a `Scalar` (or `ScalarPair`). 
224240        match  layout. backend_repr  { 
225-             BackendRepr :: Scalar ( s @ abi :: Scalar :: Initialized   {  ..  } )  => { 
241+             BackendRepr :: Scalar ( s)  => { 
226242                let  size = s. size ( bx) ; 
227243                assert_eq ! ( size,  layout. size,  "abi::Scalar size does not match layout size" ) ; 
228-                 let  val = read_scalar ( offset,  size,  s,  bx. immediate_backend_type ( layout) ) ; 
229-                 OperandRef  {  val :  OperandValue :: Immediate ( val) ,  layout } 
244+                 if  let  Some ( val)  = read_scalar ( offset,  size,  s,  bx. immediate_backend_type ( layout) )  { 
245+                     return  OperandRef  {  val :  OperandValue :: Immediate ( val) ,  layout } ; 
246+                 } 
230247            } 
231-             BackendRepr :: ScalarPair ( 
232-                 a @ abi:: Scalar :: Initialized  {  .. } , 
233-                 b @ abi:: Scalar :: Initialized  {  .. } , 
234-             )  => { 
248+             BackendRepr :: ScalarPair ( a,  b)  => { 
235249                let  ( a_size,  b_size)  = ( a. size ( bx) ,  b. size ( bx) ) ; 
236250                let  b_offset = ( offset + a_size) . align_to ( b. align ( bx) . abi ) ; 
237251                assert ! ( b_offset. bytes( )  > 0 ) ; 
@@ -247,20 +261,21 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
247261                    b, 
248262                    bx. scalar_pair_element_backend_type ( layout,  1 ,  true ) , 
249263                ) ; 
250-                 OperandRef  {  val :  OperandValue :: Pair ( a_val,  b_val) ,  layout } 
251-             } 
252-             _ if  layout. is_zst ( )  => OperandRef :: zero_sized ( layout) , 
253-             _ => { 
254-                 // Neither a scalar nor scalar pair. Load from a place 
255-                 // FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the 
256-                 // same `ConstAllocation`? 
257-                 let  init = bx. const_data_from_alloc ( alloc) ; 
258-                 let  base_addr = bx. static_addr_of ( init,  alloc_align,  None ) ; 
259- 
260-                 let  llval = bx. const_ptr_byte_offset ( base_addr,  offset) ; 
261-                 bx. load_operand ( PlaceRef :: new_sized ( llval,  layout) ) 
264+                 if  let  ( Some ( a_val) ,  Some ( b_val) )  = ( a_val,  b_val)  { 
265+                     return  OperandRef  {  val :  OperandValue :: Pair ( a_val,  b_val) ,  layout } ; 
266+                 } 
262267            } 
268+             _ if  layout. is_zst ( )  => return  OperandRef :: zero_sized ( layout) , 
269+             _ => { } 
263270        } 
271+         // Neither a scalar nor scalar pair. Load from a place 
272+         // FIXME: should we cache `const_data_from_alloc` to avoid repeating this for the 
273+         // same `ConstAllocation`? 
274+         let  init = bx. const_data_from_alloc ( alloc) ; 
275+         let  base_addr = bx. static_addr_of ( init,  alloc_align,  None ) ; 
276+ 
277+         let  llval = bx. const_ptr_byte_offset ( base_addr,  offset) ; 
278+         bx. load_operand ( PlaceRef :: new_sized ( llval,  layout) ) 
264279    } 
265280
266281    /// Asserts that this operand refers to a scalar and returns 
0 commit comments