1+ use std:: iter;
2+
13use log:: trace;
24
5+ use rand:: { seq:: IteratorRandom , Rng } ;
6+ use rustc_apfloat:: { Float , FloatConvert } ;
37use rustc_middle:: mir;
48use rustc_target:: abi:: Size ;
59
610use crate :: * ;
711
8- pub trait EvalContextExt < ' tcx > {
9- fn binary_ptr_op (
10- & self ,
11- bin_op : mir:: BinOp ,
12- left : & ImmTy < ' tcx , Provenance > ,
13- right : & ImmTy < ' tcx , Provenance > ,
14- ) -> InterpResult < ' tcx , ( ImmTy < ' tcx , Provenance > , bool ) > ;
15- }
16-
17- impl < ' mir , ' tcx > EvalContextExt < ' tcx > for super :: MiriInterpCx < ' mir , ' tcx > {
12+ impl < ' mir , ' tcx : ' mir > EvalContextExt < ' mir , ' tcx > for crate :: MiriInterpCx < ' mir , ' tcx > { }
13+ pub trait EvalContextExt < ' mir , ' tcx : ' mir > : crate :: MiriInterpCxExt < ' mir , ' tcx > {
1814 fn binary_ptr_op (
1915 & self ,
2016 bin_op : mir:: BinOp ,
@@ -23,12 +19,13 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriInterpCx<'mir, 'tcx> {
2319 ) -> InterpResult < ' tcx , ( ImmTy < ' tcx , Provenance > , bool ) > {
2420 use rustc_middle:: mir:: BinOp :: * ;
2521
22+ let this = self . eval_context_ref ( ) ;
2623 trace ! ( "ptr_op: {:?} {:?} {:?}" , * left, bin_op, * right) ;
2724
2825 Ok ( match bin_op {
2926 Eq | Ne | Lt | Le | Gt | Ge => {
3027 assert_eq ! ( left. layout. abi, right. layout. abi) ; // types an differ, e.g. fn ptrs with different `for`
31- let size = self . pointer_size ( ) ;
28+ let size = this . pointer_size ( ) ;
3229 // Just compare the bits. ScalarPairs are compared lexicographically.
3330 // We thus always compare pairs and simply fill scalars up with 0.
3431 let left = match * * left {
@@ -50,34 +47,75 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriInterpCx<'mir, 'tcx> {
5047 Ge => left >= right,
5148 _ => bug ! ( ) ,
5249 } ;
53- ( ImmTy :: from_bool ( res, * self . tcx ) , false )
50+ ( ImmTy :: from_bool ( res, * this . tcx ) , false )
5451 }
5552
5653 // Some more operations are possible with atomics.
5754 // The return value always has the provenance of the *left* operand.
5855 Add | Sub | BitOr | BitAnd | BitXor => {
5956 assert ! ( left. layout. ty. is_unsafe_ptr( ) ) ;
6057 assert ! ( right. layout. ty. is_unsafe_ptr( ) ) ;
61- let ptr = left. to_scalar ( ) . to_pointer ( self ) ?;
58+ let ptr = left. to_scalar ( ) . to_pointer ( this ) ?;
6259 // We do the actual operation with usize-typed scalars.
63- let left = ImmTy :: from_uint ( ptr. addr ( ) . bytes ( ) , self . machine . layouts . usize ) ;
60+ let left = ImmTy :: from_uint ( ptr. addr ( ) . bytes ( ) , this . machine . layouts . usize ) ;
6461 let right = ImmTy :: from_uint (
65- right. to_scalar ( ) . to_target_usize ( self ) ?,
66- self . machine . layouts . usize ,
62+ right. to_scalar ( ) . to_target_usize ( this ) ?,
63+ this . machine . layouts . usize ,
6764 ) ;
68- let ( result, overflowing) = self . overflowing_binary_op ( bin_op, & left, & right) ?;
65+ let ( result, overflowing) = this . overflowing_binary_op ( bin_op, & left, & right) ?;
6966 // Construct a new pointer with the provenance of `ptr` (the LHS).
7067 let result_ptr = Pointer :: new (
7168 ptr. provenance ,
72- Size :: from_bytes ( result. to_scalar ( ) . to_target_usize ( self ) ?) ,
69+ Size :: from_bytes ( result. to_scalar ( ) . to_target_usize ( this ) ?) ,
7370 ) ;
7471 (
75- ImmTy :: from_scalar ( Scalar :: from_maybe_pointer ( result_ptr, self ) , left. layout ) ,
72+ ImmTy :: from_scalar ( Scalar :: from_maybe_pointer ( result_ptr, this ) , left. layout ) ,
7673 overflowing,
7774 )
7875 }
7976
80- _ => span_bug ! ( self . cur_span( ) , "Invalid operator on pointers: {:?}" , bin_op) ,
77+ _ => span_bug ! ( this . cur_span( ) , "Invalid operator on pointers: {:?}" , bin_op) ,
8178 } )
8279 }
80+
81+ fn generate_nan < F1 : Float + FloatConvert < F2 > , F2 : Float > ( & self , inputs : & [ F1 ] ) -> F2 {
82+ /// Make the given NaN a signaling NaN.
83+ /// Returns `None` if this would not result in a NaN.
84+ fn make_signaling < F : Float > ( f : F ) -> Option < F > {
85+ // The quiet/signaling bit is the leftmost bit in the mantissa.
86+ // That's position `PRECISION-1`, since `PRECISION` includes the fixed leading 1 bit,
87+ // and then we subtract 1 more since this is 0-indexed.
88+ let quiet_bit_mask = 1 << ( F :: PRECISION - 2 ) ;
89+ // Unset the bit. Double-check that this wasn't the last bit set in the payload.
90+ // (which would turn the NaN into an infinity).
91+ let f = F :: from_bits ( f. to_bits ( ) & !quiet_bit_mask) ;
92+ if f. is_nan ( ) { Some ( f) } else { None }
93+ }
94+
95+ let this = self . eval_context_ref ( ) ;
96+ let mut rand = this. machine . rng . borrow_mut ( ) ;
97+ // Assemble an iterator of possible NaNs: preferred, quieting propagation, unchanged propagation.
98+ // On some targets there are more possibilities; for now we just generate those options that
99+ // are possible everywhere.
100+ let preferred_nan = F2 :: qnan ( Some ( 0 ) ) ;
101+ let nans = iter:: once ( preferred_nan)
102+ . chain ( inputs. iter ( ) . filter ( |f| f. is_nan ( ) ) . map ( |& f| {
103+ // Regular apfloat cast is quieting.
104+ f. convert ( & mut false ) . value
105+ } ) )
106+ . chain ( inputs. iter ( ) . filter ( |f| f. is_signaling ( ) ) . filter_map ( |& f| {
107+ let f: F2 = f. convert ( & mut false ) . value ;
108+ // We have to de-quiet this again for unchanged propagation.
109+ make_signaling ( f)
110+ } ) ) ;
111+ // Pick one of the NaNs.
112+ let nan = nans. choose ( & mut * rand) . unwrap ( ) ;
113+ // Non-deterministically flip the sign.
114+ if rand. gen ( ) {
115+ // This will properly flip even for NaN.
116+ -nan
117+ } else {
118+ nan
119+ }
120+ }
83121}
0 commit comments