@@ -15,11 +15,10 @@ use rustc_middle::ty::{self, GenericArgsRef, Ty};
1515use rustc_middle:: { bug, span_bug} ;
1616use rustc_span:: { Span , Symbol , sym} ;
1717use rustc_symbol_mangling:: mangle_internal_symbol;
18- use rustc_target:: callconv:: { FnAbi , PassMode } ;
1918use rustc_target:: spec:: { HasTargetSpec , PanicStrategy } ;
2019use tracing:: debug;
2120
22- use crate :: abi:: { FnAbiLlvmExt , LlvmType } ;
21+ use crate :: abi:: FnAbiLlvmExt ;
2322use crate :: builder:: Builder ;
2423use crate :: context:: CodegenCx ;
2524use crate :: llvm:: { self , Metadata } ;
@@ -165,7 +164,6 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
165164 fn codegen_intrinsic_call (
166165 & mut self ,
167166 instance : ty:: Instance < ' tcx > ,
168- fn_abi : & FnAbi < ' tcx , Ty < ' tcx > > ,
169167 args : & [ OperandRef < ' tcx , & ' ll Value > ] ,
170168 result : PlaceRef < ' tcx , & ' ll Value > ,
171169 span : Span ,
@@ -263,7 +261,7 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
263261 self . call_intrinsic ( "llvm.va_copy" , & [ args[ 0 ] . immediate ( ) , args[ 1 ] . immediate ( ) ] )
264262 }
265263 sym:: va_arg => {
266- match fn_abi . ret . layout . backend_repr {
264+ match result . layout . backend_repr {
267265 BackendRepr :: Scalar ( scalar) => {
268266 match scalar. primitive ( ) {
269267 Primitive :: Int ( ..) => {
@@ -298,18 +296,12 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
298296 }
299297
300298 sym:: volatile_load | sym:: unaligned_volatile_load => {
301- let tp_ty = fn_args. type_at ( 0 ) ;
302299 let ptr = args[ 0 ] . immediate ( ) ;
303- let load = if let PassMode :: Cast { cast : ty, pad_i32 : _ } = & fn_abi. ret . mode {
304- let llty = ty. llvm_type ( self ) ;
305- self . volatile_load ( llty, ptr)
306- } else {
307- self . volatile_load ( self . layout_of ( tp_ty) . llvm_type ( self ) , ptr)
308- } ;
300+ let load = self . volatile_load ( result. layout . llvm_type ( self ) , ptr) ;
309301 let align = if name == sym:: unaligned_volatile_load {
310302 1
311303 } else {
312- self . align_of ( tp_ty ) . bytes ( ) as u32
304+ result . layout . align . abi . bytes ( ) as u32
313305 } ;
314306 unsafe {
315307 llvm:: LLVMSetAlignment ( load, align) ;
@@ -628,14 +620,12 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
628620 }
629621 } ;
630622
631- if !fn_abi. ret . is_ignore ( ) {
632- if let PassMode :: Cast { .. } = & fn_abi. ret . mode {
633- self . store ( llval, result. val . llval , result. val . align ) ;
634- } else {
635- OperandRef :: from_immediate_or_packed_pair ( self , llval, result. layout )
636- . val
637- . store ( self , result) ;
638- }
623+ if result. layout . ty . is_bool ( ) {
624+ OperandRef :: from_immediate_or_packed_pair ( self , llval, result. layout )
625+ . val
626+ . store ( self , result) ;
627+ } else if !result. layout . ty . is_unit ( ) {
628+ self . store_to_place ( llval, result. val ) ;
639629 }
640630 Ok ( ( ) )
641631 }
0 commit comments