8
8
// option. This file may not be copied, modified, or distributed
9
9
// except according to those terms.
10
10
11
- use llvm:: { self , BasicBlockRef , ValueRef , OperandBundleDef } ;
11
+ use llvm:: { self , ValueRef , OperandBundleDef } ;
12
12
use rustc:: ty;
13
13
use rustc:: mir:: repr as mir;
14
14
use abi:: { Abi , FnType , ArgType } ;
@@ -34,22 +34,38 @@ use super::operand::OperandValue::{self, FatPtr, Immediate, Ref};
34
34
35
35
impl < ' bcx , ' tcx > MirContext < ' bcx , ' tcx > {
36
36
pub fn trans_block ( & mut self , bb : mir:: BasicBlock ) {
37
- debug ! ( "trans_block({:?})" , bb) ;
38
-
39
37
let mut bcx = self . bcx ( bb) ;
40
38
let mir = self . mir . clone ( ) ;
41
39
let data = mir. basic_block_data ( bb) ;
42
40
41
+ debug ! ( "trans_block({:?}={:?})" , bb, data) ;
42
+
43
43
// MSVC SEH bits
44
44
let ( cleanup_pad, cleanup_bundle) = if let Some ( ( cp, cb) ) = self . make_cleanup_pad ( bb) {
45
45
( Some ( cp) , Some ( cb) )
46
46
} else {
47
47
( None , None )
48
48
} ;
49
- let funclet_br = |bcx : BlockAndBuilder , llbb : BasicBlockRef | if let Some ( cp) = cleanup_pad {
50
- bcx. cleanup_ret ( cp, Some ( llbb) ) ;
51
- } else {
52
- bcx. br ( llbb) ;
49
+ let funclet_br = |this : & Self , bcx : BlockAndBuilder , bb : mir:: BasicBlock | {
50
+ if let Some ( cp) = cleanup_pad {
51
+ bcx. cleanup_ret ( cp, Some ( this. blocks [ bb. index ( ) ] . llbb ) ) ;
52
+ } else {
53
+ bcx. br ( this. blocks [ bb. index ( ) ] . llbb ) ;
54
+ }
55
+ } ;
56
+ let llblock = |this : & mut Self , target : mir:: BasicBlock | {
57
+ let lltarget = this. blocks [ target. index ( ) ] . llbb ;
58
+
59
+ if let Some ( cp) = cleanup_pad {
60
+ debug ! ( "llblock: creating cleanup trampoline for {:?}" , target) ;
61
+ let name = & format ! ( "{:?}_cleanup_trampoline_{:?}" , bb, target) ;
62
+ let block = this. fcx . new_block ( name, None ) ;
63
+ let bcx = block. build ( ) ;
64
+ bcx. cleanup_ret ( cp, Some ( lltarget) ) ;
65
+ block. llbb
66
+ } else {
67
+ lltarget
68
+ }
53
69
} ;
54
70
55
71
for statement in & data. statements {
@@ -78,13 +94,14 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
78
94
}
79
95
80
96
mir:: TerminatorKind :: Goto { target } => {
81
- funclet_br ( bcx, self . llblock ( target) ) ;
97
+ funclet_br ( self , bcx, target) ;
82
98
}
83
99
84
100
mir:: TerminatorKind :: If { ref cond, targets : ( true_bb, false_bb) } => {
85
101
let cond = self . trans_operand ( & bcx, cond) ;
86
- let lltrue = self . llblock ( true_bb) ;
87
- let llfalse = self . llblock ( false_bb) ;
102
+
103
+ let lltrue = llblock ( self , true_bb) ;
104
+ let llfalse = llblock ( self , false_bb) ;
88
105
bcx. cond_br ( cond. immediate ( ) , lltrue, llfalse) ;
89
106
}
90
107
@@ -106,18 +123,18 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
106
123
// code. This is especially helpful in cases like an if-let on a huge enum.
107
124
// Note: This optimization is only valid for exhaustive matches.
108
125
Some ( ( & & bb, & c) ) if c > targets. len ( ) / 2 => {
109
- ( Some ( bb) , self . blocks [ bb . index ( ) ] )
126
+ ( Some ( bb) , llblock ( self , bb ) )
110
127
}
111
128
// We're generating an exhaustive switch, so the else branch
112
129
// can't be hit. Branching to an unreachable instruction
113
130
// lets LLVM know this
114
- _ => ( None , self . unreachable_block ( ) )
131
+ _ => ( None , self . unreachable_block ( ) . llbb )
115
132
} ;
116
- let switch = bcx. switch ( discr, default_blk. llbb , targets. len ( ) ) ;
133
+ let switch = bcx. switch ( discr, default_blk, targets. len ( ) ) ;
117
134
assert_eq ! ( adt_def. variants. len( ) , targets. len( ) ) ;
118
135
for ( adt_variant, & target) in adt_def. variants . iter ( ) . zip ( targets) {
119
136
if default_bb != Some ( target) {
120
- let llbb = self . llblock ( target) ;
137
+ let llbb = llblock ( self , target) ;
121
138
let llval = bcx. with_block ( |bcx| adt:: trans_case (
122
139
bcx, & repr, Disr :: from ( adt_variant. disr_val ) ) ) ;
123
140
build:: AddCase ( switch, llval, llbb)
@@ -129,10 +146,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
129
146
let ( otherwise, targets) = targets. split_last ( ) . unwrap ( ) ;
130
147
let discr = bcx. load ( self . trans_lvalue ( & bcx, discr) . llval ) ;
131
148
let discr = bcx. with_block ( |bcx| base:: to_immediate ( bcx, discr, switch_ty) ) ;
132
- let switch = bcx. switch ( discr, self . llblock ( * otherwise) , values. len ( ) ) ;
149
+ let switch = bcx. switch ( discr, llblock ( self , * otherwise) , values. len ( ) ) ;
133
150
for ( value, target) in values. iter ( ) . zip ( targets) {
134
151
let val = Const :: from_constval ( bcx. ccx ( ) , value. clone ( ) , switch_ty) ;
135
- let llbb = self . llblock ( * target) ;
152
+ let llbb = llblock ( self , * target) ;
136
153
build:: AddCase ( switch, val. llval , llbb)
137
154
}
138
155
}
@@ -148,7 +165,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
148
165
let ty = lvalue. ty . to_ty ( bcx. tcx ( ) ) ;
149
166
// Double check for necessity to drop
150
167
if !glue:: type_needs_drop ( bcx. tcx ( ) , ty) {
151
- funclet_br ( bcx, self . llblock ( target) ) ;
168
+ funclet_br ( self , bcx, target) ;
152
169
return ;
153
170
}
154
171
let drop_fn = glue:: get_drop_glue ( bcx. ccx ( ) , ty) ;
@@ -163,15 +180,12 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
163
180
let unwind = self . make_landing_pad ( uwbcx) ;
164
181
bcx. invoke ( drop_fn,
165
182
& [ llvalue] ,
166
- self . llblock ( target) ,
183
+ self . blocks [ target. index ( ) ] . llbb ,
167
184
unwind. llbb ( ) ,
168
185
cleanup_bundle. as_ref ( ) ) ;
169
- self . bcx ( target) . at_start ( |bcx| {
170
- debug_loc. apply_to_bcx ( bcx) ;
171
- } ) ;
172
186
} else {
173
187
bcx. call ( drop_fn, & [ llvalue] , cleanup_bundle. as_ref ( ) ) ;
174
- funclet_br ( bcx, self . llblock ( target) ) ;
188
+ funclet_br ( self , bcx, target) ;
175
189
}
176
190
}
177
191
@@ -213,7 +227,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
213
227
let llptr = self . trans_operand ( & bcx, & args[ 0 ] ) . immediate ( ) ;
214
228
let val = self . trans_operand ( & bcx, & args[ 1 ] ) ;
215
229
self . store_operand ( & bcx, llptr, val) ;
216
- funclet_br ( bcx, self . llblock ( target) ) ;
230
+ funclet_br ( self , bcx, target) ;
217
231
return ;
218
232
}
219
233
@@ -223,7 +237,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
223
237
this. trans_transmute ( & bcx, & args[ 0 ] , dest) ;
224
238
} ) ;
225
239
226
- funclet_br ( bcx, self . llblock ( target) ) ;
240
+ funclet_br ( self , bcx, target) ;
227
241
return ;
228
242
}
229
243
@@ -328,7 +342,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
328
342
}
329
343
330
344
if let Some ( ( _, target) ) = * destination {
331
- funclet_br ( bcx, self . llblock ( target) ) ;
345
+ funclet_br ( self , bcx, target) ;
332
346
} else {
333
347
// trans_intrinsic_call already used Unreachable.
334
348
// bcx.unreachable();
@@ -376,9 +390,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
376
390
ty : sig. output . unwrap ( )
377
391
} ;
378
392
self . store_return ( & bcx, ret_dest, fn_ty. ret , op) ;
379
- funclet_br ( bcx, self . llblock ( target) ) ;
393
+ funclet_br ( self , bcx, target) ;
380
394
} else {
381
- // no need to drop args, because the call never returns
382
395
bcx. unreachable ( ) ;
383
396
}
384
397
}
@@ -581,10 +594,6 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
581
594
self . blocks [ bb. index ( ) ] . build ( )
582
595
}
583
596
584
- pub fn llblock ( & self , bb : mir:: BasicBlock ) -> BasicBlockRef {
585
- self . blocks [ bb. index ( ) ] . llbb
586
- }
587
-
588
597
fn make_return_dest ( & mut self , bcx : & BlockAndBuilder < ' bcx , ' tcx > ,
589
598
dest : & mir:: Lvalue < ' tcx > , fn_ret_ty : & ArgType ,
590
599
llargs : & mut Vec < ValueRef > , is_intrinsic : bool ) -> ReturnDest {
0 commit comments