1
1
//! Inlining pass for MIR functions
2
2
3
- use rustc_hir:: def_id:: DefId ;
4
-
5
3
use rustc_index:: bit_set:: BitSet ;
6
4
use rustc_index:: vec:: { Idx , IndexVec } ;
7
5
8
6
use rustc:: middle:: codegen_fn_attrs:: CodegenFnAttrFlags ;
9
7
use rustc:: mir:: visit:: * ;
10
8
use rustc:: mir:: * ;
11
9
use rustc:: session:: config:: Sanitizer ;
12
- use rustc:: ty:: subst:: { InternalSubsts , Subst , SubstsRef } ;
10
+ use rustc:: ty:: subst:: { InternalSubsts , Subst } ;
13
11
use rustc:: ty:: { self , Instance , InstanceDef , ParamEnv , Ty , TyCtxt , TypeFoldable } ;
14
12
15
13
use super :: simplify:: { remove_dead_blocks, CfgSimplifier } ;
@@ -32,8 +30,7 @@ pub struct Inline;
32
30
33
31
#[ derive( Copy , Clone , Debug ) ]
34
32
struct CallSite < ' tcx > {
35
- callee : DefId ,
36
- substs : SubstsRef < ' tcx > ,
33
+ callee : Instance < ' tcx > ,
37
34
bb : BasicBlock ,
38
35
source_info : SourceInfo ,
39
36
}
@@ -97,13 +94,19 @@ impl Inliner<'tcx> {
97
94
local_change = false ;
98
95
while let Some ( callsite) = callsites. pop_front ( ) {
99
96
debug ! ( "checking whether to inline callsite {:?}" , callsite) ;
100
- if !self . tcx . is_mir_available ( callsite. callee ) {
101
- debug ! ( "checking whether to inline callsite {:?} - MIR unavailable" , callsite) ;
102
- continue ;
97
+
98
+ if let InstanceDef :: Item ( callee_def_id) = callsite. callee . def {
99
+ if !self . tcx . is_mir_available ( callee_def_id) {
100
+ debug ! (
101
+ "checking whether to inline callsite {:?} - MIR unavailable" ,
102
+ callsite,
103
+ ) ;
104
+ continue ;
105
+ }
103
106
}
104
107
105
108
let self_node_id = self . tcx . hir ( ) . as_local_node_id ( self . source . def_id ( ) ) . unwrap ( ) ;
106
- let callee_node_id = self . tcx . hir ( ) . as_local_node_id ( callsite. callee ) ;
109
+ let callee_node_id = self . tcx . hir ( ) . as_local_node_id ( callsite. callee . def_id ( ) ) ;
107
110
108
111
let callee_body = if let Some ( callee_node_id) = callee_node_id {
109
112
// Avoid a cycle here by only using `optimized_mir` only if we have
@@ -113,19 +116,21 @@ impl Inliner<'tcx> {
113
116
if !self . tcx . dep_graph . is_fully_enabled ( )
114
117
&& self_node_id. as_u32 ( ) < callee_node_id. as_u32 ( )
115
118
{
116
- self . tcx . optimized_mir ( callsite. callee )
119
+ self . tcx . instance_mir ( callsite. callee . def )
117
120
} else {
118
121
continue ;
119
122
}
120
123
} else {
121
124
// This cannot result in a cycle since the callee MIR is from another crate
122
125
// and is already optimized.
123
- self . tcx . optimized_mir ( callsite. callee )
126
+ self . tcx . instance_mir ( callsite. callee . def )
124
127
} ;
125
128
129
+ let callee_body: & Body < ' tcx > = & * callee_body;
130
+
126
131
let callee_body = if self . consider_optimizing ( callsite, callee_body) {
127
132
self . tcx . subst_and_normalize_erasing_regions (
128
- & callsite. substs ,
133
+ & callsite. callee . substs ,
129
134
param_env,
130
135
callee_body,
131
136
)
@@ -186,18 +191,13 @@ impl Inliner<'tcx> {
186
191
let terminator = bb_data. terminator ( ) ;
187
192
if let TerminatorKind :: Call { func : ref op, .. } = terminator. kind {
188
193
if let ty:: FnDef ( callee_def_id, substs) = op. ty ( caller_body, self . tcx ) . kind {
189
- let instance = Instance :: resolve ( self . tcx , param_env, callee_def_id, substs) ?;
194
+ let callee = Instance :: resolve ( self . tcx , param_env, callee_def_id, substs) ?;
190
195
191
- if let InstanceDef :: Virtual ( ..) = instance . def {
196
+ if let InstanceDef :: Virtual ( ..) | InstanceDef :: Intrinsic ( _ ) = callee . def {
192
197
return None ;
193
198
}
194
199
195
- return Some ( CallSite {
196
- callee : instance. def_id ( ) ,
197
- substs : instance. substs ,
198
- bb,
199
- source_info : terminator. source_info ,
200
- } ) ;
200
+ return Some ( CallSite { callee, bb, source_info : terminator. source_info } ) ;
201
201
}
202
202
}
203
203
@@ -222,7 +222,7 @@ impl Inliner<'tcx> {
222
222
return false ;
223
223
}
224
224
225
- let codegen_fn_attrs = tcx. codegen_fn_attrs ( callsite. callee ) ;
225
+ let codegen_fn_attrs = tcx. codegen_fn_attrs ( callsite. callee . def_id ( ) ) ;
226
226
227
227
if codegen_fn_attrs. flags . contains ( CodegenFnAttrFlags :: TRACK_CALLER ) {
228
228
debug ! ( "`#[track_caller]` present - not inlining" ) ;
@@ -267,8 +267,8 @@ impl Inliner<'tcx> {
267
267
// Only inline local functions if they would be eligible for cross-crate
268
268
// inlining. This is to ensure that the final crate doesn't have MIR that
269
269
// reference unexported symbols
270
- if callsite. callee . is_local ( ) {
271
- if callsite. substs . non_erasable_generics ( ) . count ( ) == 0 && !hinted {
270
+ if callsite. callee . def_id ( ) . is_local ( ) {
271
+ if callsite. callee . substs . non_erasable_generics ( ) . count ( ) == 0 && !hinted {
272
272
debug ! ( " callee is an exported function - not inlining" ) ;
273
273
return false ;
274
274
}
@@ -324,7 +324,7 @@ impl Inliner<'tcx> {
324
324
work_list. push ( target) ;
325
325
// If the location doesn't actually need dropping, treat it like
326
326
// a regular goto.
327
- let ty = location. ty ( callee_body, tcx) . subst ( tcx, callsite. substs ) . ty ;
327
+ let ty = location. ty ( callee_body, tcx) . subst ( tcx, callsite. callee . substs ) . ty ;
328
328
if ty. needs_drop ( tcx, param_env) {
329
329
cost += CALL_PENALTY ;
330
330
if let Some ( unwind) = unwind {
@@ -374,7 +374,7 @@ impl Inliner<'tcx> {
374
374
375
375
for v in callee_body. vars_and_temps_iter ( ) {
376
376
let v = & callee_body. local_decls [ v] ;
377
- let ty = v. ty . subst ( tcx, callsite. substs ) ;
377
+ let ty = v. ty . subst ( tcx, callsite. callee . substs ) ;
378
378
// Cost of the var is the size in machine-words, if we know
379
379
// it.
380
380
if let Some ( size) = type_size_of ( tcx, param_env, ty) {
@@ -402,7 +402,7 @@ impl Inliner<'tcx> {
402
402
& self ,
403
403
callsite : CallSite < ' tcx > ,
404
404
caller_body : & mut BodyAndCache < ' tcx > ,
405
- mut callee_body : BodyAndCache < ' tcx > ,
405
+ mut callee_body : Body < ' tcx > ,
406
406
) -> bool {
407
407
let terminator = caller_body[ callsite. bb ] . terminator . take ( ) . unwrap ( ) ;
408
408
match terminator. kind {
@@ -504,6 +504,13 @@ impl Inliner<'tcx> {
504
504
caller_body. var_debug_info . push ( var_debug_info) ;
505
505
}
506
506
507
+ // HACK(eddyb) work around the `basic_blocks` field of `mir::Body`
508
+ // being private, due to `BodyAndCache` implementing `DerefMut`
509
+ // to `mir::Body` (which would allow bypassing `basic_blocks_mut`).
510
+ // The only way to make `basic_blocks` public again would be to
511
+ // remove that `DerefMut` impl and add more `*_mut` accessors.
512
+ let mut callee_body = BodyAndCache :: new ( callee_body) ;
513
+
507
514
for ( bb, mut block) in callee_body. basic_blocks_mut ( ) . drain_enumerated ( ..) {
508
515
integrator. visit_basic_block_data ( bb, & mut block) ;
509
516
caller_body. basic_blocks_mut ( ) . push ( block) ;
@@ -557,7 +564,9 @@ impl Inliner<'tcx> {
557
564
// tmp2 = tuple_tmp.2
558
565
//
559
566
// and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
560
- if tcx. is_closure ( callsite. callee ) {
567
+ // FIXME(eddyb) make this check for `"rust-call"` ABI combined with
568
+ // `callee_body.spread_arg == None`, instead of special-casing closures.
569
+ if tcx. is_closure ( callsite. callee . def_id ( ) ) {
561
570
let mut args = args. into_iter ( ) ;
562
571
let self_ = self . create_temp_if_necessary ( args. next ( ) . unwrap ( ) , callsite, caller_body) ;
563
572
let tuple = self . create_temp_if_necessary ( args. next ( ) . unwrap ( ) , callsite, caller_body) ;
0 commit comments