]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_mir/src/transform/inline/cycle.rs
New upstream version 1.51.0+dfsg1
[rustc.git] / compiler / rustc_mir / src / transform / inline / cycle.rs
1 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
2 use rustc_data_structures::stack::ensure_sufficient_stack;
3 use rustc_hir::def_id::{DefId, LocalDefId};
4 use rustc_middle::mir::TerminatorKind;
5 use rustc_middle::ty::TypeFoldable;
6 use rustc_middle::ty::{self, subst::SubstsRef, InstanceDef, TyCtxt};
7
8 // FIXME: check whether it is cheaper to precompute the entire call graph instead of invoking
9 // this query riddiculously often.
10 #[instrument(skip(tcx, root, target))]
11 crate fn mir_callgraph_reachable(
12 tcx: TyCtxt<'tcx>,
13 (root, target): (ty::Instance<'tcx>, LocalDefId),
14 ) -> bool {
15 trace!(%root, target = %tcx.def_path_str(target.to_def_id()));
16 let param_env = tcx.param_env_reveal_all_normalized(target);
17 assert_ne!(
18 root.def_id().expect_local(),
19 target,
20 "you should not call `mir_callgraph_reachable` on immediate self recursion"
21 );
22 assert!(
23 matches!(root.def, InstanceDef::Item(_)),
24 "you should not call `mir_callgraph_reachable` on shims"
25 );
26 assert!(
27 !tcx.is_constructor(root.def_id()),
28 "you should not call `mir_callgraph_reachable` on enum/struct constructor functions"
29 );
30 #[instrument(skip(tcx, param_env, target, stack, seen, recursion_limiter, caller))]
31 fn process(
32 tcx: TyCtxt<'tcx>,
33 param_env: ty::ParamEnv<'tcx>,
34 caller: ty::Instance<'tcx>,
35 target: LocalDefId,
36 stack: &mut Vec<ty::Instance<'tcx>>,
37 seen: &mut FxHashSet<ty::Instance<'tcx>>,
38 recursion_limiter: &mut FxHashMap<DefId, usize>,
39 ) -> bool {
40 trace!(%caller);
41 for &(callee, substs) in tcx.mir_inliner_callees(caller.def) {
42 let substs = caller.subst_mir_and_normalize_erasing_regions(tcx, param_env, substs);
43 let callee = match ty::Instance::resolve(tcx, param_env, callee, substs).unwrap() {
44 Some(callee) => callee,
45 None => {
46 trace!(?callee, "cannot resolve, skipping");
47 continue;
48 }
49 };
50
51 // Found a path.
52 if callee.def_id() == target.to_def_id() {
53 return true;
54 }
55
56 if tcx.is_constructor(callee.def_id()) {
57 trace!("constructors always have MIR");
58 // Constructor functions cannot cause a query cycle.
59 continue;
60 }
61
62 match callee.def {
63 InstanceDef::Item(_) => {
64 // If there is no MIR available (either because it was not in metadata or
65 // because it has no MIR because it's an extern function), then the inliner
66 // won't cause cycles on this.
67 if !tcx.is_mir_available(callee.def_id()) {
68 trace!(?callee, "no mir available, skipping");
69 continue;
70 }
71 }
72 // These have no own callable MIR.
73 InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => continue,
74 // These have MIR and if that MIR is inlined, substituted and then inlining is run
75 // again, a function item can end up getting inlined. Thus we'll be able to cause
76 // a cycle that way
77 InstanceDef::VtableShim(_)
78 | InstanceDef::ReifyShim(_)
79 | InstanceDef::FnPtrShim(..)
80 | InstanceDef::ClosureOnceShim { .. }
81 | InstanceDef::CloneShim(..) => {}
82 InstanceDef::DropGlue(..) => {
83 // FIXME: A not fully substituted drop shim can cause ICEs if one attempts to
84 // have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this
85 // needs some more analysis.
86 if callee.needs_subst() {
87 continue;
88 }
89 }
90 }
91
92 if seen.insert(callee) {
93 let recursion = recursion_limiter.entry(callee.def_id()).or_default();
94 trace!(?callee, recursion = *recursion);
95 if tcx.sess.recursion_limit().value_within_limit(*recursion) {
96 *recursion += 1;
97 stack.push(callee);
98 let found_recursion = ensure_sufficient_stack(|| {
99 process(tcx, param_env, callee, target, stack, seen, recursion_limiter)
100 });
101 if found_recursion {
102 return true;
103 }
104 stack.pop();
105 } else {
106 // Pessimistically assume that there could be recursion.
107 return true;
108 }
109 }
110 }
111 false
112 }
113 process(
114 tcx,
115 param_env,
116 root,
117 target,
118 &mut Vec::new(),
119 &mut FxHashSet::default(),
120 &mut FxHashMap::default(),
121 )
122 }
123
124 crate fn mir_inliner_callees<'tcx>(
125 tcx: TyCtxt<'tcx>,
126 instance: ty::InstanceDef<'tcx>,
127 ) -> &'tcx [(DefId, SubstsRef<'tcx>)] {
128 let steal;
129 let guard;
130 let body = match (instance, instance.def_id().as_local()) {
131 (InstanceDef::Item(_), Some(def_id)) => {
132 let def = ty::WithOptConstParam::unknown(def_id);
133 steal = tcx.mir_promoted(def).0;
134 guard = steal.borrow();
135 &*guard
136 }
137 // Functions from other crates and MIR shims
138 _ => tcx.instance_mir(instance),
139 };
140 let mut calls = Vec::new();
141 for bb_data in body.basic_blocks() {
142 let terminator = bb_data.terminator();
143 if let TerminatorKind::Call { func, .. } = &terminator.kind {
144 let ty = func.ty(&body.local_decls, tcx);
145 let call = match ty.kind() {
146 ty::FnDef(def_id, substs) => (*def_id, *substs),
147 _ => continue,
148 };
149 // We've seen this before
150 if calls.contains(&call) {
151 continue;
152 }
153 calls.push(call);
154 }
155 }
156 tcx.arena.alloc_slice(&calls)
157 }