rustc_mir_transform/inline/
cycle.rs1use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
2use rustc_data_structures::stack::ensure_sufficient_stack;
3use rustc_data_structures::unord::UnordSet;
4use rustc_hir::def_id::{DefId, LocalDefId};
5use rustc_middle::mir::TerminatorKind;
6use rustc_middle::ty::{self, GenericArgsRef, InstanceKind, TyCtxt, TypeVisitableExt};
7use rustc_session::Limit;
8use rustc_span::sym;
9use tracing::{instrument, trace};
10
11#[instrument(level = "debug", skip(tcx), ret)]
12fn should_recurse<'tcx>(tcx: TyCtxt<'tcx>, callee: ty::Instance<'tcx>) -> bool {
13 match callee.def {
14 InstanceKind::Item(_) => {
18 if !tcx.is_mir_available(callee.def_id()) {
19 return false;
20 }
21 }
22
23 InstanceKind::Intrinsic(_) | InstanceKind::Virtual(..) => return false,
25
26 InstanceKind::VTableShim(_)
30 | InstanceKind::ReifyShim(..)
31 | InstanceKind::FnPtrShim(..)
32 | InstanceKind::ClosureOnceShim { .. }
33 | InstanceKind::ConstructCoroutineInClosureShim { .. }
34 | InstanceKind::ThreadLocalShim { .. }
35 | InstanceKind::CloneShim(..) => {}
36
37 InstanceKind::FnPtrAddrShim(..) => return false,
39
40 InstanceKind::DropGlue(..)
44 | InstanceKind::FutureDropPollShim(..)
45 | InstanceKind::AsyncDropGlue(..)
46 | InstanceKind::AsyncDropGlueCtorShim(..) => {
47 if callee.has_param() {
48 return false;
49 }
50 }
51 }
52
53 crate::pm::should_run_pass(tcx, &crate::inline::Inline, crate::pm::Optimizations::Allowed)
54 || crate::inline::ForceInline::should_run_pass_for_callee(tcx, callee.def.def_id())
55}
56
57#[instrument(
58 level = "debug",
59 skip(tcx, typing_env, seen, involved, recursion_limiter, recursion_limit),
60 ret
61)]
62fn process<'tcx>(
63 tcx: TyCtxt<'tcx>,
64 typing_env: ty::TypingEnv<'tcx>,
65 caller: ty::Instance<'tcx>,
66 target: LocalDefId,
67 seen: &mut FxHashSet<ty::Instance<'tcx>>,
68 involved: &mut FxHashSet<LocalDefId>,
69 recursion_limiter: &mut FxHashMap<DefId, usize>,
70 recursion_limit: Limit,
71) -> bool {
72 trace!(%caller);
73 let mut cycle_found = false;
74
75 for &(callee, args) in tcx.mir_inliner_callees(caller.def) {
76 let Ok(args) = caller.try_instantiate_mir_and_normalize_erasing_regions(
77 tcx,
78 typing_env,
79 ty::EarlyBinder::bind(args),
80 ) else {
81 trace!(?caller, ?typing_env, ?args, "cannot normalize, skipping");
82 continue;
83 };
84 let Ok(Some(callee)) = ty::Instance::try_resolve(tcx, typing_env, callee, args) else {
85 trace!(?callee, "cannot resolve, skipping");
86 continue;
87 };
88
89 if callee.def_id() == target.to_def_id() {
91 cycle_found = true;
92 }
93
94 if tcx.is_constructor(callee.def_id()) {
95 trace!("constructors always have MIR");
96 continue;
98 }
99
100 if !should_recurse(tcx, callee) {
101 continue;
102 }
103
104 if seen.insert(callee) {
105 let recursion = recursion_limiter.entry(callee.def_id()).or_default();
106 trace!(?callee, recursion = *recursion);
107 let found_recursion = if recursion_limit.value_within_limit(*recursion) {
108 *recursion += 1;
109 ensure_sufficient_stack(|| {
110 process(
111 tcx,
112 typing_env,
113 callee,
114 target,
115 seen,
116 involved,
117 recursion_limiter,
118 recursion_limit,
119 )
120 })
121 } else {
122 true
124 };
125 if found_recursion {
126 if let Some(callee) = callee.def_id().as_local() {
127 involved.insert(callee);
129 }
130 cycle_found = true;
131 }
132 }
133 }
134
135 cycle_found
136}
137
138#[instrument(level = "debug", skip(tcx), ret)]
139pub(crate) fn mir_callgraph_cyclic<'tcx>(
140 tcx: TyCtxt<'tcx>,
141 root: LocalDefId,
142) -> UnordSet<LocalDefId> {
143 assert!(
144 !tcx.is_constructor(root.to_def_id()),
145 "you should not call `mir_callgraph_reachable` on enum/struct constructor functions"
146 );
147
148 let recursion_limit = tcx.recursion_limit() / 2;
156 let mut involved = FxHashSet::default();
157 let typing_env = ty::TypingEnv::post_analysis(tcx, root);
158 let Ok(Some(root_instance)) = ty::Instance::try_resolve(
159 tcx,
160 typing_env,
161 root.to_def_id(),
162 ty::GenericArgs::identity_for_item(tcx, root.to_def_id()),
163 ) else {
164 trace!("cannot resolve, skipping");
165 return involved.into();
166 };
167 if !should_recurse(tcx, root_instance) {
168 trace!("cannot walk, skipping");
169 return involved.into();
170 }
171 process(
172 tcx,
173 typing_env,
174 root_instance,
175 root,
176 &mut FxHashSet::default(),
177 &mut involved,
178 &mut FxHashMap::default(),
179 recursion_limit,
180 );
181 involved.into()
182}
183
184pub(crate) fn mir_inliner_callees<'tcx>(
185 tcx: TyCtxt<'tcx>,
186 instance: ty::InstanceKind<'tcx>,
187) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] {
188 let steal;
189 let guard;
190 let body = match (instance, instance.def_id().as_local()) {
191 (InstanceKind::Item(_), Some(def_id)) => {
192 steal = tcx.mir_promoted(def_id).0;
193 guard = steal.borrow();
194 &*guard
195 }
196 _ => tcx.instance_mir(instance),
198 };
199 let mut calls = FxIndexSet::default();
200 for bb_data in body.basic_blocks.iter() {
201 let terminator = bb_data.terminator();
202 if let TerminatorKind::Call { func, args: call_args, .. } = &terminator.kind {
203 let ty = func.ty(&body.local_decls, tcx);
204 let ty::FnDef(def_id, generic_args) = ty.kind() else {
205 continue;
206 };
207 let call = if tcx.is_intrinsic(*def_id, sym::const_eval_select) {
208 let func = &call_args[2].node;
209 let ty = func.ty(&body.local_decls, tcx);
210 let ty::FnDef(def_id, generic_args) = ty.kind() else {
211 continue;
212 };
213 (*def_id, *generic_args)
214 } else {
215 (*def_id, *generic_args)
216 };
217 calls.insert(call);
218 }
219 }
220 tcx.arena.alloc_from_iter(calls.iter().copied())
221}