]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_monomorphize/src/collector.rs
New upstream version 1.63.0+dfsg1
[rustc.git] / compiler / rustc_monomorphize / src / collector.rs
1 //! Mono Item Collection
2 //! ====================
3 //!
4 //! This module is responsible for discovering all items that will contribute
5 //! to code generation of the crate. The important part here is that it not only
6 //! needs to find syntax-level items (functions, structs, etc) but also all
7 //! their monomorphized instantiations. Every non-generic, non-const function
8 //! maps to one LLVM artifact. Every generic function can produce
9 //! from zero to N artifacts, depending on the sets of type arguments it
10 //! is instantiated with.
11 //! This also applies to generic items from other crates: A generic definition
12 //! in crate X might produce monomorphizations that are compiled into crate Y.
13 //! We also have to collect these here.
14 //!
15 //! The following kinds of "mono items" are handled here:
16 //!
17 //! - Functions
18 //! - Methods
19 //! - Closures
20 //! - Statics
21 //! - Drop glue
22 //!
23 //! The following things also result in LLVM artifacts, but are not collected
24 //! here, since we instantiate them locally on demand when needed in a given
25 //! codegen unit:
26 //!
27 //! - Constants
28 //! - Vtables
29 //! - Object Shims
30 //!
31 //!
32 //! General Algorithm
33 //! -----------------
34 //! Let's define some terms first:
35 //!
36 //! - A "mono item" is something that results in a function or global in
37 //! the LLVM IR of a codegen unit. Mono items do not stand on their
38 //! own, they can reference other mono items. For example, if function
39 //! `foo()` calls function `bar()` then the mono item for `foo()`
40 //! references the mono item for function `bar()`. In general, the
41 //! definition for mono item A referencing a mono item B is that
42 //! the LLVM artifact produced for A references the LLVM artifact produced
43 //! for B.
44 //!
45 //! - Mono items and the references between them form a directed graph,
46 //! where the mono items are the nodes and references form the edges.
47 //! Let's call this graph the "mono item graph".
48 //!
49 //! - The mono item graph for a program contains all mono items
50 //! that are needed in order to produce the complete LLVM IR of the program.
51 //!
52 //! The purpose of the algorithm implemented in this module is to build the
53 //! mono item graph for the current crate. It runs in two phases:
54 //!
55 //! 1. Discover the roots of the graph by traversing the HIR of the crate.
56 //! 2. Starting from the roots, find neighboring nodes by inspecting the MIR
57 //! representation of the item corresponding to a given node, until no more
58 //! new nodes are found.
59 //!
60 //! ### Discovering roots
61 //!
62 //! The roots of the mono item graph correspond to the public non-generic
63 //! syntactic items in the source code. We find them by walking the HIR of the
64 //! crate, and whenever we hit upon a public function, method, or static item,
65 //! we create a mono item consisting of the items DefId and, since we only
66 //! consider non-generic items, an empty type-substitution set. (In eager
67 //! collection mode, during incremental compilation, all non-generic functions
68 //! are considered as roots, as well as when the `-Clink-dead-code` option is
69 //! specified. Functions marked `#[no_mangle]` and functions called by inlinable
70 //! functions also always act as roots.)
71 //!
72 //! ### Finding neighbor nodes
73 //! Given a mono item node, we can discover neighbors by inspecting its
74 //! MIR. We walk the MIR and any time we hit upon something that signifies a
75 //! reference to another mono item, we have found a neighbor. Since the
76 //! mono item we are currently at is always monomorphic, we also know the
77 //! concrete type arguments of its neighbors, and so all neighbors again will be
78 //! monomorphic. The specific forms a reference to a neighboring node can take
79 //! in MIR are quite diverse. Here is an overview:
80 //!
81 //! #### Calling Functions/Methods
82 //! The most obvious form of one mono item referencing another is a
83 //! function or method call (represented by a CALL terminator in MIR). But
84 //! calls are not the only thing that might introduce a reference between two
85 //! function mono items, and as we will see below, they are just a
86 //! specialization of the form described next, and consequently will not get any
87 //! special treatment in the algorithm.
88 //!
89 //! #### Taking a reference to a function or method
90 //! A function does not need to actually be called in order to be a neighbor of
91 //! another function. It suffices to just take a reference in order to introduce
92 //! an edge. Consider the following example:
93 //!
94 //! ```
95 //! # use core::fmt::Display;
96 //! fn print_val<T: Display>(x: T) {
97 //! println!("{}", x);
98 //! }
99 //!
100 //! fn call_fn(f: &dyn Fn(i32), x: i32) {
101 //! f(x);
102 //! }
103 //!
104 //! fn main() {
105 //! let print_i32 = print_val::<i32>;
106 //! call_fn(&print_i32, 0);
107 //! }
108 //! ```
109 //! The MIR of none of these functions will contain an explicit call to
110 //! `print_val::<i32>`. Nonetheless, in order to mono this program, we need
111 //! an instance of this function. Thus, whenever we encounter a function or
112 //! method in operand position, we treat it as a neighbor of the current
113 //! mono item. Calls are just a special case of that.
114 //!
115 //! #### Closures
116 //! In a way, closures are a simple case. Since every closure object needs to be
117 //! constructed somewhere, we can reliably discover them by observing
118 //! `RValue::Aggregate` expressions with `AggregateKind::Closure`. This is also
119 //! true for closures inlined from other crates.
120 //!
121 //! #### Drop glue
122 //! Drop glue mono items are introduced by MIR drop-statements. The
123 //! generated mono item will again have drop-glue item neighbors if the
124 //! type to be dropped contains nested values that also need to be dropped. It
125 //! might also have a function item neighbor for the explicit `Drop::drop`
126 //! implementation of its type.
127 //!
128 //! #### Unsizing Casts
129 //! A subtle way of introducing neighbor edges is by casting to a trait object.
130 //! Since the resulting fat-pointer contains a reference to a vtable, we need to
131 //! instantiate all object-save methods of the trait, as we need to store
132 //! pointers to these functions even if they never get called anywhere. This can
133 //! be seen as a special case of taking a function reference.
134 //!
135 //! #### Boxes
136 //! Since `Box` expression have special compiler support, no explicit calls to
137 //! `exchange_malloc()` and `box_free()` may show up in MIR, even if the
138 //! compiler will generate them. We have to observe `Rvalue::Box` expressions
139 //! and Box-typed drop-statements for that purpose.
140 //!
141 //!
142 //! Interaction with Cross-Crate Inlining
143 //! -------------------------------------
144 //! The binary of a crate will not only contain machine code for the items
145 //! defined in the source code of that crate. It will also contain monomorphic
146 //! instantiations of any extern generic functions and of functions marked with
147 //! `#[inline]`.
148 //! The collection algorithm handles this more or less mono. If it is
149 //! about to create a mono item for something with an external `DefId`,
150 //! it will take a look if the MIR for that item is available, and if so just
151 //! proceed normally. If the MIR is not available, it assumes that the item is
152 //! just linked to and no node is created; which is exactly what we want, since
153 //! no machine code should be generated in the current crate for such an item.
154 //!
155 //! Eager and Lazy Collection Mode
156 //! ------------------------------
157 //! Mono item collection can be performed in one of two modes:
158 //!
159 //! - Lazy mode means that items will only be instantiated when actually
160 //! referenced. The goal is to produce the least amount of machine code
161 //! possible.
162 //!
163 //! - Eager mode is meant to be used in conjunction with incremental compilation
164 //! where a stable set of mono items is more important than a minimal
165 //! one. Thus, eager mode will instantiate drop-glue for every drop-able type
166 //! in the crate, even if no drop call for that type exists (yet). It will
167 //! also instantiate default implementations of trait methods, something that
168 //! otherwise is only done on demand.
169 //!
170 //!
171 //! Open Issues
172 //! -----------
173 //! Some things are not yet fully implemented in the current version of this
174 //! module.
175 //!
176 //! ### Const Fns
177 //! Ideally, no mono item should be generated for const fns unless there
178 //! is a call to them that cannot be evaluated at compile time. At the moment
179 //! this is not implemented however: a mono item will be produced
180 //! regardless of whether it is actually needed or not.
181
182 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
183 use rustc_data_structures::sync::{par_iter, MTLock, MTRef, ParallelIterator};
184 use rustc_hir as hir;
185 use rustc_hir::def::DefKind;
186 use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId};
187 use rustc_hir::lang_items::LangItem;
188 use rustc_index::bit_set::GrowableBitSet;
189 use rustc_middle::mir::interpret::{AllocId, ConstValue};
190 use rustc_middle::mir::interpret::{ErrorHandled, GlobalAlloc, Scalar};
191 use rustc_middle::mir::mono::{InstantiationMode, MonoItem};
192 use rustc_middle::mir::visit::Visitor as MirVisitor;
193 use rustc_middle::mir::{self, Local, Location};
194 use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCast};
195 use rustc_middle::ty::print::with_no_trimmed_paths;
196 use rustc_middle::ty::subst::{GenericArgKind, InternalSubsts};
197 use rustc_middle::ty::{self, GenericParamDefKind, Instance, Ty, TyCtxt, TypeFoldable, VtblEntry};
198 use rustc_middle::{middle::codegen_fn_attrs::CodegenFnAttrFlags, mir::visit::TyContext};
199 use rustc_session::config::EntryFnType;
200 use rustc_session::lint::builtin::LARGE_ASSIGNMENTS;
201 use rustc_session::Limit;
202 use rustc_span::source_map::{dummy_spanned, respan, Span, Spanned, DUMMY_SP};
203 use rustc_target::abi::Size;
204 use std::iter;
205 use std::ops::Range;
206 use std::path::PathBuf;
207
208 #[derive(PartialEq)]
209 pub enum MonoItemCollectionMode {
210 Eager,
211 Lazy,
212 }
213
214 /// Maps every mono item to all mono items it references in its
215 /// body.
216 pub struct InliningMap<'tcx> {
217 // Maps a source mono item to the range of mono items
218 // accessed by it.
219 // The range selects elements within the `targets` vecs.
220 index: FxHashMap<MonoItem<'tcx>, Range<usize>>,
221 targets: Vec<MonoItem<'tcx>>,
222
223 // Contains one bit per mono item in the `targets` field. That bit
224 // is true if that mono item needs to be inlined into every CGU.
225 inlines: GrowableBitSet<usize>,
226 }
227
228 /// Struct to store mono items in each collecting and if they should
229 /// be inlined. We call `instantiation_mode` to get their inlining
230 /// status when inserting new elements, which avoids calling it in
231 /// `inlining_map.lock_mut()`. See the `collect_items_rec` implementation
232 /// below.
233 struct MonoItems<'tcx> {
234 // If this is false, we do not need to compute whether items
235 // will need to be inlined.
236 compute_inlining: bool,
237
238 // The TyCtxt used to determine whether the a item should
239 // be inlined.
240 tcx: TyCtxt<'tcx>,
241
242 // The collected mono items. The bool field in each element
243 // indicates whether this element should be inlined.
244 items: Vec<(Spanned<MonoItem<'tcx>>, bool /*inlined*/)>,
245 }
246
247 impl<'tcx> MonoItems<'tcx> {
248 #[inline]
249 fn push(&mut self, item: Spanned<MonoItem<'tcx>>) {
250 self.extend([item]);
251 }
252
253 #[inline]
254 fn extend<T: IntoIterator<Item = Spanned<MonoItem<'tcx>>>>(&mut self, iter: T) {
255 self.items.extend(iter.into_iter().map(|mono_item| {
256 let inlined = if !self.compute_inlining {
257 false
258 } else {
259 mono_item.node.instantiation_mode(self.tcx) == InstantiationMode::LocalCopy
260 };
261 (mono_item, inlined)
262 }))
263 }
264 }
265
266 impl<'tcx> InliningMap<'tcx> {
267 fn new() -> InliningMap<'tcx> {
268 InliningMap {
269 index: FxHashMap::default(),
270 targets: Vec::new(),
271 inlines: GrowableBitSet::with_capacity(1024),
272 }
273 }
274
275 fn record_accesses<'a>(
276 &mut self,
277 source: MonoItem<'tcx>,
278 new_targets: &'a [(Spanned<MonoItem<'tcx>>, bool)],
279 ) where
280 'tcx: 'a,
281 {
282 let start_index = self.targets.len();
283 let new_items_count = new_targets.len();
284 let new_items_count_total = new_items_count + self.targets.len();
285
286 self.targets.reserve(new_items_count);
287 self.inlines.ensure(new_items_count_total);
288
289 for (i, (Spanned { node: mono_item, .. }, inlined)) in new_targets.into_iter().enumerate() {
290 self.targets.push(*mono_item);
291 if *inlined {
292 self.inlines.insert(i + start_index);
293 }
294 }
295
296 let end_index = self.targets.len();
297 assert!(self.index.insert(source, start_index..end_index).is_none());
298 }
299
300 // Internally iterate over all items referenced by `source` which will be
301 // made available for inlining.
302 pub fn with_inlining_candidates<F>(&self, source: MonoItem<'tcx>, mut f: F)
303 where
304 F: FnMut(MonoItem<'tcx>),
305 {
306 if let Some(range) = self.index.get(&source) {
307 for (i, candidate) in self.targets[range.clone()].iter().enumerate() {
308 if self.inlines.contains(range.start + i) {
309 f(*candidate);
310 }
311 }
312 }
313 }
314
315 // Internally iterate over all items and the things each accesses.
316 pub fn iter_accesses<F>(&self, mut f: F)
317 where
318 F: FnMut(MonoItem<'tcx>, &[MonoItem<'tcx>]),
319 {
320 for (&accessor, range) in &self.index {
321 f(accessor, &self.targets[range.clone()])
322 }
323 }
324 }
325
326 #[instrument(skip(tcx, mode), level = "debug")]
327 pub fn collect_crate_mono_items(
328 tcx: TyCtxt<'_>,
329 mode: MonoItemCollectionMode,
330 ) -> (FxHashSet<MonoItem<'_>>, InliningMap<'_>) {
331 let _prof_timer = tcx.prof.generic_activity("monomorphization_collector");
332
333 let roots =
334 tcx.sess.time("monomorphization_collector_root_collections", || collect_roots(tcx, mode));
335
336 debug!("building mono item graph, beginning at roots");
337
338 let mut visited = MTLock::new(FxHashSet::default());
339 let mut inlining_map = MTLock::new(InliningMap::new());
340 let recursion_limit = tcx.recursion_limit();
341
342 {
343 let visited: MTRef<'_, _> = &mut visited;
344 let inlining_map: MTRef<'_, _> = &mut inlining_map;
345
346 tcx.sess.time("monomorphization_collector_graph_walk", || {
347 par_iter(roots).for_each(|root| {
348 let mut recursion_depths = DefIdMap::default();
349 collect_items_rec(
350 tcx,
351 dummy_spanned(root),
352 visited,
353 &mut recursion_depths,
354 recursion_limit,
355 inlining_map,
356 );
357 });
358 });
359 }
360
361 (visited.into_inner(), inlining_map.into_inner())
362 }
363
364 // Find all non-generic items by walking the HIR. These items serve as roots to
365 // start monomorphizing from.
366 #[instrument(skip(tcx, mode), level = "debug")]
367 fn collect_roots(tcx: TyCtxt<'_>, mode: MonoItemCollectionMode) -> Vec<MonoItem<'_>> {
368 debug!("collecting roots");
369 let mut roots = MonoItems { compute_inlining: false, tcx, items: Vec::new() };
370
371 {
372 let entry_fn = tcx.entry_fn(());
373
374 debug!("collect_roots: entry_fn = {:?}", entry_fn);
375
376 let mut collector = RootCollector { tcx, mode, entry_fn, output: &mut roots };
377
378 let crate_items = tcx.hir_crate_items(());
379
380 for id in crate_items.items() {
381 collector.process_item(id);
382 }
383
384 for id in crate_items.impl_items() {
385 collector.process_impl_item(id);
386 }
387
388 collector.push_extra_entry_roots();
389 }
390
391 // We can only codegen items that are instantiable - items all of
392 // whose predicates hold. Luckily, items that aren't instantiable
393 // can't actually be used, so we can just skip codegenning them.
394 roots
395 .items
396 .into_iter()
397 .filter_map(|(Spanned { node: mono_item, .. }, _)| {
398 mono_item.is_instantiable(tcx).then_some(mono_item)
399 })
400 .collect()
401 }
402
403 /// Collect all monomorphized items reachable from `starting_point`, and emit a note diagnostic if a
404 /// post-monorphization error is encountered during a collection step.
405 #[instrument(skip(tcx, visited, recursion_depths, recursion_limit, inlining_map), level = "debug")]
406 fn collect_items_rec<'tcx>(
407 tcx: TyCtxt<'tcx>,
408 starting_point: Spanned<MonoItem<'tcx>>,
409 visited: MTRef<'_, MTLock<FxHashSet<MonoItem<'tcx>>>>,
410 recursion_depths: &mut DefIdMap<usize>,
411 recursion_limit: Limit,
412 inlining_map: MTRef<'_, MTLock<InliningMap<'tcx>>>,
413 ) {
414 if !visited.lock_mut().insert(starting_point.node) {
415 // We've been here already, no need to search again.
416 return;
417 }
418 debug!("BEGIN collect_items_rec({})", starting_point.node);
419
420 let mut neighbors = MonoItems { compute_inlining: true, tcx, items: Vec::new() };
421 let recursion_depth_reset;
422
423 //
424 // Post-monomorphization errors MVP
425 //
426 // We can encounter errors while monomorphizing an item, but we don't have a good way of
427 // showing a complete stack of spans ultimately leading to collecting the erroneous one yet.
428 // (It's also currently unclear exactly which diagnostics and information would be interesting
429 // to report in such cases)
430 //
431 // This leads to suboptimal error reporting: a post-monomorphization error (PME) will be
432 // shown with just a spanned piece of code causing the error, without information on where
433 // it was called from. This is especially obscure if the erroneous mono item is in a
434 // dependency. See for example issue #85155, where, before minimization, a PME happened two
435 // crates downstream from libcore's stdarch, without a way to know which dependency was the
436 // cause.
437 //
438 // If such an error occurs in the current crate, its span will be enough to locate the
439 // source. If the cause is in another crate, the goal here is to quickly locate which mono
440 // item in the current crate is ultimately responsible for causing the error.
441 //
442 // To give at least _some_ context to the user: while collecting mono items, we check the
443 // error count. If it has changed, a PME occurred, and we trigger some diagnostics about the
444 // current step of mono items collection.
445 //
446 // FIXME: don't rely on global state, instead bubble up errors. Note: this is very hard to do.
447 let error_count = tcx.sess.diagnostic().err_count();
448
449 match starting_point.node {
450 MonoItem::Static(def_id) => {
451 let instance = Instance::mono(tcx, def_id);
452
453 // Sanity check whether this ended up being collected accidentally
454 debug_assert!(should_codegen_locally(tcx, &instance));
455
456 let ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
457 visit_drop_use(tcx, ty, true, starting_point.span, &mut neighbors);
458
459 recursion_depth_reset = None;
460
461 if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
462 for &id in alloc.inner().relocations().values() {
463 collect_miri(tcx, id, &mut neighbors);
464 }
465 }
466 }
467 MonoItem::Fn(instance) => {
468 // Sanity check whether this ended up being collected accidentally
469 debug_assert!(should_codegen_locally(tcx, &instance));
470
471 // Keep track of the monomorphization recursion depth
472 recursion_depth_reset = Some(check_recursion_limit(
473 tcx,
474 instance,
475 starting_point.span,
476 recursion_depths,
477 recursion_limit,
478 ));
479 check_type_length_limit(tcx, instance);
480
481 rustc_data_structures::stack::ensure_sufficient_stack(|| {
482 collect_neighbours(tcx, instance, &mut neighbors);
483 });
484 }
485 MonoItem::GlobalAsm(item_id) => {
486 recursion_depth_reset = None;
487
488 let item = tcx.hir().item(item_id);
489 if let hir::ItemKind::GlobalAsm(asm) = item.kind {
490 for (op, op_sp) in asm.operands {
491 match op {
492 hir::InlineAsmOperand::Const { .. } => {
493 // Only constants which resolve to a plain integer
494 // are supported. Therefore the value should not
495 // depend on any other items.
496 }
497 hir::InlineAsmOperand::SymFn { anon_const } => {
498 let fn_ty =
499 tcx.typeck_body(anon_const.body).node_type(anon_const.hir_id);
500 visit_fn_use(tcx, fn_ty, false, *op_sp, &mut neighbors);
501 }
502 hir::InlineAsmOperand::SymStatic { path: _, def_id } => {
503 let instance = Instance::mono(tcx, *def_id);
504 if should_codegen_locally(tcx, &instance) {
505 trace!("collecting static {:?}", def_id);
506 neighbors.push(dummy_spanned(MonoItem::Static(*def_id)));
507 }
508 }
509 hir::InlineAsmOperand::In { .. }
510 | hir::InlineAsmOperand::Out { .. }
511 | hir::InlineAsmOperand::InOut { .. }
512 | hir::InlineAsmOperand::SplitInOut { .. } => {
513 span_bug!(*op_sp, "invalid operand type for global_asm!")
514 }
515 }
516 }
517 } else {
518 span_bug!(item.span, "Mismatch between hir::Item type and MonoItem type")
519 }
520 }
521 }
522
523 // Check for PMEs and emit a diagnostic if one happened. To try to show relevant edges of the
524 // mono item graph.
525 if tcx.sess.diagnostic().err_count() > error_count
526 && starting_point.node.is_generic_fn()
527 && starting_point.node.is_user_defined()
528 {
529 let formatted_item = with_no_trimmed_paths!(starting_point.node.to_string());
530 tcx.sess.span_note_without_error(
531 starting_point.span,
532 &format!("the above error was encountered while instantiating `{}`", formatted_item),
533 );
534 }
535 inlining_map.lock_mut().record_accesses(starting_point.node, &neighbors.items);
536
537 for (neighbour, _) in neighbors.items {
538 collect_items_rec(tcx, neighbour, visited, recursion_depths, recursion_limit, inlining_map);
539 }
540
541 if let Some((def_id, depth)) = recursion_depth_reset {
542 recursion_depths.insert(def_id, depth);
543 }
544
545 debug!("END collect_items_rec({})", starting_point.node);
546 }
547
548 /// Format instance name that is already known to be too long for rustc.
549 /// Show only the first and last 32 characters to avoid blasting
550 /// the user's terminal with thousands of lines of type-name.
551 ///
552 /// If the type name is longer than before+after, it will be written to a file.
553 fn shrunk_instance_name<'tcx>(
554 tcx: TyCtxt<'tcx>,
555 instance: &Instance<'tcx>,
556 before: usize,
557 after: usize,
558 ) -> (String, Option<PathBuf>) {
559 let s = instance.to_string();
560
561 // Only use the shrunk version if it's really shorter.
562 // This also avoids the case where before and after slices overlap.
563 if s.chars().nth(before + after + 1).is_some() {
564 // An iterator of all byte positions including the end of the string.
565 let positions = || s.char_indices().map(|(i, _)| i).chain(iter::once(s.len()));
566
567 let shrunk = format!(
568 "{before}...{after}",
569 before = &s[..positions().nth(before).unwrap_or(s.len())],
570 after = &s[positions().rev().nth(after).unwrap_or(0)..],
571 );
572
573 let path = tcx.output_filenames(()).temp_path_ext("long-type.txt", None);
574 let written_to_path = std::fs::write(&path, s).ok().map(|_| path);
575
576 (shrunk, written_to_path)
577 } else {
578 (s, None)
579 }
580 }
581
582 fn check_recursion_limit<'tcx>(
583 tcx: TyCtxt<'tcx>,
584 instance: Instance<'tcx>,
585 span: Span,
586 recursion_depths: &mut DefIdMap<usize>,
587 recursion_limit: Limit,
588 ) -> (DefId, usize) {
589 let def_id = instance.def_id();
590 let recursion_depth = recursion_depths.get(&def_id).cloned().unwrap_or(0);
591 debug!(" => recursion depth={}", recursion_depth);
592
593 let adjusted_recursion_depth = if Some(def_id) == tcx.lang_items().drop_in_place_fn() {
594 // HACK: drop_in_place creates tight monomorphization loops. Give
595 // it more margin.
596 recursion_depth / 4
597 } else {
598 recursion_depth
599 };
600
601 // Code that needs to instantiate the same function recursively
602 // more than the recursion limit is assumed to be causing an
603 // infinite expansion.
604 if !recursion_limit.value_within_limit(adjusted_recursion_depth) {
605 let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance, 32, 32);
606 let error = format!("reached the recursion limit while instantiating `{}`", shrunk);
607 let mut err = tcx.sess.struct_span_fatal(span, &error);
608 err.span_note(
609 tcx.def_span(def_id),
610 &format!("`{}` defined here", tcx.def_path_str(def_id)),
611 );
612 if let Some(path) = written_to_path {
613 err.note(&format!("the full type name has been written to '{}'", path.display()));
614 }
615 err.emit()
616 }
617
618 recursion_depths.insert(def_id, recursion_depth + 1);
619
620 (def_id, recursion_depth)
621 }
622
623 fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) {
624 let type_length = instance
625 .substs
626 .iter()
627 .flat_map(|arg| arg.walk())
628 .filter(|arg| match arg.unpack() {
629 GenericArgKind::Type(_) | GenericArgKind::Const(_) => true,
630 GenericArgKind::Lifetime(_) => false,
631 })
632 .count();
633 debug!(" => type length={}", type_length);
634
635 // Rust code can easily create exponentially-long types using only a
636 // polynomial recursion depth. Even with the default recursion
637 // depth, you can easily get cases that take >2^60 steps to run,
638 // which means that rustc basically hangs.
639 //
640 // Bail out in these cases to avoid that bad user experience.
641 if !tcx.type_length_limit().value_within_limit(type_length) {
642 let (shrunk, written_to_path) = shrunk_instance_name(tcx, &instance, 32, 32);
643 let msg = format!("reached the type-length limit while instantiating `{}`", shrunk);
644 let mut diag = tcx.sess.struct_span_fatal(tcx.def_span(instance.def_id()), &msg);
645 if let Some(path) = written_to_path {
646 diag.note(&format!("the full type name has been written to '{}'", path.display()));
647 }
648 diag.help(&format!(
649 "consider adding a `#![type_length_limit=\"{}\"]` attribute to your crate",
650 type_length
651 ));
652 diag.emit()
653 }
654 }
655
656 struct MirNeighborCollector<'a, 'tcx> {
657 tcx: TyCtxt<'tcx>,
658 body: &'a mir::Body<'tcx>,
659 output: &'a mut MonoItems<'tcx>,
660 instance: Instance<'tcx>,
661 }
662
663 impl<'a, 'tcx> MirNeighborCollector<'a, 'tcx> {
664 pub fn monomorphize<T>(&self, value: T) -> T
665 where
666 T: TypeFoldable<'tcx>,
667 {
668 debug!("monomorphize: self.instance={:?}", self.instance);
669 self.instance.subst_mir_and_normalize_erasing_regions(
670 self.tcx,
671 ty::ParamEnv::reveal_all(),
672 value,
673 )
674 }
675 }
676
677 impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
678 fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
679 debug!("visiting rvalue {:?}", *rvalue);
680
681 let span = self.body.source_info(location).span;
682
683 match *rvalue {
684 // When doing an cast from a regular pointer to a fat pointer, we
685 // have to instantiate all methods of the trait being cast to, so we
686 // can build the appropriate vtable.
687 mir::Rvalue::Cast(
688 mir::CastKind::Pointer(PointerCast::Unsize),
689 ref operand,
690 target_ty,
691 ) => {
692 let target_ty = self.monomorphize(target_ty);
693 let source_ty = operand.ty(self.body, self.tcx);
694 let source_ty = self.monomorphize(source_ty);
695 let (source_ty, target_ty) =
696 find_vtable_types_for_unsizing(self.tcx, source_ty, target_ty);
697 // This could also be a different Unsize instruction, like
698 // from a fixed sized array to a slice. But we are only
699 // interested in things that produce a vtable.
700 if target_ty.is_trait() && !source_ty.is_trait() {
701 create_mono_items_for_vtable_methods(
702 self.tcx,
703 target_ty,
704 source_ty,
705 span,
706 self.output,
707 );
708 }
709 }
710 mir::Rvalue::Cast(
711 mir::CastKind::Pointer(PointerCast::ReifyFnPointer),
712 ref operand,
713 _,
714 ) => {
715 let fn_ty = operand.ty(self.body, self.tcx);
716 let fn_ty = self.monomorphize(fn_ty);
717 visit_fn_use(self.tcx, fn_ty, false, span, &mut self.output);
718 }
719 mir::Rvalue::Cast(
720 mir::CastKind::Pointer(PointerCast::ClosureFnPointer(_)),
721 ref operand,
722 _,
723 ) => {
724 let source_ty = operand.ty(self.body, self.tcx);
725 let source_ty = self.monomorphize(source_ty);
726 match *source_ty.kind() {
727 ty::Closure(def_id, substs) => {
728 let instance = Instance::resolve_closure(
729 self.tcx,
730 def_id,
731 substs,
732 ty::ClosureKind::FnOnce,
733 );
734 if should_codegen_locally(self.tcx, &instance) {
735 self.output.push(create_fn_mono_item(self.tcx, instance, span));
736 }
737 }
738 _ => bug!(),
739 }
740 }
741 mir::Rvalue::ThreadLocalRef(def_id) => {
742 assert!(self.tcx.is_thread_local_static(def_id));
743 let instance = Instance::mono(self.tcx, def_id);
744 if should_codegen_locally(self.tcx, &instance) {
745 trace!("collecting thread-local static {:?}", def_id);
746 self.output.push(respan(span, MonoItem::Static(def_id)));
747 }
748 }
749 _ => { /* not interesting */ }
750 }
751
752 self.super_rvalue(rvalue, location);
753 }
754
755 /// This does not walk the constant, as it has been handled entirely here and trying
756 /// to walk it would attempt to evaluate the `ty::Const` inside, which doesn't necessarily
757 /// work, as some constants cannot be represented in the type system.
758 #[instrument(skip(self), level = "debug")]
759 fn visit_constant(&mut self, constant: &mir::Constant<'tcx>, location: Location) {
760 let literal = self.monomorphize(constant.literal);
761 let val = match literal {
762 mir::ConstantKind::Val(val, _) => val,
763 mir::ConstantKind::Ty(ct) => match ct.kind() {
764 ty::ConstKind::Value(val) => self.tcx.valtree_to_const_val((ct.ty(), val)),
765 ty::ConstKind::Unevaluated(ct) => {
766 debug!(?ct);
767 let param_env = ty::ParamEnv::reveal_all();
768 match self.tcx.const_eval_resolve(param_env, ct, None) {
769 // The `monomorphize` call should have evaluated that constant already.
770 Ok(val) => val,
771 Err(ErrorHandled::Reported(_) | ErrorHandled::Linted) => return,
772 Err(ErrorHandled::TooGeneric) => span_bug!(
773 self.body.source_info(location).span,
774 "collection encountered polymorphic constant: {:?}",
775 literal
776 ),
777 }
778 }
779 _ => return,
780 },
781 };
782 collect_const_value(self.tcx, val, self.output);
783 self.visit_ty(literal.ty(), TyContext::Location(location));
784 }
785
786 #[instrument(skip(self), level = "debug")]
787 fn visit_const(&mut self, constant: ty::Const<'tcx>, location: Location) {
788 debug!("visiting const {:?} @ {:?}", constant, location);
789
790 let substituted_constant = self.monomorphize(constant);
791 let param_env = ty::ParamEnv::reveal_all();
792
793 match substituted_constant.kind() {
794 ty::ConstKind::Value(val) => {
795 let const_val = self.tcx.valtree_to_const_val((constant.ty(), val));
796 collect_const_value(self.tcx, const_val, self.output)
797 }
798 ty::ConstKind::Unevaluated(unevaluated) => {
799 match self.tcx.const_eval_resolve(param_env, unevaluated, None) {
800 // The `monomorphize` call should have evaluated that constant already.
801 Ok(val) => span_bug!(
802 self.body.source_info(location).span,
803 "collection encountered the unevaluated constant {} which evaluated to {:?}",
804 substituted_constant,
805 val
806 ),
807 Err(ErrorHandled::Reported(_) | ErrorHandled::Linted) => {}
808 Err(ErrorHandled::TooGeneric) => span_bug!(
809 self.body.source_info(location).span,
810 "collection encountered polymorphic constant: {}",
811 substituted_constant
812 ),
813 }
814 }
815 _ => {}
816 }
817
818 self.super_const(constant);
819 }
820
821 fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) {
822 debug!("visiting terminator {:?} @ {:?}", terminator, location);
823 let source = self.body.source_info(location).span;
824
825 let tcx = self.tcx;
826 match terminator.kind {
827 mir::TerminatorKind::Call { ref func, .. } => {
828 let callee_ty = func.ty(self.body, tcx);
829 let callee_ty = self.monomorphize(callee_ty);
830 visit_fn_use(self.tcx, callee_ty, true, source, &mut self.output);
831 }
832 mir::TerminatorKind::Drop { ref place, .. }
833 | mir::TerminatorKind::DropAndReplace { ref place, .. } => {
834 let ty = place.ty(self.body, self.tcx).ty;
835 let ty = self.monomorphize(ty);
836 visit_drop_use(self.tcx, ty, true, source, self.output);
837 }
838 mir::TerminatorKind::InlineAsm { ref operands, .. } => {
839 for op in operands {
840 match *op {
841 mir::InlineAsmOperand::SymFn { ref value } => {
842 let fn_ty = self.monomorphize(value.literal.ty());
843 visit_fn_use(self.tcx, fn_ty, false, source, &mut self.output);
844 }
845 mir::InlineAsmOperand::SymStatic { def_id } => {
846 let instance = Instance::mono(self.tcx, def_id);
847 if should_codegen_locally(self.tcx, &instance) {
848 trace!("collecting asm sym static {:?}", def_id);
849 self.output.push(respan(source, MonoItem::Static(def_id)));
850 }
851 }
852 _ => {}
853 }
854 }
855 }
856 mir::TerminatorKind::Assert { ref msg, .. } => {
857 let lang_item = match msg {
858 mir::AssertKind::BoundsCheck { .. } => LangItem::PanicBoundsCheck,
859 _ => LangItem::Panic,
860 };
861 let instance = Instance::mono(tcx, tcx.require_lang_item(lang_item, Some(source)));
862 if should_codegen_locally(tcx, &instance) {
863 self.output.push(create_fn_mono_item(tcx, instance, source));
864 }
865 }
866 mir::TerminatorKind::Abort { .. } => {
867 let instance = Instance::mono(
868 tcx,
869 tcx.require_lang_item(LangItem::PanicNoUnwind, Some(source)),
870 );
871 if should_codegen_locally(tcx, &instance) {
872 self.output.push(create_fn_mono_item(tcx, instance, source));
873 }
874 }
875 mir::TerminatorKind::Goto { .. }
876 | mir::TerminatorKind::SwitchInt { .. }
877 | mir::TerminatorKind::Resume
878 | mir::TerminatorKind::Return
879 | mir::TerminatorKind::Unreachable => {}
880 mir::TerminatorKind::GeneratorDrop
881 | mir::TerminatorKind::Yield { .. }
882 | mir::TerminatorKind::FalseEdge { .. }
883 | mir::TerminatorKind::FalseUnwind { .. } => bug!(),
884 }
885
886 self.super_terminator(terminator, location);
887 }
888
889 fn visit_operand(&mut self, operand: &mir::Operand<'tcx>, location: Location) {
890 self.super_operand(operand, location);
891 let limit = self.tcx.move_size_limit().0;
892 if limit == 0 {
893 return;
894 }
895 let limit = Size::from_bytes(limit);
896 let ty = operand.ty(self.body, self.tcx);
897 let ty = self.monomorphize(ty);
898 let layout = self.tcx.layout_of(ty::ParamEnv::reveal_all().and(ty));
899 if let Ok(layout) = layout {
900 if layout.size > limit {
901 debug!(?layout);
902 let source_info = self.body.source_info(location);
903 debug!(?source_info);
904 let lint_root = source_info.scope.lint_root(&self.body.source_scopes);
905 debug!(?lint_root);
906 let Some(lint_root) = lint_root else {
907 // This happens when the issue is in a function from a foreign crate that
908 // we monomorphized in the current crate. We can't get a `HirId` for things
909 // in other crates.
910 // FIXME: Find out where to report the lint on. Maybe simply crate-level lint root
911 // but correct span? This would make the lint at least accept crate-level lint attributes.
912 return;
913 };
914 self.tcx.struct_span_lint_hir(
915 LARGE_ASSIGNMENTS,
916 lint_root,
917 source_info.span,
918 |lint| {
919 let mut err = lint.build(&format!("moving {} bytes", layout.size.bytes()));
920 err.span_label(source_info.span, "value moved from here");
921 err.note(&format!(r#"The current maximum size is {}, but it can be customized with the move_size_limit attribute: `#![move_size_limit = "..."]`"#, limit.bytes()));
922 err.emit();
923 },
924 );
925 }
926 }
927 }
928
929 fn visit_local(
930 &mut self,
931 _place_local: &Local,
932 _context: mir::visit::PlaceContext,
933 _location: Location,
934 ) {
935 }
936 }
937
938 fn visit_drop_use<'tcx>(
939 tcx: TyCtxt<'tcx>,
940 ty: Ty<'tcx>,
941 is_direct_call: bool,
942 source: Span,
943 output: &mut MonoItems<'tcx>,
944 ) {
945 let instance = Instance::resolve_drop_in_place(tcx, ty);
946 visit_instance_use(tcx, instance, is_direct_call, source, output);
947 }
948
949 fn visit_fn_use<'tcx>(
950 tcx: TyCtxt<'tcx>,
951 ty: Ty<'tcx>,
952 is_direct_call: bool,
953 source: Span,
954 output: &mut MonoItems<'tcx>,
955 ) {
956 if let ty::FnDef(def_id, substs) = *ty.kind() {
957 let instance = if is_direct_call {
958 ty::Instance::resolve(tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap().unwrap()
959 } else {
960 ty::Instance::resolve_for_fn_ptr(tcx, ty::ParamEnv::reveal_all(), def_id, substs)
961 .unwrap()
962 };
963 visit_instance_use(tcx, instance, is_direct_call, source, output);
964 }
965 }
966
967 fn visit_instance_use<'tcx>(
968 tcx: TyCtxt<'tcx>,
969 instance: ty::Instance<'tcx>,
970 is_direct_call: bool,
971 source: Span,
972 output: &mut MonoItems<'tcx>,
973 ) {
974 debug!("visit_item_use({:?}, is_direct_call={:?})", instance, is_direct_call);
975 if !should_codegen_locally(tcx, &instance) {
976 return;
977 }
978
979 match instance.def {
980 ty::InstanceDef::Virtual(..) | ty::InstanceDef::Intrinsic(_) => {
981 if !is_direct_call {
982 bug!("{:?} being reified", instance);
983 }
984 }
985 ty::InstanceDef::DropGlue(_, None) => {
986 // Don't need to emit noop drop glue if we are calling directly.
987 if !is_direct_call {
988 output.push(create_fn_mono_item(tcx, instance, source));
989 }
990 }
991 ty::InstanceDef::DropGlue(_, Some(_))
992 | ty::InstanceDef::VtableShim(..)
993 | ty::InstanceDef::ReifyShim(..)
994 | ty::InstanceDef::ClosureOnceShim { .. }
995 | ty::InstanceDef::Item(..)
996 | ty::InstanceDef::FnPtrShim(..)
997 | ty::InstanceDef::CloneShim(..) => {
998 output.push(create_fn_mono_item(tcx, instance, source));
999 }
1000 }
1001 }
1002
1003 /// Returns `true` if we should codegen an instance in the local crate, or returns `false` if we
1004 /// can just link to the upstream crate and therefore don't need a mono item.
1005 fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) -> bool {
1006 let Some(def_id) = instance.def.def_id_if_not_guaranteed_local_codegen() else {
1007 return true;
1008 };
1009
1010 if tcx.is_foreign_item(def_id) {
1011 // Foreign items are always linked against, there's no way of instantiating them.
1012 return false;
1013 }
1014
1015 if def_id.is_local() {
1016 // Local items cannot be referred to locally without monomorphizing them locally.
1017 return true;
1018 }
1019
1020 if tcx.is_reachable_non_generic(def_id)
1021 || instance.polymorphize(tcx).upstream_monomorphization(tcx).is_some()
1022 {
1023 // We can link to the item in question, no instance needed in this crate.
1024 return false;
1025 }
1026
1027 if !tcx.is_mir_available(def_id) {
1028 bug!("no MIR available for {:?}", def_id);
1029 }
1030
1031 true
1032 }
1033
1034 /// For a given pair of source and target type that occur in an unsizing coercion,
1035 /// this function finds the pair of types that determines the vtable linking
1036 /// them.
1037 ///
1038 /// For example, the source type might be `&SomeStruct` and the target type
1039 /// might be `&SomeTrait` in a cast like:
1040 ///
1041 /// let src: &SomeStruct = ...;
1042 /// let target = src as &SomeTrait;
1043 ///
1044 /// Then the output of this function would be (SomeStruct, SomeTrait) since for
1045 /// constructing the `target` fat-pointer we need the vtable for that pair.
1046 ///
1047 /// Things can get more complicated though because there's also the case where
1048 /// the unsized type occurs as a field:
1049 ///
1050 /// ```rust
1051 /// struct ComplexStruct<T: ?Sized> {
1052 /// a: u32,
1053 /// b: f64,
1054 /// c: T
1055 /// }
1056 /// ```
1057 ///
1058 /// In this case, if `T` is sized, `&ComplexStruct<T>` is a thin pointer. If `T`
1059 /// is unsized, `&SomeStruct` is a fat pointer, and the vtable it points to is
1060 /// for the pair of `T` (which is a trait) and the concrete type that `T` was
1061 /// originally coerced from:
1062 ///
1063 /// let src: &ComplexStruct<SomeStruct> = ...;
1064 /// let target = src as &ComplexStruct<SomeTrait>;
1065 ///
1066 /// Again, we want this `find_vtable_types_for_unsizing()` to provide the pair
1067 /// `(SomeStruct, SomeTrait)`.
1068 ///
1069 /// Finally, there is also the case of custom unsizing coercions, e.g., for
1070 /// smart pointers such as `Rc` and `Arc`.
1071 fn find_vtable_types_for_unsizing<'tcx>(
1072 tcx: TyCtxt<'tcx>,
1073 source_ty: Ty<'tcx>,
1074 target_ty: Ty<'tcx>,
1075 ) -> (Ty<'tcx>, Ty<'tcx>) {
1076 let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| {
1077 let param_env = ty::ParamEnv::reveal_all();
1078 let type_has_metadata = |ty: Ty<'tcx>| -> bool {
1079 if ty.is_sized(tcx.at(DUMMY_SP), param_env) {
1080 return false;
1081 }
1082 let tail = tcx.struct_tail_erasing_lifetimes(ty, param_env);
1083 match tail.kind() {
1084 ty::Foreign(..) => false,
1085 ty::Str | ty::Slice(..) | ty::Dynamic(..) => true,
1086 _ => bug!("unexpected unsized tail: {:?}", tail),
1087 }
1088 };
1089 if type_has_metadata(inner_source) {
1090 (inner_source, inner_target)
1091 } else {
1092 tcx.struct_lockstep_tails_erasing_lifetimes(inner_source, inner_target, param_env)
1093 }
1094 };
1095
1096 match (&source_ty.kind(), &target_ty.kind()) {
1097 (&ty::Ref(_, a, _), &ty::Ref(_, b, _) | &ty::RawPtr(ty::TypeAndMut { ty: b, .. }))
1098 | (&ty::RawPtr(ty::TypeAndMut { ty: a, .. }), &ty::RawPtr(ty::TypeAndMut { ty: b, .. })) => {
1099 ptr_vtable(*a, *b)
1100 }
1101 (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => {
1102 ptr_vtable(source_ty.boxed_ty(), target_ty.boxed_ty())
1103 }
1104
1105 (&ty::Adt(source_adt_def, source_substs), &ty::Adt(target_adt_def, target_substs)) => {
1106 assert_eq!(source_adt_def, target_adt_def);
1107
1108 let CustomCoerceUnsized::Struct(coerce_index) =
1109 crate::custom_coerce_unsize_info(tcx, source_ty, target_ty);
1110
1111 let source_fields = &source_adt_def.non_enum_variant().fields;
1112 let target_fields = &target_adt_def.non_enum_variant().fields;
1113
1114 assert!(
1115 coerce_index < source_fields.len() && source_fields.len() == target_fields.len()
1116 );
1117
1118 find_vtable_types_for_unsizing(
1119 tcx,
1120 source_fields[coerce_index].ty(tcx, source_substs),
1121 target_fields[coerce_index].ty(tcx, target_substs),
1122 )
1123 }
1124 _ => bug!(
1125 "find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}",
1126 source_ty,
1127 target_ty
1128 ),
1129 }
1130 }
1131
1132 #[instrument(skip(tcx), level = "debug")]
1133 fn create_fn_mono_item<'tcx>(
1134 tcx: TyCtxt<'tcx>,
1135 instance: Instance<'tcx>,
1136 source: Span,
1137 ) -> Spanned<MonoItem<'tcx>> {
1138 debug!("create_fn_mono_item(instance={})", instance);
1139
1140 let def_id = instance.def_id();
1141 if tcx.sess.opts.debugging_opts.profile_closures && def_id.is_local() && tcx.is_closure(def_id)
1142 {
1143 crate::util::dump_closure_profile(tcx, instance);
1144 }
1145
1146 let respanned = respan(source, MonoItem::Fn(instance.polymorphize(tcx)));
1147 debug!(?respanned);
1148
1149 respanned
1150 }
1151
1152 /// Creates a `MonoItem` for each method that is referenced by the vtable for
1153 /// the given trait/impl pair.
1154 fn create_mono_items_for_vtable_methods<'tcx>(
1155 tcx: TyCtxt<'tcx>,
1156 trait_ty: Ty<'tcx>,
1157 impl_ty: Ty<'tcx>,
1158 source: Span,
1159 output: &mut MonoItems<'tcx>,
1160 ) {
1161 assert!(!trait_ty.has_escaping_bound_vars() && !impl_ty.has_escaping_bound_vars());
1162
1163 if let ty::Dynamic(ref trait_ty, ..) = trait_ty.kind() {
1164 if let Some(principal) = trait_ty.principal() {
1165 let poly_trait_ref = principal.with_self_ty(tcx, impl_ty);
1166 assert!(!poly_trait_ref.has_escaping_bound_vars());
1167
1168 // Walk all methods of the trait, including those of its supertraits
1169 let entries = tcx.vtable_entries(poly_trait_ref);
1170 let methods = entries
1171 .iter()
1172 .filter_map(|entry| match entry {
1173 VtblEntry::MetadataDropInPlace
1174 | VtblEntry::MetadataSize
1175 | VtblEntry::MetadataAlign
1176 | VtblEntry::Vacant => None,
1177 VtblEntry::TraitVPtr(_) => {
1178 // all super trait items already covered, so skip them.
1179 None
1180 }
1181 VtblEntry::Method(instance) => {
1182 Some(*instance).filter(|instance| should_codegen_locally(tcx, instance))
1183 }
1184 })
1185 .map(|item| create_fn_mono_item(tcx, item, source));
1186 output.extend(methods);
1187 }
1188
1189 // Also add the destructor.
1190 visit_drop_use(tcx, impl_ty, false, source, output);
1191 }
1192 }
1193
1194 //=-----------------------------------------------------------------------------
1195 // Root Collection
1196 //=-----------------------------------------------------------------------------
1197
1198 struct RootCollector<'a, 'tcx> {
1199 tcx: TyCtxt<'tcx>,
1200 mode: MonoItemCollectionMode,
1201 output: &'a mut MonoItems<'tcx>,
1202 entry_fn: Option<(DefId, EntryFnType)>,
1203 }
1204
1205 impl<'v> RootCollector<'_, 'v> {
1206 fn process_item(&mut self, id: hir::ItemId) {
1207 match self.tcx.def_kind(id.def_id) {
1208 DefKind::Enum | DefKind::Struct | DefKind::Union => {
1209 let item = self.tcx.hir().item(id);
1210 match item.kind {
1211 hir::ItemKind::Enum(_, ref generics)
1212 | hir::ItemKind::Struct(_, ref generics)
1213 | hir::ItemKind::Union(_, ref generics) => {
1214 if generics.params.is_empty() {
1215 if self.mode == MonoItemCollectionMode::Eager {
1216 debug!(
1217 "RootCollector: ADT drop-glue for {}",
1218 self.tcx.def_path_str(item.def_id.to_def_id())
1219 );
1220
1221 let ty =
1222 Instance::new(item.def_id.to_def_id(), InternalSubsts::empty())
1223 .ty(self.tcx, ty::ParamEnv::reveal_all());
1224 visit_drop_use(self.tcx, ty, true, DUMMY_SP, self.output);
1225 }
1226 }
1227 }
1228 _ => bug!(),
1229 }
1230 }
1231 DefKind::GlobalAsm => {
1232 debug!(
1233 "RootCollector: ItemKind::GlobalAsm({})",
1234 self.tcx.def_path_str(id.def_id.to_def_id())
1235 );
1236 self.output.push(dummy_spanned(MonoItem::GlobalAsm(id)));
1237 }
1238 DefKind::Static(..) => {
1239 debug!(
1240 "RootCollector: ItemKind::Static({})",
1241 self.tcx.def_path_str(id.def_id.to_def_id())
1242 );
1243 self.output.push(dummy_spanned(MonoItem::Static(id.def_id.to_def_id())));
1244 }
1245 DefKind::Const => {
1246 // const items only generate mono items if they are
1247 // actually used somewhere. Just declaring them is insufficient.
1248
1249 // but even just declaring them must collect the items they refer to
1250 if let Ok(val) = self.tcx.const_eval_poly(id.def_id.to_def_id()) {
1251 collect_const_value(self.tcx, val, &mut self.output);
1252 }
1253 }
1254 DefKind::Impl => {
1255 if self.mode == MonoItemCollectionMode::Eager {
1256 let item = self.tcx.hir().item(id);
1257 create_mono_items_for_default_impls(self.tcx, item, self.output);
1258 }
1259 }
1260 DefKind::Fn => {
1261 self.push_if_root(id.def_id);
1262 }
1263 _ => {}
1264 }
1265 }
1266
1267 fn process_impl_item(&mut self, id: hir::ImplItemId) {
1268 if matches!(self.tcx.def_kind(id.def_id), DefKind::AssocFn) {
1269 self.push_if_root(id.def_id);
1270 }
1271 }
1272
1273 fn is_root(&self, def_id: LocalDefId) -> bool {
1274 !item_requires_monomorphization(self.tcx, def_id)
1275 && match self.mode {
1276 MonoItemCollectionMode::Eager => true,
1277 MonoItemCollectionMode::Lazy => {
1278 self.entry_fn.and_then(|(id, _)| id.as_local()) == Some(def_id)
1279 || self.tcx.is_reachable_non_generic(def_id)
1280 || self
1281 .tcx
1282 .codegen_fn_attrs(def_id)
1283 .flags
1284 .contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
1285 }
1286 }
1287 }
1288
1289 /// If `def_id` represents a root, pushes it onto the list of
1290 /// outputs. (Note that all roots must be monomorphic.)
1291 #[instrument(skip(self), level = "debug")]
1292 fn push_if_root(&mut self, def_id: LocalDefId) {
1293 if self.is_root(def_id) {
1294 debug!("RootCollector::push_if_root: found root def_id={:?}", def_id);
1295
1296 let instance = Instance::mono(self.tcx, def_id.to_def_id());
1297 self.output.push(create_fn_mono_item(self.tcx, instance, DUMMY_SP));
1298 }
1299 }
1300
1301 /// As a special case, when/if we encounter the
1302 /// `main()` function, we also have to generate a
1303 /// monomorphized copy of the start lang item based on
1304 /// the return type of `main`. This is not needed when
1305 /// the user writes their own `start` manually.
1306 fn push_extra_entry_roots(&mut self) {
1307 let Some((main_def_id, EntryFnType::Main)) = self.entry_fn else {
1308 return;
1309 };
1310
1311 let start_def_id = match self.tcx.lang_items().require(LangItem::Start) {
1312 Ok(s) => s,
1313 Err(err) => self.tcx.sess.fatal(&err),
1314 };
1315 let main_ret_ty = self.tcx.fn_sig(main_def_id).output();
1316
1317 // Given that `main()` has no arguments,
1318 // then its return type cannot have
1319 // late-bound regions, since late-bound
1320 // regions must appear in the argument
1321 // listing.
1322 let main_ret_ty = self.tcx.normalize_erasing_regions(
1323 ty::ParamEnv::reveal_all(),
1324 main_ret_ty.no_bound_vars().unwrap(),
1325 );
1326
1327 let start_instance = Instance::resolve(
1328 self.tcx,
1329 ty::ParamEnv::reveal_all(),
1330 start_def_id,
1331 self.tcx.intern_substs(&[main_ret_ty.into()]),
1332 )
1333 .unwrap()
1334 .unwrap();
1335
1336 self.output.push(create_fn_mono_item(self.tcx, start_instance, DUMMY_SP));
1337 }
1338 }
1339
1340 fn item_requires_monomorphization(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
1341 let generics = tcx.generics_of(def_id);
1342 generics.requires_monomorphization(tcx)
1343 }
1344
1345 fn create_mono_items_for_default_impls<'tcx>(
1346 tcx: TyCtxt<'tcx>,
1347 item: &'tcx hir::Item<'tcx>,
1348 output: &mut MonoItems<'tcx>,
1349 ) {
1350 match item.kind {
1351 hir::ItemKind::Impl(ref impl_) => {
1352 for param in impl_.generics.params {
1353 match param.kind {
1354 hir::GenericParamKind::Lifetime { .. } => {}
1355 hir::GenericParamKind::Type { .. } | hir::GenericParamKind::Const { .. } => {
1356 return;
1357 }
1358 }
1359 }
1360
1361 debug!(
1362 "create_mono_items_for_default_impls(item={})",
1363 tcx.def_path_str(item.def_id.to_def_id())
1364 );
1365
1366 if let Some(trait_ref) = tcx.impl_trait_ref(item.def_id) {
1367 let param_env = ty::ParamEnv::reveal_all();
1368 let trait_ref = tcx.normalize_erasing_regions(param_env, trait_ref);
1369 let overridden_methods = tcx.impl_item_implementor_ids(item.def_id);
1370 for method in tcx.provided_trait_methods(trait_ref.def_id) {
1371 if overridden_methods.contains_key(&method.def_id) {
1372 continue;
1373 }
1374
1375 if tcx.generics_of(method.def_id).own_requires_monomorphization() {
1376 continue;
1377 }
1378
1379 let substs =
1380 InternalSubsts::for_item(tcx, method.def_id, |param, _| match param.kind {
1381 GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(),
1382 GenericParamDefKind::Type { .. }
1383 | GenericParamDefKind::Const { .. } => {
1384 trait_ref.substs[param.index as usize]
1385 }
1386 });
1387 let instance = ty::Instance::resolve(tcx, param_env, method.def_id, substs)
1388 .unwrap()
1389 .unwrap();
1390
1391 let mono_item = create_fn_mono_item(tcx, instance, DUMMY_SP);
1392 if mono_item.node.is_instantiable(tcx) && should_codegen_locally(tcx, &instance)
1393 {
1394 output.push(mono_item);
1395 }
1396 }
1397 }
1398 }
1399 _ => bug!(),
1400 }
1401 }
1402
1403 /// Scans the miri alloc in order to find function calls, closures, and drop-glue.
1404 fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoItems<'tcx>) {
1405 match tcx.global_alloc(alloc_id) {
1406 GlobalAlloc::Static(def_id) => {
1407 assert!(!tcx.is_thread_local_static(def_id));
1408 let instance = Instance::mono(tcx, def_id);
1409 if should_codegen_locally(tcx, &instance) {
1410 trace!("collecting static {:?}", def_id);
1411 output.push(dummy_spanned(MonoItem::Static(def_id)));
1412 }
1413 }
1414 GlobalAlloc::Memory(alloc) => {
1415 trace!("collecting {:?} with {:#?}", alloc_id, alloc);
1416 for &inner in alloc.inner().relocations().values() {
1417 rustc_data_structures::stack::ensure_sufficient_stack(|| {
1418 collect_miri(tcx, inner, output);
1419 });
1420 }
1421 }
1422 GlobalAlloc::Function(fn_instance) => {
1423 if should_codegen_locally(tcx, &fn_instance) {
1424 trace!("collecting {:?} with {:#?}", alloc_id, fn_instance);
1425 output.push(create_fn_mono_item(tcx, fn_instance, DUMMY_SP));
1426 }
1427 }
1428 }
1429 }
1430
1431 /// Scans the MIR in order to find function calls, closures, and drop-glue.
1432 #[instrument(skip(tcx, output), level = "debug")]
1433 fn collect_neighbours<'tcx>(
1434 tcx: TyCtxt<'tcx>,
1435 instance: Instance<'tcx>,
1436 output: &mut MonoItems<'tcx>,
1437 ) {
1438 let body = tcx.instance_mir(instance.def);
1439 MirNeighborCollector { tcx, body: &body, output, instance }.visit_body(&body);
1440 }
1441
1442 #[instrument(skip(tcx, output), level = "debug")]
1443 fn collect_const_value<'tcx>(
1444 tcx: TyCtxt<'tcx>,
1445 value: ConstValue<'tcx>,
1446 output: &mut MonoItems<'tcx>,
1447 ) {
1448 match value {
1449 ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_miri(tcx, ptr.provenance, output),
1450 ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => {
1451 for &id in alloc.inner().relocations().values() {
1452 collect_miri(tcx, id, output);
1453 }
1454 }
1455 _ => {}
1456 }
1457 }