1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use super::FunctionDebugContext
;
12 use super::metadata
::file_metadata
;
13 use super::utils
::{DIB, span_start}
;
16 use llvm
::debuginfo
::{DIScope, DISubprogram}
;
17 use common
::{CrateContext, FunctionContext}
;
18 use rustc
::hir
::pat_util
;
19 use rustc
::mir
::repr
::{Mir, VisibilityScope}
;
20 use rustc
::util
::nodemap
::NodeMap
;
25 use syntax_pos
::{Span, Pos}
;
26 use syntax
::{ast, codemap}
;
28 use rustc_data_structures
::bitvec
::BitVector
;
29 use rustc_data_structures
::indexed_vec
::{Idx, IndexVec}
;
30 use rustc
::hir
::{self, PatKind}
;
32 // This procedure builds the *scope map* for a given function, which maps any
33 // given ast::NodeId in the function's AST to the correct DIScope metadata instance.
35 // This builder procedure walks the AST in execution order and keeps track of
36 // what belongs to which scope, creating DIScope DIEs along the way, and
37 // introducing *artificial* lexical scope descriptors where necessary. These
38 // artificial scopes allow GDB to correctly handle name shadowing.
39 pub fn create_scope_map(cx
: &CrateContext
,
41 fn_entry_block
: &hir
::Block
,
42 fn_metadata
: DISubprogram
,
43 fn_ast_id
: ast
::NodeId
)
45 let mut scope_map
= NodeMap();
46 let mut scope_stack
= vec
!(ScopeStackEntry { scope_metadata: fn_metadata, name: None }
);
47 scope_map
.insert(fn_ast_id
, fn_metadata
);
49 // Push argument identifiers onto the stack so arguments integrate nicely
50 // with variable shadowing.
52 pat_util
::pat_bindings(&arg
.pat
, |_
, node_id
, _
, path1
| {
53 scope_stack
.push(ScopeStackEntry
{ scope_metadata
: fn_metadata
,
54 name
: Some(path1
.node
) });
55 scope_map
.insert(node_id
, fn_metadata
);
59 // Clang creates a separate scope for function bodies, so let's do this too.
64 |cx
, scope_stack
, scope_map
| {
65 walk_block(cx
, fn_entry_block
, scope_stack
, scope_map
);
71 /// Produce DIScope DIEs for each MIR Scope which has variables defined in it.
72 /// If debuginfo is disabled, the returned vector is empty.
73 pub fn create_mir_scopes(fcx
: &FunctionContext
) -> IndexVec
<VisibilityScope
, DIScope
> {
74 let mir
= fcx
.mir
.clone().expect("create_mir_scopes: missing MIR for fn");
75 let mut scopes
= IndexVec
::from_elem(ptr
::null_mut(), &mir
.visibility_scopes
);
77 let fn_metadata
= match fcx
.debug_context
{
78 FunctionDebugContext
::RegularContext(box ref data
) => data
.fn_metadata
,
79 FunctionDebugContext
::DebugInfoDisabled
|
80 FunctionDebugContext
::FunctionWithoutDebugInfo
=> {
85 // Find all the scopes with variables defined in them.
86 let mut has_variables
= BitVector
::new(mir
.visibility_scopes
.len());
87 for var
in &mir
.var_decls
{
88 has_variables
.insert(var
.source_info
.scope
.index());
91 // Instantiate all scopes.
92 for idx
in 0..mir
.visibility_scopes
.len() {
93 let scope
= VisibilityScope
::new(idx
);
94 make_mir_scope(fcx
.ccx
, &mir
, &has_variables
, fn_metadata
, scope
, &mut scopes
);
100 fn make_mir_scope(ccx
: &CrateContext
,
102 has_variables
: &BitVector
,
103 fn_metadata
: DISubprogram
,
104 scope
: VisibilityScope
,
105 scopes
: &mut IndexVec
<VisibilityScope
, DIScope
>) {
106 if !scopes
[scope
].is_null() {
110 let scope_data
= &mir
.visibility_scopes
[scope
];
111 let parent_scope
= if let Some(parent
) = scope_data
.parent_scope
{
112 make_mir_scope(ccx
, mir
, has_variables
, fn_metadata
, parent
, scopes
);
115 // The root is the function itself.
116 scopes
[scope
] = fn_metadata
;
120 if !has_variables
.contains(scope
.index()) {
121 // Do not create a DIScope if there are no variables
122 // defined in this MIR Scope, to avoid debuginfo bloat.
124 // However, we don't skip creating a nested scope if
125 // our parent is the root, because we might want to
126 // put arguments in the root and not have shadowing.
127 if parent_scope
!= fn_metadata
{
128 scopes
[scope
] = parent_scope
;
133 let loc
= span_start(ccx
, scope_data
.span
);
134 scopes
[scope
] = unsafe {
135 let file_metadata
= file_metadata(ccx
, &loc
.file
.name
, &loc
.file
.abs_path
);
136 llvm
::LLVMRustDIBuilderCreateLexicalBlock(
141 loc
.col
.to_usize() as c_uint
)
145 // local helper functions for walking the AST.
146 fn with_new_scope
<F
>(cx
: &CrateContext
,
148 scope_stack
: &mut Vec
<ScopeStackEntry
> ,
149 scope_map
: &mut NodeMap
<DIScope
>,
151 F
: FnOnce(&CrateContext
, &mut Vec
<ScopeStackEntry
>, &mut NodeMap
<DIScope
>),
153 // Create a new lexical scope and push it onto the stack
154 let loc
= span_start(cx
, scope_span
);
155 let file_metadata
= file_metadata(cx
, &loc
.file
.name
, &loc
.file
.abs_path
);
156 let parent_scope
= scope_stack
.last().unwrap().scope_metadata
;
158 let scope_metadata
= unsafe {
159 llvm
::LLVMRustDIBuilderCreateLexicalBlock(
164 loc
.col
.to_usize() as c_uint
)
167 scope_stack
.push(ScopeStackEntry { scope_metadata: scope_metadata, name: None }
);
169 inner_walk(cx
, scope_stack
, scope_map
);
171 // pop artificial scopes
172 while scope_stack
.last().unwrap().name
.is_some() {
176 if scope_stack
.last().unwrap().scope_metadata
!= scope_metadata
{
177 span_bug
!(scope_span
, "debuginfo: Inconsistency in scope management.");
183 struct ScopeStackEntry
{
184 scope_metadata
: DIScope
,
185 name
: Option
<ast
::Name
>
188 fn walk_block(cx
: &CrateContext
,
190 scope_stack
: &mut Vec
<ScopeStackEntry
> ,
191 scope_map
: &mut NodeMap
<DIScope
>) {
192 scope_map
.insert(block
.id
, scope_stack
.last().unwrap().scope_metadata
);
194 // The interesting things here are statements and the concluding expression.
195 for statement
in &block
.stmts
{
196 scope_map
.insert(statement
.node
.id(),
197 scope_stack
.last().unwrap().scope_metadata
);
199 match statement
.node
{
200 hir
::StmtDecl(ref decl
, _
) =>
201 walk_decl(cx
, &decl
, scope_stack
, scope_map
),
202 hir
::StmtExpr(ref exp
, _
) |
203 hir
::StmtSemi(ref exp
, _
) =>
204 walk_expr(cx
, &exp
, scope_stack
, scope_map
),
208 if let Some(ref exp
) = block
.expr
{
209 walk_expr(cx
, &exp
, scope_stack
, scope_map
);
213 fn walk_decl(cx
: &CrateContext
,
215 scope_stack
: &mut Vec
<ScopeStackEntry
> ,
216 scope_map
: &mut NodeMap
<DIScope
>) {
218 codemap
::Spanned { node: hir::DeclLocal(ref local), .. }
=> {
219 scope_map
.insert(local
.id
, scope_stack
.last().unwrap().scope_metadata
);
221 walk_pattern(cx
, &local
.pat
, scope_stack
, scope_map
);
223 if let Some(ref exp
) = local
.init
{
224 walk_expr(cx
, &exp
, scope_stack
, scope_map
);
231 fn walk_pattern(cx
: &CrateContext
,
233 scope_stack
: &mut Vec
<ScopeStackEntry
> ,
234 scope_map
: &mut NodeMap
<DIScope
>) {
235 // Unfortunately, we cannot just use pat_util::pat_bindings() or
236 // ast_util::walk_pat() here because we have to visit *all* nodes in
237 // order to put them into the scope map. The above functions don't do that.
239 PatKind
::Binding(_
, ref path1
, ref sub_pat_opt
) => {
240 // LLVM does not properly generate 'DW_AT_start_scope' fields
241 // for variable DIEs. For this reason we have to introduce
242 // an artificial scope at bindings whenever a variable with
243 // the same name is declared in *any* parent scope.
245 // Otherwise the following error occurs:
249 // do_something(); // 'gdb print x' correctly prints 10
252 // do_something(); // 'gdb print x' prints 0, because it
253 // // already reads the uninitialized 'x'
254 // // from the next line...
256 // do_something(); // 'gdb print x' correctly prints 100
259 // Is there already a binding with that name?
260 // N.B.: this comparison must be UNhygienic... because
261 // gdb knows nothing about the context, so any two
262 // variables with the same name will cause the problem.
263 let name
= path1
.node
;
264 let need_new_scope
= scope_stack
266 .any(|entry
| entry
.name
== Some(name
));
269 // Create a new lexical scope and push it onto the stack
270 let loc
= span_start(cx
, pat
.span
);
271 let file_metadata
= file_metadata(cx
, &loc
.file
.name
, &loc
.file
.abs_path
);
272 let parent_scope
= scope_stack
.last().unwrap().scope_metadata
;
274 let scope_metadata
= unsafe {
275 llvm
::LLVMRustDIBuilderCreateLexicalBlock(
280 loc
.col
.to_usize() as c_uint
)
283 scope_stack
.push(ScopeStackEntry
{
284 scope_metadata
: scope_metadata
,
289 // Push a new entry anyway so the name can be found
290 let prev_metadata
= scope_stack
.last().unwrap().scope_metadata
;
291 scope_stack
.push(ScopeStackEntry
{
292 scope_metadata
: prev_metadata
,
297 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
299 if let Some(ref sub_pat
) = *sub_pat_opt
{
300 walk_pattern(cx
, &sub_pat
, scope_stack
, scope_map
);
305 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
308 PatKind
::TupleStruct(_
, ref sub_pats
, _
) => {
309 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
312 walk_pattern(cx
, &p
, scope_stack
, scope_map
);
316 PatKind
::Path(..) => {
317 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
320 PatKind
::Struct(_
, ref field_pats
, _
) => {
321 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
323 for &codemap
::Spanned
{
324 node
: hir
::FieldPat { pat: ref sub_pat, .. }
,
327 walk_pattern(cx
, &sub_pat
, scope_stack
, scope_map
);
331 PatKind
::Tuple(ref sub_pats
, _
) => {
332 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
334 for sub_pat
in sub_pats
{
335 walk_pattern(cx
, &sub_pat
, scope_stack
, scope_map
);
339 PatKind
::Box(ref sub_pat
) | PatKind
::Ref(ref sub_pat
, _
) => {
340 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
341 walk_pattern(cx
, &sub_pat
, scope_stack
, scope_map
);
344 PatKind
::Lit(ref exp
) => {
345 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
346 walk_expr(cx
, &exp
, scope_stack
, scope_map
);
349 PatKind
::Range(ref exp1
, ref exp2
) => {
350 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
351 walk_expr(cx
, &exp1
, scope_stack
, scope_map
);
352 walk_expr(cx
, &exp2
, scope_stack
, scope_map
);
355 PatKind
::Vec(ref front_sub_pats
, ref middle_sub_pats
, ref back_sub_pats
) => {
356 scope_map
.insert(pat
.id
, scope_stack
.last().unwrap().scope_metadata
);
358 for sub_pat
in front_sub_pats
{
359 walk_pattern(cx
, &sub_pat
, scope_stack
, scope_map
);
362 if let Some(ref sub_pat
) = *middle_sub_pats
{
363 walk_pattern(cx
, &sub_pat
, scope_stack
, scope_map
);
366 for sub_pat
in back_sub_pats
{
367 walk_pattern(cx
, &sub_pat
, scope_stack
, scope_map
);
373 fn walk_expr(cx
: &CrateContext
,
375 scope_stack
: &mut Vec
<ScopeStackEntry
> ,
376 scope_map
: &mut NodeMap
<DIScope
>) {
378 scope_map
.insert(exp
.id
, scope_stack
.last().unwrap().scope_metadata
);
384 hir
::ExprPath(..) => {}
386 hir
::ExprCast(ref sub_exp
, _
) |
387 hir
::ExprType(ref sub_exp
, _
) |
388 hir
::ExprAddrOf(_
, ref sub_exp
) |
389 hir
::ExprField(ref sub_exp
, _
) |
390 hir
::ExprTupField(ref sub_exp
, _
) =>
391 walk_expr(cx
, &sub_exp
, scope_stack
, scope_map
),
393 hir
::ExprBox(ref sub_expr
) => {
394 walk_expr(cx
, &sub_expr
, scope_stack
, scope_map
);
397 hir
::ExprRet(ref exp_opt
) => match *exp_opt
{
398 Some(ref sub_exp
) => walk_expr(cx
, &sub_exp
, scope_stack
, scope_map
),
402 hir
::ExprUnary(_
, ref sub_exp
) => {
403 walk_expr(cx
, &sub_exp
, scope_stack
, scope_map
);
406 hir
::ExprAssignOp(_
, ref lhs
, ref rhs
) |
407 hir
::ExprIndex(ref lhs
, ref rhs
) |
408 hir
::ExprBinary(_
, ref lhs
, ref rhs
) => {
409 walk_expr(cx
, &lhs
, scope_stack
, scope_map
);
410 walk_expr(cx
, &rhs
, scope_stack
, scope_map
);
413 hir
::ExprVec(ref init_expressions
) |
414 hir
::ExprTup(ref init_expressions
) => {
415 for ie
in init_expressions
{
416 walk_expr(cx
, &ie
, scope_stack
, scope_map
);
420 hir
::ExprAssign(ref sub_exp1
, ref sub_exp2
) |
421 hir
::ExprRepeat(ref sub_exp1
, ref sub_exp2
) => {
422 walk_expr(cx
, &sub_exp1
, scope_stack
, scope_map
);
423 walk_expr(cx
, &sub_exp2
, scope_stack
, scope_map
);
426 hir
::ExprIf(ref cond_exp
, ref then_block
, ref opt_else_exp
) => {
427 walk_expr(cx
, &cond_exp
, scope_stack
, scope_map
);
433 |cx
, scope_stack
, scope_map
| {
434 walk_block(cx
, &then_block
, scope_stack
, scope_map
);
437 match *opt_else_exp
{
438 Some(ref else_exp
) =>
439 walk_expr(cx
, &else_exp
, scope_stack
, scope_map
),
444 hir
::ExprWhile(ref cond_exp
, ref loop_body
, _
) => {
445 walk_expr(cx
, &cond_exp
, scope_stack
, scope_map
);
451 |cx
, scope_stack
, scope_map
| {
452 walk_block(cx
, &loop_body
, scope_stack
, scope_map
);
456 hir
::ExprLoop(ref block
, _
) |
457 hir
::ExprBlock(ref block
) => {
462 |cx
, scope_stack
, scope_map
| {
463 walk_block(cx
, &block
, scope_stack
, scope_map
);
467 hir
::ExprClosure(_
, ref decl
, ref block
, _
) => {
472 |cx
, scope_stack
, scope_map
| {
473 for &hir
::Arg { pat: ref pattern, .. }
in &decl
.inputs
{
474 walk_pattern(cx
, &pattern
, scope_stack
, scope_map
);
477 walk_block(cx
, &block
, scope_stack
, scope_map
);
481 hir
::ExprCall(ref fn_exp
, ref args
) => {
482 walk_expr(cx
, &fn_exp
, scope_stack
, scope_map
);
484 for arg_exp
in args
{
485 walk_expr(cx
, &arg_exp
, scope_stack
, scope_map
);
489 hir
::ExprMethodCall(_
, _
, ref args
) => {
490 for arg_exp
in args
{
491 walk_expr(cx
, &arg_exp
, scope_stack
, scope_map
);
495 hir
::ExprMatch(ref discriminant_exp
, ref arms
, _
) => {
496 walk_expr(cx
, &discriminant_exp
, scope_stack
, scope_map
);
498 // For each arm we have to first walk the pattern as these might
499 // introduce new artificial scopes. It should be sufficient to
500 // walk only one pattern per arm, as they all must contain the
501 // same binding names.
503 for arm_ref
in arms
{
504 let arm_span
= arm_ref
.pats
[0].span
;
510 |cx
, scope_stack
, scope_map
| {
511 for pat
in &arm_ref
.pats
{
512 walk_pattern(cx
, &pat
, scope_stack
, scope_map
);
515 if let Some(ref guard_exp
) = arm_ref
.guard
{
516 walk_expr(cx
, &guard_exp
, scope_stack
, scope_map
)
519 walk_expr(cx
, &arm_ref
.body
, scope_stack
, scope_map
);
524 hir
::ExprStruct(_
, ref fields
, ref base_exp
) => {
525 for &hir
::Field { expr: ref exp, .. }
in fields
{
526 walk_expr(cx
, &exp
, scope_stack
, scope_map
);
530 Some(ref exp
) => walk_expr(cx
, &exp
, scope_stack
, scope_map
),
535 hir
::ExprInlineAsm(_
, ref outputs
, ref inputs
) => {
536 for output
in outputs
{
537 walk_expr(cx
, output
, scope_stack
, scope_map
);
540 for input
in inputs
{
541 walk_expr(cx
, input
, scope_stack
, scope_map
);