1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // Lowers the AST to the HIR.
13 // Since the AST and HIR are fairly similar, this is mostly a simple procedure,
14 // much like a fold. Where lowering involves a bit more work things get more
15 // interesting and there are some invariants you should know about. These mostly
16 // concern spans and ids.
18 // Spans are assigned to AST nodes during parsing and then are modified during
19 // expansion to indicate the origin of a node and the process it went through
20 // being expanded. Ids are assigned to AST nodes just before lowering.
22 // For the simpler lowering steps, ids and spans should be preserved. Unlike
23 // expansion we do not preserve the process of lowering in the spans, so spans
24 // should not be modified here. When creating a new node (as opposed to
25 // 'folding' an existing one), then you create a new id using `next_id()`.
27 // You must ensure that ids are unique. That means that you should only use the
28 // id from an AST node in a single HIR node (you can assume that AST node ids
29 // are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
30 // If you do, you must then set the new node's id to a fresh one.
32 // Lowering must be reproducable (the compiler only lowers once, but tools and
33 // custom lints may lower an AST node to a HIR node to interact with the
34 // compiler). The most interesting bit of this is ids - if you lower an AST node
35 // and create new HIR nodes with fresh ids, when re-lowering the same node, you
36 // must ensure you get the same ids! To do this, we keep track of the next id
37 // when we translate a node which requires new ids. By checking this cache and
38 // using node ids starting with the cached id, we ensure ids are reproducible.
39 // To use this system, you just need to hold on to a CachedIdSetter object
40 // whilst lowering. This is an RAII object that takes care of setting and
41 // restoring the cached id, etc.
43 // This whole system relies on node ids being incremented one at a time and
44 // all increments being for lowering. This means that you should not call any
45 // non-lowering function which will use new node ids.
47 // We must also cache gensym'ed Idents to ensure that we get the same Ident
48 // every time we lower a node with gensym'ed names. One consequence of this is
49 // that you can only gensym a name once in a lowering (you don't need to worry
50 // about nested lowering though). That's because we cache based on the name and
51 // the currently cached node id, which is unique per lowered node.
53 // Spans are used for error messages and for tools to map semantics back to
54 // source code. It is therefore not as important with spans as ids to be strict
55 // about use (you can't break the compiler by screwing up a span). Obviously, a
56 // HIR node can only have a single span. But multiple nodes can have the same
57 // span and spans don't need to be kept in order, etc. Where code is preserved
58 // by lowering, it should have the same span as in the AST. Where HIR nodes are
59 // new it is probably best to give a span for the whole AST node being lowered.
60 // All nodes should have real spans, don't use dummy spans. Tools are likely to
61 // get confused if the spans from leaf AST nodes occur in multiple places
62 // in the HIR, especially for multiple identifiers.
66 use std
::collections
::BTreeMap
;
67 use std
::collections
::HashMap
;
69 use syntax
::attr
::{ThinAttributes, ThinAttributesExt}
;
70 use syntax
::ext
::mtwt
;
72 use syntax
::codemap
::{respan, Spanned, Span}
;
73 use syntax
::parse
::token
;
74 use syntax
::std_inject
;
75 use syntax
::visit
::{self, Visitor}
;
77 use std
::cell
::{Cell, RefCell}
;
79 pub struct LoweringContext
<'a
> {
80 crate_root
: Option
<&'
static str>,
81 // Map AST ids to ids used for expanded nodes.
82 id_cache
: RefCell
<HashMap
<NodeId
, NodeId
>>,
83 // Use if there are no cached ids for the current node.
84 id_assigner
: &'a NodeIdAssigner
,
85 // 0 == no cached id. Must be incremented to align with previous id
88 // Keep track of gensym'ed idents.
89 gensym_cache
: RefCell
<HashMap
<(NodeId
, &'
static str), hir
::Ident
>>,
90 // A copy of cached_id, but is also set to an id while it is being cached.
91 gensym_key
: Cell
<u32>,
94 impl<'a
, 'hir
> LoweringContext
<'a
> {
95 pub fn new(id_assigner
: &'a NodeIdAssigner
, c
: Option
<&Crate
>) -> LoweringContext
<'a
> {
96 let crate_root
= c
.and_then(|c
| {
97 if std_inject
::no_core(c
) {
99 } else if std_inject
::no_std(c
) {
107 crate_root
: crate_root
,
108 id_cache
: RefCell
::new(HashMap
::new()),
109 id_assigner
: id_assigner
,
110 cached_id
: Cell
::new(0),
111 gensym_cache
: RefCell
::new(HashMap
::new()),
112 gensym_key
: Cell
::new(0),
116 fn next_id(&self) -> NodeId
{
117 let cached
= self.cached_id
.get();
119 return self.id_assigner
.next_node_id();
122 self.cached_id
.set(cached
+ 1);
126 fn str_to_ident(&self, s
: &'
static str) -> hir
::Ident
{
127 let cached_id
= self.gensym_key
.get();
129 return hir
::Ident
::from_name(token
::gensym(s
));
132 let cached
= self.gensym_cache
.borrow().contains_key(&(cached_id
, s
));
134 self.gensym_cache
.borrow()[&(cached_id
, s
)]
136 let result
= hir
::Ident
::from_name(token
::gensym(s
));
137 self.gensym_cache
.borrow_mut().insert((cached_id
, s
), result
);
143 pub fn lower_ident(_lctx
: &LoweringContext
, ident
: Ident
) -> hir
::Ident
{
145 name
: mtwt
::resolve(ident
),
146 unhygienic_name
: ident
.name
,
150 pub fn lower_attrs(_lctx
: &LoweringContext
, attrs
: &Vec
<Attribute
>) -> hir
::HirVec
<Attribute
> {
154 pub fn lower_view_path(lctx
: &LoweringContext
, view_path
: &ViewPath
) -> P
<hir
::ViewPath
> {
156 node
: match view_path
.node
{
157 ViewPathSimple(ident
, ref path
) => {
158 hir
::ViewPathSimple(ident
.name
, lower_path(lctx
, path
))
160 ViewPathGlob(ref path
) => {
161 hir
::ViewPathGlob(lower_path(lctx
, path
))
163 ViewPathList(ref path
, ref path_list_idents
) => {
164 hir
::ViewPathList(lower_path(lctx
, path
),
165 path_list_idents
.iter()
166 .map(|path_list_ident
| {
168 node
: match path_list_ident
.node
{
169 PathListIdent { id, name, rename }
=>
173 rename
: rename
.map(|x
| x
.name
),
175 PathListMod { id, rename }
=>
178 rename
: rename
.map(|x
| x
.name
),
181 span
: path_list_ident
.span
,
187 span
: view_path
.span
,
191 pub fn lower_arm(lctx
: &LoweringContext
, arm
: &Arm
) -> hir
::Arm
{
193 attrs
: lower_attrs(lctx
, &arm
.attrs
),
194 pats
: arm
.pats
.iter().map(|x
| lower_pat(lctx
, x
)).collect(),
195 guard
: arm
.guard
.as_ref().map(|ref x
| lower_expr(lctx
, x
)),
196 body
: lower_expr(lctx
, &arm
.body
),
200 pub fn lower_decl(lctx
: &LoweringContext
, d
: &Decl
) -> P
<hir
::Decl
> {
202 DeclLocal(ref l
) => P(Spanned
{
203 node
: hir
::DeclLocal(lower_local(lctx
, l
)),
206 DeclItem(ref it
) => P(Spanned
{
207 node
: hir
::DeclItem(lower_item_id(lctx
, it
)),
213 pub fn lower_ty_binding(lctx
: &LoweringContext
, b
: &TypeBinding
) -> hir
::TypeBinding
{
217 ty
: lower_ty(lctx
, &b
.ty
),
222 pub fn lower_ty(lctx
: &LoweringContext
, t
: &Ty
) -> P
<hir
::Ty
> {
226 TyInfer
=> hir
::TyInfer
,
227 TyVec(ref ty
) => hir
::TyVec(lower_ty(lctx
, ty
)),
228 TyPtr(ref mt
) => hir
::TyPtr(lower_mt(lctx
, mt
)),
229 TyRptr(ref region
, ref mt
) => {
230 hir
::TyRptr(lower_opt_lifetime(lctx
, region
), lower_mt(lctx
, mt
))
233 hir
::TyBareFn(P(hir
::BareFnTy
{
234 lifetimes
: lower_lifetime_defs(lctx
, &f
.lifetimes
),
235 unsafety
: lower_unsafety(lctx
, f
.unsafety
),
237 decl
: lower_fn_decl(lctx
, &f
.decl
),
240 TyTup(ref tys
) => hir
::TyTup(tys
.iter().map(|ty
| lower_ty(lctx
, ty
)).collect()),
242 return lower_ty(lctx
, ty
);
244 TyPath(ref qself
, ref path
) => {
245 let qself
= qself
.as_ref().map(|&QSelf { ref ty, position }
| {
247 ty
: lower_ty(lctx
, ty
),
251 hir
::TyPath(qself
, lower_path(lctx
, path
))
253 TyObjectSum(ref ty
, ref bounds
) => {
254 hir
::TyObjectSum(lower_ty(lctx
, ty
), lower_bounds(lctx
, bounds
))
256 TyFixedLengthVec(ref ty
, ref e
) => {
257 hir
::TyFixedLengthVec(lower_ty(lctx
, ty
), lower_expr(lctx
, e
))
259 TyTypeof(ref expr
) => {
260 hir
::TyTypeof(lower_expr(lctx
, expr
))
262 TyPolyTraitRef(ref bounds
) => {
263 hir
::TyPolyTraitRef(bounds
.iter().map(|b
| lower_ty_param_bound(lctx
, b
)).collect())
265 TyMac(_
) => panic
!("TyMac should have been expanded by now."),
271 pub fn lower_foreign_mod(lctx
: &LoweringContext
, fm
: &ForeignMod
) -> hir
::ForeignMod
{
274 items
: fm
.items
.iter().map(|x
| lower_foreign_item(lctx
, x
)).collect(),
278 pub fn lower_variant(lctx
: &LoweringContext
, v
: &Variant
) -> hir
::Variant
{
280 node
: hir
::Variant_
{
281 name
: v
.node
.name
.name
,
282 attrs
: lower_attrs(lctx
, &v
.node
.attrs
),
283 data
: lower_variant_data(lctx
, &v
.node
.data
),
284 disr_expr
: v
.node
.disr_expr
.as_ref().map(|e
| lower_expr(lctx
, e
)),
290 // Path segments are usually unhygienic, hygienic path segments can occur only in
291 // identifier-like paths originating from `ExprPath`.
292 // Make life simpler for rustc_resolve by renaming only such segments.
293 pub fn lower_path_full(lctx
: &LoweringContext
, p
: &Path
, maybe_hygienic
: bool
) -> hir
::Path
{
294 let maybe_hygienic
= maybe_hygienic
&& !p
.global
&& p
.segments
.len() == 1;
299 .map(|&PathSegment { identifier, ref parameters }
| {
301 identifier
: if maybe_hygienic
{
302 lower_ident(lctx
, identifier
)
304 hir
::Ident
::from_name(identifier
.name
)
306 parameters
: lower_path_parameters(lctx
, parameters
),
314 pub fn lower_path(lctx
: &LoweringContext
, p
: &Path
) -> hir
::Path
{
315 lower_path_full(lctx
, p
, false)
318 pub fn lower_path_parameters(lctx
: &LoweringContext
,
319 path_parameters
: &PathParameters
)
320 -> hir
::PathParameters
{
321 match *path_parameters
{
322 PathParameters
::AngleBracketed(ref data
) =>
323 hir
::AngleBracketedParameters(lower_angle_bracketed_parameter_data(lctx
, data
)),
324 PathParameters
::Parenthesized(ref data
) =>
325 hir
::ParenthesizedParameters(lower_parenthesized_parameter_data(lctx
, data
)),
329 pub fn lower_angle_bracketed_parameter_data(lctx
: &LoweringContext
,
330 data
: &AngleBracketedParameterData
)
331 -> hir
::AngleBracketedParameterData
{
332 let &AngleBracketedParameterData { ref lifetimes, ref types, ref bindings }
= data
;
333 hir
::AngleBracketedParameterData
{
334 lifetimes
: lower_lifetimes(lctx
, lifetimes
),
335 types
: types
.iter().map(|ty
| lower_ty(lctx
, ty
)).collect(),
336 bindings
: bindings
.iter().map(|b
| lower_ty_binding(lctx
, b
)).collect(),
340 pub fn lower_parenthesized_parameter_data(lctx
: &LoweringContext
,
341 data
: &ParenthesizedParameterData
)
342 -> hir
::ParenthesizedParameterData
{
343 let &ParenthesizedParameterData { ref inputs, ref output, span }
= data
;
344 hir
::ParenthesizedParameterData
{
345 inputs
: inputs
.iter().map(|ty
| lower_ty(lctx
, ty
)).collect(),
346 output
: output
.as_ref().map(|ty
| lower_ty(lctx
, ty
)),
351 pub fn lower_local(lctx
: &LoweringContext
, l
: &Local
) -> P
<hir
::Local
> {
354 ty
: l
.ty
.as_ref().map(|t
| lower_ty(lctx
, t
)),
355 pat
: lower_pat(lctx
, &l
.pat
),
356 init
: l
.init
.as_ref().map(|e
| lower_expr(lctx
, e
)),
358 attrs
: l
.attrs
.clone(),
362 pub fn lower_explicit_self_underscore(lctx
: &LoweringContext
,
364 -> hir
::ExplicitSelf_
{
366 SelfStatic
=> hir
::SelfStatic
,
367 SelfValue(v
) => hir
::SelfValue(v
.name
),
368 SelfRegion(ref lifetime
, m
, ident
) => {
369 hir
::SelfRegion(lower_opt_lifetime(lctx
, lifetime
),
370 lower_mutability(lctx
, m
),
373 SelfExplicit(ref typ
, ident
) => {
374 hir
::SelfExplicit(lower_ty(lctx
, typ
), ident
.name
)
379 pub fn lower_mutability(_lctx
: &LoweringContext
, m
: Mutability
) -> hir
::Mutability
{
381 MutMutable
=> hir
::MutMutable
,
382 MutImmutable
=> hir
::MutImmutable
,
386 pub fn lower_explicit_self(lctx
: &LoweringContext
, s
: &ExplicitSelf
) -> hir
::ExplicitSelf
{
388 node
: lower_explicit_self_underscore(lctx
, &s
.node
),
393 pub fn lower_arg(lctx
: &LoweringContext
, arg
: &Arg
) -> hir
::Arg
{
396 pat
: lower_pat(lctx
, &arg
.pat
),
397 ty
: lower_ty(lctx
, &arg
.ty
),
401 pub fn lower_fn_decl(lctx
: &LoweringContext
, decl
: &FnDecl
) -> P
<hir
::FnDecl
> {
403 inputs
: decl
.inputs
.iter().map(|x
| lower_arg(lctx
, x
)).collect(),
404 output
: match decl
.output
{
405 Return(ref ty
) => hir
::Return(lower_ty(lctx
, ty
)),
406 DefaultReturn(span
) => hir
::DefaultReturn(span
),
407 NoReturn(span
) => hir
::NoReturn(span
),
409 variadic
: decl
.variadic
,
413 pub fn lower_ty_param_bound(lctx
: &LoweringContext
, tpb
: &TyParamBound
) -> hir
::TyParamBound
{
415 TraitTyParamBound(ref ty
, modifier
) => {
416 hir
::TraitTyParamBound(lower_poly_trait_ref(lctx
, ty
),
417 lower_trait_bound_modifier(lctx
, modifier
))
419 RegionTyParamBound(ref lifetime
) => {
420 hir
::RegionTyParamBound(lower_lifetime(lctx
, lifetime
))
425 pub fn lower_ty_param(lctx
: &LoweringContext
, tp
: &TyParam
) -> hir
::TyParam
{
429 bounds
: lower_bounds(lctx
, &tp
.bounds
),
430 default: tp
.default.as_ref().map(|x
| lower_ty(lctx
, x
)),
435 pub fn lower_ty_params(lctx
: &LoweringContext
,
437 -> hir
::HirVec
<hir
::TyParam
> {
438 tps
.iter().map(|tp
| lower_ty_param(lctx
, tp
)).collect()
441 pub fn lower_lifetime(_lctx
: &LoweringContext
, l
: &Lifetime
) -> hir
::Lifetime
{
449 pub fn lower_lifetime_def(lctx
: &LoweringContext
, l
: &LifetimeDef
) -> hir
::LifetimeDef
{
451 lifetime
: lower_lifetime(lctx
, &l
.lifetime
),
452 bounds
: lower_lifetimes(lctx
, &l
.bounds
),
456 pub fn lower_lifetimes(lctx
: &LoweringContext
, lts
: &Vec
<Lifetime
>) -> hir
::HirVec
<hir
::Lifetime
> {
457 lts
.iter().map(|l
| lower_lifetime(lctx
, l
)).collect()
460 pub fn lower_lifetime_defs(lctx
: &LoweringContext
,
461 lts
: &Vec
<LifetimeDef
>)
462 -> hir
::HirVec
<hir
::LifetimeDef
> {
463 lts
.iter().map(|l
| lower_lifetime_def(lctx
, l
)).collect()
466 pub fn lower_opt_lifetime(lctx
: &LoweringContext
,
467 o_lt
: &Option
<Lifetime
>)
468 -> Option
<hir
::Lifetime
> {
469 o_lt
.as_ref().map(|lt
| lower_lifetime(lctx
, lt
))
472 pub fn lower_generics(lctx
: &LoweringContext
, g
: &Generics
) -> hir
::Generics
{
474 ty_params
: lower_ty_params(lctx
, &g
.ty_params
),
475 lifetimes
: lower_lifetime_defs(lctx
, &g
.lifetimes
),
476 where_clause
: lower_where_clause(lctx
, &g
.where_clause
),
480 pub fn lower_where_clause(lctx
: &LoweringContext
, wc
: &WhereClause
) -> hir
::WhereClause
{
483 predicates
: wc
.predicates
485 .map(|predicate
| lower_where_predicate(lctx
, predicate
))
490 pub fn lower_where_predicate(lctx
: &LoweringContext
,
491 pred
: &WherePredicate
)
492 -> hir
::WherePredicate
{
494 WherePredicate
::BoundPredicate(WhereBoundPredicate
{ ref bound_lifetimes
,
498 hir
::WherePredicate
::BoundPredicate(hir
::WhereBoundPredicate
{
499 bound_lifetimes
: lower_lifetime_defs(lctx
, bound_lifetimes
),
500 bounded_ty
: lower_ty(lctx
, bounded_ty
),
501 bounds
: bounds
.iter().map(|x
| lower_ty_param_bound(lctx
, x
)).collect(),
505 WherePredicate
::RegionPredicate(WhereRegionPredicate
{ ref lifetime
,
508 hir
::WherePredicate
::RegionPredicate(hir
::WhereRegionPredicate
{
510 lifetime
: lower_lifetime(lctx
, lifetime
),
511 bounds
: bounds
.iter().map(|bound
| lower_lifetime(lctx
, bound
)).collect(),
514 WherePredicate
::EqPredicate(WhereEqPredicate
{ id
,
518 hir
::WherePredicate
::EqPredicate(hir
::WhereEqPredicate
{
520 path
: lower_path(lctx
, path
),
521 ty
: lower_ty(lctx
, ty
),
528 pub fn lower_variant_data(lctx
: &LoweringContext
, vdata
: &VariantData
) -> hir
::VariantData
{
530 VariantData
::Struct(ref fields
, id
) => {
531 hir
::VariantData
::Struct(fields
.iter()
532 .map(|f
| lower_struct_field(lctx
, f
))
536 VariantData
::Tuple(ref fields
, id
) => {
537 hir
::VariantData
::Tuple(fields
.iter()
538 .map(|f
| lower_struct_field(lctx
, f
))
542 VariantData
::Unit(id
) => hir
::VariantData
::Unit(id
),
546 pub fn lower_trait_ref(lctx
: &LoweringContext
, p
: &TraitRef
) -> hir
::TraitRef
{
548 path
: lower_path(lctx
, &p
.path
),
553 pub fn lower_poly_trait_ref(lctx
: &LoweringContext
, p
: &PolyTraitRef
) -> hir
::PolyTraitRef
{
555 bound_lifetimes
: lower_lifetime_defs(lctx
, &p
.bound_lifetimes
),
556 trait_ref
: lower_trait_ref(lctx
, &p
.trait_ref
),
561 pub fn lower_struct_field(lctx
: &LoweringContext
, f
: &StructField
) -> hir
::StructField
{
563 node
: hir
::StructField_
{
565 kind
: lower_struct_field_kind(lctx
, &f
.node
.kind
),
566 ty
: lower_ty(lctx
, &f
.node
.ty
),
567 attrs
: lower_attrs(lctx
, &f
.node
.attrs
),
573 pub fn lower_field(lctx
: &LoweringContext
, f
: &Field
) -> hir
::Field
{
575 name
: respan(f
.ident
.span
, f
.ident
.node
.name
),
576 expr
: lower_expr(lctx
, &f
.expr
),
581 pub fn lower_mt(lctx
: &LoweringContext
, mt
: &MutTy
) -> hir
::MutTy
{
583 ty
: lower_ty(lctx
, &mt
.ty
),
584 mutbl
: lower_mutability(lctx
, mt
.mutbl
),
588 pub fn lower_opt_bounds(lctx
: &LoweringContext
,
589 b
: &Option
<TyParamBounds
>)
590 -> Option
<hir
::TyParamBounds
> {
591 b
.as_ref().map(|ref bounds
| lower_bounds(lctx
, bounds
))
594 fn lower_bounds(lctx
: &LoweringContext
, bounds
: &TyParamBounds
) -> hir
::TyParamBounds
{
595 bounds
.iter().map(|bound
| lower_ty_param_bound(lctx
, bound
)).collect()
598 pub fn lower_block(lctx
: &LoweringContext
, b
: &Block
) -> P
<hir
::Block
> {
601 stmts
: b
.stmts
.iter().map(|s
| lower_stmt(lctx
, s
)).collect(),
602 expr
: b
.expr
.as_ref().map(|ref x
| lower_expr(lctx
, x
)),
603 rules
: lower_block_check_mode(lctx
, &b
.rules
),
608 pub fn lower_item_underscore(lctx
: &LoweringContext
, i
: &Item_
) -> hir
::Item_
{
610 ItemExternCrate(string
) => hir
::ItemExternCrate(string
),
611 ItemUse(ref view_path
) => {
612 hir
::ItemUse(lower_view_path(lctx
, view_path
))
614 ItemStatic(ref t
, m
, ref e
) => {
615 hir
::ItemStatic(lower_ty(lctx
, t
),
616 lower_mutability(lctx
, m
),
619 ItemConst(ref t
, ref e
) => {
620 hir
::ItemConst(lower_ty(lctx
, t
), lower_expr(lctx
, e
))
622 ItemFn(ref decl
, unsafety
, constness
, abi
, ref generics
, ref body
) => {
623 hir
::ItemFn(lower_fn_decl(lctx
, decl
),
624 lower_unsafety(lctx
, unsafety
),
625 lower_constness(lctx
, constness
),
627 lower_generics(lctx
, generics
),
628 lower_block(lctx
, body
))
630 ItemMod(ref m
) => hir
::ItemMod(lower_mod(lctx
, m
)),
631 ItemForeignMod(ref nm
) => hir
::ItemForeignMod(lower_foreign_mod(lctx
, nm
)),
632 ItemTy(ref t
, ref generics
) => {
633 hir
::ItemTy(lower_ty(lctx
, t
), lower_generics(lctx
, generics
))
635 ItemEnum(ref enum_definition
, ref generics
) => {
636 hir
::ItemEnum(hir
::EnumDef
{
637 variants
: enum_definition
.variants
639 .map(|x
| lower_variant(lctx
, x
))
642 lower_generics(lctx
, generics
))
644 ItemStruct(ref struct_def
, ref generics
) => {
645 let struct_def
= lower_variant_data(lctx
, struct_def
);
646 hir
::ItemStruct(struct_def
, lower_generics(lctx
, generics
))
648 ItemDefaultImpl(unsafety
, ref trait_ref
) => {
649 hir
::ItemDefaultImpl(lower_unsafety(lctx
, unsafety
),
650 lower_trait_ref(lctx
, trait_ref
))
652 ItemImpl(unsafety
, polarity
, ref generics
, ref ifce
, ref ty
, ref impl_items
) => {
653 let new_impl_items
= impl_items
.iter()
654 .map(|item
| lower_impl_item(lctx
, item
))
656 let ifce
= ifce
.as_ref().map(|trait_ref
| lower_trait_ref(lctx
, trait_ref
));
657 hir
::ItemImpl(lower_unsafety(lctx
, unsafety
),
658 lower_impl_polarity(lctx
, polarity
),
659 lower_generics(lctx
, generics
),
664 ItemTrait(unsafety
, ref generics
, ref bounds
, ref items
) => {
665 let bounds
= lower_bounds(lctx
, bounds
);
666 let items
= items
.iter().map(|item
| lower_trait_item(lctx
, item
)).collect();
667 hir
::ItemTrait(lower_unsafety(lctx
, unsafety
),
668 lower_generics(lctx
, generics
),
672 ItemMac(_
) => panic
!("Shouldn't still be around"),
676 pub fn lower_trait_item(lctx
: &LoweringContext
, i
: &TraitItem
) -> hir
::TraitItem
{
680 attrs
: lower_attrs(lctx
, &i
.attrs
),
682 ConstTraitItem(ref ty
, ref default) => {
683 hir
::ConstTraitItem(lower_ty(lctx
, ty
),
684 default.as_ref().map(|x
| lower_expr(lctx
, x
)))
686 MethodTraitItem(ref sig
, ref body
) => {
687 hir
::MethodTraitItem(lower_method_sig(lctx
, sig
),
688 body
.as_ref().map(|x
| lower_block(lctx
, x
)))
690 TypeTraitItem(ref bounds
, ref default) => {
691 hir
::TypeTraitItem(lower_bounds(lctx
, bounds
),
692 default.as_ref().map(|x
| lower_ty(lctx
, x
)))
699 pub fn lower_impl_item(lctx
: &LoweringContext
, i
: &ImplItem
) -> hir
::ImplItem
{
703 attrs
: lower_attrs(lctx
, &i
.attrs
),
704 vis
: lower_visibility(lctx
, i
.vis
),
706 ImplItemKind
::Const(ref ty
, ref expr
) => {
707 hir
::ImplItemKind
::Const(lower_ty(lctx
, ty
), lower_expr(lctx
, expr
))
709 ImplItemKind
::Method(ref sig
, ref body
) => {
710 hir
::ImplItemKind
::Method(lower_method_sig(lctx
, sig
), lower_block(lctx
, body
))
712 ImplItemKind
::Type(ref ty
) => hir
::ImplItemKind
::Type(lower_ty(lctx
, ty
)),
713 ImplItemKind
::Macro(..) => panic
!("Shouldn't exist any more"),
719 pub fn lower_mod(lctx
: &LoweringContext
, m
: &Mod
) -> hir
::Mod
{
722 item_ids
: m
.items
.iter().map(|x
| lower_item_id(lctx
, x
)).collect(),
726 struct ItemLowerer
<'lcx
, 'interner
: 'lcx
> {
727 items
: BTreeMap
<NodeId
, hir
::Item
>,
728 lctx
: &'lcx LoweringContext
<'interner
>,
731 impl<'lcx
, 'interner
> Visitor
<'lcx
> for ItemLowerer
<'lcx
, 'interner
> {
732 fn visit_item(&mut self, item
: &'lcx Item
) {
733 self.items
.insert(item
.id
, lower_item(self.lctx
, item
));
734 visit
::walk_item(self, item
);
738 pub fn lower_crate(lctx
: &LoweringContext
, c
: &Crate
) -> hir
::Crate
{
740 let mut item_lowerer
= ItemLowerer { items: BTreeMap::new(), lctx: lctx }
;
741 visit
::walk_crate(&mut item_lowerer
, c
);
746 module
: lower_mod(lctx
, &c
.module
),
747 attrs
: lower_attrs(lctx
, &c
.attrs
),
748 config
: c
.config
.clone().into(),
750 exported_macros
: c
.exported_macros
.iter().map(|m
| lower_macro_def(lctx
, m
)).collect(),
755 pub fn lower_macro_def(lctx
: &LoweringContext
, m
: &MacroDef
) -> hir
::MacroDef
{
758 attrs
: lower_attrs(lctx
, &m
.attrs
),
761 imported_from
: m
.imported_from
.map(|x
| x
.name
),
763 use_locally
: m
.use_locally
,
764 allow_internal_unstable
: m
.allow_internal_unstable
,
765 body
: m
.body
.clone().into(),
769 pub fn lower_item_id(_lctx
: &LoweringContext
, i
: &Item
) -> hir
::ItemId
{
770 hir
::ItemId { id: i.id }
773 pub fn lower_item(lctx
: &LoweringContext
, i
: &Item
) -> hir
::Item
{
774 let node
= lower_item_underscore(lctx
, &i
.node
);
779 attrs
: lower_attrs(lctx
, &i
.attrs
),
781 vis
: lower_visibility(lctx
, i
.vis
),
786 pub fn lower_foreign_item(lctx
: &LoweringContext
, i
: &ForeignItem
) -> hir
::ForeignItem
{
790 attrs
: lower_attrs(lctx
, &i
.attrs
),
792 ForeignItemFn(ref fdec
, ref generics
) => {
793 hir
::ForeignItemFn(lower_fn_decl(lctx
, fdec
), lower_generics(lctx
, generics
))
795 ForeignItemStatic(ref t
, m
) => {
796 hir
::ForeignItemStatic(lower_ty(lctx
, t
), m
)
799 vis
: lower_visibility(lctx
, i
.vis
),
804 pub fn lower_method_sig(lctx
: &LoweringContext
, sig
: &MethodSig
) -> hir
::MethodSig
{
806 generics
: lower_generics(lctx
, &sig
.generics
),
808 explicit_self
: lower_explicit_self(lctx
, &sig
.explicit_self
),
809 unsafety
: lower_unsafety(lctx
, sig
.unsafety
),
810 constness
: lower_constness(lctx
, sig
.constness
),
811 decl
: lower_fn_decl(lctx
, &sig
.decl
),
815 pub fn lower_unsafety(_lctx
: &LoweringContext
, u
: Unsafety
) -> hir
::Unsafety
{
817 Unsafety
::Unsafe
=> hir
::Unsafety
::Unsafe
,
818 Unsafety
::Normal
=> hir
::Unsafety
::Normal
,
822 pub fn lower_constness(_lctx
: &LoweringContext
, c
: Constness
) -> hir
::Constness
{
824 Constness
::Const
=> hir
::Constness
::Const
,
825 Constness
::NotConst
=> hir
::Constness
::NotConst
,
829 pub fn lower_unop(_lctx
: &LoweringContext
, u
: UnOp
) -> hir
::UnOp
{
831 UnDeref
=> hir
::UnDeref
,
837 pub fn lower_binop(_lctx
: &LoweringContext
, b
: BinOp
) -> hir
::BinOp
{
847 BiBitXor
=> hir
::BiBitXor
,
848 BiBitAnd
=> hir
::BiBitAnd
,
849 BiBitOr
=> hir
::BiBitOr
,
863 pub fn lower_pat(lctx
: &LoweringContext
, p
: &Pat
) -> P
<hir
::Pat
> {
867 PatWild
=> hir
::PatWild
,
868 PatIdent(ref binding_mode
, pth1
, ref sub
) => {
869 hir
::PatIdent(lower_binding_mode(lctx
, binding_mode
),
870 respan(pth1
.span
, lower_ident(lctx
, pth1
.node
)),
871 sub
.as_ref().map(|x
| lower_pat(lctx
, x
)))
873 PatLit(ref e
) => hir
::PatLit(lower_expr(lctx
, e
)),
874 PatEnum(ref pth
, ref pats
) => {
875 hir
::PatEnum(lower_path(lctx
, pth
),
877 .map(|pats
| pats
.iter().map(|x
| lower_pat(lctx
, x
)).collect()))
879 PatQPath(ref qself
, ref pth
) => {
880 let qself
= hir
::QSelf
{
881 ty
: lower_ty(lctx
, &qself
.ty
),
882 position
: qself
.position
,
884 hir
::PatQPath(qself
, lower_path(lctx
, pth
))
886 PatStruct(ref pth
, ref fields
, etc
) => {
887 let pth
= lower_path(lctx
, pth
);
888 let fs
= fields
.iter()
892 node
: hir
::FieldPat
{
893 name
: f
.node
.ident
.name
,
894 pat
: lower_pat(lctx
, &f
.node
.pat
),
895 is_shorthand
: f
.node
.is_shorthand
,
900 hir
::PatStruct(pth
, fs
, etc
)
902 PatTup(ref elts
) => hir
::PatTup(elts
.iter().map(|x
| lower_pat(lctx
, x
)).collect()),
903 PatBox(ref inner
) => hir
::PatBox(lower_pat(lctx
, inner
)),
904 PatRegion(ref inner
, mutbl
) => {
905 hir
::PatRegion(lower_pat(lctx
, inner
), lower_mutability(lctx
, mutbl
))
907 PatRange(ref e1
, ref e2
) => {
908 hir
::PatRange(lower_expr(lctx
, e1
), lower_expr(lctx
, e2
))
910 PatVec(ref before
, ref slice
, ref after
) => {
911 hir
::PatVec(before
.iter().map(|x
| lower_pat(lctx
, x
)).collect(),
912 slice
.as_ref().map(|x
| lower_pat(lctx
, x
)),
913 after
.iter().map(|x
| lower_pat(lctx
, x
)).collect())
915 PatMac(_
) => panic
!("Shouldn't exist here"),
921 // Utility fn for setting and unsetting the cached id.
922 fn cache_ids
<'a
, OP
, R
>(lctx
: &LoweringContext
, expr_id
: NodeId
, op
: OP
) -> R
923 where OP
: FnOnce(&LoweringContext
) -> R
925 // Only reset the id if it was previously 0, i.e., was not cached.
926 // If it was cached, we are in a nested node, but our id count will
927 // still count towards the parent's count.
928 let reset_cached_id
= lctx
.cached_id
.get() == 0;
931 let id_cache
: &mut HashMap
<_
, _
> = &mut lctx
.id_cache
.borrow_mut();
933 if id_cache
.contains_key(&expr_id
) {
934 let cached_id
= lctx
.cached_id
.get();
936 // We're entering a node where we need to track ids, but are not
938 lctx
.cached_id
.set(id_cache
[&expr_id
]);
939 lctx
.gensym_key
.set(id_cache
[&expr_id
]);
941 // We're already tracking - check that the tracked id is the same
942 // as the expected id.
943 assert
!(cached_id
== id_cache
[&expr_id
], "id mismatch");
946 let next_id
= lctx
.id_assigner
.peek_node_id();
947 id_cache
.insert(expr_id
, next_id
);
948 lctx
.gensym_key
.set(next_id
);
952 let result
= op(lctx
);
955 lctx
.cached_id
.set(0);
956 lctx
.gensym_key
.set(0);
962 pub fn lower_expr(lctx
: &LoweringContext
, e
: &Expr
) -> P
<hir
::Expr
> {
967 // Eventually a desugaring for `box EXPR`
968 // (similar to the desugaring above for `in PLACE BLOCK`)
969 // should go here, desugaring
973 // let mut place = BoxPlace::make_place();
974 // let raw_place = Place::pointer(&mut place);
975 // let value = $value;
977 // ::std::ptr::write(raw_place, value);
978 // Boxed::finalize(place)
981 // But for now there are type-inference issues doing that.
983 hir
::ExprBox(lower_expr(lctx
, e
))
986 // Desugar ExprBox: `in (PLACE) EXPR`
987 ExprInPlace(ref placer
, ref value_expr
) => {
991 // let mut place = Placer::make_place(p);
992 // let raw_place = Place::pointer(&mut place);
994 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
995 // InPlace::finalize(place)
997 return cache_ids(lctx
, e
.id
, |lctx
| {
998 let placer_expr
= lower_expr(lctx
, placer
);
999 let value_expr
= lower_expr(lctx
, value_expr
);
1001 let placer_ident
= lctx
.str_to_ident("placer");
1002 let place_ident
= lctx
.str_to_ident("place");
1003 let p_ptr_ident
= lctx
.str_to_ident("p_ptr");
1005 let make_place
= ["ops", "Placer", "make_place"];
1006 let place_pointer
= ["ops", "Place", "pointer"];
1007 let move_val_init
= ["intrinsics", "move_val_init"];
1008 let inplace_finalize
= ["ops", "InPlace", "finalize"];
1010 let make_call
= |lctx
: &LoweringContext
, p
, args
| {
1011 let path
= core_path(lctx
, e
.span
, p
);
1012 let path
= expr_path(lctx
, path
, None
);
1013 expr_call(lctx
, e
.span
, path
, args
, None
)
1016 let mk_stmt_let
= |lctx
: &LoweringContext
, bind
, expr
| {
1017 stmt_let(lctx
, e
.span
, false, bind
, expr
, None
)
1020 let mk_stmt_let_mut
= |lctx
: &LoweringContext
, bind
, expr
| {
1021 stmt_let(lctx
, e
.span
, true, bind
, expr
, None
)
1024 // let placer = <placer_expr> ;
1026 let placer_expr
= signal_block_expr(lctx
,
1030 hir
::PopUnstableBlock
,
1032 mk_stmt_let(lctx
, placer_ident
, placer_expr
)
1035 // let mut place = Placer::make_place(placer);
1037 let placer
= expr_ident(lctx
, e
.span
, placer_ident
, None
);
1038 let call
= make_call(lctx
, &make_place
, hir_vec
![placer
]);
1039 mk_stmt_let_mut(lctx
, place_ident
, call
)
1042 // let p_ptr = Place::pointer(&mut place);
1044 let agent
= expr_ident(lctx
, e
.span
, place_ident
, None
);
1045 let args
= hir_vec
![expr_mut_addr_of(lctx
, e
.span
, agent
, None
)];
1046 let call
= make_call(lctx
, &place_pointer
, args
);
1047 mk_stmt_let(lctx
, p_ptr_ident
, call
)
1050 // pop_unsafe!(EXPR));
1051 let pop_unsafe_expr
= {
1052 let value_expr
= signal_block_expr(lctx
,
1056 hir
::PopUnstableBlock
,
1058 signal_block_expr(lctx
,
1062 hir
::PopUnsafeBlock(hir
::CompilerGenerated
), None
)
1066 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1067 // InPlace::finalize(place)
1070 let ptr
= expr_ident(lctx
, e
.span
, p_ptr_ident
, None
);
1071 let call_move_val_init
=
1073 make_call(lctx
, &move_val_init
, hir_vec
![ptr
, pop_unsafe_expr
]),
1075 let call_move_val_init
= respan(e
.span
, call_move_val_init
);
1077 let place
= expr_ident(lctx
, e
.span
, place_ident
, None
);
1078 let call
= make_call(lctx
, &inplace_finalize
, hir_vec
![place
]);
1079 signal_block_expr(lctx
,
1080 hir_vec
![call_move_val_init
],
1083 hir
::PushUnsafeBlock(hir
::CompilerGenerated
), None
)
1086 signal_block_expr(lctx
,
1087 hir_vec
![s1
, s2
, s3
],
1090 hir
::PushUnstableBlock
,
1095 ExprVec(ref exprs
) => {
1096 hir
::ExprVec(exprs
.iter().map(|x
| lower_expr(lctx
, x
)).collect())
1098 ExprRepeat(ref expr
, ref count
) => {
1099 let expr
= lower_expr(lctx
, expr
);
1100 let count
= lower_expr(lctx
, count
);
1101 hir
::ExprRepeat(expr
, count
)
1103 ExprTup(ref elts
) => {
1104 hir
::ExprTup(elts
.iter().map(|x
| lower_expr(lctx
, x
)).collect())
1106 ExprCall(ref f
, ref args
) => {
1107 let f
= lower_expr(lctx
, f
);
1108 hir
::ExprCall(f
, args
.iter().map(|x
| lower_expr(lctx
, x
)).collect())
1110 ExprMethodCall(i
, ref tps
, ref args
) => {
1111 let tps
= tps
.iter().map(|x
| lower_ty(lctx
, x
)).collect();
1112 let args
= args
.iter().map(|x
| lower_expr(lctx
, x
)).collect();
1113 hir
::ExprMethodCall(respan(i
.span
, i
.node
.name
), tps
, args
)
1115 ExprBinary(binop
, ref lhs
, ref rhs
) => {
1116 let binop
= lower_binop(lctx
, binop
);
1117 let lhs
= lower_expr(lctx
, lhs
);
1118 let rhs
= lower_expr(lctx
, rhs
);
1119 hir
::ExprBinary(binop
, lhs
, rhs
)
1121 ExprUnary(op
, ref ohs
) => {
1122 let op
= lower_unop(lctx
, op
);
1123 let ohs
= lower_expr(lctx
, ohs
);
1124 hir
::ExprUnary(op
, ohs
)
1126 ExprLit(ref l
) => hir
::ExprLit(P((**l
).clone())),
1127 ExprCast(ref expr
, ref ty
) => {
1128 let expr
= lower_expr(lctx
, expr
);
1129 hir
::ExprCast(expr
, lower_ty(lctx
, ty
))
1131 ExprType(ref expr
, ref ty
) => {
1132 let expr
= lower_expr(lctx
, expr
);
1133 hir
::ExprType(expr
, lower_ty(lctx
, ty
))
1135 ExprAddrOf(m
, ref ohs
) => {
1136 let m
= lower_mutability(lctx
, m
);
1137 let ohs
= lower_expr(lctx
, ohs
);
1138 hir
::ExprAddrOf(m
, ohs
)
1140 // More complicated than you might expect because the else branch
1141 // might be `if let`.
1142 ExprIf(ref cond
, ref blk
, ref else_opt
) => {
1143 let else_opt
= else_opt
.as_ref().map(|els
| {
1146 cache_ids(lctx
, e
.id
, |lctx
| {
1147 // wrap the if-let expr in a block
1148 let span
= els
.span
;
1149 let els
= lower_expr(lctx
, els
);
1150 let id
= lctx
.next_id();
1151 let blk
= P(hir
::Block
{
1155 rules
: hir
::DefaultBlock
,
1158 expr_block(lctx
, blk
, None
)
1161 _
=> lower_expr(lctx
, els
),
1165 hir
::ExprIf(lower_expr(lctx
, cond
), lower_block(lctx
, blk
), else_opt
)
1167 ExprWhile(ref cond
, ref body
, opt_ident
) => {
1168 hir
::ExprWhile(lower_expr(lctx
, cond
), lower_block(lctx
, body
),
1169 opt_ident
.map(|ident
| lower_ident(lctx
, ident
)))
1171 ExprLoop(ref body
, opt_ident
) => {
1172 hir
::ExprLoop(lower_block(lctx
, body
),
1173 opt_ident
.map(|ident
| lower_ident(lctx
, ident
)))
1175 ExprMatch(ref expr
, ref arms
) => {
1176 hir
::ExprMatch(lower_expr(lctx
, expr
),
1177 arms
.iter().map(|x
| lower_arm(lctx
, x
)).collect(),
1178 hir
::MatchSource
::Normal
)
1180 ExprClosure(capture_clause
, ref decl
, ref body
) => {
1181 hir
::ExprClosure(lower_capture_clause(lctx
, capture_clause
),
1182 lower_fn_decl(lctx
, decl
),
1183 lower_block(lctx
, body
))
1185 ExprBlock(ref blk
) => hir
::ExprBlock(lower_block(lctx
, blk
)),
1186 ExprAssign(ref el
, ref er
) => {
1187 hir
::ExprAssign(lower_expr(lctx
, el
), lower_expr(lctx
, er
))
1189 ExprAssignOp(op
, ref el
, ref er
) => {
1190 hir
::ExprAssignOp(lower_binop(lctx
, op
),
1191 lower_expr(lctx
, el
),
1192 lower_expr(lctx
, er
))
1194 ExprField(ref el
, ident
) => {
1195 hir
::ExprField(lower_expr(lctx
, el
), respan(ident
.span
, ident
.node
.name
))
1197 ExprTupField(ref el
, ident
) => {
1198 hir
::ExprTupField(lower_expr(lctx
, el
), ident
)
1200 ExprIndex(ref el
, ref er
) => {
1201 hir
::ExprIndex(lower_expr(lctx
, el
), lower_expr(lctx
, er
))
1203 ExprRange(ref e1
, ref e2
) => {
1204 hir
::ExprRange(e1
.as_ref().map(|x
| lower_expr(lctx
, x
)),
1205 e2
.as_ref().map(|x
| lower_expr(lctx
, x
)))
1207 ExprPath(ref qself
, ref path
) => {
1208 let hir_qself
= qself
.as_ref().map(|&QSelf { ref ty, position }
| {
1210 ty
: lower_ty(lctx
, ty
),
1214 hir
::ExprPath(hir_qself
, lower_path_full(lctx
, path
, qself
.is_none()))
1216 ExprBreak(opt_ident
) => hir
::ExprBreak(opt_ident
.map(|sp_ident
| {
1217 respan(sp_ident
.span
, lower_ident(lctx
, sp_ident
.node
))
1219 ExprAgain(opt_ident
) => hir
::ExprAgain(opt_ident
.map(|sp_ident
| {
1220 respan(sp_ident
.span
, lower_ident(lctx
, sp_ident
.node
))
1222 ExprRet(ref e
) => hir
::ExprRet(e
.as_ref().map(|x
| lower_expr(lctx
, x
))),
1223 ExprInlineAsm(InlineAsm
{
1233 }) => hir
::ExprInlineAsm(hir
::InlineAsm
{
1234 inputs
: inputs
.iter()
1235 .map(|&(ref c
, ref input
)| (c
.clone(), lower_expr(lctx
, input
)))
1237 outputs
: outputs
.iter()
1239 hir
::InlineAsmOutput
{
1240 constraint
: out
.constraint
.clone(),
1241 expr
: lower_expr(lctx
, &out
.expr
),
1243 is_indirect
: out
.is_indirect
,
1248 asm_str_style
: asm_str_style
,
1249 clobbers
: clobbers
.clone().into(),
1251 alignstack
: alignstack
,
1255 ExprStruct(ref path
, ref fields
, ref maybe_expr
) => {
1256 hir
::ExprStruct(lower_path(lctx
, path
),
1257 fields
.iter().map(|x
| lower_field(lctx
, x
)).collect(),
1258 maybe_expr
.as_ref().map(|x
| lower_expr(lctx
, x
)))
1260 ExprParen(ref ex
) => {
1261 // merge attributes into the inner expression.
1262 return lower_expr(lctx
, ex
).map(|mut ex
| {
1263 ex
.attrs
.update(|attrs
| {
1264 attrs
.prepend(e
.attrs
.clone())
1270 // Desugar ExprIfLet
1271 // From: `if let <pat> = <sub_expr> <body> [<else_opt>]`
1272 ExprIfLet(ref pat
, ref sub_expr
, ref body
, ref else_opt
) => {
1275 // match <sub_expr> {
1277 // [_ if <else_opt_if_cond> => <else_opt_if_body>,]
1278 // _ => [<else_opt> | ()]
1281 return cache_ids(lctx
, e
.id
, |lctx
| {
1282 // `<pat> => <body>`
1284 let body
= lower_block(lctx
, body
);
1285 let body_expr
= expr_block(lctx
, body
, None
);
1286 arm(hir_vec
![lower_pat(lctx
, pat
)], body_expr
)
1289 // `[_ if <else_opt_if_cond> => <else_opt_if_body>,]`
1290 let mut else_opt
= else_opt
.as_ref().map(|e
| lower_expr(lctx
, e
));
1291 let else_if_arms
= {
1292 let mut arms
= vec
![];
1294 let else_opt_continue
= else_opt
.and_then(|els
| {
1295 els
.and_then(|els
| {
1298 hir
::ExprIf(cond
, then
, else_opt
) => {
1299 let pat_under
= pat_wild(lctx
, e
.span
);
1300 arms
.push(hir
::Arm
{
1302 pats
: hir_vec
![pat_under
],
1304 body
: expr_block(lctx
, then
, None
),
1306 else_opt
.map(|else_opt
| (else_opt
, true))
1308 _
=> Some((P(els
), false)),
1312 match else_opt_continue
{
1313 Some((e
, true)) => {
1316 Some((e
, false)) => {
1329 let contains_else_clause
= else_opt
.is_some();
1331 // `_ => [<else_opt> | ()]`
1333 let pat_under
= pat_wild(lctx
, e
.span
);
1335 else_opt
.unwrap_or_else(
1336 || expr_tuple(lctx
, e
.span
, hir_vec
![], None
));
1337 arm(hir_vec
![pat_under
], else_expr
)
1340 let mut arms
= Vec
::with_capacity(else_if_arms
.len() + 2);
1342 arms
.extend(else_if_arms
);
1343 arms
.push(else_arm
);
1345 let sub_expr
= lower_expr(lctx
, sub_expr
);
1346 // add attributes to the outer returned expr node
1349 hir
::ExprMatch(sub_expr
,
1351 hir
::MatchSource
::IfLetDesugar
{
1352 contains_else_clause
: contains_else_clause
,
1358 // Desugar ExprWhileLet
1359 // From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
1360 ExprWhileLet(ref pat
, ref sub_expr
, ref body
, opt_ident
) => {
1363 // [opt_ident]: loop {
1364 // match <sub_expr> {
1370 return cache_ids(lctx
, e
.id
, |lctx
| {
1371 // `<pat> => <body>`
1373 let body
= lower_block(lctx
, body
);
1374 let body_expr
= expr_block(lctx
, body
, None
);
1375 arm(hir_vec
![lower_pat(lctx
, pat
)], body_expr
)
1380 let pat_under
= pat_wild(lctx
, e
.span
);
1381 let break_expr
= expr_break(lctx
, e
.span
, None
);
1382 arm(hir_vec
![pat_under
], break_expr
)
1385 // `match <sub_expr> { ... }`
1386 let arms
= hir_vec
![pat_arm
, break_arm
];
1387 let sub_expr
= lower_expr(lctx
, sub_expr
);
1388 let match_expr
= expr(lctx
,
1390 hir
::ExprMatch(sub_expr
,
1392 hir
::MatchSource
::WhileLetDesugar
),
1395 // `[opt_ident]: loop { ... }`
1396 let loop_block
= block_expr(lctx
, match_expr
);
1397 let loop_expr
= hir
::ExprLoop(loop_block
,
1398 opt_ident
.map(|ident
| lower_ident(lctx
, ident
)));
1399 // add attributes to the outer returned expr node
1400 expr(lctx
, e
.span
, loop_expr
, e
.attrs
.clone())
1404 // Desugar ExprForLoop
1405 // From: `[opt_ident]: for <pat> in <head> <body>`
1406 ExprForLoop(ref pat
, ref head
, ref body
, opt_ident
) => {
1410 // let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1412 // [opt_ident]: loop {
1413 // match ::std::iter::Iterator::next(&mut iter) {
1414 // ::std::option::Option::Some(<pat>) => <body>,
1415 // ::std::option::Option::None => break
1423 return cache_ids(lctx
, e
.id
, |lctx
| {
1425 let head
= lower_expr(lctx
, head
);
1427 let iter
= lctx
.str_to_ident("iter");
1429 // `::std::option::Option::Some(<pat>) => <body>`
1431 let body_block
= lower_block(lctx
, body
);
1432 let body_span
= body_block
.span
;
1433 let body_expr
= P(hir
::Expr
{
1435 node
: hir
::ExprBlock(body_block
),
1439 let pat
= lower_pat(lctx
, pat
);
1440 let some_pat
= pat_some(lctx
, e
.span
, pat
);
1442 arm(hir_vec
![some_pat
], body_expr
)
1445 // `::std::option::Option::None => break`
1447 let break_expr
= expr_break(lctx
, e
.span
, None
);
1449 arm(hir_vec
![pat_none(lctx
, e
.span
)], break_expr
)
1452 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1455 let strs
= std_path(lctx
, &["iter", "Iterator", "next"]);
1457 path_global(e
.span
, strs
)
1459 let iter
= expr_ident(lctx
, e
.span
, iter
, None
);
1460 let ref_mut_iter
= expr_mut_addr_of(lctx
, e
.span
, iter
, None
);
1461 let next_path
= expr_path(lctx
, next_path
, None
);
1462 let next_expr
= expr_call(lctx
,
1465 hir_vec
![ref_mut_iter
],
1467 let arms
= hir_vec
![pat_arm
, break_arm
];
1471 hir
::ExprMatch(next_expr
, arms
, hir
::MatchSource
::ForLoopDesugar
),
1475 // `[opt_ident]: loop { ... }`
1476 let loop_block
= block_expr(lctx
, match_expr
);
1477 let loop_expr
= hir
::ExprLoop(loop_block
,
1478 opt_ident
.map(|ident
| lower_ident(lctx
, ident
)));
1479 let loop_expr
= expr(lctx
, e
.span
, loop_expr
, None
);
1481 // `mut iter => { ... }`
1483 let iter_pat
= pat_ident_binding_mode(lctx
,
1486 hir
::BindByValue(hir
::MutMutable
));
1487 arm(hir_vec
![iter_pat
], loop_expr
)
1490 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1491 let into_iter_expr
= {
1492 let into_iter_path
= {
1493 let strs
= std_path(lctx
, &["iter", "IntoIterator", "into_iter"]);
1495 path_global(e
.span
, strs
)
1498 let into_iter
= expr_path(lctx
, into_iter_path
, None
);
1499 expr_call(lctx
, e
.span
, into_iter
, hir_vec
![head
], None
)
1502 let match_expr
= expr_match(lctx
,
1506 hir
::MatchSource
::ForLoopDesugar
,
1509 // `{ let _result = ...; _result }`
1510 // underscore prevents an unused_variables lint if the head diverges
1511 let result_ident
= lctx
.str_to_ident("_result");
1512 let let_stmt
= stmt_let(lctx
, e
.span
, false, result_ident
, match_expr
, None
);
1513 let result
= expr_ident(lctx
, e
.span
, result_ident
, None
);
1514 let block
= block_all(lctx
, e
.span
, hir_vec
![let_stmt
], Some(result
));
1515 // add the attributes to the outer returned expr node
1516 expr_block(lctx
, block
, e
.attrs
.clone())
1520 ExprMac(_
) => panic
!("Shouldn't exist here"),
1523 attrs
: e
.attrs
.clone(),
1527 pub fn lower_stmt(lctx
: &LoweringContext
, s
: &Stmt
) -> hir
::Stmt
{
1529 StmtDecl(ref d
, id
) => {
1531 node
: hir
::StmtDecl(lower_decl(lctx
, d
), id
),
1535 StmtExpr(ref e
, id
) => {
1537 node
: hir
::StmtExpr(lower_expr(lctx
, e
), id
),
1541 StmtSemi(ref e
, id
) => {
1543 node
: hir
::StmtSemi(lower_expr(lctx
, e
), id
),
1547 StmtMac(..) => panic
!("Shouldn't exist here"),
1551 pub fn lower_capture_clause(_lctx
: &LoweringContext
, c
: CaptureClause
) -> hir
::CaptureClause
{
1553 CaptureByValue
=> hir
::CaptureByValue
,
1554 CaptureByRef
=> hir
::CaptureByRef
,
1558 pub fn lower_visibility(_lctx
: &LoweringContext
, v
: Visibility
) -> hir
::Visibility
{
1560 Public
=> hir
::Public
,
1561 Inherited
=> hir
::Inherited
,
1565 pub fn lower_block_check_mode(lctx
: &LoweringContext
, b
: &BlockCheckMode
) -> hir
::BlockCheckMode
{
1567 DefaultBlock
=> hir
::DefaultBlock
,
1568 UnsafeBlock(u
) => hir
::UnsafeBlock(lower_unsafe_source(lctx
, u
)),
1572 pub fn lower_binding_mode(lctx
: &LoweringContext
, b
: &BindingMode
) -> hir
::BindingMode
{
1574 BindingMode
::ByRef(m
) => hir
::BindByRef(lower_mutability(lctx
, m
)),
1575 BindingMode
::ByValue(m
) => hir
::BindByValue(lower_mutability(lctx
, m
)),
1579 pub fn lower_struct_field_kind(lctx
: &LoweringContext
,
1580 s
: &StructFieldKind
)
1581 -> hir
::StructFieldKind
{
1583 NamedField(ident
, vis
) => hir
::NamedField(ident
.name
, lower_visibility(lctx
, vis
)),
1584 UnnamedField(vis
) => hir
::UnnamedField(lower_visibility(lctx
, vis
)),
1588 pub fn lower_unsafe_source(_lctx
: &LoweringContext
, u
: UnsafeSource
) -> hir
::UnsafeSource
{
1590 CompilerGenerated
=> hir
::CompilerGenerated
,
1591 UserProvided
=> hir
::UserProvided
,
1595 pub fn lower_impl_polarity(_lctx
: &LoweringContext
, i
: ImplPolarity
) -> hir
::ImplPolarity
{
1597 ImplPolarity
::Positive
=> hir
::ImplPolarity
::Positive
,
1598 ImplPolarity
::Negative
=> hir
::ImplPolarity
::Negative
,
1602 pub fn lower_trait_bound_modifier(_lctx
: &LoweringContext
,
1603 f
: TraitBoundModifier
)
1604 -> hir
::TraitBoundModifier
{
1606 TraitBoundModifier
::None
=> hir
::TraitBoundModifier
::None
,
1607 TraitBoundModifier
::Maybe
=> hir
::TraitBoundModifier
::Maybe
,
1611 // Helper methods for building HIR.
1613 fn arm(pats
: hir
::HirVec
<P
<hir
::Pat
>>, expr
: P
<hir
::Expr
>) -> hir
::Arm
{
1622 fn expr_break(lctx
: &LoweringContext
, span
: Span
,
1623 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1624 expr(lctx
, span
, hir
::ExprBreak(None
), attrs
)
1627 fn expr_call(lctx
: &LoweringContext
,
1630 args
: hir
::HirVec
<P
<hir
::Expr
>>,
1631 attrs
: ThinAttributes
)
1633 expr(lctx
, span
, hir
::ExprCall(e
, args
), attrs
)
1636 fn expr_ident(lctx
: &LoweringContext
, span
: Span
, id
: hir
::Ident
,
1637 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1638 expr_path(lctx
, path_ident(span
, id
), attrs
)
1641 fn expr_mut_addr_of(lctx
: &LoweringContext
, span
: Span
, e
: P
<hir
::Expr
>,
1642 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1643 expr(lctx
, span
, hir
::ExprAddrOf(hir
::MutMutable
, e
), attrs
)
1646 fn expr_path(lctx
: &LoweringContext
, path
: hir
::Path
,
1647 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1648 expr(lctx
, path
.span
, hir
::ExprPath(None
, path
), attrs
)
1651 fn expr_match(lctx
: &LoweringContext
,
1654 arms
: hir
::HirVec
<hir
::Arm
>,
1655 source
: hir
::MatchSource
,
1656 attrs
: ThinAttributes
)
1658 expr(lctx
, span
, hir
::ExprMatch(arg
, arms
, source
), attrs
)
1661 fn expr_block(lctx
: &LoweringContext
, b
: P
<hir
::Block
>,
1662 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1663 expr(lctx
, b
.span
, hir
::ExprBlock(b
), attrs
)
1666 fn expr_tuple(lctx
: &LoweringContext
, sp
: Span
, exprs
: hir
::HirVec
<P
<hir
::Expr
>>,
1667 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1668 expr(lctx
, sp
, hir
::ExprTup(exprs
), attrs
)
1671 fn expr(lctx
: &LoweringContext
, span
: Span
, node
: hir
::Expr_
,
1672 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1681 fn stmt_let(lctx
: &LoweringContext
,
1686 attrs
: ThinAttributes
)
1688 let pat
= if mutbl
{
1689 pat_ident_binding_mode(lctx
, sp
, ident
, hir
::BindByValue(hir
::MutMutable
))
1691 pat_ident(lctx
, sp
, ident
)
1693 let local
= P(hir
::Local
{
1701 let decl
= respan(sp
, hir
::DeclLocal(local
));
1702 respan(sp
, hir
::StmtDecl(P(decl
), lctx
.next_id()))
1705 fn block_expr(lctx
: &LoweringContext
, expr
: P
<hir
::Expr
>) -> P
<hir
::Block
> {
1706 block_all(lctx
, expr
.span
, hir
::HirVec
::new(), Some(expr
))
1709 fn block_all(lctx
: &LoweringContext
,
1711 stmts
: hir
::HirVec
<hir
::Stmt
>,
1712 expr
: Option
<P
<hir
::Expr
>>)
1718 rules
: hir
::DefaultBlock
,
1723 fn pat_some(lctx
: &LoweringContext
, span
: Span
, pat
: P
<hir
::Pat
>) -> P
<hir
::Pat
> {
1724 let some
= std_path(lctx
, &["option", "Option", "Some"]);
1725 let path
= path_global(span
, some
);
1726 pat_enum(lctx
, span
, path
, hir_vec
![pat
])
1729 fn pat_none(lctx
: &LoweringContext
, span
: Span
) -> P
<hir
::Pat
> {
1730 let none
= std_path(lctx
, &["option", "Option", "None"]);
1731 let path
= path_global(span
, none
);
1732 pat_enum(lctx
, span
, path
, hir_vec
![])
1735 fn pat_enum(lctx
: &LoweringContext
,
1738 subpats
: hir
::HirVec
<P
<hir
::Pat
>>)
1740 let pt
= hir
::PatEnum(path
, Some(subpats
));
1744 fn pat_ident(lctx
: &LoweringContext
, span
: Span
, ident
: hir
::Ident
) -> P
<hir
::Pat
> {
1745 pat_ident_binding_mode(lctx
, span
, ident
, hir
::BindByValue(hir
::MutImmutable
))
1748 fn pat_ident_binding_mode(lctx
: &LoweringContext
,
1751 bm
: hir
::BindingMode
)
1753 let pat_ident
= hir
::PatIdent(bm
,
1759 pat(lctx
, span
, pat_ident
)
1762 fn pat_wild(lctx
: &LoweringContext
, span
: Span
) -> P
<hir
::Pat
> {
1763 pat(lctx
, span
, hir
::PatWild
)
1766 fn pat(lctx
: &LoweringContext
, span
: Span
, pat
: hir
::Pat_
) -> P
<hir
::Pat
> {
1774 fn path_ident(span
: Span
, id
: hir
::Ident
) -> hir
::Path
{
1775 path(span
, vec
![id
])
1778 fn path(span
: Span
, strs
: Vec
<hir
::Ident
>) -> hir
::Path
{
1779 path_all(span
, false, strs
, hir
::HirVec
::new(), hir
::HirVec
::new(), hir
::HirVec
::new())
1782 fn path_global(span
: Span
, strs
: Vec
<hir
::Ident
>) -> hir
::Path
{
1783 path_all(span
, true, strs
, hir
::HirVec
::new(), hir
::HirVec
::new(), hir
::HirVec
::new())
1786 fn path_all(sp
: Span
,
1788 mut idents
: Vec
<hir
::Ident
>,
1789 lifetimes
: hir
::HirVec
<hir
::Lifetime
>,
1790 types
: hir
::HirVec
<P
<hir
::Ty
>>,
1791 bindings
: hir
::HirVec
<hir
::TypeBinding
>)
1793 let last_identifier
= idents
.pop().unwrap();
1794 let mut segments
: Vec
<hir
::PathSegment
> = idents
.into_iter()
1798 parameters
: hir
::PathParameters
::none(),
1802 segments
.push(hir
::PathSegment
{
1803 identifier
: last_identifier
,
1804 parameters
: hir
::AngleBracketedParameters(hir
::AngleBracketedParameterData
{
1805 lifetimes
: lifetimes
,
1813 segments
: segments
.into(),
1817 fn std_path(lctx
: &LoweringContext
, components
: &[&str]) -> Vec
<hir
::Ident
> {
1818 let mut v
= Vec
::new();
1819 if let Some(s
) = lctx
.crate_root
{
1820 v
.push(hir
::Ident
::from_name(token
::intern(s
)));
1822 v
.extend(components
.iter().map(|s
| hir
::Ident
::from_name(token
::intern(s
))));
1826 // Given suffix ["b","c","d"], returns path `::std::b::c::d` when
1827 // `fld.cx.use_std`, and `::core::b::c::d` otherwise.
1828 fn core_path(lctx
: &LoweringContext
, span
: Span
, components
: &[&str]) -> hir
::Path
{
1829 let idents
= std_path(lctx
, components
);
1830 path_global(span
, idents
)
1833 fn signal_block_expr(lctx
: &LoweringContext
,
1834 stmts
: hir
::HirVec
<hir
::Stmt
>,
1837 rule
: hir
::BlockCheckMode
,
1838 attrs
: ThinAttributes
)
1840 let id
= lctx
.next_id();
1857 use syntax
::ast
::{self, NodeId, NodeIdAssigner}
;
1858 use syntax
::{parse, codemap}
;
1859 use syntax
::fold
::Folder
;
1860 use std
::cell
::Cell
;
1862 struct MockAssigner
{
1863 next_id
: Cell
<NodeId
>,
1867 fn new() -> MockAssigner
{
1868 MockAssigner { next_id: Cell::new(0) }
1873 fn call_site(&self) -> codemap
::Span
;
1874 fn cfg(&self) -> ast
::CrateConfig
;
1875 fn ident_of(&self, st
: &str) -> ast
::Ident
;
1876 fn name_of(&self, st
: &str) -> ast
::Name
;
1877 fn parse_sess(&self) -> &parse
::ParseSess
;
1880 impl FakeExtCtxt
for parse
::ParseSess
{
1881 fn call_site(&self) -> codemap
::Span
{
1883 lo
: codemap
::BytePos(0),
1884 hi
: codemap
::BytePos(0),
1885 expn_id
: codemap
::NO_EXPANSION
,
1888 fn cfg(&self) -> ast
::CrateConfig
{
1891 fn ident_of(&self, st
: &str) -> ast
::Ident
{
1892 parse
::token
::str_to_ident(st
)
1894 fn name_of(&self, st
: &str) -> ast
::Name
{
1895 parse
::token
::intern(st
)
1897 fn parse_sess(&self) -> &parse
::ParseSess
{
1902 impl NodeIdAssigner
for MockAssigner
{
1903 fn next_node_id(&self) -> NodeId
{
1904 let result
= self.next_id
.get();
1905 self.next_id
.set(result
+ 1);
1909 fn peek_node_id(&self) -> NodeId
{
1914 impl Folder
for MockAssigner
{
1915 fn new_id(&mut self, old_id
: NodeId
) -> NodeId
{
1916 assert_eq
!(old_id
, ast
::DUMMY_NODE_ID
);
1922 fn test_preserves_ids() {
1923 let cx
= parse
::ParseSess
::new();
1924 let mut assigner
= MockAssigner
::new();
1926 let ast_if_let
= quote_expr
!(&cx
,
1927 if let Some(foo
) = baz
{
1930 let ast_if_let
= assigner
.fold_expr(ast_if_let
);
1931 let ast_while_let
= quote_expr
!(&cx
,
1932 while let Some(foo
) = baz
{
1935 let ast_while_let
= assigner
.fold_expr(ast_while_let
);
1936 let ast_for
= quote_expr
!(&cx
,
1940 let ast_for
= assigner
.fold_expr(ast_for
);
1941 let ast_in
= quote_expr
!(&cx
, in HEAP { foo() }
);
1942 let ast_in
= assigner
.fold_expr(ast_in
);
1944 let lctx
= LoweringContext
::new(&assigner
, None
);
1945 let hir1
= lower_expr(&lctx
, &ast_if_let
);
1946 let hir2
= lower_expr(&lctx
, &ast_if_let
);
1947 assert
!(hir1
== hir2
);
1949 let hir1
= lower_expr(&lctx
, &ast_while_let
);
1950 let hir2
= lower_expr(&lctx
, &ast_while_let
);
1951 assert
!(hir1
== hir2
);
1953 let hir1
= lower_expr(&lctx
, &ast_for
);
1954 let hir2
= lower_expr(&lctx
, &ast_for
);
1955 assert
!(hir1
== hir2
);
1957 let hir1
= lower_expr(&lctx
, &ast_in
);
1958 let hir2
= lower_expr(&lctx
, &ast_in
);
1959 assert
!(hir1
== hir2
);