1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 // Lowers the AST to the HIR.
13 // Since the AST and HIR are fairly similar, this is mostly a simple procedure,
14 // much like a fold. Where lowering involves a bit more work things get more
15 // interesting and there are some invariants you should know about. These mostly
16 // concern spans and ids.
18 // Spans are assigned to AST nodes during parsing and then are modified during
19 // expansion to indicate the origin of a node and the process it went through
20 // being expanded. Ids are assigned to AST nodes just before lowering.
22 // For the simpler lowering steps, ids and spans should be preserved. Unlike
23 // expansion we do not preserve the process of lowering in the spans, so spans
24 // should not be modified here. When creating a new node (as opposed to
25 // 'folding' an existing one), then you create a new id using `next_id()`.
27 // You must ensure that ids are unique. That means that you should only use the
28 // id from an AST node in a single HIR node (you can assume that AST node ids
29 // are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
30 // If you do, you must then set the new node's id to a fresh one.
32 // Lowering must be reproducable (the compiler only lowers once, but tools and
33 // custom lints may lower an AST node to a HIR node to interact with the
34 // compiler). The most interesting bit of this is ids - if you lower an AST node
35 // and create new HIR nodes with fresh ids, when re-lowering the same node, you
36 // must ensure you get the same ids! To do this, we keep track of the next id
37 // when we translate a node which requires new ids. By checking this cache and
38 // using node ids starting with the cached id, we ensure ids are reproducible.
39 // To use this system, you just need to hold on to a CachedIdSetter object
40 // whilst lowering. This is an RAII object that takes care of setting and
41 // restoring the cached id, etc.
43 // This whole system relies on node ids being incremented one at a time and
44 // all increments being for lowering. This means that you should not call any
45 // non-lowering function which will use new node ids.
47 // We must also cache gensym'ed Idents to ensure that we get the same Ident
48 // every time we lower a node with gensym'ed names. One consequence of this is
49 // that you can only gensym a name once in a lowering (you don't need to worry
50 // about nested lowering though). That's because we cache based on the name and
51 // the currently cached node id, which is unique per lowered node.
53 // Spans are used for error messages and for tools to map semantics back to
54 // source code. It is therefore not as important with spans as ids to be strict
55 // about use (you can't break the compiler by screwing up a span). Obviously, a
56 // HIR node can only have a single span. But multiple nodes can have the same
57 // span and spans don't need to be kept in order, etc. Where code is preserved
58 // by lowering, it should have the same span as in the AST. Where HIR nodes are
59 // new it is probably best to give a span for the whole AST node being lowered.
60 // All nodes should have real spans, don't use dummy spans. Tools are likely to
61 // get confused if the spans from leaf AST nodes occur in multiple places
62 // in the HIR, especially for multiple identifiers.
66 use std
::collections
::BTreeMap
;
67 use std
::collections
::HashMap
;
70 use syntax
::attr
::{ThinAttributes, ThinAttributesExt}
;
71 use syntax
::errors
::Handler
;
72 use syntax
::ext
::mtwt
;
74 use syntax
::codemap
::{respan, Spanned, Span}
;
75 use syntax
::parse
::token
;
76 use syntax
::std_inject
;
77 use syntax
::visit
::{self, Visitor}
;
79 use std
::cell
::{Cell, RefCell}
;
81 pub struct LoweringContext
<'a
> {
82 crate_root
: Option
<&'
static str>,
83 // Map AST ids to ids used for expanded nodes.
84 id_cache
: RefCell
<HashMap
<NodeId
, NodeId
>>,
85 // Use if there are no cached ids for the current node.
86 id_assigner
: &'a NodeIdAssigner
,
87 // 0 == no cached id. Must be incremented to align with previous id
90 // Keep track of gensym'ed idents.
91 gensym_cache
: RefCell
<HashMap
<(NodeId
, &'
static str), hir
::Ident
>>,
92 // A copy of cached_id, but is also set to an id while a node is lowered for
94 gensym_key
: Cell
<u32>,
97 impl<'a
, 'hir
> LoweringContext
<'a
> {
98 pub fn new(id_assigner
: &'a NodeIdAssigner
, c
: Option
<&Crate
>) -> LoweringContext
<'a
> {
99 let crate_root
= c
.and_then(|c
| {
100 if std_inject
::no_core(c
) {
102 } else if std_inject
::no_std(c
) {
110 crate_root
: crate_root
,
111 id_cache
: RefCell
::new(HashMap
::new()),
112 id_assigner
: id_assigner
,
113 cached_id
: Cell
::new(0),
114 gensym_cache
: RefCell
::new(HashMap
::new()),
115 gensym_key
: Cell
::new(0),
119 fn next_id(&self) -> NodeId
{
120 let cached_id
= self.cached_id
.get();
122 return self.id_assigner
.next_node_id();
125 self.cached_id
.set(cached_id
+ 1);
129 fn str_to_ident(&self, s
: &'
static str) -> hir
::Ident
{
130 let gensym_key
= self.gensym_key
.get();
132 return hir
::Ident
::from_name(token
::gensym(s
));
135 let cached
= self.gensym_cache
.borrow().contains_key(&(gensym_key
, s
));
137 self.gensym_cache
.borrow()[&(gensym_key
, s
)]
139 let result
= hir
::Ident
::from_name(token
::gensym(s
));
140 self.gensym_cache
.borrow_mut().insert((gensym_key
, s
), result
);
145 // Panics if this LoweringContext's NodeIdAssigner is not able to emit diagnostics.
146 fn diagnostic(&self) -> &Handler
{
147 self.id_assigner
.diagnostic()
151 // Utility fn for setting and unsetting the cached id.
152 fn cache_ids
<'a
, OP
, R
>(lctx
: &LoweringContext
, expr_id
: NodeId
, op
: OP
) -> R
153 where OP
: FnOnce(&LoweringContext
) -> R
155 // Only reset the id if it was previously 0, i.e., was not cached.
156 // If it was cached, we are in a nested node, but our id count will
157 // still count towards the parent's count.
158 let reset_cached_id
= lctx
.cached_id
.get() == 0;
159 // We always reset gensym_key so that if we use the same name in a nested
160 // node and after that node, they get different values.
161 let old_gensym_key
= lctx
.gensym_key
.get();
164 let id_cache
: &mut HashMap
<_
, _
> = &mut lctx
.id_cache
.borrow_mut();
166 if id_cache
.contains_key(&expr_id
) {
167 let cached_id
= lctx
.cached_id
.get();
169 // We're entering a node where we need to track ids, but are not
171 lctx
.cached_id
.set(id_cache
[&expr_id
]);
173 // We're already tracking - check that the tracked id is the same
174 // as the expected id.
175 assert
!(cached_id
== id_cache
[&expr_id
], "id mismatch");
177 lctx
.gensym_key
.set(id_cache
[&expr_id
]);
179 // We've never lowered this node before, remember it for next time.
180 let next_id
= lctx
.id_assigner
.peek_node_id();
181 id_cache
.insert(expr_id
, next_id
);
182 lctx
.gensym_key
.set(next_id
);
183 // self.cached_id is not set when we lower a node for the first time,
184 // only on re-lowering.
188 let result
= op(lctx
);
191 lctx
.cached_id
.set(0);
193 lctx
.gensym_key
.set(old_gensym_key
);
198 pub fn lower_ident(_lctx
: &LoweringContext
, ident
: Ident
) -> hir
::Ident
{
200 name
: mtwt
::resolve(ident
),
201 unhygienic_name
: ident
.name
,
205 pub fn lower_attrs(_lctx
: &LoweringContext
, attrs
: &Vec
<Attribute
>) -> hir
::HirVec
<Attribute
> {
209 pub fn lower_view_path(lctx
: &LoweringContext
, view_path
: &ViewPath
) -> P
<hir
::ViewPath
> {
211 node
: match view_path
.node
{
212 ViewPathSimple(ident
, ref path
) => {
213 hir
::ViewPathSimple(ident
.name
, lower_path(lctx
, path
))
215 ViewPathGlob(ref path
) => {
216 hir
::ViewPathGlob(lower_path(lctx
, path
))
218 ViewPathList(ref path
, ref path_list_idents
) => {
219 hir
::ViewPathList(lower_path(lctx
, path
),
220 path_list_idents
.iter()
221 .map(lower_path_list_item
)
225 span
: view_path
.span
,
229 fn lower_path_list_item(path_list_ident
: &PathListItem
) -> hir
::PathListItem
{
231 node
: match path_list_ident
.node
{
232 PathListItemKind
::Ident { id, name, rename }
=> hir
::PathListIdent
{
235 rename
: rename
.map(|x
| x
.name
),
237 PathListItemKind
::Mod { id, rename }
=> hir
::PathListMod
{
239 rename
: rename
.map(|x
| x
.name
),
242 span
: path_list_ident
.span
,
246 pub fn lower_arm(lctx
: &LoweringContext
, arm
: &Arm
) -> hir
::Arm
{
248 attrs
: lower_attrs(lctx
, &arm
.attrs
),
249 pats
: arm
.pats
.iter().map(|x
| lower_pat(lctx
, x
)).collect(),
250 guard
: arm
.guard
.as_ref().map(|ref x
| lower_expr(lctx
, x
)),
251 body
: lower_expr(lctx
, &arm
.body
),
255 pub fn lower_decl(lctx
: &LoweringContext
, d
: &Decl
) -> P
<hir
::Decl
> {
257 DeclKind
::Local(ref l
) => P(Spanned
{
258 node
: hir
::DeclLocal(lower_local(lctx
, l
)),
261 DeclKind
::Item(ref it
) => P(Spanned
{
262 node
: hir
::DeclItem(lower_item_id(lctx
, it
)),
268 pub fn lower_ty_binding(lctx
: &LoweringContext
, b
: &TypeBinding
) -> hir
::TypeBinding
{
272 ty
: lower_ty(lctx
, &b
.ty
),
277 pub fn lower_ty(lctx
: &LoweringContext
, t
: &Ty
) -> P
<hir
::Ty
> {
278 use syntax
::ast
::TyKind
::*;
282 Infer
=> hir
::TyInfer
,
283 Vec(ref ty
) => hir
::TyVec(lower_ty(lctx
, ty
)),
284 Ptr(ref mt
) => hir
::TyPtr(lower_mt(lctx
, mt
)),
285 Rptr(ref region
, ref mt
) => {
286 hir
::TyRptr(lower_opt_lifetime(lctx
, region
), lower_mt(lctx
, mt
))
289 hir
::TyBareFn(P(hir
::BareFnTy
{
290 lifetimes
: lower_lifetime_defs(lctx
, &f
.lifetimes
),
291 unsafety
: lower_unsafety(lctx
, f
.unsafety
),
293 decl
: lower_fn_decl(lctx
, &f
.decl
),
296 Tup(ref tys
) => hir
::TyTup(tys
.iter().map(|ty
| lower_ty(lctx
, ty
)).collect()),
298 return lower_ty(lctx
, ty
);
300 Path(ref qself
, ref path
) => {
301 let qself
= qself
.as_ref().map(|&QSelf { ref ty, position }
| {
303 ty
: lower_ty(lctx
, ty
),
307 hir
::TyPath(qself
, lower_path(lctx
, path
))
309 ObjectSum(ref ty
, ref bounds
) => {
310 hir
::TyObjectSum(lower_ty(lctx
, ty
), lower_bounds(lctx
, bounds
))
312 FixedLengthVec(ref ty
, ref e
) => {
313 hir
::TyFixedLengthVec(lower_ty(lctx
, ty
), lower_expr(lctx
, e
))
315 Typeof(ref expr
) => {
316 hir
::TyTypeof(lower_expr(lctx
, expr
))
318 PolyTraitRef(ref bounds
) => {
319 hir
::TyPolyTraitRef(bounds
.iter().map(|b
| lower_ty_param_bound(lctx
, b
)).collect())
321 Mac(_
) => panic
!("TyMac should have been expanded by now."),
327 pub fn lower_foreign_mod(lctx
: &LoweringContext
, fm
: &ForeignMod
) -> hir
::ForeignMod
{
330 items
: fm
.items
.iter().map(|x
| lower_foreign_item(lctx
, x
)).collect(),
334 pub fn lower_variant(lctx
: &LoweringContext
, v
: &Variant
) -> hir
::Variant
{
336 node
: hir
::Variant_
{
337 name
: v
.node
.name
.name
,
338 attrs
: lower_attrs(lctx
, &v
.node
.attrs
),
339 data
: lower_variant_data(lctx
, &v
.node
.data
),
340 disr_expr
: v
.node
.disr_expr
.as_ref().map(|e
| lower_expr(lctx
, e
)),
346 // Path segments are usually unhygienic, hygienic path segments can occur only in
347 // identifier-like paths originating from `ExprPath`.
348 // Make life simpler for rustc_resolve by renaming only such segments.
349 pub fn lower_path_full(lctx
: &LoweringContext
, p
: &Path
, maybe_hygienic
: bool
) -> hir
::Path
{
350 let maybe_hygienic
= maybe_hygienic
&& !p
.global
&& p
.segments
.len() == 1;
355 .map(|&PathSegment { identifier, ref parameters }
| {
357 identifier
: if maybe_hygienic
{
358 lower_ident(lctx
, identifier
)
360 hir
::Ident
::from_name(identifier
.name
)
362 parameters
: lower_path_parameters(lctx
, parameters
),
370 pub fn lower_path(lctx
: &LoweringContext
, p
: &Path
) -> hir
::Path
{
371 lower_path_full(lctx
, p
, false)
374 pub fn lower_path_parameters(lctx
: &LoweringContext
,
375 path_parameters
: &PathParameters
)
376 -> hir
::PathParameters
{
377 match *path_parameters
{
378 PathParameters
::AngleBracketed(ref data
) =>
379 hir
::AngleBracketedParameters(lower_angle_bracketed_parameter_data(lctx
, data
)),
380 PathParameters
::Parenthesized(ref data
) =>
381 hir
::ParenthesizedParameters(lower_parenthesized_parameter_data(lctx
, data
)),
385 pub fn lower_angle_bracketed_parameter_data(lctx
: &LoweringContext
,
386 data
: &AngleBracketedParameterData
)
387 -> hir
::AngleBracketedParameterData
{
388 let &AngleBracketedParameterData { ref lifetimes, ref types, ref bindings }
= data
;
389 hir
::AngleBracketedParameterData
{
390 lifetimes
: lower_lifetimes(lctx
, lifetimes
),
391 types
: types
.iter().map(|ty
| lower_ty(lctx
, ty
)).collect(),
392 bindings
: bindings
.iter().map(|b
| lower_ty_binding(lctx
, b
)).collect(),
396 pub fn lower_parenthesized_parameter_data(lctx
: &LoweringContext
,
397 data
: &ParenthesizedParameterData
)
398 -> hir
::ParenthesizedParameterData
{
399 let &ParenthesizedParameterData { ref inputs, ref output, span }
= data
;
400 hir
::ParenthesizedParameterData
{
401 inputs
: inputs
.iter().map(|ty
| lower_ty(lctx
, ty
)).collect(),
402 output
: output
.as_ref().map(|ty
| lower_ty(lctx
, ty
)),
407 pub fn lower_local(lctx
: &LoweringContext
, l
: &Local
) -> P
<hir
::Local
> {
410 ty
: l
.ty
.as_ref().map(|t
| lower_ty(lctx
, t
)),
411 pat
: lower_pat(lctx
, &l
.pat
),
412 init
: l
.init
.as_ref().map(|e
| lower_expr(lctx
, e
)),
414 attrs
: l
.attrs
.clone(),
418 pub fn lower_explicit_self_underscore(lctx
: &LoweringContext
,
420 -> hir
::ExplicitSelf_
{
422 SelfKind
::Static
=> hir
::SelfStatic
,
423 SelfKind
::Value(v
) => hir
::SelfValue(v
.name
),
424 SelfKind
::Region(ref lifetime
, m
, ident
) => {
425 hir
::SelfRegion(lower_opt_lifetime(lctx
, lifetime
),
426 lower_mutability(lctx
, m
),
429 SelfKind
::Explicit(ref typ
, ident
) => {
430 hir
::SelfExplicit(lower_ty(lctx
, typ
), ident
.name
)
435 pub fn lower_mutability(_lctx
: &LoweringContext
, m
: Mutability
) -> hir
::Mutability
{
437 Mutability
::Mutable
=> hir
::MutMutable
,
438 Mutability
::Immutable
=> hir
::MutImmutable
,
442 pub fn lower_explicit_self(lctx
: &LoweringContext
, s
: &ExplicitSelf
) -> hir
::ExplicitSelf
{
444 node
: lower_explicit_self_underscore(lctx
, &s
.node
),
449 pub fn lower_arg(lctx
: &LoweringContext
, arg
: &Arg
) -> hir
::Arg
{
452 pat
: lower_pat(lctx
, &arg
.pat
),
453 ty
: lower_ty(lctx
, &arg
.ty
),
457 pub fn lower_fn_decl(lctx
: &LoweringContext
, decl
: &FnDecl
) -> P
<hir
::FnDecl
> {
459 inputs
: decl
.inputs
.iter().map(|x
| lower_arg(lctx
, x
)).collect(),
460 output
: match decl
.output
{
461 FunctionRetTy
::Ty(ref ty
) => hir
::Return(lower_ty(lctx
, ty
)),
462 FunctionRetTy
::Default(span
) => hir
::DefaultReturn(span
),
463 FunctionRetTy
::None(span
) => hir
::NoReturn(span
),
465 variadic
: decl
.variadic
,
469 pub fn lower_ty_param_bound(lctx
: &LoweringContext
, tpb
: &TyParamBound
) -> hir
::TyParamBound
{
471 TraitTyParamBound(ref ty
, modifier
) => {
472 hir
::TraitTyParamBound(lower_poly_trait_ref(lctx
, ty
),
473 lower_trait_bound_modifier(lctx
, modifier
))
475 RegionTyParamBound(ref lifetime
) => {
476 hir
::RegionTyParamBound(lower_lifetime(lctx
, lifetime
))
481 pub fn lower_ty_param(lctx
: &LoweringContext
, tp
: &TyParam
) -> hir
::TyParam
{
485 bounds
: lower_bounds(lctx
, &tp
.bounds
),
486 default: tp
.default.as_ref().map(|x
| lower_ty(lctx
, x
)),
491 pub fn lower_ty_params(lctx
: &LoweringContext
,
493 -> hir
::HirVec
<hir
::TyParam
> {
494 tps
.iter().map(|tp
| lower_ty_param(lctx
, tp
)).collect()
497 pub fn lower_lifetime(_lctx
: &LoweringContext
, l
: &Lifetime
) -> hir
::Lifetime
{
505 pub fn lower_lifetime_def(lctx
: &LoweringContext
, l
: &LifetimeDef
) -> hir
::LifetimeDef
{
507 lifetime
: lower_lifetime(lctx
, &l
.lifetime
),
508 bounds
: lower_lifetimes(lctx
, &l
.bounds
),
512 pub fn lower_lifetimes(lctx
: &LoweringContext
, lts
: &Vec
<Lifetime
>) -> hir
::HirVec
<hir
::Lifetime
> {
513 lts
.iter().map(|l
| lower_lifetime(lctx
, l
)).collect()
516 pub fn lower_lifetime_defs(lctx
: &LoweringContext
,
517 lts
: &Vec
<LifetimeDef
>)
518 -> hir
::HirVec
<hir
::LifetimeDef
> {
519 lts
.iter().map(|l
| lower_lifetime_def(lctx
, l
)).collect()
522 pub fn lower_opt_lifetime(lctx
: &LoweringContext
,
523 o_lt
: &Option
<Lifetime
>)
524 -> Option
<hir
::Lifetime
> {
525 o_lt
.as_ref().map(|lt
| lower_lifetime(lctx
, lt
))
528 pub fn lower_generics(lctx
: &LoweringContext
, g
: &Generics
) -> hir
::Generics
{
530 ty_params
: lower_ty_params(lctx
, &g
.ty_params
),
531 lifetimes
: lower_lifetime_defs(lctx
, &g
.lifetimes
),
532 where_clause
: lower_where_clause(lctx
, &g
.where_clause
),
536 pub fn lower_where_clause(lctx
: &LoweringContext
, wc
: &WhereClause
) -> hir
::WhereClause
{
539 predicates
: wc
.predicates
541 .map(|predicate
| lower_where_predicate(lctx
, predicate
))
546 pub fn lower_where_predicate(lctx
: &LoweringContext
,
547 pred
: &WherePredicate
)
548 -> hir
::WherePredicate
{
550 WherePredicate
::BoundPredicate(WhereBoundPredicate
{ ref bound_lifetimes
,
554 hir
::WherePredicate
::BoundPredicate(hir
::WhereBoundPredicate
{
555 bound_lifetimes
: lower_lifetime_defs(lctx
, bound_lifetimes
),
556 bounded_ty
: lower_ty(lctx
, bounded_ty
),
557 bounds
: bounds
.iter().map(|x
| lower_ty_param_bound(lctx
, x
)).collect(),
561 WherePredicate
::RegionPredicate(WhereRegionPredicate
{ ref lifetime
,
564 hir
::WherePredicate
::RegionPredicate(hir
::WhereRegionPredicate
{
566 lifetime
: lower_lifetime(lctx
, lifetime
),
567 bounds
: bounds
.iter().map(|bound
| lower_lifetime(lctx
, bound
)).collect(),
570 WherePredicate
::EqPredicate(WhereEqPredicate
{ id
,
574 hir
::WherePredicate
::EqPredicate(hir
::WhereEqPredicate
{
576 path
: lower_path(lctx
, path
),
577 ty
: lower_ty(lctx
, ty
),
584 pub fn lower_variant_data(lctx
: &LoweringContext
, vdata
: &VariantData
) -> hir
::VariantData
{
586 VariantData
::Struct(ref fields
, id
) => {
587 hir
::VariantData
::Struct(fields
.iter()
589 .map(|f
| lower_struct_field(lctx
, f
))
593 VariantData
::Tuple(ref fields
, id
) => {
594 hir
::VariantData
::Tuple(fields
.iter()
596 .map(|f
| lower_struct_field(lctx
, f
))
600 VariantData
::Unit(id
) => hir
::VariantData
::Unit(id
),
604 pub fn lower_trait_ref(lctx
: &LoweringContext
, p
: &TraitRef
) -> hir
::TraitRef
{
606 path
: lower_path(lctx
, &p
.path
),
611 pub fn lower_poly_trait_ref(lctx
: &LoweringContext
, p
: &PolyTraitRef
) -> hir
::PolyTraitRef
{
613 bound_lifetimes
: lower_lifetime_defs(lctx
, &p
.bound_lifetimes
),
614 trait_ref
: lower_trait_ref(lctx
, &p
.trait_ref
),
619 pub fn lower_struct_field(lctx
: &LoweringContext
,
620 (index
, f
): (usize, &StructField
))
621 -> hir
::StructField
{
625 name
: f
.ident
.map(|ident
| ident
.name
).unwrap_or(token
::intern(&index
.to_string())),
626 vis
: lower_visibility(lctx
, &f
.vis
),
627 ty
: lower_ty(lctx
, &f
.ty
),
628 attrs
: lower_attrs(lctx
, &f
.attrs
),
632 pub fn lower_field(lctx
: &LoweringContext
, f
: &Field
) -> hir
::Field
{
634 name
: respan(f
.ident
.span
, f
.ident
.node
.name
),
635 expr
: lower_expr(lctx
, &f
.expr
),
640 pub fn lower_mt(lctx
: &LoweringContext
, mt
: &MutTy
) -> hir
::MutTy
{
642 ty
: lower_ty(lctx
, &mt
.ty
),
643 mutbl
: lower_mutability(lctx
, mt
.mutbl
),
647 pub fn lower_opt_bounds(lctx
: &LoweringContext
,
648 b
: &Option
<TyParamBounds
>)
649 -> Option
<hir
::TyParamBounds
> {
650 b
.as_ref().map(|ref bounds
| lower_bounds(lctx
, bounds
))
653 fn lower_bounds(lctx
: &LoweringContext
, bounds
: &TyParamBounds
) -> hir
::TyParamBounds
{
654 bounds
.iter().map(|bound
| lower_ty_param_bound(lctx
, bound
)).collect()
657 pub fn lower_block(lctx
: &LoweringContext
, b
: &Block
) -> P
<hir
::Block
> {
660 stmts
: b
.stmts
.iter().map(|s
| lower_stmt(lctx
, s
)).collect(),
661 expr
: b
.expr
.as_ref().map(|ref x
| lower_expr(lctx
, x
)),
662 rules
: lower_block_check_mode(lctx
, &b
.rules
),
667 pub fn lower_item_kind(lctx
: &LoweringContext
, i
: &ItemKind
) -> hir
::Item_
{
669 ItemKind
::ExternCrate(string
) => hir
::ItemExternCrate(string
),
670 ItemKind
::Use(ref view_path
) => {
671 hir
::ItemUse(lower_view_path(lctx
, view_path
))
673 ItemKind
::Static(ref t
, m
, ref e
) => {
674 hir
::ItemStatic(lower_ty(lctx
, t
),
675 lower_mutability(lctx
, m
),
678 ItemKind
::Const(ref t
, ref e
) => {
679 hir
::ItemConst(lower_ty(lctx
, t
), lower_expr(lctx
, e
))
681 ItemKind
::Fn(ref decl
, unsafety
, constness
, abi
, ref generics
, ref body
) => {
682 hir
::ItemFn(lower_fn_decl(lctx
, decl
),
683 lower_unsafety(lctx
, unsafety
),
684 lower_constness(lctx
, constness
),
686 lower_generics(lctx
, generics
),
687 lower_block(lctx
, body
))
689 ItemKind
::Mod(ref m
) => hir
::ItemMod(lower_mod(lctx
, m
)),
690 ItemKind
::ForeignMod(ref nm
) => hir
::ItemForeignMod(lower_foreign_mod(lctx
, nm
)),
691 ItemKind
::Ty(ref t
, ref generics
) => {
692 hir
::ItemTy(lower_ty(lctx
, t
), lower_generics(lctx
, generics
))
694 ItemKind
::Enum(ref enum_definition
, ref generics
) => {
695 hir
::ItemEnum(hir
::EnumDef
{
696 variants
: enum_definition
.variants
698 .map(|x
| lower_variant(lctx
, x
))
701 lower_generics(lctx
, generics
))
703 ItemKind
::Struct(ref struct_def
, ref generics
) => {
704 let struct_def
= lower_variant_data(lctx
, struct_def
);
705 hir
::ItemStruct(struct_def
, lower_generics(lctx
, generics
))
707 ItemKind
::DefaultImpl(unsafety
, ref trait_ref
) => {
708 hir
::ItemDefaultImpl(lower_unsafety(lctx
, unsafety
),
709 lower_trait_ref(lctx
, trait_ref
))
711 ItemKind
::Impl(unsafety
, polarity
, ref generics
, ref ifce
, ref ty
, ref impl_items
) => {
712 let new_impl_items
= impl_items
.iter()
713 .map(|item
| lower_impl_item(lctx
, item
))
715 let ifce
= ifce
.as_ref().map(|trait_ref
| lower_trait_ref(lctx
, trait_ref
));
716 hir
::ItemImpl(lower_unsafety(lctx
, unsafety
),
717 lower_impl_polarity(lctx
, polarity
),
718 lower_generics(lctx
, generics
),
723 ItemKind
::Trait(unsafety
, ref generics
, ref bounds
, ref items
) => {
724 let bounds
= lower_bounds(lctx
, bounds
);
725 let items
= items
.iter().map(|item
| lower_trait_item(lctx
, item
)).collect();
726 hir
::ItemTrait(lower_unsafety(lctx
, unsafety
),
727 lower_generics(lctx
, generics
),
731 ItemKind
::Mac(_
) => panic
!("Shouldn't still be around"),
735 pub fn lower_trait_item(lctx
: &LoweringContext
, i
: &TraitItem
) -> hir
::TraitItem
{
739 attrs
: lower_attrs(lctx
, &i
.attrs
),
741 TraitItemKind
::Const(ref ty
, ref default) => {
742 hir
::ConstTraitItem(lower_ty(lctx
, ty
),
743 default.as_ref().map(|x
| lower_expr(lctx
, x
)))
745 TraitItemKind
::Method(ref sig
, ref body
) => {
746 hir
::MethodTraitItem(lower_method_sig(lctx
, sig
),
747 body
.as_ref().map(|x
| lower_block(lctx
, x
)))
749 TraitItemKind
::Type(ref bounds
, ref default) => {
750 hir
::TypeTraitItem(lower_bounds(lctx
, bounds
),
751 default.as_ref().map(|x
| lower_ty(lctx
, x
)))
758 pub fn lower_impl_item(lctx
: &LoweringContext
, i
: &ImplItem
) -> hir
::ImplItem
{
762 attrs
: lower_attrs(lctx
, &i
.attrs
),
763 vis
: lower_visibility(lctx
, &i
.vis
),
764 defaultness
: lower_defaultness(lctx
, i
.defaultness
),
766 ImplItemKind
::Const(ref ty
, ref expr
) => {
767 hir
::ImplItemKind
::Const(lower_ty(lctx
, ty
), lower_expr(lctx
, expr
))
769 ImplItemKind
::Method(ref sig
, ref body
) => {
770 hir
::ImplItemKind
::Method(lower_method_sig(lctx
, sig
), lower_block(lctx
, body
))
772 ImplItemKind
::Type(ref ty
) => hir
::ImplItemKind
::Type(lower_ty(lctx
, ty
)),
773 ImplItemKind
::Macro(..) => panic
!("Shouldn't exist any more"),
779 pub fn lower_mod(lctx
: &LoweringContext
, m
: &Mod
) -> hir
::Mod
{
782 item_ids
: m
.items
.iter().map(|x
| lower_item_id(lctx
, x
)).collect(),
786 struct ItemLowerer
<'lcx
, 'interner
: 'lcx
> {
787 items
: BTreeMap
<NodeId
, hir
::Item
>,
788 lctx
: &'lcx LoweringContext
<'interner
>,
791 impl<'lcx
, 'interner
> Visitor
<'lcx
> for ItemLowerer
<'lcx
, 'interner
> {
792 fn visit_item(&mut self, item
: &'lcx Item
) {
793 self.items
.insert(item
.id
, lower_item(self.lctx
, item
));
794 visit
::walk_item(self, item
);
798 pub fn lower_crate(lctx
: &LoweringContext
, c
: &Crate
) -> hir
::Crate
{
800 let mut item_lowerer
= ItemLowerer { items: BTreeMap::new(), lctx: lctx }
;
801 visit
::walk_crate(&mut item_lowerer
, c
);
806 module
: lower_mod(lctx
, &c
.module
),
807 attrs
: lower_attrs(lctx
, &c
.attrs
),
808 config
: c
.config
.clone().into(),
810 exported_macros
: c
.exported_macros
.iter().map(|m
| lower_macro_def(lctx
, m
)).collect(),
815 pub fn lower_macro_def(lctx
: &LoweringContext
, m
: &MacroDef
) -> hir
::MacroDef
{
818 attrs
: lower_attrs(lctx
, &m
.attrs
),
821 imported_from
: m
.imported_from
.map(|x
| x
.name
),
823 use_locally
: m
.use_locally
,
824 allow_internal_unstable
: m
.allow_internal_unstable
,
825 body
: m
.body
.clone().into(),
829 pub fn lower_item_id(_lctx
: &LoweringContext
, i
: &Item
) -> hir
::ItemId
{
830 hir
::ItemId { id: i.id }
833 pub fn lower_item(lctx
: &LoweringContext
, i
: &Item
) -> hir
::Item
{
834 let node
= lower_item_kind(lctx
, &i
.node
);
839 attrs
: lower_attrs(lctx
, &i
.attrs
),
841 vis
: lower_visibility(lctx
, &i
.vis
),
846 pub fn lower_foreign_item(lctx
: &LoweringContext
, i
: &ForeignItem
) -> hir
::ForeignItem
{
850 attrs
: lower_attrs(lctx
, &i
.attrs
),
852 ForeignItemKind
::Fn(ref fdec
, ref generics
) => {
853 hir
::ForeignItemFn(lower_fn_decl(lctx
, fdec
), lower_generics(lctx
, generics
))
855 ForeignItemKind
::Static(ref t
, m
) => {
856 hir
::ForeignItemStatic(lower_ty(lctx
, t
), m
)
859 vis
: lower_visibility(lctx
, &i
.vis
),
864 pub fn lower_method_sig(lctx
: &LoweringContext
, sig
: &MethodSig
) -> hir
::MethodSig
{
866 generics
: lower_generics(lctx
, &sig
.generics
),
868 explicit_self
: lower_explicit_self(lctx
, &sig
.explicit_self
),
869 unsafety
: lower_unsafety(lctx
, sig
.unsafety
),
870 constness
: lower_constness(lctx
, sig
.constness
),
871 decl
: lower_fn_decl(lctx
, &sig
.decl
),
875 pub fn lower_unsafety(_lctx
: &LoweringContext
, u
: Unsafety
) -> hir
::Unsafety
{
877 Unsafety
::Unsafe
=> hir
::Unsafety
::Unsafe
,
878 Unsafety
::Normal
=> hir
::Unsafety
::Normal
,
882 pub fn lower_constness(_lctx
: &LoweringContext
, c
: Constness
) -> hir
::Constness
{
884 Constness
::Const
=> hir
::Constness
::Const
,
885 Constness
::NotConst
=> hir
::Constness
::NotConst
,
889 pub fn lower_unop(_lctx
: &LoweringContext
, u
: UnOp
) -> hir
::UnOp
{
891 UnOp
::Deref
=> hir
::UnDeref
,
892 UnOp
::Not
=> hir
::UnNot
,
893 UnOp
::Neg
=> hir
::UnNeg
,
897 pub fn lower_binop(_lctx
: &LoweringContext
, b
: BinOp
) -> hir
::BinOp
{
900 BinOpKind
::Add
=> hir
::BiAdd
,
901 BinOpKind
::Sub
=> hir
::BiSub
,
902 BinOpKind
::Mul
=> hir
::BiMul
,
903 BinOpKind
::Div
=> hir
::BiDiv
,
904 BinOpKind
::Rem
=> hir
::BiRem
,
905 BinOpKind
::And
=> hir
::BiAnd
,
906 BinOpKind
::Or
=> hir
::BiOr
,
907 BinOpKind
::BitXor
=> hir
::BiBitXor
,
908 BinOpKind
::BitAnd
=> hir
::BiBitAnd
,
909 BinOpKind
::BitOr
=> hir
::BiBitOr
,
910 BinOpKind
::Shl
=> hir
::BiShl
,
911 BinOpKind
::Shr
=> hir
::BiShr
,
912 BinOpKind
::Eq
=> hir
::BiEq
,
913 BinOpKind
::Lt
=> hir
::BiLt
,
914 BinOpKind
::Le
=> hir
::BiLe
,
915 BinOpKind
::Ne
=> hir
::BiNe
,
916 BinOpKind
::Ge
=> hir
::BiGe
,
917 BinOpKind
::Gt
=> hir
::BiGt
,
923 pub fn lower_pat(lctx
: &LoweringContext
, p
: &Pat
) -> P
<hir
::Pat
> {
927 PatKind
::Wild
=> hir
::PatKind
::Wild
,
928 PatKind
::Ident(ref binding_mode
, pth1
, ref sub
) => {
929 hir
::PatKind
::Ident(lower_binding_mode(lctx
, binding_mode
),
930 respan(pth1
.span
, lower_ident(lctx
, pth1
.node
)),
931 sub
.as_ref().map(|x
| lower_pat(lctx
, x
)))
933 PatKind
::Lit(ref e
) => hir
::PatKind
::Lit(lower_expr(lctx
, e
)),
934 PatKind
::TupleStruct(ref pth
, ref pats
) => {
935 hir
::PatKind
::TupleStruct(lower_path(lctx
, pth
),
937 .map(|pats
| pats
.iter().map(|x
| lower_pat(lctx
, x
)).collect()))
939 PatKind
::Path(ref pth
) => {
940 hir
::PatKind
::Path(lower_path(lctx
, pth
))
942 PatKind
::QPath(ref qself
, ref pth
) => {
943 let qself
= hir
::QSelf
{
944 ty
: lower_ty(lctx
, &qself
.ty
),
945 position
: qself
.position
,
947 hir
::PatKind
::QPath(qself
, lower_path(lctx
, pth
))
949 PatKind
::Struct(ref pth
, ref fields
, etc
) => {
950 let pth
= lower_path(lctx
, pth
);
951 let fs
= fields
.iter()
955 node
: hir
::FieldPat
{
956 name
: f
.node
.ident
.name
,
957 pat
: lower_pat(lctx
, &f
.node
.pat
),
958 is_shorthand
: f
.node
.is_shorthand
,
963 hir
::PatKind
::Struct(pth
, fs
, etc
)
965 PatKind
::Tup(ref elts
) => {
966 hir
::PatKind
::Tup(elts
.iter().map(|x
| lower_pat(lctx
, x
)).collect())
968 PatKind
::Box(ref inner
) => hir
::PatKind
::Box(lower_pat(lctx
, inner
)),
969 PatKind
::Ref(ref inner
, mutbl
) => {
970 hir
::PatKind
::Ref(lower_pat(lctx
, inner
), lower_mutability(lctx
, mutbl
))
972 PatKind
::Range(ref e1
, ref e2
) => {
973 hir
::PatKind
::Range(lower_expr(lctx
, e1
), lower_expr(lctx
, e2
))
975 PatKind
::Vec(ref before
, ref slice
, ref after
) => {
976 hir
::PatKind
::Vec(before
.iter().map(|x
| lower_pat(lctx
, x
)).collect(),
977 slice
.as_ref().map(|x
| lower_pat(lctx
, x
)),
978 after
.iter().map(|x
| lower_pat(lctx
, x
)).collect())
980 PatKind
::Mac(_
) => panic
!("Shouldn't exist here"),
986 pub fn lower_expr(lctx
: &LoweringContext
, e
: &Expr
) -> P
<hir
::Expr
> {
991 // Eventually a desugaring for `box EXPR`
992 // (similar to the desugaring above for `in PLACE BLOCK`)
993 // should go here, desugaring
997 // let mut place = BoxPlace::make_place();
998 // let raw_place = Place::pointer(&mut place);
999 // let value = $value;
1001 // ::std::ptr::write(raw_place, value);
1002 // Boxed::finalize(place)
1005 // But for now there are type-inference issues doing that.
1006 ExprKind
::Box(ref e
) => {
1007 hir
::ExprBox(lower_expr(lctx
, e
))
1010 // Desugar ExprBox: `in (PLACE) EXPR`
1011 ExprKind
::InPlace(ref placer
, ref value_expr
) => {
1015 // let mut place = Placer::make_place(p);
1016 // let raw_place = Place::pointer(&mut place);
1018 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1019 // InPlace::finalize(place)
1021 return cache_ids(lctx
, e
.id
, |lctx
| {
1022 let placer_expr
= lower_expr(lctx
, placer
);
1023 let value_expr
= lower_expr(lctx
, value_expr
);
1025 let placer_ident
= lctx
.str_to_ident("placer");
1026 let place_ident
= lctx
.str_to_ident("place");
1027 let p_ptr_ident
= lctx
.str_to_ident("p_ptr");
1029 let make_place
= ["ops", "Placer", "make_place"];
1030 let place_pointer
= ["ops", "Place", "pointer"];
1031 let move_val_init
= ["intrinsics", "move_val_init"];
1032 let inplace_finalize
= ["ops", "InPlace", "finalize"];
1034 let make_call
= |lctx
: &LoweringContext
, p
, args
| {
1035 let path
= core_path(lctx
, e
.span
, p
);
1036 let path
= expr_path(lctx
, path
, None
);
1037 expr_call(lctx
, e
.span
, path
, args
, None
)
1040 let mk_stmt_let
= |lctx
: &LoweringContext
, bind
, expr
| {
1041 stmt_let(lctx
, e
.span
, false, bind
, expr
, None
)
1044 let mk_stmt_let_mut
= |lctx
: &LoweringContext
, bind
, expr
| {
1045 stmt_let(lctx
, e
.span
, true, bind
, expr
, None
)
1048 // let placer = <placer_expr> ;
1050 let placer_expr
= signal_block_expr(lctx
,
1054 hir
::PopUnstableBlock
,
1056 mk_stmt_let(lctx
, placer_ident
, placer_expr
)
1059 // let mut place = Placer::make_place(placer);
1061 let placer
= expr_ident(lctx
, e
.span
, placer_ident
, None
);
1062 let call
= make_call(lctx
, &make_place
, hir_vec
![placer
]);
1063 mk_stmt_let_mut(lctx
, place_ident
, call
)
1066 // let p_ptr = Place::pointer(&mut place);
1068 let agent
= expr_ident(lctx
, e
.span
, place_ident
, None
);
1069 let args
= hir_vec
![expr_mut_addr_of(lctx
, e
.span
, agent
, None
)];
1070 let call
= make_call(lctx
, &place_pointer
, args
);
1071 mk_stmt_let(lctx
, p_ptr_ident
, call
)
1074 // pop_unsafe!(EXPR));
1075 let pop_unsafe_expr
= {
1076 let value_expr
= signal_block_expr(lctx
,
1080 hir
::PopUnstableBlock
,
1082 signal_block_expr(lctx
,
1086 hir
::PopUnsafeBlock(hir
::CompilerGenerated
), None
)
1090 // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR ));
1091 // InPlace::finalize(place)
1094 let ptr
= expr_ident(lctx
, e
.span
, p_ptr_ident
, None
);
1095 let call_move_val_init
=
1097 make_call(lctx
, &move_val_init
, hir_vec
![ptr
, pop_unsafe_expr
]),
1099 let call_move_val_init
= respan(e
.span
, call_move_val_init
);
1101 let place
= expr_ident(lctx
, e
.span
, place_ident
, None
);
1102 let call
= make_call(lctx
, &inplace_finalize
, hir_vec
![place
]);
1103 signal_block_expr(lctx
,
1104 hir_vec
![call_move_val_init
],
1107 hir
::PushUnsafeBlock(hir
::CompilerGenerated
), None
)
1110 signal_block_expr(lctx
,
1111 hir_vec
![s1
, s2
, s3
],
1114 hir
::PushUnstableBlock
,
1119 ExprKind
::Vec(ref exprs
) => {
1120 hir
::ExprVec(exprs
.iter().map(|x
| lower_expr(lctx
, x
)).collect())
1122 ExprKind
::Repeat(ref expr
, ref count
) => {
1123 let expr
= lower_expr(lctx
, expr
);
1124 let count
= lower_expr(lctx
, count
);
1125 hir
::ExprRepeat(expr
, count
)
1127 ExprKind
::Tup(ref elts
) => {
1128 hir
::ExprTup(elts
.iter().map(|x
| lower_expr(lctx
, x
)).collect())
1130 ExprKind
::Call(ref f
, ref args
) => {
1131 let f
= lower_expr(lctx
, f
);
1132 hir
::ExprCall(f
, args
.iter().map(|x
| lower_expr(lctx
, x
)).collect())
1134 ExprKind
::MethodCall(i
, ref tps
, ref args
) => {
1135 let tps
= tps
.iter().map(|x
| lower_ty(lctx
, x
)).collect();
1136 let args
= args
.iter().map(|x
| lower_expr(lctx
, x
)).collect();
1137 hir
::ExprMethodCall(respan(i
.span
, i
.node
.name
), tps
, args
)
1139 ExprKind
::Binary(binop
, ref lhs
, ref rhs
) => {
1140 let binop
= lower_binop(lctx
, binop
);
1141 let lhs
= lower_expr(lctx
, lhs
);
1142 let rhs
= lower_expr(lctx
, rhs
);
1143 hir
::ExprBinary(binop
, lhs
, rhs
)
1145 ExprKind
::Unary(op
, ref ohs
) => {
1146 let op
= lower_unop(lctx
, op
);
1147 let ohs
= lower_expr(lctx
, ohs
);
1148 hir
::ExprUnary(op
, ohs
)
1150 ExprKind
::Lit(ref l
) => hir
::ExprLit(P((**l
).clone())),
1151 ExprKind
::Cast(ref expr
, ref ty
) => {
1152 let expr
= lower_expr(lctx
, expr
);
1153 hir
::ExprCast(expr
, lower_ty(lctx
, ty
))
1155 ExprKind
::Type(ref expr
, ref ty
) => {
1156 let expr
= lower_expr(lctx
, expr
);
1157 hir
::ExprType(expr
, lower_ty(lctx
, ty
))
1159 ExprKind
::AddrOf(m
, ref ohs
) => {
1160 let m
= lower_mutability(lctx
, m
);
1161 let ohs
= lower_expr(lctx
, ohs
);
1162 hir
::ExprAddrOf(m
, ohs
)
1164 // More complicated than you might expect because the else branch
1165 // might be `if let`.
1166 ExprKind
::If(ref cond
, ref blk
, ref else_opt
) => {
1167 let else_opt
= else_opt
.as_ref().map(|els
| {
1169 ExprKind
::IfLet(..) => {
1170 cache_ids(lctx
, e
.id
, |lctx
| {
1171 // wrap the if-let expr in a block
1172 let span
= els
.span
;
1173 let els
= lower_expr(lctx
, els
);
1174 let id
= lctx
.next_id();
1175 let blk
= P(hir
::Block
{
1179 rules
: hir
::DefaultBlock
,
1182 expr_block(lctx
, blk
, None
)
1185 _
=> lower_expr(lctx
, els
),
1189 hir
::ExprIf(lower_expr(lctx
, cond
), lower_block(lctx
, blk
), else_opt
)
1191 ExprKind
::While(ref cond
, ref body
, opt_ident
) => {
1192 hir
::ExprWhile(lower_expr(lctx
, cond
), lower_block(lctx
, body
),
1193 opt_ident
.map(|ident
| lower_ident(lctx
, ident
)))
1195 ExprKind
::Loop(ref body
, opt_ident
) => {
1196 hir
::ExprLoop(lower_block(lctx
, body
),
1197 opt_ident
.map(|ident
| lower_ident(lctx
, ident
)))
1199 ExprKind
::Match(ref expr
, ref arms
) => {
1200 hir
::ExprMatch(lower_expr(lctx
, expr
),
1201 arms
.iter().map(|x
| lower_arm(lctx
, x
)).collect(),
1202 hir
::MatchSource
::Normal
)
1204 ExprKind
::Closure(capture_clause
, ref decl
, ref body
) => {
1205 hir
::ExprClosure(lower_capture_clause(lctx
, capture_clause
),
1206 lower_fn_decl(lctx
, decl
),
1207 lower_block(lctx
, body
))
1209 ExprKind
::Block(ref blk
) => hir
::ExprBlock(lower_block(lctx
, blk
)),
1210 ExprKind
::Assign(ref el
, ref er
) => {
1211 hir
::ExprAssign(lower_expr(lctx
, el
), lower_expr(lctx
, er
))
1213 ExprKind
::AssignOp(op
, ref el
, ref er
) => {
1214 hir
::ExprAssignOp(lower_binop(lctx
, op
),
1215 lower_expr(lctx
, el
),
1216 lower_expr(lctx
, er
))
1218 ExprKind
::Field(ref el
, ident
) => {
1219 hir
::ExprField(lower_expr(lctx
, el
), respan(ident
.span
, ident
.node
.name
))
1221 ExprKind
::TupField(ref el
, ident
) => {
1222 hir
::ExprTupField(lower_expr(lctx
, el
), ident
)
1224 ExprKind
::Index(ref el
, ref er
) => {
1225 hir
::ExprIndex(lower_expr(lctx
, el
), lower_expr(lctx
, er
))
1227 ExprKind
::Range(ref e1
, ref e2
, lims
) => {
1228 fn make_struct(lctx
: &LoweringContext
,
1231 fields
: &[(&str, &P
<Expr
>)]) -> P
<hir
::Expr
> {
1232 let strs
= std_path(lctx
, &iter
::once(&"ops")
1235 .collect
::<Vec
<_
>>());
1237 let structpath
= path_global(ast_expr
.span
, strs
);
1239 let hir_expr
= if fields
.len() == 0 {
1242 ast_expr
.attrs
.clone())
1247 fields
.into_iter().map(|&(s
, e
)| {
1248 field(token
::intern(s
),
1249 signal_block_expr(lctx
,
1251 lower_expr(lctx
, &**e
),
1253 hir
::PopUnstableBlock
,
1258 ast_expr
.attrs
.clone())
1261 signal_block_expr(lctx
,
1265 hir
::PushUnstableBlock
,
1269 return cache_ids(lctx
, e
.id
, |lctx
| {
1270 use syntax
::ast
::RangeLimits
::*;
1272 match (e1
, e2
, lims
) {
1273 (&None
, &None
, HalfOpen
) =>
1274 make_struct(lctx
, e
, &["RangeFull"],
1277 (&Some(ref e1
), &None
, HalfOpen
) =>
1278 make_struct(lctx
, e
, &["RangeFrom"],
1281 (&None
, &Some(ref e2
), HalfOpen
) =>
1282 make_struct(lctx
, e
, &["RangeTo"],
1285 (&Some(ref e1
), &Some(ref e2
), HalfOpen
) =>
1286 make_struct(lctx
, e
, &["Range"],
1287 &[("start", e1
), ("end", e2
)]),
1289 (&None
, &Some(ref e2
), Closed
) =>
1290 make_struct(lctx
, e
, &["RangeToInclusive"],
1293 (&Some(ref e1
), &Some(ref e2
), Closed
) =>
1294 make_struct(lctx
, e
, &["RangeInclusive", "NonEmpty"],
1295 &[("start", e1
), ("end", e2
)]),
1297 _
=> panic
!(lctx
.diagnostic().span_fatal(e
.span
,
1298 "inclusive range with no end"))
1302 ExprKind
::Path(ref qself
, ref path
) => {
1303 let hir_qself
= qself
.as_ref().map(|&QSelf { ref ty, position }
| {
1305 ty
: lower_ty(lctx
, ty
),
1309 hir
::ExprPath(hir_qself
, lower_path_full(lctx
, path
, qself
.is_none()))
1311 ExprKind
::Break(opt_ident
) => hir
::ExprBreak(opt_ident
.map(|sp_ident
| {
1312 respan(sp_ident
.span
, lower_ident(lctx
, sp_ident
.node
))
1314 ExprKind
::Again(opt_ident
) => hir
::ExprAgain(opt_ident
.map(|sp_ident
| {
1315 respan(sp_ident
.span
, lower_ident(lctx
, sp_ident
.node
))
1317 ExprKind
::Ret(ref e
) => hir
::ExprRet(e
.as_ref().map(|x
| lower_expr(lctx
, x
))),
1318 ExprKind
::InlineAsm(InlineAsm
{
1328 }) => hir
::ExprInlineAsm(hir
::InlineAsm
{
1329 inputs
: inputs
.iter().map(|&(ref c
, _
)| c
.clone()).collect(),
1330 outputs
: outputs
.iter()
1332 hir
::InlineAsmOutput
{
1333 constraint
: out
.constraint
.clone(),
1335 is_indirect
: out
.is_indirect
,
1340 asm_str_style
: asm_str_style
,
1341 clobbers
: clobbers
.clone().into(),
1343 alignstack
: alignstack
,
1346 }, outputs
.iter().map(|out
| lower_expr(lctx
, &out
.expr
)).collect(),
1347 inputs
.iter().map(|&(_
, ref input
)| lower_expr(lctx
, input
)).collect()),
1348 ExprKind
::Struct(ref path
, ref fields
, ref maybe_expr
) => {
1349 hir
::ExprStruct(lower_path(lctx
, path
),
1350 fields
.iter().map(|x
| lower_field(lctx
, x
)).collect(),
1351 maybe_expr
.as_ref().map(|x
| lower_expr(lctx
, x
)))
1353 ExprKind
::Paren(ref ex
) => {
1354 // merge attributes into the inner expression.
1355 return lower_expr(lctx
, ex
).map(|mut ex
| {
1356 ex
.attrs
.update(|attrs
| {
1357 attrs
.prepend(e
.attrs
.clone())
1363 // Desugar ExprIfLet
1364 // From: `if let <pat> = <sub_expr> <body> [<else_opt>]`
1365 ExprKind
::IfLet(ref pat
, ref sub_expr
, ref body
, ref else_opt
) => {
1368 // match <sub_expr> {
1370 // [_ if <else_opt_if_cond> => <else_opt_if_body>,]
1371 // _ => [<else_opt> | ()]
1374 return cache_ids(lctx
, e
.id
, |lctx
| {
1375 // `<pat> => <body>`
1377 let body
= lower_block(lctx
, body
);
1378 let body_expr
= expr_block(lctx
, body
, None
);
1379 arm(hir_vec
![lower_pat(lctx
, pat
)], body_expr
)
1382 // `[_ if <else_opt_if_cond> => <else_opt_if_body>,]`
1383 let mut else_opt
= else_opt
.as_ref().map(|e
| lower_expr(lctx
, e
));
1384 let else_if_arms
= {
1385 let mut arms
= vec
![];
1387 let else_opt_continue
= else_opt
.and_then(|els
| {
1388 els
.and_then(|els
| {
1391 hir
::ExprIf(cond
, then
, else_opt
) => {
1392 let pat_under
= pat_wild(lctx
, e
.span
);
1393 arms
.push(hir
::Arm
{
1395 pats
: hir_vec
![pat_under
],
1397 body
: expr_block(lctx
, then
, None
),
1399 else_opt
.map(|else_opt
| (else_opt
, true))
1401 _
=> Some((P(els
), false)),
1405 match else_opt_continue
{
1406 Some((e
, true)) => {
1409 Some((e
, false)) => {
1422 let contains_else_clause
= else_opt
.is_some();
1424 // `_ => [<else_opt> | ()]`
1426 let pat_under
= pat_wild(lctx
, e
.span
);
1428 else_opt
.unwrap_or_else(
1429 || expr_tuple(lctx
, e
.span
, hir_vec
![], None
));
1430 arm(hir_vec
![pat_under
], else_expr
)
1433 let mut arms
= Vec
::with_capacity(else_if_arms
.len() + 2);
1435 arms
.extend(else_if_arms
);
1436 arms
.push(else_arm
);
1438 let sub_expr
= lower_expr(lctx
, sub_expr
);
1439 // add attributes to the outer returned expr node
1442 hir
::ExprMatch(sub_expr
,
1444 hir
::MatchSource
::IfLetDesugar
{
1445 contains_else_clause
: contains_else_clause
,
1451 // Desugar ExprWhileLet
1452 // From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
1453 ExprKind
::WhileLet(ref pat
, ref sub_expr
, ref body
, opt_ident
) => {
1456 // [opt_ident]: loop {
1457 // match <sub_expr> {
1463 return cache_ids(lctx
, e
.id
, |lctx
| {
1464 // `<pat> => <body>`
1466 let body
= lower_block(lctx
, body
);
1467 let body_expr
= expr_block(lctx
, body
, None
);
1468 arm(hir_vec
![lower_pat(lctx
, pat
)], body_expr
)
1473 let pat_under
= pat_wild(lctx
, e
.span
);
1474 let break_expr
= expr_break(lctx
, e
.span
, None
);
1475 arm(hir_vec
![pat_under
], break_expr
)
1478 // `match <sub_expr> { ... }`
1479 let arms
= hir_vec
![pat_arm
, break_arm
];
1480 let sub_expr
= lower_expr(lctx
, sub_expr
);
1481 let match_expr
= expr(lctx
,
1483 hir
::ExprMatch(sub_expr
,
1485 hir
::MatchSource
::WhileLetDesugar
),
1488 // `[opt_ident]: loop { ... }`
1489 let loop_block
= block_expr(lctx
, match_expr
);
1490 let loop_expr
= hir
::ExprLoop(loop_block
,
1491 opt_ident
.map(|ident
| lower_ident(lctx
, ident
)));
1492 // add attributes to the outer returned expr node
1493 expr(lctx
, e
.span
, loop_expr
, e
.attrs
.clone())
1497 // Desugar ExprForLoop
1498 // From: `[opt_ident]: for <pat> in <head> <body>`
1499 ExprKind
::ForLoop(ref pat
, ref head
, ref body
, opt_ident
) => {
1503 // let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1505 // [opt_ident]: loop {
1506 // match ::std::iter::Iterator::next(&mut iter) {
1507 // ::std::option::Option::Some(<pat>) => <body>,
1508 // ::std::option::Option::None => break
1516 return cache_ids(lctx
, e
.id
, |lctx
| {
1518 let head
= lower_expr(lctx
, head
);
1520 let iter
= lctx
.str_to_ident("iter");
1522 // `::std::option::Option::Some(<pat>) => <body>`
1524 let body_block
= lower_block(lctx
, body
);
1525 let body_span
= body_block
.span
;
1526 let body_expr
= P(hir
::Expr
{
1528 node
: hir
::ExprBlock(body_block
),
1532 let pat
= lower_pat(lctx
, pat
);
1533 let some_pat
= pat_some(lctx
, e
.span
, pat
);
1535 arm(hir_vec
![some_pat
], body_expr
)
1538 // `::std::option::Option::None => break`
1540 let break_expr
= expr_break(lctx
, e
.span
, None
);
1542 arm(hir_vec
![pat_none(lctx
, e
.span
)], break_expr
)
1545 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1548 let strs
= std_path(lctx
, &["iter", "Iterator", "next"]);
1550 path_global(e
.span
, strs
)
1552 let iter
= expr_ident(lctx
, e
.span
, iter
, None
);
1553 let ref_mut_iter
= expr_mut_addr_of(lctx
, e
.span
, iter
, None
);
1554 let next_path
= expr_path(lctx
, next_path
, None
);
1555 let next_expr
= expr_call(lctx
,
1558 hir_vec
![ref_mut_iter
],
1560 let arms
= hir_vec
![pat_arm
, break_arm
];
1564 hir
::ExprMatch(next_expr
, arms
, hir
::MatchSource
::ForLoopDesugar
),
1568 // `[opt_ident]: loop { ... }`
1569 let loop_block
= block_expr(lctx
, match_expr
);
1570 let loop_expr
= hir
::ExprLoop(loop_block
,
1571 opt_ident
.map(|ident
| lower_ident(lctx
, ident
)));
1572 let loop_expr
= expr(lctx
, e
.span
, loop_expr
, None
);
1574 // `mut iter => { ... }`
1576 let iter_pat
= pat_ident_binding_mode(lctx
,
1579 hir
::BindByValue(hir
::MutMutable
));
1580 arm(hir_vec
![iter_pat
], loop_expr
)
1583 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1584 let into_iter_expr
= {
1585 let into_iter_path
= {
1586 let strs
= std_path(lctx
, &["iter", "IntoIterator", "into_iter"]);
1588 path_global(e
.span
, strs
)
1591 let into_iter
= expr_path(lctx
, into_iter_path
, None
);
1592 expr_call(lctx
, e
.span
, into_iter
, hir_vec
![head
], None
)
1595 let match_expr
= expr_match(lctx
,
1599 hir
::MatchSource
::ForLoopDesugar
,
1602 // `{ let _result = ...; _result }`
1603 // underscore prevents an unused_variables lint if the head diverges
1604 let result_ident
= lctx
.str_to_ident("_result");
1605 let let_stmt
= stmt_let(lctx
, e
.span
, false, result_ident
, match_expr
, None
);
1606 let result
= expr_ident(lctx
, e
.span
, result_ident
, None
);
1607 let block
= block_all(lctx
, e
.span
, hir_vec
![let_stmt
], Some(result
));
1608 // add the attributes to the outer returned expr node
1609 expr_block(lctx
, block
, e
.attrs
.clone())
1613 // Desugar ExprKind::Try
1615 ExprKind
::Try(ref sub_expr
) => {
1622 // return Err(From::from(err))
1627 return cache_ids(lctx
, e
.id
, |lctx
| {
1629 let sub_expr
= lower_expr(lctx
, sub_expr
);
1633 let val_ident
= lctx
.str_to_ident("val");
1634 let val_pat
= pat_ident(lctx
, e
.span
, val_ident
);
1635 let val_expr
= expr_ident(lctx
, e
.span
, val_ident
, None
);
1636 let ok_pat
= pat_ok(lctx
, e
.span
, val_pat
);
1638 arm(hir_vec
![ok_pat
], val_expr
)
1641 // Err(err) => return Err(From::from(err))
1643 let err_ident
= lctx
.str_to_ident("err");
1645 let path
= std_path(lctx
, &["convert", "From", "from"]);
1646 let path
= path_global(e
.span
, path
);
1647 let from
= expr_path(lctx
, path
, None
);
1648 let err_expr
= expr_ident(lctx
, e
.span
, err_ident
, None
);
1650 expr_call(lctx
, e
.span
, from
, hir_vec
![err_expr
], None
)
1653 let path
= std_path(lctx
, &["result", "Result", "Err"]);
1654 let path
= path_global(e
.span
, path
);
1655 let err_ctor
= expr_path(lctx
, path
, None
);
1656 expr_call(lctx
, e
.span
, err_ctor
, hir_vec
![from_expr
], None
)
1658 let err_pat
= pat_err(lctx
, e
.span
, pat_ident(lctx
, e
.span
, err_ident
));
1659 let ret_expr
= expr(lctx
, e
.span
,
1660 hir
::Expr_
::ExprRet(Some(err_expr
)), None
);
1662 arm(hir_vec
![err_pat
], ret_expr
)
1665 expr_match(lctx
, e
.span
, sub_expr
, hir_vec
![err_arm
, ok_arm
],
1666 hir
::MatchSource
::TryDesugar
, None
)
1670 ExprKind
::Mac(_
) => panic
!("Shouldn't exist here"),
1673 attrs
: e
.attrs
.clone(),
1677 pub fn lower_stmt(lctx
: &LoweringContext
, s
: &Stmt
) -> hir
::Stmt
{
1679 StmtKind
::Decl(ref d
, id
) => {
1681 node
: hir
::StmtDecl(lower_decl(lctx
, d
), id
),
1685 StmtKind
::Expr(ref e
, id
) => {
1687 node
: hir
::StmtExpr(lower_expr(lctx
, e
), id
),
1691 StmtKind
::Semi(ref e
, id
) => {
1693 node
: hir
::StmtSemi(lower_expr(lctx
, e
), id
),
1697 StmtKind
::Mac(..) => panic
!("Shouldn't exist here"),
1701 pub fn lower_capture_clause(_lctx
: &LoweringContext
, c
: CaptureBy
) -> hir
::CaptureClause
{
1703 CaptureBy
::Value
=> hir
::CaptureByValue
,
1704 CaptureBy
::Ref
=> hir
::CaptureByRef
,
1708 pub fn lower_visibility(lctx
: &LoweringContext
, v
: &Visibility
) -> hir
::Visibility
{
1710 Visibility
::Public
=> hir
::Public
,
1711 Visibility
::Inherited
=> hir
::Inherited
,
1712 _
=> panic
!(lctx
.diagnostic().fatal("pub(restricted) is not implemented yet!"))
1716 pub fn lower_defaultness(_lctx
: &LoweringContext
, d
: Defaultness
) -> hir
::Defaultness
{
1718 Defaultness
::Default
=> hir
::Defaultness
::Default
,
1719 Defaultness
::Final
=> hir
::Defaultness
::Final
,
1723 pub fn lower_block_check_mode(lctx
: &LoweringContext
, b
: &BlockCheckMode
) -> hir
::BlockCheckMode
{
1725 BlockCheckMode
::Default
=> hir
::DefaultBlock
,
1726 BlockCheckMode
::Unsafe(u
) => hir
::UnsafeBlock(lower_unsafe_source(lctx
, u
)),
1730 pub fn lower_binding_mode(lctx
: &LoweringContext
, b
: &BindingMode
) -> hir
::BindingMode
{
1732 BindingMode
::ByRef(m
) => hir
::BindByRef(lower_mutability(lctx
, m
)),
1733 BindingMode
::ByValue(m
) => hir
::BindByValue(lower_mutability(lctx
, m
)),
1737 pub fn lower_unsafe_source(_lctx
: &LoweringContext
, u
: UnsafeSource
) -> hir
::UnsafeSource
{
1739 CompilerGenerated
=> hir
::CompilerGenerated
,
1740 UserProvided
=> hir
::UserProvided
,
1744 pub fn lower_impl_polarity(_lctx
: &LoweringContext
, i
: ImplPolarity
) -> hir
::ImplPolarity
{
1746 ImplPolarity
::Positive
=> hir
::ImplPolarity
::Positive
,
1747 ImplPolarity
::Negative
=> hir
::ImplPolarity
::Negative
,
1751 pub fn lower_trait_bound_modifier(_lctx
: &LoweringContext
,
1752 f
: TraitBoundModifier
)
1753 -> hir
::TraitBoundModifier
{
1755 TraitBoundModifier
::None
=> hir
::TraitBoundModifier
::None
,
1756 TraitBoundModifier
::Maybe
=> hir
::TraitBoundModifier
::Maybe
,
1760 // Helper methods for building HIR.
1762 fn arm(pats
: hir
::HirVec
<P
<hir
::Pat
>>, expr
: P
<hir
::Expr
>) -> hir
::Arm
{
1771 fn field(name
: Name
, expr
: P
<hir
::Expr
>, span
: Span
) -> hir
::Field
{
1782 fn expr_break(lctx
: &LoweringContext
, span
: Span
,
1783 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1784 expr(lctx
, span
, hir
::ExprBreak(None
), attrs
)
1787 fn expr_call(lctx
: &LoweringContext
,
1790 args
: hir
::HirVec
<P
<hir
::Expr
>>,
1791 attrs
: ThinAttributes
)
1793 expr(lctx
, span
, hir
::ExprCall(e
, args
), attrs
)
1796 fn expr_ident(lctx
: &LoweringContext
, span
: Span
, id
: hir
::Ident
,
1797 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1798 expr_path(lctx
, path_ident(span
, id
), attrs
)
1801 fn expr_mut_addr_of(lctx
: &LoweringContext
, span
: Span
, e
: P
<hir
::Expr
>,
1802 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1803 expr(lctx
, span
, hir
::ExprAddrOf(hir
::MutMutable
, e
), attrs
)
1806 fn expr_path(lctx
: &LoweringContext
, path
: hir
::Path
,
1807 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1808 expr(lctx
, path
.span
, hir
::ExprPath(None
, path
), attrs
)
1811 fn expr_match(lctx
: &LoweringContext
,
1814 arms
: hir
::HirVec
<hir
::Arm
>,
1815 source
: hir
::MatchSource
,
1816 attrs
: ThinAttributes
)
1818 expr(lctx
, span
, hir
::ExprMatch(arg
, arms
, source
), attrs
)
1821 fn expr_block(lctx
: &LoweringContext
, b
: P
<hir
::Block
>,
1822 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1823 expr(lctx
, b
.span
, hir
::ExprBlock(b
), attrs
)
1826 fn expr_tuple(lctx
: &LoweringContext
, sp
: Span
, exprs
: hir
::HirVec
<P
<hir
::Expr
>>,
1827 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1828 expr(lctx
, sp
, hir
::ExprTup(exprs
), attrs
)
1831 fn expr_struct(lctx
: &LoweringContext
,
1834 fields
: hir
::HirVec
<hir
::Field
>,
1835 e
: Option
<P
<hir
::Expr
>>,
1836 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1837 expr(lctx
, sp
, hir
::ExprStruct(path
, fields
, e
), attrs
)
1840 fn expr(lctx
: &LoweringContext
, span
: Span
, node
: hir
::Expr_
,
1841 attrs
: ThinAttributes
) -> P
<hir
::Expr
> {
1850 fn stmt_let(lctx
: &LoweringContext
,
1855 attrs
: ThinAttributes
)
1857 let pat
= if mutbl
{
1858 pat_ident_binding_mode(lctx
, sp
, ident
, hir
::BindByValue(hir
::MutMutable
))
1860 pat_ident(lctx
, sp
, ident
)
1862 let local
= P(hir
::Local
{
1870 let decl
= respan(sp
, hir
::DeclLocal(local
));
1871 respan(sp
, hir
::StmtDecl(P(decl
), lctx
.next_id()))
1874 fn block_expr(lctx
: &LoweringContext
, expr
: P
<hir
::Expr
>) -> P
<hir
::Block
> {
1875 block_all(lctx
, expr
.span
, hir
::HirVec
::new(), Some(expr
))
1878 fn block_all(lctx
: &LoweringContext
,
1880 stmts
: hir
::HirVec
<hir
::Stmt
>,
1881 expr
: Option
<P
<hir
::Expr
>>)
1887 rules
: hir
::DefaultBlock
,
1892 fn pat_ok(lctx
: &LoweringContext
, span
: Span
, pat
: P
<hir
::Pat
>) -> P
<hir
::Pat
> {
1893 let ok
= std_path(lctx
, &["result", "Result", "Ok"]);
1894 let path
= path_global(span
, ok
);
1895 pat_enum(lctx
, span
, path
, hir_vec
![pat
])
1898 fn pat_err(lctx
: &LoweringContext
, span
: Span
, pat
: P
<hir
::Pat
>) -> P
<hir
::Pat
> {
1899 let err
= std_path(lctx
, &["result", "Result", "Err"]);
1900 let path
= path_global(span
, err
);
1901 pat_enum(lctx
, span
, path
, hir_vec
![pat
])
1904 fn pat_some(lctx
: &LoweringContext
, span
: Span
, pat
: P
<hir
::Pat
>) -> P
<hir
::Pat
> {
1905 let some
= std_path(lctx
, &["option", "Option", "Some"]);
1906 let path
= path_global(span
, some
);
1907 pat_enum(lctx
, span
, path
, hir_vec
![pat
])
1910 fn pat_none(lctx
: &LoweringContext
, span
: Span
) -> P
<hir
::Pat
> {
1911 let none
= std_path(lctx
, &["option", "Option", "None"]);
1912 let path
= path_global(span
, none
);
1913 pat_enum(lctx
, span
, path
, hir_vec
![])
1916 fn pat_enum(lctx
: &LoweringContext
,
1919 subpats
: hir
::HirVec
<P
<hir
::Pat
>>)
1921 let pt
= if subpats
.is_empty() {
1922 hir
::PatKind
::Path(path
)
1924 hir
::PatKind
::TupleStruct(path
, Some(subpats
))
1929 fn pat_ident(lctx
: &LoweringContext
, span
: Span
, ident
: hir
::Ident
) -> P
<hir
::Pat
> {
1930 pat_ident_binding_mode(lctx
, span
, ident
, hir
::BindByValue(hir
::MutImmutable
))
1933 fn pat_ident_binding_mode(lctx
: &LoweringContext
,
1936 bm
: hir
::BindingMode
)
1938 let pat_ident
= hir
::PatKind
::Ident(bm
,
1944 pat(lctx
, span
, pat_ident
)
1947 fn pat_wild(lctx
: &LoweringContext
, span
: Span
) -> P
<hir
::Pat
> {
1948 pat(lctx
, span
, hir
::PatKind
::Wild
)
1951 fn pat(lctx
: &LoweringContext
, span
: Span
, pat
: hir
::PatKind
) -> P
<hir
::Pat
> {
1959 fn path_ident(span
: Span
, id
: hir
::Ident
) -> hir
::Path
{
1960 path(span
, vec
![id
])
1963 fn path(span
: Span
, strs
: Vec
<hir
::Ident
>) -> hir
::Path
{
1964 path_all(span
, false, strs
, hir
::HirVec
::new(), hir
::HirVec
::new(), hir
::HirVec
::new())
1967 fn path_global(span
: Span
, strs
: Vec
<hir
::Ident
>) -> hir
::Path
{
1968 path_all(span
, true, strs
, hir
::HirVec
::new(), hir
::HirVec
::new(), hir
::HirVec
::new())
1971 fn path_all(sp
: Span
,
1973 mut idents
: Vec
<hir
::Ident
>,
1974 lifetimes
: hir
::HirVec
<hir
::Lifetime
>,
1975 types
: hir
::HirVec
<P
<hir
::Ty
>>,
1976 bindings
: hir
::HirVec
<hir
::TypeBinding
>)
1978 let last_identifier
= idents
.pop().unwrap();
1979 let mut segments
: Vec
<hir
::PathSegment
> = idents
.into_iter()
1983 parameters
: hir
::PathParameters
::none(),
1987 segments
.push(hir
::PathSegment
{
1988 identifier
: last_identifier
,
1989 parameters
: hir
::AngleBracketedParameters(hir
::AngleBracketedParameterData
{
1990 lifetimes
: lifetimes
,
1998 segments
: segments
.into(),
2002 fn std_path(lctx
: &LoweringContext
, components
: &[&str]) -> Vec
<hir
::Ident
> {
2003 let mut v
= Vec
::new();
2004 if let Some(s
) = lctx
.crate_root
{
2005 v
.push(hir
::Ident
::from_name(token
::intern(s
)));
2007 v
.extend(components
.iter().map(|s
| hir
::Ident
::from_name(token
::intern(s
))));
2011 // Given suffix ["b","c","d"], returns path `::std::b::c::d` when
2012 // `fld.cx.use_std`, and `::core::b::c::d` otherwise.
2013 fn core_path(lctx
: &LoweringContext
, span
: Span
, components
: &[&str]) -> hir
::Path
{
2014 let idents
= std_path(lctx
, components
);
2015 path_global(span
, idents
)
2018 fn signal_block_expr(lctx
: &LoweringContext
,
2019 stmts
: hir
::HirVec
<hir
::Stmt
>,
2022 rule
: hir
::BlockCheckMode
,
2023 attrs
: ThinAttributes
)
2025 let id
= lctx
.next_id();
2042 use syntax
::ast
::{self, NodeId, NodeIdAssigner}
;
2043 use syntax
::{parse, codemap}
;
2044 use syntax
::fold
::Folder
;
2045 use std
::cell
::Cell
;
2047 struct MockAssigner
{
2048 next_id
: Cell
<NodeId
>,
2052 fn new() -> MockAssigner
{
2053 MockAssigner { next_id: Cell::new(0) }
2058 fn call_site(&self) -> codemap
::Span
;
2059 fn cfg(&self) -> ast
::CrateConfig
;
2060 fn ident_of(&self, st
: &str) -> ast
::Ident
;
2061 fn name_of(&self, st
: &str) -> ast
::Name
;
2062 fn parse_sess(&self) -> &parse
::ParseSess
;
2065 impl FakeExtCtxt
for parse
::ParseSess
{
2066 fn call_site(&self) -> codemap
::Span
{
2068 lo
: codemap
::BytePos(0),
2069 hi
: codemap
::BytePos(0),
2070 expn_id
: codemap
::NO_EXPANSION
,
2073 fn cfg(&self) -> ast
::CrateConfig
{
2076 fn ident_of(&self, st
: &str) -> ast
::Ident
{
2077 parse
::token
::str_to_ident(st
)
2079 fn name_of(&self, st
: &str) -> ast
::Name
{
2080 parse
::token
::intern(st
)
2082 fn parse_sess(&self) -> &parse
::ParseSess
{
2087 impl NodeIdAssigner
for MockAssigner
{
2088 fn next_node_id(&self) -> NodeId
{
2089 let result
= self.next_id
.get();
2090 self.next_id
.set(result
+ 1);
2094 fn peek_node_id(&self) -> NodeId
{
2099 impl Folder
for MockAssigner
{
2100 fn new_id(&mut self, old_id
: NodeId
) -> NodeId
{
2101 assert_eq
!(old_id
, ast
::DUMMY_NODE_ID
);
2107 fn test_preserves_ids() {
2108 let cx
= parse
::ParseSess
::new();
2109 let mut assigner
= MockAssigner
::new();
2111 let ast_if_let
= quote_expr
!(&cx
,
2112 if let Some(foo
) = baz
{
2115 let ast_if_let
= assigner
.fold_expr(ast_if_let
);
2116 let ast_while_let
= quote_expr
!(&cx
,
2117 while let Some(foo
) = baz
{
2120 let ast_while_let
= assigner
.fold_expr(ast_while_let
);
2121 let ast_for
= quote_expr
!(&cx
,
2127 let ast_for
= assigner
.fold_expr(ast_for
);
2128 let ast_in
= quote_expr
!(&cx
, in HEAP { foo() }
);
2129 let ast_in
= assigner
.fold_expr(ast_in
);
2131 let lctx
= LoweringContext
::new(&assigner
, None
);
2132 let hir1
= lower_expr(&lctx
, &ast_if_let
);
2133 let hir2
= lower_expr(&lctx
, &ast_if_let
);
2134 assert
!(hir1
== hir2
);
2136 let hir1
= lower_expr(&lctx
, &ast_while_let
);
2137 let hir2
= lower_expr(&lctx
, &ast_while_let
);
2138 assert
!(hir1
== hir2
);
2140 let hir1
= lower_expr(&lctx
, &ast_for
);
2141 let hir2
= lower_expr(&lctx
, &ast_for
);
2142 assert
!(hir1
== hir2
);
2144 let hir1
= lower_expr(&lctx
, &ast_in
);
2145 let hir2
= lower_expr(&lctx
, &ast_in
);
2146 assert
!(hir1
== hir2
);