]> git.proxmox.com Git - rustc.git/blobdiff - src/libsyntax/ext/tt/transcribe.rs
New upstream version 1.12.0+dfsg1
[rustc.git] / src / libsyntax / ext / tt / transcribe.rs
index 368a9f0c27e56fd3e721693bd308702c82d5c75f..939425378def699da15025a14cff36795f30c360 100644 (file)
@@ -9,15 +9,15 @@
 // except according to those terms.
 use self::LockstepIterSize::*;
 
-use ast;
-use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident};
-use codemap::{Span, DUMMY_SP};
-use diagnostic::SpanHandler;
+use ast::Ident;
+use syntax_pos::{Span, DUMMY_SP};
+use errors::{Handler, DiagnosticBuilder};
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
-use parse::token::{Eof, DocComment, Interpolated, MatchNt, SubstNt};
-use parse::token::{Token, NtIdent, SpecialMacroVar};
+use parse::token::{DocComment, MatchNt, SubstNt};
+use parse::token::{Token, Interpolated, NtIdent, NtTT, SpecialMacroVar};
 use parse::token;
 use parse::lexer::TokenAndSpan;
+use tokenstream::{self, TokenTree};
 
 use std::rc::Rc;
 use std::ops::Add;
@@ -34,7 +34,7 @@ struct TtFrame {
 
 #[derive(Clone)]
 pub struct TtReader<'a> {
-    pub sp_diag: &'a SpanHandler,
+    pub sp_diag: &'a Handler,
     /// the unzipped tree:
     stack: Vec<TtFrame>,
     /* for MBE-style macro transcription */
@@ -50,16 +50,17 @@ pub struct TtReader<'a> {
     pub cur_span: Span,
     /// Transform doc comments. Only useful in macro invocations
     pub desugar_doc_comments: bool,
+    pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
 }
 
 /// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
+/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
 /// (and should) be None.
-pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
-                         interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
-                         imported_from: Option<Ident>,
-                         src: Vec<ast::TokenTree>)
-                         -> TtReader<'a> {
+pub fn new_tt_reader(sp_diag: &Handler,
+                     interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
+                     imported_from: Option<Ident>,
+                     src: Vec<tokenstream::TokenTree>)
+                     -> TtReader {
     new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
 }
 
@@ -67,21 +68,21 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
 /// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
 ///
 /// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
+/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
 /// (and should) be None.
-pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
-                                       interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
-                                       imported_from: Option<Ident>,
-                                       src: Vec<ast::TokenTree>,
-                                       desugar_doc_comments: bool)
-                                       -> TtReader<'a> {
+pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
+                                   interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
+                                   imported_from: Option<Ident>,
+                                   src: Vec<tokenstream::TokenTree>,
+                                   desugar_doc_comments: bool)
+                                   -> TtReader {
     let mut r = TtReader {
         sp_diag: sp_diag,
         stack: vec!(TtFrame {
-            forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
+            forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
                 tts: src,
                 // doesn't matter. This merely holds the root unzipping.
-                separator: None, op: ast::ZeroOrMore, num_captures: 0
+                separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
             })),
             idx: 0,
             dotdotdoted: false,
@@ -99,6 +100,7 @@ pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
         /* dummy values, never read: */
         cur_tok: token::Eof,
         cur_span: DUMMY_SP,
+        fatal_errs: Vec::new(),
     };
     tt_next_token(&mut r); /* get cur_tok and cur_span set up */
     r
@@ -140,11 +142,9 @@ impl Add for LockstepIterSize {
                 LisContradiction(_) => other,
                 LisConstraint(r_len, _) if l_len == r_len => self.clone(),
                 LisConstraint(r_len, r_id) => {
-                    let l_n = token::get_ident(l_id.clone());
-                    let r_n = token::get_ident(r_id);
                     LisContradiction(format!("inconsistent lockstep iteration: \
-                                              '{:?}' has {} items, but '{:?}' has {}",
-                                              l_n, l_len, r_n, r_len).to_string())
+                                              '{}' has {} items, but '{}' has {}",
+                                              l_id, l_len, r_id, r_len))
                 }
             },
         }
@@ -153,17 +153,17 @@ impl Add for LockstepIterSize {
 
 fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
     match *t {
-        TtDelimited(_, ref delimed) => {
+        TokenTree::Delimited(_, ref delimed) => {
             delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
                 size + lockstep_iter_size(tt, r)
             })
         },
-        TtSequence(_, ref seq) => {
+        TokenTree::Sequence(_, ref seq) => {
             seq.tts.iter().fold(LisUnconstrained, |size, tt| {
                 size + lockstep_iter_size(tt, r)
             })
         },
-        TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) =>
+        TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
             match lookup_cur_matched(r, name) {
                 Some(matched) => match *matched {
                     MatchedNonterminal(_) => LisUnconstrained,
@@ -171,7 +171,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
                 },
                 _ => LisUnconstrained
             },
-        TtToken(..) => LisUnconstrained,
+        TokenTree::Token(..) => LisUnconstrained,
     }
 }
 
@@ -188,7 +188,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
             None => (),
             Some(sp) => {
                 r.cur_span = sp;
-                r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain);
+                r.cur_tok = token::Ident(r.imported_from.unwrap());
                 return ret_val;
             },
         }
@@ -225,26 +225,23 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
         } else { /* repeat */
             *r.repeat_idx.last_mut().unwrap() += 1;
             r.stack.last_mut().unwrap().idx = 0;
-            match r.stack.last().unwrap().sep.clone() {
-                Some(tk) => {
-                    r.cur_tok = tk; /* repeat same span, I guess */
-                    return ret_val;
-                }
-                None => {}
+            if let Some(tk) = r.stack.last().unwrap().sep.clone() {
+                r.cur_tok = tk; // repeat same span, I guess
+                return ret_val;
             }
         }
     }
-    loop { /* because it's easiest, this handles `TtDelimited` not starting
-              with a `TtToken`, even though it won't happen */
+    loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting
+              with a `TokenTree::Token`, even though it won't happen */
         let t = {
             let frame = r.stack.last().unwrap();
             // FIXME(pcwalton): Bad copy.
             frame.forest.get_tt(frame.idx)
         };
         match t {
-            TtSequence(sp, seq) => {
+            TokenTree::Sequence(sp, seq) => {
                 // FIXME(pcwalton): Bad copy.
-                match lockstep_iter_size(&TtSequence(sp, seq.clone()),
+                match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
                                          r) {
                     LisUnconstrained => {
                         panic!(r.sp_diag.span_fatal(
@@ -259,7 +256,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                     }
                     LisConstraint(len, _) => {
                         if len == 0 {
-                            if seq.op == ast::OneOrMore {
+                            if seq.op == tokenstream::KleeneOp::OneOrMore {
                                 // FIXME #2887 blame invoker
                                 panic!(r.sp_diag.span_fatal(sp.clone(),
                                                      "this must repeat at least once"));
@@ -274,49 +271,59 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                             idx: 0,
                             dotdotdoted: true,
                             sep: seq.separator.clone(),
-                            forest: TtSequence(sp, seq),
+                            forest: TokenTree::Sequence(sp, seq),
                         });
                     }
                 }
             }
             // FIXME #2887: think about span stuff here
-            TtToken(sp, SubstNt(ident, namep)) => {
-                r.stack.last_mut().unwrap().idx += 1;
+            TokenTree::Token(sp, SubstNt(ident)) => {
                 match lookup_cur_matched(r, ident) {
                     None => {
+                        r.stack.last_mut().unwrap().idx += 1;
                         r.cur_span = sp;
-                        r.cur_tok = SubstNt(ident, namep);
+                        r.cur_tok = SubstNt(ident);
                         return ret_val;
-                        // this can't be 0 length, just like TtDelimited
+                        // this can't be 0 length, just like TokenTree::Delimited
                     }
                     Some(cur_matched) => {
                         match *cur_matched {
                             // sidestep the interpolation tricks for ident because
                             // (a) idents can be in lots of places, so it'd be a pain
                             // (b) we actually can, since it's a token.
-                            MatchedNonterminal(NtIdent(ref sn, b)) => {
-                                r.cur_span = sp;
-                                r.cur_tok = token::Ident(**sn, b);
+                            MatchedNonterminal(NtIdent(ref sn)) => {
+                                r.stack.last_mut().unwrap().idx += 1;
+                                r.cur_span = sn.span;
+                                r.cur_tok = token::Ident(sn.node);
                                 return ret_val;
                             }
+                            MatchedNonterminal(NtTT(ref tt)) => {
+                                r.stack.push(TtFrame {
+                                    forest: TokenTree::Token(sp, Interpolated(NtTT(tt.clone()))),
+                                    idx: 0,
+                                    dotdotdoted: false,
+                                    sep: None,
+                                });
+                            }
                             MatchedNonterminal(ref other_whole_nt) => {
+                                r.stack.last_mut().unwrap().idx += 1;
                                 // FIXME(pcwalton): Bad copy.
                                 r.cur_span = sp;
-                                r.cur_tok = token::Interpolated((*other_whole_nt).clone());
+                                r.cur_tok = Interpolated((*other_whole_nt).clone());
                                 return ret_val;
                             }
                             MatchedSeq(..) => {
                                 panic!(r.sp_diag.span_fatal(
-                                    r.cur_span, /* blame the macro writer */
-                                    &format!("variable '{:?}' is still repeating at this depth",
-                                            token::get_ident(ident))));
+                                    sp, /* blame the macro writer */
+                                    &format!("variable '{}' is still repeating at this depth",
+                                            ident)));
                             }
                         }
                     }
                 }
             }
-            // TtDelimited or any token that can be unzipped
-            seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => {
+            // TokenTree::Delimited or any token that can be unzipped
+            seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
                 // do not advance the idx yet
                 r.stack.push(TtFrame {
                    forest: seq,
@@ -326,15 +333,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                 });
                 // if this could be 0-length, we'd need to potentially recur here
             }
-            TtToken(sp, DocComment(name)) if r.desugar_doc_comments => {
+            TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
                 r.stack.push(TtFrame {
-                   forest: TtToken(sp, DocComment(name)),
+                   forest: TokenTree::Token(sp, DocComment(name)),
                    idx: 0,
                    dotdotdoted: false,
                    sep: None
                 });
             }
-            TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
+            TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
                 r.stack.last_mut().unwrap().idx += 1;
 
                 if r.imported_from.is_some() {
@@ -346,7 +353,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
 
                 // otherwise emit nothing and proceed to the next token
             }
-            TtToken(sp, tok) => {
+            TokenTree::Token(sp, tok) => {
                 r.cur_span = sp;
                 r.cur_tok = tok;
                 r.stack.last_mut().unwrap().idx += 1;