1 // Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
13 //! TokenStreams represent syntactic objects before they are converted into ASTs.
14 //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
15 //! which are themselves either a single Token, a Delimited subsequence of tokens,
16 //! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
20 //! TokenStreams are persistant data structures construced as ropes with reference
21 //! counted-children. In general, this means that calling an operation on a TokenStream
22 //! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to
23 //! the original. This essentially coerces TokenStreams into 'views' of their subparts,
24 //! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
25 //! ownership of the original.
27 use ast
::{self, AttrStyle, LitKind}
;
28 use syntax_pos
::{Span, DUMMY_SP, NO_EXPANSION}
;
29 use codemap
::{Spanned, combine_spans}
;
31 use ext
::tt
::macro_parser
;
32 use parse
::lexer
::comments
::{doc_comment_style, strip_doc_comment_decoration}
;
35 use parse
::token
::{self, Token, Lit, Nonterminal}
;
40 use std
::ops
::{self, Index}
;
43 /// A delimited sequence of token trees
44 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
45 pub struct Delimited
{
46 /// The type of delimiter
47 pub delim
: token
::DelimToken
,
48 /// The span covering the opening delimiter
50 /// The delimited sequence of token trees
51 pub tts
: Vec
<TokenTree
>,
52 /// The span covering the closing delimiter
57 /// Returns the opening delimiter as a token.
58 pub fn open_token(&self) -> token
::Token
{
59 token
::OpenDelim(self.delim
)
62 /// Returns the closing delimiter as a token.
63 pub fn close_token(&self) -> token
::Token
{
64 token
::CloseDelim(self.delim
)
67 /// Returns the opening delimiter as a token tree.
68 pub fn open_tt(&self) -> TokenTree
{
69 TokenTree
::Token(self.open_span
, self.open_token())
72 /// Returns the closing delimiter as a token tree.
73 pub fn close_tt(&self) -> TokenTree
{
74 TokenTree
::Token(self.close_span
, self.close_token())
77 /// Returns the token trees inside the delimiters.
78 pub fn subtrees(&self) -> &[TokenTree
] {
83 /// A sequence of token trees
84 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
85 pub struct SequenceRepetition
{
86 /// The sequence of token trees
87 pub tts
: Vec
<TokenTree
>,
88 /// The optional separator
89 pub separator
: Option
<token
::Token
>,
90 /// Whether the sequence can be repeated zero (*), or one or more times (+)
92 /// The number of `MatchNt`s that appear in the sequence (and subsequences)
93 pub num_captures
: usize,
96 /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
97 /// for token sequences.
98 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
104 /// When the main rust parser encounters a syntax-extension invocation, it
105 /// parses the arguments to the invocation as a token-tree. This is a very
106 /// loose structure, such that all sorts of different AST-fragments can
107 /// be passed to syntax extensions using a uniform type.
109 /// If the syntax extension is an MBE macro, it will attempt to match its
110 /// LHS token tree against the provided token tree, and if it finds a
111 /// match, will transcribe the RHS token tree, splicing in any captured
112 /// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
114 /// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
115 /// Nothing special happens to misnamed or misplaced `SubstNt`s.
116 #[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
119 Token(Span
, token
::Token
),
120 /// A delimited sequence of token trees
121 Delimited(Span
, Rc
<Delimited
>),
123 // This only makes sense in MBE macros.
124 /// A kleene-style repetition sequence with a span
125 Sequence(Span
, Rc
<SequenceRepetition
>),
129 pub fn len(&self) -> usize {
131 TokenTree
::Token(_
, token
::DocComment(name
)) => {
132 match doc_comment_style(&name
.as_str()) {
133 AttrStyle
::Outer
=> 2,
134 AttrStyle
::Inner
=> 3,
137 TokenTree
::Token(_
, token
::Interpolated(ref nt
)) => {
138 if let Nonterminal
::NtTT(..) = **nt { 1 }
else { 0 }
140 TokenTree
::Token(_
, token
::MatchNt(..)) => 3,
141 TokenTree
::Delimited(_
, ref delimed
) => delimed
.tts
.len() + 2,
142 TokenTree
::Sequence(_
, ref seq
) => seq
.tts
.len(),
143 TokenTree
::Token(..) => 0,
147 pub fn get_tt(&self, index
: usize) -> TokenTree
{
148 match (self, index
) {
149 (&TokenTree
::Token(sp
, token
::DocComment(_
)), 0) => TokenTree
::Token(sp
, token
::Pound
),
150 (&TokenTree
::Token(sp
, token
::DocComment(name
)), 1)
151 if doc_comment_style(&name
.as_str()) == AttrStyle
::Inner
=> {
152 TokenTree
::Token(sp
, token
::Not
)
154 (&TokenTree
::Token(sp
, token
::DocComment(name
)), _
) => {
155 let stripped
= strip_doc_comment_decoration(&name
.as_str());
157 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
158 // required to wrap the text.
159 let num_of_hashes
= stripped
.chars()
163 } else if *cnt != 0 && x == '#' {
173 TokenTree::Delimited(sp, Rc::new(Delimited {
174 delim: token::Bracket,
176 tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc
"))),
177 TokenTree::Token(sp, token::Eq),
178 TokenTree::Token(sp, token::Literal(
179 token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
183 (&TokenTree::Delimited(_, ref delimed), _) => {
185 return delimed.open_tt();
187 if index == delimed.tts.len() + 1 {
188 return delimed.close_tt();
190 delimed.tts[index - 1].clone()
192 (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
193 let v = [TokenTree::Token(sp, token::SubstNt(name)),
194 TokenTree::Token(sp, token::Colon),
195 TokenTree::Token(sp, token::Ident(kind))];
198 (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
199 _ => panic!("Cannot expand a token tree
"),
203 /// Returns the `Span` corresponding to this token tree.
204 pub fn get_span(&self) -> Span {
206 TokenTree::Token(span, _) => span,
207 TokenTree::Delimited(span, _) => span,
208 TokenTree::Sequence(span, _) => span,
212 /// Use this token tree as a matcher to parse given tts.
213 pub fn parse(cx: &base::ExtCtxt,
216 -> macro_parser::NamedParseResult {
217 let diag = &cx.parse_sess().span_diagnostic;
218 // `None` is because we're not interpolating
219 let arg_rdr = lexer::new_tt_reader(diag, None, tts.iter().cloned().collect());
220 macro_parser::parse(cx.parse_sess(), arg_rdr, mtch)
223 /// Check if this TokenTree is equal to the other, regardless of span information.
224 pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
225 match (self, other) {
226 (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
227 (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
228 (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() &&
230 for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) {
231 if !tt1.eq_unspanned(tt2) {
242 /// Retrieve the TokenTree's span.
243 pub fn span(&self) -> Span {
245 TokenTree::Token(sp, _) |
246 TokenTree::Delimited(sp, _) |
247 TokenTree::Sequence(sp, _) => sp,
251 /// Indicates if the stream is a token that is equal to the provided token.
252 pub fn eq_token(&self, t: Token) -> bool {
254 TokenTree::Token(_, ref tk) => *tk == t,
259 /// Indicates if the token is an identifier.
260 pub fn is_ident(&self) -> bool {
261 self.maybe_ident().is_some()
264 /// Returns an identifier.
265 pub fn maybe_ident(&self) -> Option<ast::Ident> {
267 TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()),
268 TokenTree::Delimited(_, ref dl) => {
269 let tts = dl.subtrees();
279 /// Returns a Token literal.
280 pub fn maybe_lit(&self) -> Option<token::Lit> {
282 TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()),
283 TokenTree::Delimited(_, ref dl) => {
284 let tts = dl.subtrees();
294 /// Returns an AST string literal.
295 pub fn maybe_str(&self) -> Option<ast::Lit> {
297 TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
298 let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
299 ast::StrStyle::Cooked);
305 TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
306 let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
307 ast::StrStyle::Raw(n));
320 /// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural
321 /// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we
322 /// are going to cut a few corners (i.e., use some of the AST structure) when we need to
323 /// for backwards compatibility.
325 /// TokenStreams are collections of TokenTrees that represent a syntactic structure. The
326 /// struct itself shouldn't be directly manipulated; the internal structure is not stable,
327 /// and may be changed at any time in the future. The operators will not, however (except
328 /// for signatures, later on).
329 #[derive(Clone, Eq, Hash, RustcEncodable, RustcDecodable)]
330 pub struct TokenStream {
334 // This indicates the maximum size for a leaf in the concatenation algorithm.
335 // If two leafs will be collectively smaller than this, they will be merged.
336 // If a leaf is larger than this, it will be concatenated at the top.
337 const LEAF_SIZE : usize = 32;
339 // NB If Leaf access proves to be slow, inroducing a secondary Leaf without the bounds
340 // for unsliced Leafs may lead to some performance improvemenet.
341 #[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
342 pub enum InternalTS {
345 tts: Rc<Vec<TokenTree>>,
351 left: Rc<InternalTS>,
352 right: Rc<InternalTS>,
358 impl fmt::Debug for TokenStream {
359 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
364 impl fmt::Debug for InternalTS {
365 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
367 InternalTS::Empty(..) => Ok(()),
368 InternalTS::Leaf { ref tts, offset, len, .. } => {
369 for t in tts.iter().skip(offset).take(len) {
370 try!(write!(f, "{:?}
", t));
374 InternalTS::Node { ref left, ref right, .. } => {
382 /// Checks if two TokenStreams are equivalent (including spans). For unspanned
383 /// equality, see `eq_unspanned`.
384 impl PartialEq<TokenStream> for TokenStream {
385 fn eq(&self, other: &TokenStream) -> bool {
386 self.iter().eq(other.iter())
390 // NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span
391 // will be at {2,13}. Without finer-grained span structures, however, this seems to be
392 // our only recourse.
393 // FIXME Do something smarter to compute the expansion id.
394 fn covering_span(trees: &[TokenTree]) -> Span {
395 // disregard any dummy spans we have
396 let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::<Vec<&TokenTree>>();
398 // if we're out of spans, stop
403 // set up the initial values
404 let fst_span = trees[0].span();
406 let mut lo_span = fst_span.lo;
407 let mut hi_span = fst_span.hi;
408 let mut expn_id = fst_span.expn_id;
410 // compute the spans iteratively
411 for t in trees.iter().skip(1) {
419 if expn_id != sp.expn_id {
420 expn_id = NO_EXPANSION;
432 fn len(&self) -> usize {
434 InternalTS::Empty(..) => 0,
435 InternalTS::Leaf { len, .. } => len,
436 InternalTS::Node { len, .. } => len,
440 fn span(&self) -> Span {
442 InternalTS::Empty(sp) |
443 InternalTS::Leaf { sp, .. } |
444 InternalTS::Node { sp, .. } => sp,
448 fn slice(&self, range: ops::Range<usize>) -> TokenStream {
449 let from = range.start;
452 return TokenStream::mk_empty();
455 panic!("Invalid range
: {} to {}
", from, to);
457 if from == 0 && to == self.len() {
458 return TokenStream { ts: self.clone() }; /* should be cheap */
461 InternalTS::Empty(..) => panic!("Invalid index
"),
462 InternalTS::Leaf { ref tts, offset, .. } => {
463 let offset = offset + from;
465 TokenStream::mk_sub_leaf(tts.clone(),
468 covering_span(&tts[offset..offset + len]))
470 InternalTS::Node { ref left, ref right, .. } => {
471 let left_len = left.len();
474 } else if from >= left_len {
475 right.slice(from - left_len..to - left_len)
477 TokenStream::concat(left.slice(from..left_len), right.slice(0..to - left_len))
483 fn to_vec(&self) -> Vec<&TokenTree> {
484 let mut res = Vec::with_capacity(self.len());
485 fn traverse_and_append<'a>(res: &mut Vec<&'a TokenTree>, ts: &'a InternalTS) {
487 InternalTS::Empty(..) => {},
488 InternalTS::Leaf { ref tts, offset, len, .. } => {
489 let mut to_app = tts[offset..offset + len].iter().collect();
490 res.append(&mut to_app);
492 InternalTS::Node { ref left, ref right, .. } => {
493 traverse_and_append(res, left);
494 traverse_and_append(res, right);
498 traverse_and_append(&mut res, self);
502 fn to_tts(&self) -> Vec<TokenTree> {
503 self.to_vec().into_iter().cloned().collect::<Vec<TokenTree>>()
506 // Returns an internal node's children.
507 fn children(&self) -> Option<(Rc<InternalTS>, Rc<InternalTS>)> {
509 InternalTS::Node { ref left, ref right, .. } => Some((left.clone(), right.clone())),
515 /// TokenStream operators include basic destructuring, boolean operations, `maybe_...`
516 /// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward,
517 /// indicating information about the structure of the stream. The `maybe_...` operations
518 /// return `Some<...>` if the tokenstream contains the appropriate item.
520 /// Similarly, the `maybe_..._prefix` operations potentially return a
521 /// partially-destructured stream as a pair where the first element is the expected item
522 /// and the second is the remainder of the stream. As anb example,
524 /// `maybe_path_prefix("a
::b
::c(a
,b
,c
).foo()") -> (a::b::c, "(a
,b
,c
).foo()")`
526 // Construct an empty node with a dummy span.
527 pub fn mk_empty() -> TokenStream {
528 TokenStream { ts: InternalTS::Empty(DUMMY_SP) }
531 // Construct an empty node with the provided span.
532 fn mk_spanned_empty(sp: Span) -> TokenStream {
533 TokenStream { ts: InternalTS::Empty(sp) }
536 // Construct a leaf node with a 0 offset and length equivalent to the input.
537 fn mk_leaf(tts: Rc<Vec<TokenTree>>, sp: Span) -> TokenStream {
540 ts: InternalTS::Leaf {
549 // Construct a leaf node with the provided values.
550 fn mk_sub_leaf(tts: Rc<Vec<TokenTree>>, offset: usize, len: usize, sp: Span) -> TokenStream {
552 ts: InternalTS::Leaf {
561 // Construct an internal node with the provided values.
562 fn mk_int_node(left: Rc<InternalTS>,
563 right: Rc<InternalTS>,
568 ts: InternalTS::Node {
577 /// Convert a vector of `TokenTree`s into a `TokenStream`.
578 pub fn from_tts(trees: Vec<TokenTree>) -> TokenStream {
579 let span = covering_span(&trees[..]);
580 TokenStream::mk_leaf(Rc::new(trees), span)
583 /// Convert a vector of Tokens into a TokenStream.
584 pub fn from_tokens(tokens: Vec<Token>) -> TokenStream {
585 // FIXME do something nicer with the spans
586 TokenStream::from_tts(tokens.into_iter().map(|t| TokenTree::Token(DUMMY_SP, t)).collect())
589 /// Manually change a TokenStream's span.
590 pub fn respan(self, span: Span) -> TokenStream {
592 InternalTS::Empty(..) => TokenStream::mk_spanned_empty(span),
593 InternalTS::Leaf { tts, offset, len, .. } => {
594 TokenStream::mk_sub_leaf(tts, offset, len, span)
596 InternalTS::Node { left, right, len, .. } => {
597 TokenStream::mk_int_node(left, right, len, span)
602 /// Concatenates two TokenStreams into a new TokenStream.
603 pub fn concat(left: TokenStream, right: TokenStream) -> TokenStream {
604 // This internal procedure performs 'aggressive compacting' during concatenation as
606 // - If the nodes' combined total total length is less than 32, we copy both of
607 // them into a new vector and build a new leaf node.
608 // - If one node is an internal node and the other is a 'small' leaf (length<32),
609 // we recur down the internal node on the appropriate side.
610 // - Otherwise, we construct a new internal node that points to them as left and
612 fn concat_internal(left: Rc<InternalTS>, right: Rc<InternalTS>) -> TokenStream {
613 let llen = left.len();
614 let rlen = right.len();
615 let len = llen + rlen;
616 let span = combine_spans(left.span(), right.span());
617 if len <= LEAF_SIZE {
618 let mut new_vec = left.to_tts();
619 let mut rvec = right.to_tts();
620 new_vec.append(&mut rvec);
621 return TokenStream::mk_leaf(Rc::new(new_vec), span);
624 match (left.children(), right.children()) {
625 (Some((lleft, lright)), None) => {
626 if rlen <= LEAF_SIZE {
627 let new_right = concat_internal(lright, right);
628 TokenStream::mk_int_node(lleft, Rc::new(new_right.ts), len, span)
630 TokenStream::mk_int_node(left, right, len, span)
633 (None, Some((rleft, rright))) => {
634 if rlen <= LEAF_SIZE {
635 let new_left = concat_internal(left, rleft);
636 TokenStream::mk_int_node(Rc::new(new_left.ts), rright, len, span)
638 TokenStream::mk_int_node(left, right, len, span)
641 (_, _) => TokenStream::mk_int_node(left, right, len, span),
647 } else if right.is_empty() {
650 concat_internal(Rc::new(left.ts), Rc::new(right.ts))
654 /// Indicate if the TokenStream is empty.
655 pub fn is_empty(&self) -> bool {
659 /// Return a TokenStream's length.
660 pub fn len(&self) -> usize {
664 /// Convert a TokenStream into a vector of borrowed TokenTrees.
665 pub fn to_vec(&self) -> Vec<&TokenTree> {
669 /// Convert a TokenStream into a vector of TokenTrees (by cloning the TokenTrees).
670 /// (This operation is an O(n) deep copy of the underlying structure.)
671 pub fn to_tts(&self) -> Vec<TokenTree> {
675 /// Return the TokenStream's span.
676 pub fn span(&self) -> Span {
680 /// Returns an iterator over a TokenStream (as a sequence of TokenTrees).
681 pub fn iter<'a>(&self) -> Iter {
682 Iter { vs: self, idx: 0 }
685 /// Splits a TokenStream based on the provided `&TokenTree -> bool` predicate.
686 pub fn split<P>(&self, pred: P) -> Split<P>
687 where P: FnMut(&TokenTree) -> bool
697 /// Produce a slice of the input TokenStream from the `from` index, inclusive, to the
698 /// `to` index, non-inclusive.
699 pub fn slice(&self, range: ops::Range<usize>) -> TokenStream {
703 /// Slice starting at the provided index, inclusive.
704 pub fn slice_from(&self, from: ops::RangeFrom<usize>) -> TokenStream {
705 self.slice(from.start..self.len())
708 /// Slice up to the provided index, non-inclusive.
709 pub fn slice_to(&self, to: ops::RangeTo<usize>) -> TokenStream {
710 self.slice(0..to.end)
713 /// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or
715 pub fn is_delimited(&self) -> bool {
716 self.maybe_delimited().is_some()
719 /// Returns the inside of the delimited term as a new TokenStream.
720 pub fn maybe_delimited(&self) -> Option<TokenStream> {
721 if !(self.len() == 1) {
725 // FIXME It would be nice to change Delimited to move the Rc around the TokenTree
726 // vector directly in order to avoid the clone here.
728 TokenTree::Delimited(_, ref rc) => Some(TokenStream::from_tts(rc.tts.clone())),
733 /// Indicates if the stream is exactly one identifier.
734 pub fn is_ident(&self) -> bool {
735 self.maybe_ident().is_some()
738 /// Returns an identifier
739 pub fn maybe_ident(&self) -> Option<ast::Ident> {
740 if !(self.len() == 1) {
745 TokenTree::Token(_, Token::Ident(t)) => Some(t),
750 /// Compares two TokenStreams, checking equality without regarding span information.
751 pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
752 for (t1, t2) in self.iter().zip(other.iter()) {
753 if !t1.eq_unspanned(t2) {
760 /// Convert a vector of TokenTrees into a parentheses-delimited TokenStream.
761 pub fn as_delimited_stream(tts: Vec<TokenTree>, delim: token::DelimToken) -> TokenStream {
762 let new_sp = covering_span(&tts);
764 let new_delim = Rc::new(Delimited {
768 close_span: DUMMY_SP,
771 TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)])
775 impl fmt::Display for TokenStream {
776 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
777 f.write_str(&pprust::tts_to_string(&self.to_tts()))
781 // FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the
782 // next leaf's iterator when the current one is exhausted.
783 pub struct Iter<'a> {
788 impl<'a> Iterator for Iter<'a> {
789 type Item = &'a TokenTree;
791 fn next(&mut self) -> Option<&'a TokenTree> {
792 if self.vs.is_empty() || self.idx >= self.vs.len() {
796 let ret = Some(&self.vs[self.idx]);
797 self.idx = self.idx + 1;
802 pub struct Split<'a, P>
803 where P: FnMut(&TokenTree) -> bool
811 impl<'a, P> Iterator for Split<'a, P>
812 where P: FnMut(&TokenTree) -> bool
814 type Item = TokenStream;
816 fn next(&mut self) -> Option<TokenStream> {
820 if self.idx >= self.vs.len() {
821 self.finished = true;
825 let mut lookup = self.vs.iter().skip(self.idx);
826 match lookup.position(|x| (self.pred)(&x)) {
828 self.finished = true;
829 Some(self.vs.slice_from(self.idx..))
832 let ret = Some(self.vs.slice(self.idx..self.idx + edx));
840 impl Index<usize> for TokenStream {
841 type Output = TokenTree;
843 fn index(&self, index: usize) -> &TokenTree {
848 impl Index<usize> for InternalTS {
849 type Output = TokenTree;
851 fn index(&self, index: usize) -> &TokenTree {
852 if self.len() <= index {
853 panic!("Index {} too large
for {:?}
", index, self);
856 InternalTS::Empty(..) => panic!("Invalid index
"),
857 InternalTS::Leaf { ref tts, offset, .. } => tts.get(index + offset).unwrap(),
858 InternalTS::Node { ref left, ref right, .. } => {
859 let left_len = left.len();
860 if index < left_len {
861 Index::index(&**left, index)
863 Index::index(&**right, index - left_len)
874 use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
875 use parse::token::{self, str_to_ident, Token};
876 use util::parser_testing::string_to_tts;
879 fn sp(a: u32, b: u32) -> Span {
883 expn_id: NO_EXPANSION,
887 fn as_paren_delimited_stream(tts: Vec<TokenTree>) -> TokenStream {
888 TokenStream::as_delimited_stream(tts, token::DelimToken::Paren)
893 let test_res = TokenStream::from_tts(string_to_tts("foo
::bar
::baz
".to_string()));
894 let test_fst = TokenStream::from_tts(string_to_tts("foo
::bar
".to_string()));
895 let test_snd = TokenStream::from_tts(string_to_tts("::baz
".to_string()));
896 let eq_res = TokenStream::concat(test_fst, test_snd);
897 assert_eq!(test_res.len(), 5);
898 assert_eq!(eq_res.len(), 5);
899 assert_eq!(test_res.eq_unspanned(&eq_res), true);
903 fn test_from_to_bijection() {
904 let test_start = string_to_tts("foo
::bar(baz
)".to_string());
905 let test_end = TokenStream::from_tts(string_to_tts("foo
::bar(baz
)".to_string())).to_tts();
906 assert_eq!(test_start, test_end)
910 fn test_to_from_bijection() {
911 let test_start = TokenStream::from_tts(string_to_tts("foo
::bar(baz
)".to_string()));
912 let test_end = TokenStream::from_tts(test_start.clone().to_tts());
913 assert_eq!(test_start, test_end)
918 let test_res = TokenStream::from_tts(string_to_tts("foo
".to_string()));
919 let test_eqs = TokenStream::from_tts(string_to_tts("foo
".to_string()));
920 assert_eq!(test_res, test_eqs)
925 let test_res = TokenStream::from_tts(string_to_tts("::bar
::baz
".to_string()));
926 let test_eqs = TokenStream::from_tts(string_to_tts("::bar
::baz
".to_string()));
927 assert_eq!(test_res, test_eqs)
932 let test_res = TokenStream::from_tts(string_to_tts("foo
::bar
".to_string()));
933 let test_eqs = TokenStream::from_tts(string_to_tts("foo
::bar
::baz
".to_string()));
934 assert_eq!(test_res, test_eqs.slice(0..3))
939 let test_res = TokenStream::from_tts(string_to_tts("".to_string()));
940 let test_eqs = TokenStream::from_tts(string_to_tts("".to_string()));
941 assert_eq!(test_res, test_eqs)
946 let test_res = TokenStream::from_tts(string_to_tts("::bar
::baz
".to_string()));
947 let test_eqs = TokenStream::from_tts(string_to_tts("bar
::baz
".to_string()));
948 assert_eq!(test_res == test_eqs, false)
953 let test_res = TokenStream::from_tts(string_to_tts("(bar
,baz
)".to_string()));
954 let test_eqs = TokenStream::from_tts(string_to_tts("bar
,baz
".to_string()));
955 assert_eq!(test_res == test_eqs, false)
960 let test_res = TokenStream::from_tts(string_to_tts("foo
::bar
".to_string()));
961 let test_eqs = TokenStream::from_tts(string_to_tts("foo
::bar
::baz
".to_string()));
962 assert_eq!(test_res, test_eqs.slice(0..3))
967 let test_res = TokenStream::from_tts(string_to_tts("foo
::bar
::baz
".to_string()))
969 let test_eqs = TokenStream::from_tts(vec![TokenTree::Token(sp(5,8),
970 token::Ident(str_to_ident("bar
")))]);
971 assert_eq!(test_res, test_eqs)
976 let test0 = TokenStream::from_tts(Vec::new());
977 let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1),
978 Token::Ident(str_to_ident("a
")))]);
979 let test2 = TokenStream::from_tts(string_to_tts("foo(bar
::baz
)".to_string()));
981 assert_eq!(test0.is_empty(), true);
982 assert_eq!(test1.is_empty(), false);
983 assert_eq!(test2.is_empty(), false);
987 fn test_is_delimited() {
988 let test0 = TokenStream::from_tts(string_to_tts("foo(bar
::baz
)".to_string()));
989 let test1 = TokenStream::from_tts(string_to_tts("(bar
::baz
)".to_string()));
990 let test2 = TokenStream::from_tts(string_to_tts("(foo
,bar
,baz
)".to_string()));
991 let test3 = TokenStream::from_tts(string_to_tts("(foo
,bar
,baz
)(zab
,rab
,oof
)".to_string()));
992 let test4 = TokenStream::from_tts(string_to_tts("(foo
,bar
,baz
)foo
".to_string()));
993 let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
995 assert_eq!(test0.is_delimited(), false);
996 assert_eq!(test1.is_delimited(), true);
997 assert_eq!(test2.is_delimited(), true);
998 assert_eq!(test3.is_delimited(), false);
999 assert_eq!(test4.is_delimited(), false);
1000 assert_eq!(test5.is_delimited(), false);
1004 fn test_is_ident() {
1005 let test0 = TokenStream::from_tts(string_to_tts("\"foo
\"".to_string()));
1006 let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
1007 let test2 = TokenStream::from_tts(string_to_tts("foo
".to_string()));
1008 let test3 = TokenStream::from_tts(string_to_tts("foo
::bar
".to_string()));
1009 let test4 = TokenStream::from_tts(string_to_tts("foo(bar
)".to_string()));
1011 assert_eq!(test0.is_ident(), false);
1012 assert_eq!(test1.is_ident(), false);
1013 assert_eq!(test2.is_ident(), true);
1014 assert_eq!(test3.is_ident(), false);
1015 assert_eq!(test4.is_ident(), false);
1019 fn test_maybe_delimited() {
1020 let test0_input = TokenStream::from_tts(string_to_tts("foo(bar
::baz
)".to_string()));
1021 let test1_input = TokenStream::from_tts(string_to_tts("(bar
::baz
)".to_string()));
1022 let test2_input = TokenStream::from_tts(string_to_tts("(foo
,bar
,baz
)".to_string()));
1023 let test3_input = TokenStream::from_tts(string_to_tts("(foo
,bar
,baz
)(zab
,rab
)"
1025 let test4_input = TokenStream::from_tts(string_to_tts("(foo
,bar
,baz
)foo
".to_string()));
1026 let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
1028 let test0 = test0_input.maybe_delimited();
1029 let test1 = test1_input.maybe_delimited();
1030 let test2 = test2_input.maybe_delimited();
1031 let test3 = test3_input.maybe_delimited();
1032 let test4 = test4_input.maybe_delimited();
1033 let test5 = test5_input.maybe_delimited();
1035 assert_eq!(test0, None);
1037 let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1038 token::Ident(str_to_ident("bar
"))),
1039 TokenTree::Token(sp(4, 6), token::ModSep),
1040 TokenTree::Token(sp(6, 9),
1041 token::Ident(str_to_ident("baz
")))]);
1042 assert_eq!(test1, Some(test1_expected));
1044 let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1045 token::Ident(str_to_ident("foo
"))),
1046 TokenTree::Token(sp(4, 5), token::Comma),
1047 TokenTree::Token(sp(5, 8),
1048 token::Ident(str_to_ident("bar
"))),
1049 TokenTree::Token(sp(8, 9), token::Comma),
1050 TokenTree::Token(sp(9, 12),
1051 token::Ident(str_to_ident("baz
")))]);
1052 assert_eq!(test2, Some(test2_expected));
1054 assert_eq!(test3, None);
1056 assert_eq!(test4, None);
1058 assert_eq!(test5, None);
1061 // pub fn maybe_ident(&self) -> Option<ast::Ident>
1063 fn test_maybe_ident() {
1064 let test0 = TokenStream::from_tts(string_to_tts("\"foo
\"".to_string())).maybe_ident();
1065 let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident();
1066 let test2 = TokenStream::from_tts(string_to_tts("foo
".to_string())).maybe_ident();
1067 let test3 = TokenStream::from_tts(string_to_tts("foo
::bar
".to_string())).maybe_ident();
1068 let test4 = TokenStream::from_tts(string_to_tts("foo(bar
)".to_string())).maybe_ident();
1070 assert_eq!(test0, None);
1071 assert_eq!(test1, None);
1072 assert_eq!(test2, Some(str_to_ident("foo
")));
1073 assert_eq!(test3, None);
1074 assert_eq!(test4, None);
1078 fn test_as_delimited_stream() {
1079 let test0 = as_paren_delimited_stream(string_to_tts("foo
,bar
,".to_string()));
1080 let test1 = as_paren_delimited_stream(string_to_tts("baz(foo
,bar
)".to_string()));
1082 let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo
"))),
1083 TokenTree::Token(sp(3, 4), token::Comma),
1084 TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar
"))),
1085 TokenTree::Token(sp(7, 8), token::Comma)];
1086 let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
1088 delim: token::DelimToken::Paren,
1089 open_span: DUMMY_SP,
1091 close_span: DUMMY_SP,
1094 assert_eq!(test0, test0_stream);
1097 let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo
"))),
1098 TokenTree::Token(sp(7, 8), token::Comma),
1099 TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar
")))];
1101 let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz
"))),
1102 TokenTree::Delimited(sp(3, 12),
1104 delim: token::DelimToken::Paren,
1105 open_span: sp(3, 4),
1107 close_span: sp(11, 12),
1110 let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12),
1112 delim: token::DelimToken::Paren,
1113 open_span: DUMMY_SP,
1115 close_span: DUMMY_SP,
1118 assert_eq!(test1, test1_stream);