]> git.proxmox.com Git - rustc.git/blame - src/libsyntax/tokenstream.rs
New upstream version 1.14.0+dfsg1
[rustc.git] / src / libsyntax / tokenstream.rs
CommitLineData
3157f602
XL
1// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
5bcae85e
SL
11//! # Token Streams
12//!
13//! TokenStreams represent syntactic objects before they are converted into ASTs.
14//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
15//! which are themselves either a single Token, a Delimited subsequence of tokens,
16//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
17//! expansion).
18//!
19//! ## Ownership
20//! TokenStreams are persistant data structures construced as ropes with reference
21//! counted-children. In general, this means that calling an operation on a TokenStream
22//! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to
23//! the original. This essentially coerces TokenStreams into 'views' of their subparts,
24//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
25//! ownership of the original.
3157f602 26
5bcae85e
SL
27use ast::{self, AttrStyle, LitKind};
28use syntax_pos::{Span, DUMMY_SP, NO_EXPANSION};
29use codemap::{Spanned, combine_spans};
3157f602
XL
30use ext::base;
31use ext::tt::macro_parser;
32use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
33use parse::lexer;
5bcae85e
SL
34use parse;
35use parse::token::{self, Token, Lit, Nonterminal};
9e0c209e 36use print::pprust;
5bcae85e
SL
37
38use std::fmt;
39use std::iter::*;
40use std::ops::{self, Index};
41use std::rc::Rc;
3157f602
XL
42
43/// A delimited sequence of token trees
44#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
45pub struct Delimited {
46 /// The type of delimiter
47 pub delim: token::DelimToken,
48 /// The span covering the opening delimiter
49 pub open_span: Span,
50 /// The delimited sequence of token trees
51 pub tts: Vec<TokenTree>,
52 /// The span covering the closing delimiter
53 pub close_span: Span,
54}
55
56impl Delimited {
57 /// Returns the opening delimiter as a token.
58 pub fn open_token(&self) -> token::Token {
59 token::OpenDelim(self.delim)
60 }
61
62 /// Returns the closing delimiter as a token.
63 pub fn close_token(&self) -> token::Token {
64 token::CloseDelim(self.delim)
65 }
66
67 /// Returns the opening delimiter as a token tree.
68 pub fn open_tt(&self) -> TokenTree {
69 TokenTree::Token(self.open_span, self.open_token())
70 }
71
72 /// Returns the closing delimiter as a token tree.
73 pub fn close_tt(&self) -> TokenTree {
74 TokenTree::Token(self.close_span, self.close_token())
75 }
5bcae85e
SL
76
77 /// Returns the token trees inside the delimiters.
78 pub fn subtrees(&self) -> &[TokenTree] {
79 &self.tts
80 }
3157f602
XL
81}
82
83/// A sequence of token trees
84#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
85pub struct SequenceRepetition {
86 /// The sequence of token trees
87 pub tts: Vec<TokenTree>,
88 /// The optional separator
89 pub separator: Option<token::Token>,
90 /// Whether the sequence can be repeated zero (*), or one or more times (+)
91 pub op: KleeneOp,
92 /// The number of `MatchNt`s that appear in the sequence (and subsequences)
93 pub num_captures: usize,
94}
95
96/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
97/// for token sequences.
98#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
99pub enum KleeneOp {
100 ZeroOrMore,
101 OneOrMore,
102}
103
104/// When the main rust parser encounters a syntax-extension invocation, it
105/// parses the arguments to the invocation as a token-tree. This is a very
106/// loose structure, such that all sorts of different AST-fragments can
107/// be passed to syntax extensions using a uniform type.
108///
109/// If the syntax extension is an MBE macro, it will attempt to match its
110/// LHS token tree against the provided token tree, and if it finds a
111/// match, will transcribe the RHS token tree, splicing in any captured
112/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
113///
114/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
115/// Nothing special happens to misnamed or misplaced `SubstNt`s.
5bcae85e 116#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
3157f602
XL
117pub enum TokenTree {
118 /// A single token
119 Token(Span, token::Token),
120 /// A delimited sequence of token trees
5bcae85e 121 Delimited(Span, Rc<Delimited>),
3157f602
XL
122
123 // This only makes sense in MBE macros.
3157f602 124 /// A kleene-style repetition sequence with a span
5bcae85e 125 Sequence(Span, Rc<SequenceRepetition>),
3157f602
XL
126}
127
128impl TokenTree {
129 pub fn len(&self) -> usize {
130 match *self {
131 TokenTree::Token(_, token::DocComment(name)) => {
132 match doc_comment_style(&name.as_str()) {
133 AttrStyle::Outer => 2,
5bcae85e 134 AttrStyle::Inner => 3,
3157f602
XL
135 }
136 }
c30ab7b3
SL
137 TokenTree::Token(_, token::Interpolated(ref nt)) => {
138 if let Nonterminal::NtTT(..) = **nt { 1 } else { 0 }
139 },
3157f602 140 TokenTree::Token(_, token::MatchNt(..)) => 3,
5bcae85e
SL
141 TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
142 TokenTree::Sequence(_, ref seq) => seq.tts.len(),
143 TokenTree::Token(..) => 0,
3157f602
XL
144 }
145 }
146
147 pub fn get_tt(&self, index: usize) -> TokenTree {
148 match (self, index) {
5bcae85e 149 (&TokenTree::Token(sp, token::DocComment(_)), 0) => TokenTree::Token(sp, token::Pound),
3157f602 150 (&TokenTree::Token(sp, token::DocComment(name)), 1)
5bcae85e 151 if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
3157f602
XL
152 TokenTree::Token(sp, token::Not)
153 }
154 (&TokenTree::Token(sp, token::DocComment(name)), _) => {
155 let stripped = strip_doc_comment_decoration(&name.as_str());
156
157 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
158 // required to wrap the text.
5bcae85e
SL
159 let num_of_hashes = stripped.chars()
160 .scan(0, |cnt, x| {
161 *cnt = if x == '"' {
162 1
163 } else if *cnt != 0 && x == '#' {
164 *cnt + 1
165 } else {
166 0
167 };
168 Some(*cnt)
169 })
170 .max()
171 .unwrap_or(0);
3157f602 172
5bcae85e 173 TokenTree::Delimited(sp, Rc::new(Delimited {
3157f602
XL
174 delim: token::Bracket,
175 open_span: sp,
176 tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
177 TokenTree::Token(sp, token::Eq),
178 TokenTree::Token(sp, token::Literal(
179 token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
180 close_span: sp,
5bcae85e 181 }))
3157f602
XL
182 }
183 (&TokenTree::Delimited(_, ref delimed), _) => {
184 if index == 0 {
185 return delimed.open_tt();
186 }
187 if index == delimed.tts.len() + 1 {
188 return delimed.close_tt();
189 }
190 delimed.tts[index - 1].clone()
191 }
3157f602
XL
192 (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
193 let v = [TokenTree::Token(sp, token::SubstNt(name)),
194 TokenTree::Token(sp, token::Colon),
195 TokenTree::Token(sp, token::Ident(kind))];
196 v[index].clone()
197 }
5bcae85e
SL
198 (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
199 _ => panic!("Cannot expand a token tree"),
3157f602
XL
200 }
201 }
202
203 /// Returns the `Span` corresponding to this token tree.
204 pub fn get_span(&self) -> Span {
205 match *self {
5bcae85e 206 TokenTree::Token(span, _) => span,
3157f602 207 TokenTree::Delimited(span, _) => span,
5bcae85e 208 TokenTree::Sequence(span, _) => span,
3157f602
XL
209 }
210 }
211
212 /// Use this token tree as a matcher to parse given tts.
5bcae85e
SL
213 pub fn parse(cx: &base::ExtCtxt,
214 mtch: &[TokenTree],
215 tts: &[TokenTree])
3157f602 216 -> macro_parser::NamedParseResult {
c30ab7b3 217 let diag = &cx.parse_sess().span_diagnostic;
3157f602 218 // `None` is because we're not interpolating
c30ab7b3
SL
219 let arg_rdr = lexer::new_tt_reader(diag, None, tts.iter().cloned().collect());
220 macro_parser::parse(cx.parse_sess(), arg_rdr, mtch)
3157f602 221 }
5bcae85e
SL
222
223 /// Check if this TokenTree is equal to the other, regardless of span information.
224 pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
225 match (self, other) {
226 (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
227 (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
228 (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() &&
229 {
230 for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) {
231 if !tt1.eq_unspanned(tt2) {
232 return false;
233 }
234 }
235 true
236 }
237 }
238 (_, _) => false,
239 }
240 }
241
242 /// Retrieve the TokenTree's span.
243 pub fn span(&self) -> Span {
244 match *self {
245 TokenTree::Token(sp, _) |
246 TokenTree::Delimited(sp, _) |
247 TokenTree::Sequence(sp, _) => sp,
248 }
249 }
250
251 /// Indicates if the stream is a token that is equal to the provided token.
252 pub fn eq_token(&self, t: Token) -> bool {
253 match *self {
254 TokenTree::Token(_, ref tk) => *tk == t,
255 _ => false,
256 }
257 }
258
259 /// Indicates if the token is an identifier.
260 pub fn is_ident(&self) -> bool {
261 self.maybe_ident().is_some()
262 }
263
264 /// Returns an identifier.
265 pub fn maybe_ident(&self) -> Option<ast::Ident> {
266 match *self {
267 TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()),
268 TokenTree::Delimited(_, ref dl) => {
269 let tts = dl.subtrees();
270 if tts.len() != 1 {
271 return None;
272 }
273 tts[0].maybe_ident()
274 }
275 _ => None,
276 }
277 }
278
279 /// Returns a Token literal.
280 pub fn maybe_lit(&self) -> Option<token::Lit> {
281 match *self {
282 TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()),
283 TokenTree::Delimited(_, ref dl) => {
284 let tts = dl.subtrees();
285 if tts.len() != 1 {
286 return None;
287 }
288 tts[0].maybe_lit()
289 }
290 _ => None,
291 }
292 }
293
294 /// Returns an AST string literal.
295 pub fn maybe_str(&self) -> Option<ast::Lit> {
296 match *self {
297 TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
298 let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
299 ast::StrStyle::Cooked);
300 Some(Spanned {
301 node: l,
302 span: sp,
303 })
304 }
305 TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
306 let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
307 ast::StrStyle::Raw(n));
308 Some(Spanned {
309 node: l,
310 span: sp,
311 })
312 }
313 _ => None,
314 }
315 }
3157f602
XL
316}
317
5bcae85e
SL
318/// #Token Streams
319///
320/// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural
321/// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we
322/// are going to cut a few corners (i.e., use some of the AST structure) when we need to
323/// for backwards compatibility.
324
325/// TokenStreams are collections of TokenTrees that represent a syntactic structure. The
326/// struct itself shouldn't be directly manipulated; the internal structure is not stable,
327/// and may be changed at any time in the future. The operators will not, however (except
328/// for signatures, later on).
329#[derive(Clone, Eq, Hash, RustcEncodable, RustcDecodable)]
330pub struct TokenStream {
331 ts: InternalTS,
332}
333
334// This indicates the maximum size for a leaf in the concatenation algorithm.
335// If two leafs will be collectively smaller than this, they will be merged.
336// If a leaf is larger than this, it will be concatenated at the top.
337const LEAF_SIZE : usize = 32;
338
339// NB If Leaf access proves to be slow, inroducing a secondary Leaf without the bounds
340// for unsliced Leafs may lead to some performance improvemenet.
341#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
342pub enum InternalTS {
343 Empty(Span),
344 Leaf {
345 tts: Rc<Vec<TokenTree>>,
346 offset: usize,
347 len: usize,
348 sp: Span,
349 },
350 Node {
351 left: Rc<InternalTS>,
352 right: Rc<InternalTS>,
353 len: usize,
354 sp: Span,
355 },
356}
357
358impl fmt::Debug for TokenStream {
359 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
360 self.ts.fmt(f)
361 }
362}
363
364impl fmt::Debug for InternalTS {
365 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
366 match *self {
367 InternalTS::Empty(..) => Ok(()),
368 InternalTS::Leaf { ref tts, offset, len, .. } => {
369 for t in tts.iter().skip(offset).take(len) {
370 try!(write!(f, "{:?}", t));
371 }
372 Ok(())
373 }
374 InternalTS::Node { ref left, ref right, .. } => {
375 try!(left.fmt(f));
376 right.fmt(f)
377 }
378 }
379 }
380}
381
382/// Checks if two TokenStreams are equivalent (including spans). For unspanned
383/// equality, see `eq_unspanned`.
384impl PartialEq<TokenStream> for TokenStream {
385 fn eq(&self, other: &TokenStream) -> bool {
386 self.iter().eq(other.iter())
387 }
388}
389
390// NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span
391// will be at {2,13}. Without finer-grained span structures, however, this seems to be
392// our only recourse.
393// FIXME Do something smarter to compute the expansion id.
394fn covering_span(trees: &[TokenTree]) -> Span {
395 // disregard any dummy spans we have
396 let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::<Vec<&TokenTree>>();
397
398 // if we're out of spans, stop
399 if trees.len() < 1 {
400 return DUMMY_SP;
401 }
402
403 // set up the initial values
404 let fst_span = trees[0].span();
405
406 let mut lo_span = fst_span.lo;
407 let mut hi_span = fst_span.hi;
408 let mut expn_id = fst_span.expn_id;
409
410 // compute the spans iteratively
411 for t in trees.iter().skip(1) {
412 let sp = t.span();
413 if sp.lo < lo_span {
414 lo_span = sp.lo;
415 }
416 if hi_span < sp.hi {
417 hi_span = sp.hi;
418 }
419 if expn_id != sp.expn_id {
420 expn_id = NO_EXPANSION;
421 }
422 }
423
424 Span {
425 lo: lo_span,
426 hi: hi_span,
427 expn_id: expn_id,
428 }
429}
430
431impl InternalTS {
432 fn len(&self) -> usize {
433 match *self {
434 InternalTS::Empty(..) => 0,
435 InternalTS::Leaf { len, .. } => len,
436 InternalTS::Node { len, .. } => len,
437 }
438 }
439
440 fn span(&self) -> Span {
441 match *self {
442 InternalTS::Empty(sp) |
443 InternalTS::Leaf { sp, .. } |
444 InternalTS::Node { sp, .. } => sp,
445 }
446 }
447
448 fn slice(&self, range: ops::Range<usize>) -> TokenStream {
449 let from = range.start;
450 let to = range.end;
451 if from == to {
452 return TokenStream::mk_empty();
453 }
454 if from > to {
455 panic!("Invalid range: {} to {}", from, to);
456 }
457 if from == 0 && to == self.len() {
458 return TokenStream { ts: self.clone() }; /* should be cheap */
459 }
460 match *self {
461 InternalTS::Empty(..) => panic!("Invalid index"),
462 InternalTS::Leaf { ref tts, offset, .. } => {
463 let offset = offset + from;
464 let len = to - from;
465 TokenStream::mk_sub_leaf(tts.clone(),
466 offset,
467 len,
468 covering_span(&tts[offset..offset + len]))
469 }
470 InternalTS::Node { ref left, ref right, .. } => {
471 let left_len = left.len();
472 if to <= left_len {
473 left.slice(range)
474 } else if from >= left_len {
475 right.slice(from - left_len..to - left_len)
476 } else {
477 TokenStream::concat(left.slice(from..left_len), right.slice(0..to - left_len))
478 }
479 }
480 }
481 }
482
483 fn to_vec(&self) -> Vec<&TokenTree> {
484 let mut res = Vec::with_capacity(self.len());
485 fn traverse_and_append<'a>(res: &mut Vec<&'a TokenTree>, ts: &'a InternalTS) {
486 match *ts {
487 InternalTS::Empty(..) => {},
488 InternalTS::Leaf { ref tts, offset, len, .. } => {
489 let mut to_app = tts[offset..offset + len].iter().collect();
490 res.append(&mut to_app);
491 }
492 InternalTS::Node { ref left, ref right, .. } => {
493 traverse_and_append(res, left);
494 traverse_and_append(res, right);
495 }
496 }
497 }
498 traverse_and_append(&mut res, self);
499 res
500 }
501
502 fn to_tts(&self) -> Vec<TokenTree> {
503 self.to_vec().into_iter().cloned().collect::<Vec<TokenTree>>()
504 }
505
506 // Returns an internal node's children.
507 fn children(&self) -> Option<(Rc<InternalTS>, Rc<InternalTS>)> {
508 match *self {
509 InternalTS::Node { ref left, ref right, .. } => Some((left.clone(), right.clone())),
510 _ => None,
511 }
512 }
513}
514
515/// TokenStream operators include basic destructuring, boolean operations, `maybe_...`
516/// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward,
517/// indicating information about the structure of the stream. The `maybe_...` operations
518/// return `Some<...>` if the tokenstream contains the appropriate item.
519///
520/// Similarly, the `maybe_..._prefix` operations potentially return a
521/// partially-destructured stream as a pair where the first element is the expected item
522/// and the second is the remainder of the stream. As anb example,
523///
524/// `maybe_path_prefix("a::b::c(a,b,c).foo()") -> (a::b::c, "(a,b,c).foo()")`
525impl TokenStream {
526 // Construct an empty node with a dummy span.
527 pub fn mk_empty() -> TokenStream {
528 TokenStream { ts: InternalTS::Empty(DUMMY_SP) }
529 }
530
531 // Construct an empty node with the provided span.
532 fn mk_spanned_empty(sp: Span) -> TokenStream {
533 TokenStream { ts: InternalTS::Empty(sp) }
534 }
535
536 // Construct a leaf node with a 0 offset and length equivalent to the input.
537 fn mk_leaf(tts: Rc<Vec<TokenTree>>, sp: Span) -> TokenStream {
538 let len = tts.len();
539 TokenStream {
540 ts: InternalTS::Leaf {
541 tts: tts,
542 offset: 0,
543 len: len,
544 sp: sp,
545 },
546 }
547 }
548
549 // Construct a leaf node with the provided values.
550 fn mk_sub_leaf(tts: Rc<Vec<TokenTree>>, offset: usize, len: usize, sp: Span) -> TokenStream {
551 TokenStream {
552 ts: InternalTS::Leaf {
553 tts: tts,
554 offset: offset,
555 len: len,
556 sp: sp,
557 },
558 }
559 }
560
561 // Construct an internal node with the provided values.
562 fn mk_int_node(left: Rc<InternalTS>,
563 right: Rc<InternalTS>,
564 len: usize,
565 sp: Span)
566 -> TokenStream {
567 TokenStream {
568 ts: InternalTS::Node {
569 left: left,
570 right: right,
571 len: len,
572 sp: sp,
573 },
574 }
575 }
576
577 /// Convert a vector of `TokenTree`s into a `TokenStream`.
578 pub fn from_tts(trees: Vec<TokenTree>) -> TokenStream {
579 let span = covering_span(&trees[..]);
580 TokenStream::mk_leaf(Rc::new(trees), span)
581 }
582
9e0c209e
SL
583 /// Convert a vector of Tokens into a TokenStream.
584 pub fn from_tokens(tokens: Vec<Token>) -> TokenStream {
585 // FIXME do something nicer with the spans
586 TokenStream::from_tts(tokens.into_iter().map(|t| TokenTree::Token(DUMMY_SP, t)).collect())
587 }
588
5bcae85e
SL
589 /// Manually change a TokenStream's span.
590 pub fn respan(self, span: Span) -> TokenStream {
591 match self.ts {
592 InternalTS::Empty(..) => TokenStream::mk_spanned_empty(span),
593 InternalTS::Leaf { tts, offset, len, .. } => {
594 TokenStream::mk_sub_leaf(tts, offset, len, span)
595 }
596 InternalTS::Node { left, right, len, .. } => {
597 TokenStream::mk_int_node(left, right, len, span)
598 }
599 }
600 }
601
602 /// Concatenates two TokenStreams into a new TokenStream.
603 pub fn concat(left: TokenStream, right: TokenStream) -> TokenStream {
604 // This internal procedure performs 'aggressive compacting' during concatenation as
605 // follows:
606 // - If the nodes' combined total total length is less than 32, we copy both of
607 // them into a new vector and build a new leaf node.
608 // - If one node is an internal node and the other is a 'small' leaf (length<32),
609 // we recur down the internal node on the appropriate side.
610 // - Otherwise, we construct a new internal node that points to them as left and
611 // right.
612 fn concat_internal(left: Rc<InternalTS>, right: Rc<InternalTS>) -> TokenStream {
613 let llen = left.len();
614 let rlen = right.len();
615 let len = llen + rlen;
616 let span = combine_spans(left.span(), right.span());
617 if len <= LEAF_SIZE {
618 let mut new_vec = left.to_tts();
619 let mut rvec = right.to_tts();
620 new_vec.append(&mut rvec);
621 return TokenStream::mk_leaf(Rc::new(new_vec), span);
622 }
623
624 match (left.children(), right.children()) {
625 (Some((lleft, lright)), None) => {
626 if rlen <= LEAF_SIZE {
627 let new_right = concat_internal(lright, right);
628 TokenStream::mk_int_node(lleft, Rc::new(new_right.ts), len, span)
629 } else {
630 TokenStream::mk_int_node(left, right, len, span)
631 }
632 }
633 (None, Some((rleft, rright))) => {
634 if rlen <= LEAF_SIZE {
635 let new_left = concat_internal(left, rleft);
636 TokenStream::mk_int_node(Rc::new(new_left.ts), rright, len, span)
637 } else {
638 TokenStream::mk_int_node(left, right, len, span)
639 }
640 }
641 (_, _) => TokenStream::mk_int_node(left, right, len, span),
642 }
643 }
644
645 if left.is_empty() {
646 right
647 } else if right.is_empty() {
648 left
649 } else {
650 concat_internal(Rc::new(left.ts), Rc::new(right.ts))
651 }
652 }
653
654 /// Indicate if the TokenStream is empty.
655 pub fn is_empty(&self) -> bool {
656 self.len() == 0
657 }
658
659 /// Return a TokenStream's length.
660 pub fn len(&self) -> usize {
661 self.ts.len()
662 }
663
664 /// Convert a TokenStream into a vector of borrowed TokenTrees.
665 pub fn to_vec(&self) -> Vec<&TokenTree> {
666 self.ts.to_vec()
667 }
668
669 /// Convert a TokenStream into a vector of TokenTrees (by cloning the TokenTrees).
670 /// (This operation is an O(n) deep copy of the underlying structure.)
671 pub fn to_tts(&self) -> Vec<TokenTree> {
672 self.ts.to_tts()
673 }
674
675 /// Return the TokenStream's span.
676 pub fn span(&self) -> Span {
677 self.ts.span()
678 }
679
680 /// Returns an iterator over a TokenStream (as a sequence of TokenTrees).
681 pub fn iter<'a>(&self) -> Iter {
682 Iter { vs: self, idx: 0 }
683 }
684
685 /// Splits a TokenStream based on the provided `&TokenTree -> bool` predicate.
686 pub fn split<P>(&self, pred: P) -> Split<P>
687 where P: FnMut(&TokenTree) -> bool
688 {
689 Split {
690 vs: self,
691 pred: pred,
692 finished: false,
693 idx: 0,
694 }
695 }
696
697 /// Produce a slice of the input TokenStream from the `from` index, inclusive, to the
698 /// `to` index, non-inclusive.
699 pub fn slice(&self, range: ops::Range<usize>) -> TokenStream {
700 self.ts.slice(range)
701 }
702
703 /// Slice starting at the provided index, inclusive.
704 pub fn slice_from(&self, from: ops::RangeFrom<usize>) -> TokenStream {
705 self.slice(from.start..self.len())
706 }
707
708 /// Slice up to the provided index, non-inclusive.
709 pub fn slice_to(&self, to: ops::RangeTo<usize>) -> TokenStream {
710 self.slice(0..to.end)
711 }
712
713 /// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or
714 /// `{a,b,c}`).
715 pub fn is_delimited(&self) -> bool {
716 self.maybe_delimited().is_some()
717 }
718
719 /// Returns the inside of the delimited term as a new TokenStream.
720 pub fn maybe_delimited(&self) -> Option<TokenStream> {
721 if !(self.len() == 1) {
722 return None;
723 }
724
725 // FIXME It would be nice to change Delimited to move the Rc around the TokenTree
726 // vector directly in order to avoid the clone here.
727 match self[0] {
728 TokenTree::Delimited(_, ref rc) => Some(TokenStream::from_tts(rc.tts.clone())),
729 _ => None,
730 }
731 }
732
733 /// Indicates if the stream is exactly one identifier.
734 pub fn is_ident(&self) -> bool {
735 self.maybe_ident().is_some()
736 }
737
738 /// Returns an identifier
739 pub fn maybe_ident(&self) -> Option<ast::Ident> {
740 if !(self.len() == 1) {
741 return None;
742 }
743
744 match self[0] {
745 TokenTree::Token(_, Token::Ident(t)) => Some(t),
746 _ => None,
747 }
748 }
749
750 /// Compares two TokenStreams, checking equality without regarding span information.
751 pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
752 for (t1, t2) in self.iter().zip(other.iter()) {
753 if !t1.eq_unspanned(t2) {
754 return false;
755 }
756 }
757 true
758 }
759
760 /// Convert a vector of TokenTrees into a parentheses-delimited TokenStream.
761 pub fn as_delimited_stream(tts: Vec<TokenTree>, delim: token::DelimToken) -> TokenStream {
762 let new_sp = covering_span(&tts);
763
764 let new_delim = Rc::new(Delimited {
765 delim: delim,
766 open_span: DUMMY_SP,
767 tts: tts,
768 close_span: DUMMY_SP,
769 });
770
771 TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)])
772 }
773}
774
9e0c209e
SL
775impl fmt::Display for TokenStream {
776 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
777 f.write_str(&pprust::tts_to_string(&self.to_tts()))
778 }
779}
780
5bcae85e
SL
781// FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the
782// next leaf's iterator when the current one is exhausted.
783pub struct Iter<'a> {
784 vs: &'a TokenStream,
785 idx: usize,
786}
787
788impl<'a> Iterator for Iter<'a> {
789 type Item = &'a TokenTree;
790
791 fn next(&mut self) -> Option<&'a TokenTree> {
792 if self.vs.is_empty() || self.idx >= self.vs.len() {
793 return None;
794 }
795
796 let ret = Some(&self.vs[self.idx]);
797 self.idx = self.idx + 1;
798 ret
799 }
800}
801
802pub struct Split<'a, P>
803 where P: FnMut(&TokenTree) -> bool
804{
805 vs: &'a TokenStream,
806 pred: P,
807 finished: bool,
808 idx: usize,
809}
810
811impl<'a, P> Iterator for Split<'a, P>
812 where P: FnMut(&TokenTree) -> bool
813{
814 type Item = TokenStream;
815
816 fn next(&mut self) -> Option<TokenStream> {
817 if self.finished {
818 return None;
819 }
820 if self.idx >= self.vs.len() {
821 self.finished = true;
822 return None;
823 }
824
825 let mut lookup = self.vs.iter().skip(self.idx);
826 match lookup.position(|x| (self.pred)(&x)) {
827 None => {
828 self.finished = true;
829 Some(self.vs.slice_from(self.idx..))
830 }
831 Some(edx) => {
832 let ret = Some(self.vs.slice(self.idx..self.idx + edx));
833 self.idx += edx + 1;
834 ret
835 }
836 }
837 }
838}
839
840impl Index<usize> for TokenStream {
841 type Output = TokenTree;
842
843 fn index(&self, index: usize) -> &TokenTree {
844 &self.ts[index]
845 }
846}
847
848impl Index<usize> for InternalTS {
849 type Output = TokenTree;
850
851 fn index(&self, index: usize) -> &TokenTree {
852 if self.len() <= index {
853 panic!("Index {} too large for {:?}", index, self);
854 }
855 match *self {
856 InternalTS::Empty(..) => panic!("Invalid index"),
857 InternalTS::Leaf { ref tts, offset, .. } => tts.get(index + offset).unwrap(),
858 InternalTS::Node { ref left, ref right, .. } => {
859 let left_len = left.len();
860 if index < left_len {
861 Index::index(&**left, index)
862 } else {
863 Index::index(&**right, index - left_len)
864 }
865 }
866 }
867 }
868}
869
870
871#[cfg(test)]
872mod tests {
873 use super::*;
874 use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
875 use parse::token::{self, str_to_ident, Token};
876 use util::parser_testing::string_to_tts;
877 use std::rc::Rc;
878
879 fn sp(a: u32, b: u32) -> Span {
880 Span {
881 lo: BytePos(a),
882 hi: BytePos(b),
883 expn_id: NO_EXPANSION,
884 }
885 }
886
887 fn as_paren_delimited_stream(tts: Vec<TokenTree>) -> TokenStream {
888 TokenStream::as_delimited_stream(tts, token::DelimToken::Paren)
889 }
890
891 #[test]
892 fn test_concat() {
893 let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()));
894 let test_fst = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
895 let test_snd = TokenStream::from_tts(string_to_tts("::baz".to_string()));
896 let eq_res = TokenStream::concat(test_fst, test_snd);
897 assert_eq!(test_res.len(), 5);
898 assert_eq!(eq_res.len(), 5);
899 assert_eq!(test_res.eq_unspanned(&eq_res), true);
900 }
901
902 #[test]
903 fn test_from_to_bijection() {
904 let test_start = string_to_tts("foo::bar(baz)".to_string());
905 let test_end = TokenStream::from_tts(string_to_tts("foo::bar(baz)".to_string())).to_tts();
906 assert_eq!(test_start, test_end)
907 }
908
909 #[test]
910 fn test_to_from_bijection() {
911 let test_start = TokenStream::from_tts(string_to_tts("foo::bar(baz)".to_string()));
912 let test_end = TokenStream::from_tts(test_start.clone().to_tts());
913 assert_eq!(test_start, test_end)
914 }
915
916 #[test]
917 fn test_eq_0() {
918 let test_res = TokenStream::from_tts(string_to_tts("foo".to_string()));
919 let test_eqs = TokenStream::from_tts(string_to_tts("foo".to_string()));
920 assert_eq!(test_res, test_eqs)
921 }
922
923 #[test]
924 fn test_eq_1() {
925 let test_res = TokenStream::from_tts(string_to_tts("::bar::baz".to_string()));
926 let test_eqs = TokenStream::from_tts(string_to_tts("::bar::baz".to_string()));
927 assert_eq!(test_res, test_eqs)
928 }
929
930 #[test]
931 fn test_eq_2() {
932 let test_res = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
933 let test_eqs = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()));
934 assert_eq!(test_res, test_eqs.slice(0..3))
935 }
936
937 #[test]
938 fn test_eq_3() {
939 let test_res = TokenStream::from_tts(string_to_tts("".to_string()));
940 let test_eqs = TokenStream::from_tts(string_to_tts("".to_string()));
941 assert_eq!(test_res, test_eqs)
942 }
943
944 #[test]
945 fn test_diseq_0() {
946 let test_res = TokenStream::from_tts(string_to_tts("::bar::baz".to_string()));
947 let test_eqs = TokenStream::from_tts(string_to_tts("bar::baz".to_string()));
948 assert_eq!(test_res == test_eqs, false)
949 }
950
951 #[test]
952 fn test_diseq_1() {
953 let test_res = TokenStream::from_tts(string_to_tts("(bar,baz)".to_string()));
954 let test_eqs = TokenStream::from_tts(string_to_tts("bar,baz".to_string()));
955 assert_eq!(test_res == test_eqs, false)
956 }
957
958 #[test]
959 fn test_slice_0() {
960 let test_res = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
961 let test_eqs = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()));
962 assert_eq!(test_res, test_eqs.slice(0..3))
963 }
964
965 #[test]
966 fn test_slice_1() {
967 let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()))
968 .slice(2..3);
969 let test_eqs = TokenStream::from_tts(vec![TokenTree::Token(sp(5,8),
970 token::Ident(str_to_ident("bar")))]);
971 assert_eq!(test_res, test_eqs)
972 }
973
974 #[test]
975 fn test_is_empty() {
976 let test0 = TokenStream::from_tts(Vec::new());
977 let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1),
978 Token::Ident(str_to_ident("a")))]);
979 let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
980
981 assert_eq!(test0.is_empty(), true);
982 assert_eq!(test1.is_empty(), false);
983 assert_eq!(test2.is_empty(), false);
984 }
985
986 #[test]
987 fn test_is_delimited() {
988 let test0 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
989 let test1 = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
990 let test2 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
991 let test3 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab,oof)".to_string()));
992 let test4 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
993 let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
994
995 assert_eq!(test0.is_delimited(), false);
996 assert_eq!(test1.is_delimited(), true);
997 assert_eq!(test2.is_delimited(), true);
998 assert_eq!(test3.is_delimited(), false);
999 assert_eq!(test4.is_delimited(), false);
1000 assert_eq!(test5.is_delimited(), false);
1001 }
1002
1003 #[test]
1004 fn test_is_ident() {
1005 let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
1006 let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
1007 let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
1008 let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
1009 let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
1010
1011 assert_eq!(test0.is_ident(), false);
1012 assert_eq!(test1.is_ident(), false);
1013 assert_eq!(test2.is_ident(), true);
1014 assert_eq!(test3.is_ident(), false);
1015 assert_eq!(test4.is_ident(), false);
1016 }
1017
1018 #[test]
1019 fn test_maybe_delimited() {
1020 let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
1021 let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
1022 let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
1023 let test3_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
1024 .to_string()));
1025 let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
1026 let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
1027
1028 let test0 = test0_input.maybe_delimited();
1029 let test1 = test1_input.maybe_delimited();
1030 let test2 = test2_input.maybe_delimited();
1031 let test3 = test3_input.maybe_delimited();
1032 let test4 = test4_input.maybe_delimited();
1033 let test5 = test5_input.maybe_delimited();
1034
1035 assert_eq!(test0, None);
1036
1037 let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1038 token::Ident(str_to_ident("bar"))),
1039 TokenTree::Token(sp(4, 6), token::ModSep),
1040 TokenTree::Token(sp(6, 9),
1041 token::Ident(str_to_ident("baz")))]);
1042 assert_eq!(test1, Some(test1_expected));
1043
1044 let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
1045 token::Ident(str_to_ident("foo"))),
1046 TokenTree::Token(sp(4, 5), token::Comma),
1047 TokenTree::Token(sp(5, 8),
1048 token::Ident(str_to_ident("bar"))),
1049 TokenTree::Token(sp(8, 9), token::Comma),
1050 TokenTree::Token(sp(9, 12),
1051 token::Ident(str_to_ident("baz")))]);
1052 assert_eq!(test2, Some(test2_expected));
1053
1054 assert_eq!(test3, None);
1055
1056 assert_eq!(test4, None);
1057
1058 assert_eq!(test5, None);
1059 }
1060
1061 // pub fn maybe_ident(&self) -> Option<ast::Ident>
1062 #[test]
1063 fn test_maybe_ident() {
1064 let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_ident();
1065 let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident();
1066 let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_ident();
1067 let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_ident();
1068 let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_ident();
1069
1070 assert_eq!(test0, None);
1071 assert_eq!(test1, None);
1072 assert_eq!(test2, Some(str_to_ident("foo")));
1073 assert_eq!(test3, None);
1074 assert_eq!(test4, None);
1075 }
1076
1077 #[test]
1078 fn test_as_delimited_stream() {
1079 let test0 = as_paren_delimited_stream(string_to_tts("foo,bar,".to_string()));
1080 let test1 = as_paren_delimited_stream(string_to_tts("baz(foo,bar)".to_string()));
1081
1082 let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))),
1083 TokenTree::Token(sp(3, 4), token::Comma),
1084 TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
1085 TokenTree::Token(sp(7, 8), token::Comma)];
1086 let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
1087 Rc::new(Delimited {
1088 delim: token::DelimToken::Paren,
1089 open_span: DUMMY_SP,
1090 tts: test0_tts,
1091 close_span: DUMMY_SP,
1092 }))]);
1093
1094 assert_eq!(test0, test0_stream);
1095
1096
1097 let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))),
1098 TokenTree::Token(sp(7, 8), token::Comma),
1099 TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))];
1100
1101 let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))),
1102 TokenTree::Delimited(sp(3, 12),
1103 Rc::new(Delimited {
1104 delim: token::DelimToken::Paren,
1105 open_span: sp(3, 4),
1106 tts: test1_tts,
1107 close_span: sp(11, 12),
1108 }))];
1109
1110 let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12),
1111 Rc::new(Delimited {
1112 delim: token::DelimToken::Paren,
1113 open_span: DUMMY_SP,
1114 tts: test1_parse,
1115 close_span: DUMMY_SP,
1116 }))]);
1117
1118 assert_eq!(test1, test1_stream);
1119 }
1120}