]>
Commit | Line | Data |
---|---|---|
3157f602 XL |
1 | // Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT |
2 | // file at the top-level directory of this distribution and at | |
3 | // http://rust-lang.org/COPYRIGHT. | |
4 | // | |
5 | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or | |
6 | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license | |
7 | // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your | |
8 | // option. This file may not be copied, modified, or distributed | |
9 | // except according to those terms. | |
10 | ||
5bcae85e SL |
11 | //! # Token Streams |
12 | //! | |
13 | //! TokenStreams represent syntactic objects before they are converted into ASTs. | |
14 | //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s, | |
15 | //! which are themselves either a single Token, a Delimited subsequence of tokens, | |
16 | //! or a SequenceRepetition specifier (for the purpose of sequence generation during macro | |
17 | //! expansion). | |
18 | //! | |
19 | //! ## Ownership | |
20 | //! TokenStreams are persistant data structures construced as ropes with reference | |
21 | //! counted-children. In general, this means that calling an operation on a TokenStream | |
22 | //! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to | |
23 | //! the original. This essentially coerces TokenStreams into 'views' of their subparts, | |
24 | //! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking | |
25 | //! ownership of the original. | |
3157f602 | 26 | |
5bcae85e SL |
27 | use ast::{self, AttrStyle, LitKind}; |
28 | use syntax_pos::{Span, DUMMY_SP, NO_EXPANSION}; | |
29 | use codemap::{Spanned, combine_spans}; | |
3157f602 XL |
30 | use ext::base; |
31 | use ext::tt::macro_parser; | |
32 | use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; | |
33 | use parse::lexer; | |
5bcae85e SL |
34 | use parse; |
35 | use parse::token::{self, Token, Lit, Nonterminal}; | |
36 | ||
37 | use std::fmt; | |
38 | use std::iter::*; | |
39 | use std::ops::{self, Index}; | |
40 | use std::rc::Rc; | |
3157f602 XL |
41 | |
42 | /// A delimited sequence of token trees | |
43 | #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] | |
44 | pub struct Delimited { | |
45 | /// The type of delimiter | |
46 | pub delim: token::DelimToken, | |
47 | /// The span covering the opening delimiter | |
48 | pub open_span: Span, | |
49 | /// The delimited sequence of token trees | |
50 | pub tts: Vec<TokenTree>, | |
51 | /// The span covering the closing delimiter | |
52 | pub close_span: Span, | |
53 | } | |
54 | ||
55 | impl Delimited { | |
56 | /// Returns the opening delimiter as a token. | |
57 | pub fn open_token(&self) -> token::Token { | |
58 | token::OpenDelim(self.delim) | |
59 | } | |
60 | ||
61 | /// Returns the closing delimiter as a token. | |
62 | pub fn close_token(&self) -> token::Token { | |
63 | token::CloseDelim(self.delim) | |
64 | } | |
65 | ||
66 | /// Returns the opening delimiter as a token tree. | |
67 | pub fn open_tt(&self) -> TokenTree { | |
68 | TokenTree::Token(self.open_span, self.open_token()) | |
69 | } | |
70 | ||
71 | /// Returns the closing delimiter as a token tree. | |
72 | pub fn close_tt(&self) -> TokenTree { | |
73 | TokenTree::Token(self.close_span, self.close_token()) | |
74 | } | |
5bcae85e SL |
75 | |
76 | /// Returns the token trees inside the delimiters. | |
77 | pub fn subtrees(&self) -> &[TokenTree] { | |
78 | &self.tts | |
79 | } | |
3157f602 XL |
80 | } |
81 | ||
82 | /// A sequence of token trees | |
83 | #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] | |
84 | pub struct SequenceRepetition { | |
85 | /// The sequence of token trees | |
86 | pub tts: Vec<TokenTree>, | |
87 | /// The optional separator | |
88 | pub separator: Option<token::Token>, | |
89 | /// Whether the sequence can be repeated zero (*), or one or more times (+) | |
90 | pub op: KleeneOp, | |
91 | /// The number of `MatchNt`s that appear in the sequence (and subsequences) | |
92 | pub num_captures: usize, | |
93 | } | |
94 | ||
95 | /// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) | |
96 | /// for token sequences. | |
97 | #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] | |
98 | pub enum KleeneOp { | |
99 | ZeroOrMore, | |
100 | OneOrMore, | |
101 | } | |
102 | ||
103 | /// When the main rust parser encounters a syntax-extension invocation, it | |
104 | /// parses the arguments to the invocation as a token-tree. This is a very | |
105 | /// loose structure, such that all sorts of different AST-fragments can | |
106 | /// be passed to syntax extensions using a uniform type. | |
107 | /// | |
108 | /// If the syntax extension is an MBE macro, it will attempt to match its | |
109 | /// LHS token tree against the provided token tree, and if it finds a | |
110 | /// match, will transcribe the RHS token tree, splicing in any captured | |
111 | /// macro_parser::matched_nonterminals into the `SubstNt`s it finds. | |
112 | /// | |
113 | /// The RHS of an MBE macro is the only place `SubstNt`s are substituted. | |
114 | /// Nothing special happens to misnamed or misplaced `SubstNt`s. | |
5bcae85e | 115 | #[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] |
3157f602 XL |
116 | pub enum TokenTree { |
117 | /// A single token | |
118 | Token(Span, token::Token), | |
119 | /// A delimited sequence of token trees | |
5bcae85e | 120 | Delimited(Span, Rc<Delimited>), |
3157f602 XL |
121 | |
122 | // This only makes sense in MBE macros. | |
3157f602 | 123 | /// A kleene-style repetition sequence with a span |
5bcae85e | 124 | Sequence(Span, Rc<SequenceRepetition>), |
3157f602 XL |
125 | } |
126 | ||
127 | impl TokenTree { | |
128 | pub fn len(&self) -> usize { | |
129 | match *self { | |
130 | TokenTree::Token(_, token::DocComment(name)) => { | |
131 | match doc_comment_style(&name.as_str()) { | |
132 | AttrStyle::Outer => 2, | |
5bcae85e | 133 | AttrStyle::Inner => 3, |
3157f602 XL |
134 | } |
135 | } | |
136 | TokenTree::Token(_, token::SpecialVarNt(..)) => 2, | |
137 | TokenTree::Token(_, token::MatchNt(..)) => 3, | |
5bcae85e SL |
138 | TokenTree::Token(_, token::Interpolated(Nonterminal::NtTT(..))) => 1, |
139 | TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2, | |
140 | TokenTree::Sequence(_, ref seq) => seq.tts.len(), | |
141 | TokenTree::Token(..) => 0, | |
3157f602 XL |
142 | } |
143 | } | |
144 | ||
145 | pub fn get_tt(&self, index: usize) -> TokenTree { | |
146 | match (self, index) { | |
5bcae85e | 147 | (&TokenTree::Token(sp, token::DocComment(_)), 0) => TokenTree::Token(sp, token::Pound), |
3157f602 | 148 | (&TokenTree::Token(sp, token::DocComment(name)), 1) |
5bcae85e | 149 | if doc_comment_style(&name.as_str()) == AttrStyle::Inner => { |
3157f602 XL |
150 | TokenTree::Token(sp, token::Not) |
151 | } | |
152 | (&TokenTree::Token(sp, token::DocComment(name)), _) => { | |
153 | let stripped = strip_doc_comment_decoration(&name.as_str()); | |
154 | ||
155 | // Searches for the occurrences of `"#*` and returns the minimum number of `#`s | |
156 | // required to wrap the text. | |
5bcae85e SL |
157 | let num_of_hashes = stripped.chars() |
158 | .scan(0, |cnt, x| { | |
159 | *cnt = if x == '"' { | |
160 | 1 | |
161 | } else if *cnt != 0 && x == '#' { | |
162 | *cnt + 1 | |
163 | } else { | |
164 | 0 | |
165 | }; | |
166 | Some(*cnt) | |
167 | }) | |
168 | .max() | |
169 | .unwrap_or(0); | |
3157f602 | 170 | |
5bcae85e | 171 | TokenTree::Delimited(sp, Rc::new(Delimited { |
3157f602 XL |
172 | delim: token::Bracket, |
173 | open_span: sp, | |
174 | tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), | |
175 | TokenTree::Token(sp, token::Eq), | |
176 | TokenTree::Token(sp, token::Literal( | |
177 | token::StrRaw(token::intern(&stripped), num_of_hashes), None))], | |
178 | close_span: sp, | |
5bcae85e | 179 | })) |
3157f602 XL |
180 | } |
181 | (&TokenTree::Delimited(_, ref delimed), _) => { | |
182 | if index == 0 { | |
183 | return delimed.open_tt(); | |
184 | } | |
185 | if index == delimed.tts.len() + 1 { | |
186 | return delimed.close_tt(); | |
187 | } | |
188 | delimed.tts[index - 1].clone() | |
189 | } | |
190 | (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { | |
191 | let v = [TokenTree::Token(sp, token::Dollar), | |
192 | TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))]; | |
193 | v[index].clone() | |
194 | } | |
195 | (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => { | |
196 | let v = [TokenTree::Token(sp, token::SubstNt(name)), | |
197 | TokenTree::Token(sp, token::Colon), | |
198 | TokenTree::Token(sp, token::Ident(kind))]; | |
199 | v[index].clone() | |
200 | } | |
5bcae85e SL |
201 | (&TokenTree::Token(_, token::Interpolated(Nonterminal::NtTT(ref tt))), _) => { |
202 | tt.clone().unwrap() | |
3157f602 | 203 | } |
5bcae85e SL |
204 | (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(), |
205 | _ => panic!("Cannot expand a token tree"), | |
3157f602 XL |
206 | } |
207 | } | |
208 | ||
209 | /// Returns the `Span` corresponding to this token tree. | |
210 | pub fn get_span(&self) -> Span { | |
211 | match *self { | |
5bcae85e | 212 | TokenTree::Token(span, _) => span, |
3157f602 | 213 | TokenTree::Delimited(span, _) => span, |
5bcae85e | 214 | TokenTree::Sequence(span, _) => span, |
3157f602 XL |
215 | } |
216 | } | |
217 | ||
218 | /// Use this token tree as a matcher to parse given tts. | |
5bcae85e SL |
219 | pub fn parse(cx: &base::ExtCtxt, |
220 | mtch: &[TokenTree], | |
221 | tts: &[TokenTree]) | |
3157f602 XL |
222 | -> macro_parser::NamedParseResult { |
223 | // `None` is because we're not interpolating | |
224 | let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic, | |
225 | None, | |
226 | None, | |
227 | tts.iter().cloned().collect(), | |
228 | true); | |
229 | macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch) | |
230 | } | |
5bcae85e SL |
231 | |
232 | /// Check if this TokenTree is equal to the other, regardless of span information. | |
233 | pub fn eq_unspanned(&self, other: &TokenTree) -> bool { | |
234 | match (self, other) { | |
235 | (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2, | |
236 | (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => { | |
237 | (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() && | |
238 | { | |
239 | for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) { | |
240 | if !tt1.eq_unspanned(tt2) { | |
241 | return false; | |
242 | } | |
243 | } | |
244 | true | |
245 | } | |
246 | } | |
247 | (_, _) => false, | |
248 | } | |
249 | } | |
250 | ||
251 | /// Retrieve the TokenTree's span. | |
252 | pub fn span(&self) -> Span { | |
253 | match *self { | |
254 | TokenTree::Token(sp, _) | | |
255 | TokenTree::Delimited(sp, _) | | |
256 | TokenTree::Sequence(sp, _) => sp, | |
257 | } | |
258 | } | |
259 | ||
260 | /// Indicates if the stream is a token that is equal to the provided token. | |
261 | pub fn eq_token(&self, t: Token) -> bool { | |
262 | match *self { | |
263 | TokenTree::Token(_, ref tk) => *tk == t, | |
264 | _ => false, | |
265 | } | |
266 | } | |
267 | ||
268 | /// Indicates if the token is an identifier. | |
269 | pub fn is_ident(&self) -> bool { | |
270 | self.maybe_ident().is_some() | |
271 | } | |
272 | ||
273 | /// Returns an identifier. | |
274 | pub fn maybe_ident(&self) -> Option<ast::Ident> { | |
275 | match *self { | |
276 | TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()), | |
277 | TokenTree::Delimited(_, ref dl) => { | |
278 | let tts = dl.subtrees(); | |
279 | if tts.len() != 1 { | |
280 | return None; | |
281 | } | |
282 | tts[0].maybe_ident() | |
283 | } | |
284 | _ => None, | |
285 | } | |
286 | } | |
287 | ||
288 | /// Returns a Token literal. | |
289 | pub fn maybe_lit(&self) -> Option<token::Lit> { | |
290 | match *self { | |
291 | TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()), | |
292 | TokenTree::Delimited(_, ref dl) => { | |
293 | let tts = dl.subtrees(); | |
294 | if tts.len() != 1 { | |
295 | return None; | |
296 | } | |
297 | tts[0].maybe_lit() | |
298 | } | |
299 | _ => None, | |
300 | } | |
301 | } | |
302 | ||
303 | /// Returns an AST string literal. | |
304 | pub fn maybe_str(&self) -> Option<ast::Lit> { | |
305 | match *self { | |
306 | TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => { | |
307 | let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())), | |
308 | ast::StrStyle::Cooked); | |
309 | Some(Spanned { | |
310 | node: l, | |
311 | span: sp, | |
312 | }) | |
313 | } | |
314 | TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => { | |
315 | let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), | |
316 | ast::StrStyle::Raw(n)); | |
317 | Some(Spanned { | |
318 | node: l, | |
319 | span: sp, | |
320 | }) | |
321 | } | |
322 | _ => None, | |
323 | } | |
324 | } | |
3157f602 XL |
325 | } |
326 | ||
5bcae85e SL |
327 | /// #Token Streams |
328 | /// | |
329 | /// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural | |
330 | /// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we | |
331 | /// are going to cut a few corners (i.e., use some of the AST structure) when we need to | |
332 | /// for backwards compatibility. | |
333 | ||
334 | /// TokenStreams are collections of TokenTrees that represent a syntactic structure. The | |
335 | /// struct itself shouldn't be directly manipulated; the internal structure is not stable, | |
336 | /// and may be changed at any time in the future. The operators will not, however (except | |
337 | /// for signatures, later on). | |
338 | #[derive(Clone, Eq, Hash, RustcEncodable, RustcDecodable)] | |
339 | pub struct TokenStream { | |
340 | ts: InternalTS, | |
341 | } | |
342 | ||
343 | // This indicates the maximum size for a leaf in the concatenation algorithm. | |
344 | // If two leafs will be collectively smaller than this, they will be merged. | |
345 | // If a leaf is larger than this, it will be concatenated at the top. | |
346 | const LEAF_SIZE : usize = 32; | |
347 | ||
348 | // NB If Leaf access proves to be slow, inroducing a secondary Leaf without the bounds | |
349 | // for unsliced Leafs may lead to some performance improvemenet. | |
350 | #[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] | |
351 | pub enum InternalTS { | |
352 | Empty(Span), | |
353 | Leaf { | |
354 | tts: Rc<Vec<TokenTree>>, | |
355 | offset: usize, | |
356 | len: usize, | |
357 | sp: Span, | |
358 | }, | |
359 | Node { | |
360 | left: Rc<InternalTS>, | |
361 | right: Rc<InternalTS>, | |
362 | len: usize, | |
363 | sp: Span, | |
364 | }, | |
365 | } | |
366 | ||
367 | impl fmt::Debug for TokenStream { | |
368 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
369 | self.ts.fmt(f) | |
370 | } | |
371 | } | |
372 | ||
373 | impl fmt::Debug for InternalTS { | |
374 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |
375 | match *self { | |
376 | InternalTS::Empty(..) => Ok(()), | |
377 | InternalTS::Leaf { ref tts, offset, len, .. } => { | |
378 | for t in tts.iter().skip(offset).take(len) { | |
379 | try!(write!(f, "{:?}", t)); | |
380 | } | |
381 | Ok(()) | |
382 | } | |
383 | InternalTS::Node { ref left, ref right, .. } => { | |
384 | try!(left.fmt(f)); | |
385 | right.fmt(f) | |
386 | } | |
387 | } | |
388 | } | |
389 | } | |
390 | ||
391 | /// Checks if two TokenStreams are equivalent (including spans). For unspanned | |
392 | /// equality, see `eq_unspanned`. | |
393 | impl PartialEq<TokenStream> for TokenStream { | |
394 | fn eq(&self, other: &TokenStream) -> bool { | |
395 | self.iter().eq(other.iter()) | |
396 | } | |
397 | } | |
398 | ||
399 | // NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span | |
400 | // will be at {2,13}. Without finer-grained span structures, however, this seems to be | |
401 | // our only recourse. | |
402 | // FIXME Do something smarter to compute the expansion id. | |
403 | fn covering_span(trees: &[TokenTree]) -> Span { | |
404 | // disregard any dummy spans we have | |
405 | let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::<Vec<&TokenTree>>(); | |
406 | ||
407 | // if we're out of spans, stop | |
408 | if trees.len() < 1 { | |
409 | return DUMMY_SP; | |
410 | } | |
411 | ||
412 | // set up the initial values | |
413 | let fst_span = trees[0].span(); | |
414 | ||
415 | let mut lo_span = fst_span.lo; | |
416 | let mut hi_span = fst_span.hi; | |
417 | let mut expn_id = fst_span.expn_id; | |
418 | ||
419 | // compute the spans iteratively | |
420 | for t in trees.iter().skip(1) { | |
421 | let sp = t.span(); | |
422 | if sp.lo < lo_span { | |
423 | lo_span = sp.lo; | |
424 | } | |
425 | if hi_span < sp.hi { | |
426 | hi_span = sp.hi; | |
427 | } | |
428 | if expn_id != sp.expn_id { | |
429 | expn_id = NO_EXPANSION; | |
430 | } | |
431 | } | |
432 | ||
433 | Span { | |
434 | lo: lo_span, | |
435 | hi: hi_span, | |
436 | expn_id: expn_id, | |
437 | } | |
438 | } | |
439 | ||
440 | impl InternalTS { | |
441 | fn len(&self) -> usize { | |
442 | match *self { | |
443 | InternalTS::Empty(..) => 0, | |
444 | InternalTS::Leaf { len, .. } => len, | |
445 | InternalTS::Node { len, .. } => len, | |
446 | } | |
447 | } | |
448 | ||
449 | fn span(&self) -> Span { | |
450 | match *self { | |
451 | InternalTS::Empty(sp) | | |
452 | InternalTS::Leaf { sp, .. } | | |
453 | InternalTS::Node { sp, .. } => sp, | |
454 | } | |
455 | } | |
456 | ||
457 | fn slice(&self, range: ops::Range<usize>) -> TokenStream { | |
458 | let from = range.start; | |
459 | let to = range.end; | |
460 | if from == to { | |
461 | return TokenStream::mk_empty(); | |
462 | } | |
463 | if from > to { | |
464 | panic!("Invalid range: {} to {}", from, to); | |
465 | } | |
466 | if from == 0 && to == self.len() { | |
467 | return TokenStream { ts: self.clone() }; /* should be cheap */ | |
468 | } | |
469 | match *self { | |
470 | InternalTS::Empty(..) => panic!("Invalid index"), | |
471 | InternalTS::Leaf { ref tts, offset, .. } => { | |
472 | let offset = offset + from; | |
473 | let len = to - from; | |
474 | TokenStream::mk_sub_leaf(tts.clone(), | |
475 | offset, | |
476 | len, | |
477 | covering_span(&tts[offset..offset + len])) | |
478 | } | |
479 | InternalTS::Node { ref left, ref right, .. } => { | |
480 | let left_len = left.len(); | |
481 | if to <= left_len { | |
482 | left.slice(range) | |
483 | } else if from >= left_len { | |
484 | right.slice(from - left_len..to - left_len) | |
485 | } else { | |
486 | TokenStream::concat(left.slice(from..left_len), right.slice(0..to - left_len)) | |
487 | } | |
488 | } | |
489 | } | |
490 | } | |
491 | ||
492 | fn to_vec(&self) -> Vec<&TokenTree> { | |
493 | let mut res = Vec::with_capacity(self.len()); | |
494 | fn traverse_and_append<'a>(res: &mut Vec<&'a TokenTree>, ts: &'a InternalTS) { | |
495 | match *ts { | |
496 | InternalTS::Empty(..) => {}, | |
497 | InternalTS::Leaf { ref tts, offset, len, .. } => { | |
498 | let mut to_app = tts[offset..offset + len].iter().collect(); | |
499 | res.append(&mut to_app); | |
500 | } | |
501 | InternalTS::Node { ref left, ref right, .. } => { | |
502 | traverse_and_append(res, left); | |
503 | traverse_and_append(res, right); | |
504 | } | |
505 | } | |
506 | } | |
507 | traverse_and_append(&mut res, self); | |
508 | res | |
509 | } | |
510 | ||
511 | fn to_tts(&self) -> Vec<TokenTree> { | |
512 | self.to_vec().into_iter().cloned().collect::<Vec<TokenTree>>() | |
513 | } | |
514 | ||
515 | // Returns an internal node's children. | |
516 | fn children(&self) -> Option<(Rc<InternalTS>, Rc<InternalTS>)> { | |
517 | match *self { | |
518 | InternalTS::Node { ref left, ref right, .. } => Some((left.clone(), right.clone())), | |
519 | _ => None, | |
520 | } | |
521 | } | |
522 | } | |
523 | ||
524 | /// TokenStream operators include basic destructuring, boolean operations, `maybe_...` | |
525 | /// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward, | |
526 | /// indicating information about the structure of the stream. The `maybe_...` operations | |
527 | /// return `Some<...>` if the tokenstream contains the appropriate item. | |
528 | /// | |
529 | /// Similarly, the `maybe_..._prefix` operations potentially return a | |
530 | /// partially-destructured stream as a pair where the first element is the expected item | |
531 | /// and the second is the remainder of the stream. As anb example, | |
532 | /// | |
533 | /// `maybe_path_prefix("a::b::c(a,b,c).foo()") -> (a::b::c, "(a,b,c).foo()")` | |
534 | impl TokenStream { | |
535 | // Construct an empty node with a dummy span. | |
536 | pub fn mk_empty() -> TokenStream { | |
537 | TokenStream { ts: InternalTS::Empty(DUMMY_SP) } | |
538 | } | |
539 | ||
540 | // Construct an empty node with the provided span. | |
541 | fn mk_spanned_empty(sp: Span) -> TokenStream { | |
542 | TokenStream { ts: InternalTS::Empty(sp) } | |
543 | } | |
544 | ||
545 | // Construct a leaf node with a 0 offset and length equivalent to the input. | |
546 | fn mk_leaf(tts: Rc<Vec<TokenTree>>, sp: Span) -> TokenStream { | |
547 | let len = tts.len(); | |
548 | TokenStream { | |
549 | ts: InternalTS::Leaf { | |
550 | tts: tts, | |
551 | offset: 0, | |
552 | len: len, | |
553 | sp: sp, | |
554 | }, | |
555 | } | |
556 | } | |
557 | ||
558 | // Construct a leaf node with the provided values. | |
559 | fn mk_sub_leaf(tts: Rc<Vec<TokenTree>>, offset: usize, len: usize, sp: Span) -> TokenStream { | |
560 | TokenStream { | |
561 | ts: InternalTS::Leaf { | |
562 | tts: tts, | |
563 | offset: offset, | |
564 | len: len, | |
565 | sp: sp, | |
566 | }, | |
567 | } | |
568 | } | |
569 | ||
570 | // Construct an internal node with the provided values. | |
571 | fn mk_int_node(left: Rc<InternalTS>, | |
572 | right: Rc<InternalTS>, | |
573 | len: usize, | |
574 | sp: Span) | |
575 | -> TokenStream { | |
576 | TokenStream { | |
577 | ts: InternalTS::Node { | |
578 | left: left, | |
579 | right: right, | |
580 | len: len, | |
581 | sp: sp, | |
582 | }, | |
583 | } | |
584 | } | |
585 | ||
586 | /// Convert a vector of `TokenTree`s into a `TokenStream`. | |
587 | pub fn from_tts(trees: Vec<TokenTree>) -> TokenStream { | |
588 | let span = covering_span(&trees[..]); | |
589 | TokenStream::mk_leaf(Rc::new(trees), span) | |
590 | } | |
591 | ||
592 | /// Manually change a TokenStream's span. | |
593 | pub fn respan(self, span: Span) -> TokenStream { | |
594 | match self.ts { | |
595 | InternalTS::Empty(..) => TokenStream::mk_spanned_empty(span), | |
596 | InternalTS::Leaf { tts, offset, len, .. } => { | |
597 | TokenStream::mk_sub_leaf(tts, offset, len, span) | |
598 | } | |
599 | InternalTS::Node { left, right, len, .. } => { | |
600 | TokenStream::mk_int_node(left, right, len, span) | |
601 | } | |
602 | } | |
603 | } | |
604 | ||
605 | /// Concatenates two TokenStreams into a new TokenStream. | |
606 | pub fn concat(left: TokenStream, right: TokenStream) -> TokenStream { | |
607 | // This internal procedure performs 'aggressive compacting' during concatenation as | |
608 | // follows: | |
609 | // - If the nodes' combined total total length is less than 32, we copy both of | |
610 | // them into a new vector and build a new leaf node. | |
611 | // - If one node is an internal node and the other is a 'small' leaf (length<32), | |
612 | // we recur down the internal node on the appropriate side. | |
613 | // - Otherwise, we construct a new internal node that points to them as left and | |
614 | // right. | |
615 | fn concat_internal(left: Rc<InternalTS>, right: Rc<InternalTS>) -> TokenStream { | |
616 | let llen = left.len(); | |
617 | let rlen = right.len(); | |
618 | let len = llen + rlen; | |
619 | let span = combine_spans(left.span(), right.span()); | |
620 | if len <= LEAF_SIZE { | |
621 | let mut new_vec = left.to_tts(); | |
622 | let mut rvec = right.to_tts(); | |
623 | new_vec.append(&mut rvec); | |
624 | return TokenStream::mk_leaf(Rc::new(new_vec), span); | |
625 | } | |
626 | ||
627 | match (left.children(), right.children()) { | |
628 | (Some((lleft, lright)), None) => { | |
629 | if rlen <= LEAF_SIZE { | |
630 | let new_right = concat_internal(lright, right); | |
631 | TokenStream::mk_int_node(lleft, Rc::new(new_right.ts), len, span) | |
632 | } else { | |
633 | TokenStream::mk_int_node(left, right, len, span) | |
634 | } | |
635 | } | |
636 | (None, Some((rleft, rright))) => { | |
637 | if rlen <= LEAF_SIZE { | |
638 | let new_left = concat_internal(left, rleft); | |
639 | TokenStream::mk_int_node(Rc::new(new_left.ts), rright, len, span) | |
640 | } else { | |
641 | TokenStream::mk_int_node(left, right, len, span) | |
642 | } | |
643 | } | |
644 | (_, _) => TokenStream::mk_int_node(left, right, len, span), | |
645 | } | |
646 | } | |
647 | ||
648 | if left.is_empty() { | |
649 | right | |
650 | } else if right.is_empty() { | |
651 | left | |
652 | } else { | |
653 | concat_internal(Rc::new(left.ts), Rc::new(right.ts)) | |
654 | } | |
655 | } | |
656 | ||
657 | /// Indicate if the TokenStream is empty. | |
658 | pub fn is_empty(&self) -> bool { | |
659 | self.len() == 0 | |
660 | } | |
661 | ||
662 | /// Return a TokenStream's length. | |
663 | pub fn len(&self) -> usize { | |
664 | self.ts.len() | |
665 | } | |
666 | ||
667 | /// Convert a TokenStream into a vector of borrowed TokenTrees. | |
668 | pub fn to_vec(&self) -> Vec<&TokenTree> { | |
669 | self.ts.to_vec() | |
670 | } | |
671 | ||
672 | /// Convert a TokenStream into a vector of TokenTrees (by cloning the TokenTrees). | |
673 | /// (This operation is an O(n) deep copy of the underlying structure.) | |
674 | pub fn to_tts(&self) -> Vec<TokenTree> { | |
675 | self.ts.to_tts() | |
676 | } | |
677 | ||
678 | /// Return the TokenStream's span. | |
679 | pub fn span(&self) -> Span { | |
680 | self.ts.span() | |
681 | } | |
682 | ||
683 | /// Returns an iterator over a TokenStream (as a sequence of TokenTrees). | |
684 | pub fn iter<'a>(&self) -> Iter { | |
685 | Iter { vs: self, idx: 0 } | |
686 | } | |
687 | ||
688 | /// Splits a TokenStream based on the provided `&TokenTree -> bool` predicate. | |
689 | pub fn split<P>(&self, pred: P) -> Split<P> | |
690 | where P: FnMut(&TokenTree) -> bool | |
691 | { | |
692 | Split { | |
693 | vs: self, | |
694 | pred: pred, | |
695 | finished: false, | |
696 | idx: 0, | |
697 | } | |
698 | } | |
699 | ||
700 | /// Produce a slice of the input TokenStream from the `from` index, inclusive, to the | |
701 | /// `to` index, non-inclusive. | |
702 | pub fn slice(&self, range: ops::Range<usize>) -> TokenStream { | |
703 | self.ts.slice(range) | |
704 | } | |
705 | ||
706 | /// Slice starting at the provided index, inclusive. | |
707 | pub fn slice_from(&self, from: ops::RangeFrom<usize>) -> TokenStream { | |
708 | self.slice(from.start..self.len()) | |
709 | } | |
710 | ||
711 | /// Slice up to the provided index, non-inclusive. | |
712 | pub fn slice_to(&self, to: ops::RangeTo<usize>) -> TokenStream { | |
713 | self.slice(0..to.end) | |
714 | } | |
715 | ||
716 | /// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or | |
717 | /// `{a,b,c}`). | |
718 | pub fn is_delimited(&self) -> bool { | |
719 | self.maybe_delimited().is_some() | |
720 | } | |
721 | ||
722 | /// Returns the inside of the delimited term as a new TokenStream. | |
723 | pub fn maybe_delimited(&self) -> Option<TokenStream> { | |
724 | if !(self.len() == 1) { | |
725 | return None; | |
726 | } | |
727 | ||
728 | // FIXME It would be nice to change Delimited to move the Rc around the TokenTree | |
729 | // vector directly in order to avoid the clone here. | |
730 | match self[0] { | |
731 | TokenTree::Delimited(_, ref rc) => Some(TokenStream::from_tts(rc.tts.clone())), | |
732 | _ => None, | |
733 | } | |
734 | } | |
735 | ||
736 | /// Indicates if the stream is exactly one identifier. | |
737 | pub fn is_ident(&self) -> bool { | |
738 | self.maybe_ident().is_some() | |
739 | } | |
740 | ||
741 | /// Returns an identifier | |
742 | pub fn maybe_ident(&self) -> Option<ast::Ident> { | |
743 | if !(self.len() == 1) { | |
744 | return None; | |
745 | } | |
746 | ||
747 | match self[0] { | |
748 | TokenTree::Token(_, Token::Ident(t)) => Some(t), | |
749 | _ => None, | |
750 | } | |
751 | } | |
752 | ||
753 | /// Compares two TokenStreams, checking equality without regarding span information. | |
754 | pub fn eq_unspanned(&self, other: &TokenStream) -> bool { | |
755 | for (t1, t2) in self.iter().zip(other.iter()) { | |
756 | if !t1.eq_unspanned(t2) { | |
757 | return false; | |
758 | } | |
759 | } | |
760 | true | |
761 | } | |
762 | ||
763 | /// Convert a vector of TokenTrees into a parentheses-delimited TokenStream. | |
764 | pub fn as_delimited_stream(tts: Vec<TokenTree>, delim: token::DelimToken) -> TokenStream { | |
765 | let new_sp = covering_span(&tts); | |
766 | ||
767 | let new_delim = Rc::new(Delimited { | |
768 | delim: delim, | |
769 | open_span: DUMMY_SP, | |
770 | tts: tts, | |
771 | close_span: DUMMY_SP, | |
772 | }); | |
773 | ||
774 | TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)]) | |
775 | } | |
776 | } | |
777 | ||
778 | // FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the | |
779 | // next leaf's iterator when the current one is exhausted. | |
780 | pub struct Iter<'a> { | |
781 | vs: &'a TokenStream, | |
782 | idx: usize, | |
783 | } | |
784 | ||
785 | impl<'a> Iterator for Iter<'a> { | |
786 | type Item = &'a TokenTree; | |
787 | ||
788 | fn next(&mut self) -> Option<&'a TokenTree> { | |
789 | if self.vs.is_empty() || self.idx >= self.vs.len() { | |
790 | return None; | |
791 | } | |
792 | ||
793 | let ret = Some(&self.vs[self.idx]); | |
794 | self.idx = self.idx + 1; | |
795 | ret | |
796 | } | |
797 | } | |
798 | ||
799 | pub struct Split<'a, P> | |
800 | where P: FnMut(&TokenTree) -> bool | |
801 | { | |
802 | vs: &'a TokenStream, | |
803 | pred: P, | |
804 | finished: bool, | |
805 | idx: usize, | |
806 | } | |
807 | ||
808 | impl<'a, P> Iterator for Split<'a, P> | |
809 | where P: FnMut(&TokenTree) -> bool | |
810 | { | |
811 | type Item = TokenStream; | |
812 | ||
813 | fn next(&mut self) -> Option<TokenStream> { | |
814 | if self.finished { | |
815 | return None; | |
816 | } | |
817 | if self.idx >= self.vs.len() { | |
818 | self.finished = true; | |
819 | return None; | |
820 | } | |
821 | ||
822 | let mut lookup = self.vs.iter().skip(self.idx); | |
823 | match lookup.position(|x| (self.pred)(&x)) { | |
824 | None => { | |
825 | self.finished = true; | |
826 | Some(self.vs.slice_from(self.idx..)) | |
827 | } | |
828 | Some(edx) => { | |
829 | let ret = Some(self.vs.slice(self.idx..self.idx + edx)); | |
830 | self.idx += edx + 1; | |
831 | ret | |
832 | } | |
833 | } | |
834 | } | |
835 | } | |
836 | ||
837 | impl Index<usize> for TokenStream { | |
838 | type Output = TokenTree; | |
839 | ||
840 | fn index(&self, index: usize) -> &TokenTree { | |
841 | &self.ts[index] | |
842 | } | |
843 | } | |
844 | ||
845 | impl Index<usize> for InternalTS { | |
846 | type Output = TokenTree; | |
847 | ||
848 | fn index(&self, index: usize) -> &TokenTree { | |
849 | if self.len() <= index { | |
850 | panic!("Index {} too large for {:?}", index, self); | |
851 | } | |
852 | match *self { | |
853 | InternalTS::Empty(..) => panic!("Invalid index"), | |
854 | InternalTS::Leaf { ref tts, offset, .. } => tts.get(index + offset).unwrap(), | |
855 | InternalTS::Node { ref left, ref right, .. } => { | |
856 | let left_len = left.len(); | |
857 | if index < left_len { | |
858 | Index::index(&**left, index) | |
859 | } else { | |
860 | Index::index(&**right, index - left_len) | |
861 | } | |
862 | } | |
863 | } | |
864 | } | |
865 | } | |
866 | ||
867 | ||
868 | #[cfg(test)] | |
869 | mod tests { | |
870 | use super::*; | |
871 | use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP}; | |
872 | use parse::token::{self, str_to_ident, Token}; | |
873 | use util::parser_testing::string_to_tts; | |
874 | use std::rc::Rc; | |
875 | ||
876 | fn sp(a: u32, b: u32) -> Span { | |
877 | Span { | |
878 | lo: BytePos(a), | |
879 | hi: BytePos(b), | |
880 | expn_id: NO_EXPANSION, | |
881 | } | |
882 | } | |
883 | ||
884 | fn as_paren_delimited_stream(tts: Vec<TokenTree>) -> TokenStream { | |
885 | TokenStream::as_delimited_stream(tts, token::DelimToken::Paren) | |
886 | } | |
887 | ||
888 | #[test] | |
889 | fn test_concat() { | |
890 | let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())); | |
891 | let test_fst = TokenStream::from_tts(string_to_tts("foo::bar".to_string())); | |
892 | let test_snd = TokenStream::from_tts(string_to_tts("::baz".to_string())); | |
893 | let eq_res = TokenStream::concat(test_fst, test_snd); | |
894 | assert_eq!(test_res.len(), 5); | |
895 | assert_eq!(eq_res.len(), 5); | |
896 | assert_eq!(test_res.eq_unspanned(&eq_res), true); | |
897 | } | |
898 | ||
899 | #[test] | |
900 | fn test_from_to_bijection() { | |
901 | let test_start = string_to_tts("foo::bar(baz)".to_string()); | |
902 | let test_end = TokenStream::from_tts(string_to_tts("foo::bar(baz)".to_string())).to_tts(); | |
903 | assert_eq!(test_start, test_end) | |
904 | } | |
905 | ||
906 | #[test] | |
907 | fn test_to_from_bijection() { | |
908 | let test_start = TokenStream::from_tts(string_to_tts("foo::bar(baz)".to_string())); | |
909 | let test_end = TokenStream::from_tts(test_start.clone().to_tts()); | |
910 | assert_eq!(test_start, test_end) | |
911 | } | |
912 | ||
913 | #[test] | |
914 | fn test_eq_0() { | |
915 | let test_res = TokenStream::from_tts(string_to_tts("foo".to_string())); | |
916 | let test_eqs = TokenStream::from_tts(string_to_tts("foo".to_string())); | |
917 | assert_eq!(test_res, test_eqs) | |
918 | } | |
919 | ||
920 | #[test] | |
921 | fn test_eq_1() { | |
922 | let test_res = TokenStream::from_tts(string_to_tts("::bar::baz".to_string())); | |
923 | let test_eqs = TokenStream::from_tts(string_to_tts("::bar::baz".to_string())); | |
924 | assert_eq!(test_res, test_eqs) | |
925 | } | |
926 | ||
927 | #[test] | |
928 | fn test_eq_2() { | |
929 | let test_res = TokenStream::from_tts(string_to_tts("foo::bar".to_string())); | |
930 | let test_eqs = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())); | |
931 | assert_eq!(test_res, test_eqs.slice(0..3)) | |
932 | } | |
933 | ||
934 | #[test] | |
935 | fn test_eq_3() { | |
936 | let test_res = TokenStream::from_tts(string_to_tts("".to_string())); | |
937 | let test_eqs = TokenStream::from_tts(string_to_tts("".to_string())); | |
938 | assert_eq!(test_res, test_eqs) | |
939 | } | |
940 | ||
941 | #[test] | |
942 | fn test_diseq_0() { | |
943 | let test_res = TokenStream::from_tts(string_to_tts("::bar::baz".to_string())); | |
944 | let test_eqs = TokenStream::from_tts(string_to_tts("bar::baz".to_string())); | |
945 | assert_eq!(test_res == test_eqs, false) | |
946 | } | |
947 | ||
948 | #[test] | |
949 | fn test_diseq_1() { | |
950 | let test_res = TokenStream::from_tts(string_to_tts("(bar,baz)".to_string())); | |
951 | let test_eqs = TokenStream::from_tts(string_to_tts("bar,baz".to_string())); | |
952 | assert_eq!(test_res == test_eqs, false) | |
953 | } | |
954 | ||
955 | #[test] | |
956 | fn test_slice_0() { | |
957 | let test_res = TokenStream::from_tts(string_to_tts("foo::bar".to_string())); | |
958 | let test_eqs = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())); | |
959 | assert_eq!(test_res, test_eqs.slice(0..3)) | |
960 | } | |
961 | ||
962 | #[test] | |
963 | fn test_slice_1() { | |
964 | let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())) | |
965 | .slice(2..3); | |
966 | let test_eqs = TokenStream::from_tts(vec![TokenTree::Token(sp(5,8), | |
967 | token::Ident(str_to_ident("bar")))]); | |
968 | assert_eq!(test_res, test_eqs) | |
969 | } | |
970 | ||
971 | #[test] | |
972 | fn test_is_empty() { | |
973 | let test0 = TokenStream::from_tts(Vec::new()); | |
974 | let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1), | |
975 | Token::Ident(str_to_ident("a")))]); | |
976 | let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string())); | |
977 | ||
978 | assert_eq!(test0.is_empty(), true); | |
979 | assert_eq!(test1.is_empty(), false); | |
980 | assert_eq!(test2.is_empty(), false); | |
981 | } | |
982 | ||
983 | #[test] | |
984 | fn test_is_delimited() { | |
985 | let test0 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string())); | |
986 | let test1 = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string())); | |
987 | let test2 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string())); | |
988 | let test3 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab,oof)".to_string())); | |
989 | let test4 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string())); | |
990 | let test5 = TokenStream::from_tts(string_to_tts("".to_string())); | |
991 | ||
992 | assert_eq!(test0.is_delimited(), false); | |
993 | assert_eq!(test1.is_delimited(), true); | |
994 | assert_eq!(test2.is_delimited(), true); | |
995 | assert_eq!(test3.is_delimited(), false); | |
996 | assert_eq!(test4.is_delimited(), false); | |
997 | assert_eq!(test5.is_delimited(), false); | |
998 | } | |
999 | ||
1000 | #[test] | |
1001 | fn test_is_ident() { | |
1002 | let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())); | |
1003 | let test1 = TokenStream::from_tts(string_to_tts("5".to_string())); | |
1004 | let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())); | |
1005 | let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())); | |
1006 | let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())); | |
1007 | ||
1008 | assert_eq!(test0.is_ident(), false); | |
1009 | assert_eq!(test1.is_ident(), false); | |
1010 | assert_eq!(test2.is_ident(), true); | |
1011 | assert_eq!(test3.is_ident(), false); | |
1012 | assert_eq!(test4.is_ident(), false); | |
1013 | } | |
1014 | ||
1015 | #[test] | |
1016 | fn test_maybe_delimited() { | |
1017 | let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string())); | |
1018 | let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string())); | |
1019 | let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string())); | |
1020 | let test3_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)" | |
1021 | .to_string())); | |
1022 | let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string())); | |
1023 | let test5_input = TokenStream::from_tts(string_to_tts("".to_string())); | |
1024 | ||
1025 | let test0 = test0_input.maybe_delimited(); | |
1026 | let test1 = test1_input.maybe_delimited(); | |
1027 | let test2 = test2_input.maybe_delimited(); | |
1028 | let test3 = test3_input.maybe_delimited(); | |
1029 | let test4 = test4_input.maybe_delimited(); | |
1030 | let test5 = test5_input.maybe_delimited(); | |
1031 | ||
1032 | assert_eq!(test0, None); | |
1033 | ||
1034 | let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4), | |
1035 | token::Ident(str_to_ident("bar"))), | |
1036 | TokenTree::Token(sp(4, 6), token::ModSep), | |
1037 | TokenTree::Token(sp(6, 9), | |
1038 | token::Ident(str_to_ident("baz")))]); | |
1039 | assert_eq!(test1, Some(test1_expected)); | |
1040 | ||
1041 | let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4), | |
1042 | token::Ident(str_to_ident("foo"))), | |
1043 | TokenTree::Token(sp(4, 5), token::Comma), | |
1044 | TokenTree::Token(sp(5, 8), | |
1045 | token::Ident(str_to_ident("bar"))), | |
1046 | TokenTree::Token(sp(8, 9), token::Comma), | |
1047 | TokenTree::Token(sp(9, 12), | |
1048 | token::Ident(str_to_ident("baz")))]); | |
1049 | assert_eq!(test2, Some(test2_expected)); | |
1050 | ||
1051 | assert_eq!(test3, None); | |
1052 | ||
1053 | assert_eq!(test4, None); | |
1054 | ||
1055 | assert_eq!(test5, None); | |
1056 | } | |
1057 | ||
1058 | // pub fn maybe_ident(&self) -> Option<ast::Ident> | |
1059 | #[test] | |
1060 | fn test_maybe_ident() { | |
1061 | let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_ident(); | |
1062 | let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident(); | |
1063 | let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_ident(); | |
1064 | let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_ident(); | |
1065 | let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_ident(); | |
1066 | ||
1067 | assert_eq!(test0, None); | |
1068 | assert_eq!(test1, None); | |
1069 | assert_eq!(test2, Some(str_to_ident("foo"))); | |
1070 | assert_eq!(test3, None); | |
1071 | assert_eq!(test4, None); | |
1072 | } | |
1073 | ||
1074 | #[test] | |
1075 | fn test_as_delimited_stream() { | |
1076 | let test0 = as_paren_delimited_stream(string_to_tts("foo,bar,".to_string())); | |
1077 | let test1 = as_paren_delimited_stream(string_to_tts("baz(foo,bar)".to_string())); | |
1078 | ||
1079 | let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))), | |
1080 | TokenTree::Token(sp(3, 4), token::Comma), | |
1081 | TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))), | |
1082 | TokenTree::Token(sp(7, 8), token::Comma)]; | |
1083 | let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8), | |
1084 | Rc::new(Delimited { | |
1085 | delim: token::DelimToken::Paren, | |
1086 | open_span: DUMMY_SP, | |
1087 | tts: test0_tts, | |
1088 | close_span: DUMMY_SP, | |
1089 | }))]); | |
1090 | ||
1091 | assert_eq!(test0, test0_stream); | |
1092 | ||
1093 | ||
1094 | let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))), | |
1095 | TokenTree::Token(sp(7, 8), token::Comma), | |
1096 | TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))]; | |
1097 | ||
1098 | let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))), | |
1099 | TokenTree::Delimited(sp(3, 12), | |
1100 | Rc::new(Delimited { | |
1101 | delim: token::DelimToken::Paren, | |
1102 | open_span: sp(3, 4), | |
1103 | tts: test1_tts, | |
1104 | close_span: sp(11, 12), | |
1105 | }))]; | |
1106 | ||
1107 | let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12), | |
1108 | Rc::new(Delimited { | |
1109 | delim: token::DelimToken::Paren, | |
1110 | open_span: DUMMY_SP, | |
1111 | tts: test1_parse, | |
1112 | close_span: DUMMY_SP, | |
1113 | }))]); | |
1114 | ||
1115 | assert_eq!(test1, test1_stream); | |
1116 | } | |
1117 | } |