//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
//! ownership of the original.
-use crate::token::{self, Delimiter, Token, TokenKind};
+use crate::ast::StmtKind;
+use crate::ast_traits::{HasAttrs, HasSpan, HasTokens};
+use crate::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use crate::AttrVec;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_span::{Span, DUMMY_SP};
use smallvec::{smallvec, SmallVec};
-use std::{fmt, iter, mem};
+use std::{fmt, iter};
/// When the main Rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token tree. This is a very
Delimited(DelimSpan, Delimiter, TokenStream),
}
-#[derive(Copy, Clone)]
-pub enum CanSynthesizeMissingTokens {
- Yes,
- No,
-}
-
// Ensure all fields of `TokenTree` is `Send` and `Sync`.
#[cfg(parallel_compiler)]
fn _dummy()
}
impl<S: Encoder> Encodable<S> for LazyTokenStream {
- fn encode(&self, s: &mut S) -> Result<(), S::Error> {
+ fn encode(&self, s: &mut S) {
// Used by AST json printing.
- Encodable::encode(&self.create_token_stream(), s)
+ Encodable::encode(&self.create_token_stream(), s);
}
}
self.0.len()
}
- pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
- match streams.len() {
- 0 => TokenStream::default(),
- 1 => streams.pop().unwrap(),
- _ => {
- // We are going to extend the first stream in `streams` with
- // the elements from the subsequent streams. This requires
- // using `make_mut()` on the first stream, and in practice this
- // doesn't cause cloning 99.9% of the time.
- //
- // One very common use case is when `streams` has two elements,
- // where the first stream has any number of elements within
- // (often 1, but sometimes many more) and the second stream has
- // a single element within.
-
- // Determine how much the first stream will be extended.
- // Needed to avoid quadratic blow up from on-the-fly
- // reallocations (#57735).
- let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
-
- // Get the first stream. If it's `None`, create an empty
- // stream.
- let mut iter = streams.drain(..);
- let mut first_stream_lrc = iter.next().unwrap().0;
-
- // Append the elements to the first stream, after reserving
- // space for them.
- let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
- first_vec_mut.reserve(num_appends);
- for stream in iter {
- first_vec_mut.extend(stream.0.iter().cloned());
- }
-
- // Create the final `TokenStream`.
- TokenStream(first_stream_lrc)
- }
- }
- }
-
- pub fn trees(&self) -> Cursor {
- self.clone().into_trees()
+ pub fn trees(&self) -> CursorRef<'_> {
+ CursorRef::new(self)
}
pub fn into_trees(self) -> Cursor {
.collect(),
))
}
+
+ fn opt_from_ast(node: &(impl HasAttrs + HasTokens)) -> Option<TokenStream> {
+ let tokens = node.tokens()?;
+ let attrs = node.attrs();
+ let attr_annotated = if attrs.is_empty() {
+ tokens.create_token_stream()
+ } else {
+ let attr_data = AttributesData { attrs: attrs.to_vec().into(), tokens: tokens.clone() };
+ AttrAnnotatedTokenStream::new(vec![(
+ AttrAnnotatedTokenTree::Attributes(attr_data),
+ Spacing::Alone,
+ )])
+ };
+ Some(attr_annotated.to_tokenstream())
+ }
+
+ pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
+ TokenStream::opt_from_ast(node)
+ .unwrap_or_else(|| panic!("missing tokens for node at {:?}: {:?}", node.span(), node))
+ }
+
+ pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
+ match nt {
+ Nonterminal::NtIdent(ident, is_raw) => {
+ TokenTree::token(token::Ident(ident.name, *is_raw), ident.span).into()
+ }
+ Nonterminal::NtLifetime(ident) => {
+ TokenTree::token(token::Lifetime(ident.name), ident.span).into()
+ }
+ Nonterminal::NtItem(item) => TokenStream::from_ast(item),
+ Nonterminal::NtBlock(block) => TokenStream::from_ast(block),
+ Nonterminal::NtStmt(stmt) if let StmtKind::Empty = stmt.kind => {
+ // FIXME: Properly collect tokens for empty statements.
+ TokenTree::token(token::Semi, stmt.span).into()
+ }
+ Nonterminal::NtStmt(stmt) => TokenStream::from_ast(stmt),
+ Nonterminal::NtPat(pat) => TokenStream::from_ast(pat),
+ Nonterminal::NtTy(ty) => TokenStream::from_ast(ty),
+ Nonterminal::NtMeta(attr) => TokenStream::from_ast(attr),
+ Nonterminal::NtPath(path) => TokenStream::from_ast(path),
+ Nonterminal::NtVis(vis) => TokenStream::from_ast(vis),
+ Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => TokenStream::from_ast(expr),
+ }
+ }
+
+ fn flatten_token(token: &Token) -> TokenTree {
+ match &token.kind {
+ token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = **nt => {
+ TokenTree::token(token::Ident(ident.name, is_raw), ident.span)
+ }
+ token::Interpolated(nt) => TokenTree::Delimited(
+ DelimSpan::from_single(token.span),
+ Delimiter::Invisible,
+ TokenStream::from_nonterminal_ast(&nt).flattened(),
+ ),
+ _ => TokenTree::Token(token.clone()),
+ }
+ }
+
+ fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
+ match tree {
+ TokenTree::Token(token) => TokenStream::flatten_token(token),
+ TokenTree::Delimited(span, delim, tts) => {
+ TokenTree::Delimited(*span, *delim, tts.flattened())
+ }
+ }
+ }
+
+ #[must_use]
+ pub fn flattened(&self) -> TokenStream {
+ fn can_skip(stream: &TokenStream) -> bool {
+ stream.trees().all(|tree| match tree {
+ TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
+ TokenTree::Delimited(_, _, inner) => can_skip(inner),
+ })
+ }
+
+ if can_skip(self) {
+ return self.clone();
+ }
+
+ self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
+ }
}
// 99.5%+ of the time we have 1 or 2 elements in this vector.
}
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
- let mut stream = stream.into();
-
- // If `self` is not empty and the last tree within the last stream is a
- // token tree marked with `Joint`...
- if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut()
- && let Some((TokenTree::Token(last_token), Spacing::Joint)) = last_stream_lrc.last()
- // ...and `stream` is not empty and the first tree within it is
- // a token tree...
- && let TokenStream(ref mut stream_lrc) = stream
- && let Some((TokenTree::Token(token), spacing)) = stream_lrc.first()
- // ...and the two tokens can be glued together...
- && let Some(glued_tok) = last_token.glue(&token)
- {
- // ...then do so, by overwriting the last token
- // tree in `self` and removing the first token tree
- // from `stream`. This requires using `make_mut()`
- // on the last stream in `self` and on `stream`,
- // and in practice this doesn't cause cloning 99.9%
- // of the time.
-
- // Overwrite the last token tree with the merged
- // token.
- let last_vec_mut = Lrc::make_mut(last_stream_lrc);
- *last_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
-
- // Remove the first token tree from `stream`. (This
- // is almost always the only tree in `stream`.)
- let stream_vec_mut = Lrc::make_mut(stream_lrc);
- stream_vec_mut.remove(0);
-
- // Don't push `stream` if it's empty -- that could
- // block subsequent token gluing, by getting
- // between two token trees that should be glued
- // together.
- if !stream.is_empty() {
- self.0.push(stream);
- }
- return;
- }
- self.0.push(stream);
+ self.0.push(stream.into());
}
pub fn build(self) -> TokenStream {
- TokenStream::from_streams(self.0)
+ let mut streams = self.0;
+ match streams.len() {
+ 0 => TokenStream::default(),
+ 1 => streams.pop().unwrap(),
+ _ => {
+ // We will extend the first stream in `streams` with the
+ // elements from the subsequent streams. This requires using
+ // `make_mut()` on the first stream, and in practice this
+ // doesn't cause cloning 99.9% of the time.
+ //
+ // One very common use case is when `streams` has two elements,
+ // where the first stream has any number of elements within
+ // (often 1, but sometimes many more) and the second stream has
+ // a single element within.
+
+ // Determine how much the first stream will be extended.
+ // Needed to avoid quadratic blow up from on-the-fly
+ // reallocations (#57735).
+ let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
+
+ // Get the first stream, which will become the result stream.
+ // If it's `None`, create an empty stream.
+ let mut iter = streams.drain(..);
+ let mut res_stream_lrc = iter.next().unwrap().0;
+
+ // Append the subsequent elements to the result stream, after
+ // reserving space for them.
+ let res_vec_mut = Lrc::make_mut(&mut res_stream_lrc);
+ res_vec_mut.reserve(num_appends);
+ for stream in iter {
+ let stream_iter = stream.0.iter().cloned();
+
+ // If (a) `res_mut_vec` is not empty and the last tree
+ // within it is a token tree marked with `Joint`, and (b)
+ // `stream` is not empty and the first tree within it is a
+ // token tree, and (c) the two tokens can be glued
+ // together...
+ if let Some((TokenTree::Token(last_tok), Spacing::Joint)) = res_vec_mut.last()
+ && let Some((TokenTree::Token(tok), spacing)) = stream.0.first()
+ && let Some(glued_tok) = last_tok.glue(&tok)
+ {
+ // ...then overwrite the last token tree in
+ // `res_vec_mut` with the glued token, and skip the
+ // first token tree from `stream`.
+ *res_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
+ res_vec_mut.extend(stream_iter.skip(1));
+ } else {
+ // Append all of `stream`.
+ res_vec_mut.extend(stream_iter);
+ }
+ }
+
+ TokenStream(res_stream_lrc)
+ }
+ }
}
}
}
impl<'t> CursorRef<'t> {
+ fn new(stream: &'t TokenStream) -> Self {
+ CursorRef { stream, index: 0 }
+ }
+
+ #[inline]
fn next_with_spacing(&mut self) -> Option<&'t TreeAndSpacing> {
self.stream.0.get(self.index).map(|tree| {
self.index += 1;
tree
})
}
+
+ pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
+ self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
+ }
}
impl<'t> Iterator for CursorRef<'t> {
})
}
- pub fn index(&self) -> usize {
- self.index
- }
-
- pub fn append(&mut self, new_stream: TokenStream) {
- if new_stream.is_empty() {
- return;
- }
- let index = self.index;
- let stream = mem::take(&mut self.stream);
- *self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
- self.index = index;
- }
-
pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
}