]> git.proxmox.com Git - rustc.git/blob - compiler/rustc_parse/src/parser/attr_wrapper.rs
New upstream version 1.52.0~beta.3+dfsg1
[rustc.git] / compiler / rustc_parse / src / parser / attr_wrapper.rs
1 use super::attr;
2 use super::{ForceCollect, Parser, TokenCursor, TrailingToken};
3 use rustc_ast::token::{self, Token, TokenKind};
4 use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
5 use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing};
6 use rustc_ast::AstLike;
7 use rustc_ast::{self as ast};
8 use rustc_errors::PResult;
9 use rustc_span::{Span, DUMMY_SP};
10
11 /// A wrapper type to ensure that the parser handles outer attributes correctly.
12 /// When we parse outer attributes, we need to ensure that we capture tokens
13 /// for the attribute target. This allows us to perform cfg-expansion on
14 /// a token stream before we invoke a derive proc-macro.
15 ///
16 /// This wrapper prevents direct access to the underlying `Vec<ast::Attribute>`.
17 /// Parsing code can only get access to the underlying attributes
18 /// by passing an `AttrWrapper` to `collect_tokens_trailing_tokens`.
19 /// This makes it difficult to accidentally construct an AST node
20 /// (which stores a `Vec<ast::Attribute>`) without first collecting tokens.
21 ///
22 /// This struct has its own module, to ensure that the parser code
23 /// cannot directly access the `attrs` field
24 #[derive(Debug, Clone)]
25 pub struct AttrWrapper {
26 attrs: Vec<ast::Attribute>,
27 }
28
29 impl AttrWrapper {
30 pub fn empty() -> AttrWrapper {
31 AttrWrapper { attrs: vec![] }
32 }
33 pub fn new(attrs: Vec<ast::Attribute>) -> AttrWrapper {
34 AttrWrapper { attrs }
35 }
36 // FIXME: Delay span bug here?
37 pub(crate) fn take_for_recovery(self) -> Vec<ast::Attribute> {
38 self.attrs
39 }
40 pub fn is_empty(&self) -> bool {
41 self.attrs.is_empty()
42 }
43 }
44
45 impl<'a> Parser<'a> {
46 /// Records all tokens consumed by the provided callback,
47 /// including the current token. These tokens are collected
48 /// into a `LazyTokenStream`, and returned along with the result
49 /// of the callback.
50 ///
51 /// Note: If your callback consumes an opening delimiter
52 /// (including the case where you call `collect_tokens`
53 /// when the current token is an opening delimeter),
54 /// you must also consume the corresponding closing delimiter.
55 ///
56 /// That is, you can consume
57 /// `something ([{ }])` or `([{}])`, but not `([{}]`
58 ///
59 /// This restriction shouldn't be an issue in practice,
60 /// since this function is used to record the tokens for
61 /// a parsed AST item, which always has matching delimiters.
62 pub fn collect_tokens_trailing_token<R: AstLike>(
63 &mut self,
64 attrs: AttrWrapper,
65 force_collect: ForceCollect,
66 f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
67 ) -> PResult<'a, R> {
68 if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) {
69 return Ok(f(self, attrs.attrs)?.0);
70 }
71 let start_token = (self.token.clone(), self.token_spacing);
72 let cursor_snapshot = self.token_cursor.clone();
73
74 let (mut ret, trailing_token) = f(self, attrs.attrs)?;
75 let tokens = match ret.tokens_mut() {
76 Some(tokens) if tokens.is_none() => tokens,
77 _ => return Ok(ret),
78 };
79
80 // Produces a `TokenStream` on-demand. Using `cursor_snapshot`
81 // and `num_calls`, we can reconstruct the `TokenStream` seen
82 // by the callback. This allows us to avoid producing a `TokenStream`
83 // if it is never needed - for example, a captured `macro_rules!`
84 // argument that is never passed to a proc macro.
85 // In practice token stream creation happens rarely compared to
86 // calls to `collect_tokens` (see some statistics in #78736),
87 // so we are doing as little up-front work as possible.
88 //
89 // This also makes `Parser` very cheap to clone, since
90 // there is no intermediate collection buffer to clone.
91 #[derive(Clone)]
92 struct LazyTokenStreamImpl {
93 start_token: (Token, Spacing),
94 cursor_snapshot: TokenCursor,
95 num_calls: usize,
96 desugar_doc_comments: bool,
97 append_unglued_token: Option<TreeAndSpacing>,
98 }
99 impl CreateTokenStream for LazyTokenStreamImpl {
100 fn create_token_stream(&self) -> TokenStream {
101 // The token produced by the final call to `next` or `next_desugared`
102 // was not actually consumed by the callback. The combination
103 // of chaining the initial token and using `take` produces the desired
104 // result - we produce an empty `TokenStream` if no calls were made,
105 // and omit the final token otherwise.
106 let mut cursor_snapshot = self.cursor_snapshot.clone();
107 let tokens = std::iter::once(self.start_token.clone())
108 .chain((0..self.num_calls).map(|_| {
109 if self.desugar_doc_comments {
110 cursor_snapshot.next_desugared()
111 } else {
112 cursor_snapshot.next()
113 }
114 }))
115 .take(self.num_calls);
116
117 make_token_stream(tokens, self.append_unglued_token.clone())
118 }
119 }
120
121 let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
122 match trailing_token {
123 TrailingToken::None => {}
124 TrailingToken::Semi => {
125 assert_eq!(self.token.kind, token::Semi);
126 num_calls += 1;
127 }
128 TrailingToken::MaybeComma => {
129 if self.token.kind == token::Comma {
130 num_calls += 1;
131 }
132 }
133 }
134
135 *tokens = Some(LazyTokenStream::new(LazyTokenStreamImpl {
136 start_token,
137 num_calls,
138 cursor_snapshot,
139 desugar_doc_comments: self.desugar_doc_comments,
140 append_unglued_token: self.token_cursor.append_unglued_token.clone(),
141 }));
142
143 Ok(ret)
144 }
145 }
146
147 /// Converts a flattened iterator of tokens (including open and close delimiter tokens)
148 /// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
149 /// of open and close delims.
150 fn make_token_stream(
151 tokens: impl Iterator<Item = (Token, Spacing)>,
152 append_unglued_token: Option<TreeAndSpacing>,
153 ) -> TokenStream {
154 #[derive(Debug)]
155 struct FrameData {
156 open: Span,
157 inner: Vec<(TokenTree, Spacing)>,
158 }
159 let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
160 for (token, spacing) in tokens {
161 match token {
162 Token { kind: TokenKind::OpenDelim(_), span } => {
163 stack.push(FrameData { open: span, inner: vec![] });
164 }
165 Token { kind: TokenKind::CloseDelim(delim), span } => {
166 let frame_data = stack.pop().expect("Token stack was empty!");
167 let dspan = DelimSpan::from_pair(frame_data.open, span);
168 let stream = TokenStream::new(frame_data.inner);
169 let delimited = TokenTree::Delimited(dspan, delim, stream);
170 stack
171 .last_mut()
172 .unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
173 .inner
174 .push((delimited, Spacing::Alone));
175 }
176 token => {
177 stack
178 .last_mut()
179 .expect("Bottom token frame is missing!")
180 .inner
181 .push((TokenTree::Token(token), spacing));
182 }
183 }
184 }
185 let mut final_buf = stack.pop().expect("Missing final buf!");
186 final_buf.inner.extend(append_unglued_token);
187 assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
188 TokenStream::new(final_buf.inner)
189 }