1 //! The main parser interface.
3 #![feature(array_windows)]
4 #![feature(box_patterns)]
5 #![feature(crate_visibility_modifier)]
6 #![feature(if_let_guard)]
7 #![feature(let_chains)]
9 #![feature(never_type)]
10 #![recursion_limit = "256"]
16 use rustc_ast
::token
::{self, Nonterminal, Token, TokenKind}
;
17 use rustc_ast
::tokenstream
::{self, AttributesData, CanSynthesizeMissingTokens, LazyTokenStream}
;
18 use rustc_ast
::tokenstream
::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree}
;
19 use rustc_ast
::tokenstream
::{Spacing, TokenStream}
;
20 use rustc_ast
::AstLike
;
21 use rustc_ast
::Attribute
;
22 use rustc_ast
::{AttrItem, MetaItem}
;
23 use rustc_ast_pretty
::pprust
;
24 use rustc_data_structures
::sync
::Lrc
;
25 use rustc_errors
::{Applicability, Diagnostic, FatalError, Level, PResult}
;
26 use rustc_session
::parse
::ParseSess
;
27 use rustc_span
::{FileName, SourceFile, Span}
;
32 pub const MACRO_ARGUMENTS
: Option
<&str> = Some("macro arguments");
36 use parser
::{emit_unclosed_delims, make_unclosed_delims_error, Parser}
;
38 pub mod validate_attr
;
40 // A bunch of utility functions of the form `parse_<thing>_from_<source>`
41 // where <thing> includes crate, expr, item, stmt, tts, and one that
42 // uses a HOF to parse anything, and <source> includes file and
45 /// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
46 macro_rules
! panictry_buffer
{
47 ($handler
:expr
, $e
:expr
) => {{
48 use rustc_errors
::FatalError
;
49 use std
::result
::Result
::{Err, Ok}
;
54 $handler
.emit_diagnostic(&mut e
);
62 pub fn parse_crate_from_file
<'a
>(input
: &Path
, sess
: &'a ParseSess
) -> PResult
<'a
, ast
::Crate
> {
63 let mut parser
= new_parser_from_file(sess
, input
, None
);
64 parser
.parse_crate_mod()
67 pub fn parse_crate_attrs_from_file
<'a
>(
70 ) -> PResult
<'a
, Vec
<ast
::Attribute
>> {
71 let mut parser
= new_parser_from_file(sess
, input
, None
);
72 parser
.parse_inner_attributes()
75 pub fn parse_crate_from_source_str(
79 ) -> PResult
<'_
, ast
::Crate
> {
80 new_parser_from_source_str(sess
, name
, source
).parse_crate_mod()
83 pub fn parse_crate_attrs_from_source_str(
87 ) -> PResult
<'_
, Vec
<ast
::Attribute
>> {
88 new_parser_from_source_str(sess
, name
, source
).parse_inner_attributes()
91 pub fn parse_stream_from_source_str(
95 override_span
: Option
<Span
>,
97 let (stream
, mut errors
) =
98 source_file_to_stream(sess
, sess
.source_map().new_source_file(name
, source
), override_span
);
99 emit_unclosed_delims(&mut errors
, &sess
);
103 /// Creates a new parser from a source string.
104 pub fn new_parser_from_source_str(sess
: &ParseSess
, name
: FileName
, source
: String
) -> Parser
<'_
> {
105 panictry_buffer
!(&sess
.span_diagnostic
, maybe_new_parser_from_source_str(sess
, name
, source
))
108 /// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
110 pub fn maybe_new_parser_from_source_str(
114 ) -> Result
<Parser
<'_
>, Vec
<Diagnostic
>> {
115 maybe_source_file_to_parser(sess
, sess
.source_map().new_source_file(name
, source
))
118 /// Creates a new parser, handling errors as appropriate if the file doesn't exist.
119 /// If a span is given, that is used on an error as the source of the problem.
120 pub fn new_parser_from_file
<'a
>(sess
: &'a ParseSess
, path
: &Path
, sp
: Option
<Span
>) -> Parser
<'a
> {
121 source_file_to_parser(sess
, file_to_source_file(sess
, path
, sp
))
124 /// Given a `source_file` and config, returns a parser.
125 fn source_file_to_parser(sess
: &ParseSess
, source_file
: Lrc
<SourceFile
>) -> Parser
<'_
> {
126 panictry_buffer
!(&sess
.span_diagnostic
, maybe_source_file_to_parser(sess
, source_file
))
129 /// Given a `source_file` and config, return a parser. Returns any buffered errors from lexing the
130 /// initial token stream.
131 fn maybe_source_file_to_parser(
133 source_file
: Lrc
<SourceFile
>,
134 ) -> Result
<Parser
<'_
>, Vec
<Diagnostic
>> {
135 let end_pos
= source_file
.end_pos
;
136 let (stream
, unclosed_delims
) = maybe_file_to_stream(sess
, source_file
, None
)?
;
137 let mut parser
= stream_to_parser(sess
, stream
, None
);
138 parser
.unclosed_delims
= unclosed_delims
;
139 if parser
.token
== token
::Eof
{
140 parser
.token
.span
= Span
::new(end_pos
, end_pos
, parser
.token
.span
.ctxt(), None
);
148 /// Given a session and a path and an optional span (for error reporting),
149 /// add the path to the session's source_map and return the new source_file or
150 /// error when a file can't be read.
151 fn try_file_to_source_file(
154 spanopt
: Option
<Span
>,
155 ) -> Result
<Lrc
<SourceFile
>, Diagnostic
> {
156 sess
.source_map().load_file(path
).map_err(|e
| {
157 let msg
= format
!("couldn't read {}: {}", path
.display(), e
);
158 let mut diag
= Diagnostic
::new(Level
::Fatal
, &msg
);
159 if let Some(sp
) = spanopt
{
166 /// Given a session and a path and an optional span (for error reporting),
167 /// adds the path to the session's `source_map` and returns the new `source_file`.
168 fn file_to_source_file(sess
: &ParseSess
, path
: &Path
, spanopt
: Option
<Span
>) -> Lrc
<SourceFile
> {
169 match try_file_to_source_file(sess
, path
, spanopt
) {
170 Ok(source_file
) => source_file
,
172 sess
.span_diagnostic
.emit_diagnostic(&mut d
);
178 /// Given a `source_file`, produces a sequence of token trees.
179 pub fn source_file_to_stream(
181 source_file
: Lrc
<SourceFile
>,
182 override_span
: Option
<Span
>,
183 ) -> (TokenStream
, Vec
<lexer
::UnmatchedBrace
>) {
184 panictry_buffer
!(&sess
.span_diagnostic
, maybe_file_to_stream(sess
, source_file
, override_span
))
187 /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
188 /// parsing the token stream.
189 pub fn maybe_file_to_stream(
191 source_file
: Lrc
<SourceFile
>,
192 override_span
: Option
<Span
>,
193 ) -> Result
<(TokenStream
, Vec
<lexer
::UnmatchedBrace
>), Vec
<Diagnostic
>> {
194 let src
= source_file
.src
.as_ref().unwrap_or_else(|| {
195 sess
.span_diagnostic
.bug(&format
!(
196 "cannot lex `source_file` without source: {}",
197 sess
.source_map().filename_for_diagnostics(&source_file
.name
)
201 let (token_trees
, unmatched_braces
) =
202 lexer
::parse_token_trees(sess
, src
.as_str(), source_file
.start_pos
, override_span
);
205 Ok(stream
) => Ok((stream
, unmatched_braces
)),
207 let mut buffer
= Vec
::with_capacity(1);
208 err
.buffer(&mut buffer
);
209 // Not using `emit_unclosed_delims` to use `db.buffer`
210 for unmatched
in unmatched_braces
{
211 if let Some(err
) = make_unclosed_delims_error(unmatched
, &sess
) {
212 err
.buffer(&mut buffer
);
220 /// Given a stream and the `ParseSess`, produces a parser.
221 pub fn stream_to_parser
<'a
>(
224 subparser_name
: Option
<&'
static str>,
226 Parser
::new(sess
, stream
, false, subparser_name
)
229 /// Runs the given subparser `f` on the tokens of the given `attr`'s item.
230 pub fn parse_in
<'a
, T
>(
234 mut f
: impl FnMut(&mut Parser
<'a
>) -> PResult
<'a
, T
>,
235 ) -> PResult
<'a
, T
> {
236 let mut parser
= Parser
::new(sess
, tts
, false, Some(name
));
237 let result
= f(&mut parser
)?
;
238 if parser
.token
!= token
::Eof
{
239 parser
.unexpected()?
;
244 // NOTE(Centril): The following probably shouldn't be here but it acknowledges the
245 // fact that architecturally, we are using parsing (read on below to understand why).
247 pub fn nt_to_tokenstream(
250 synthesize_tokens
: CanSynthesizeMissingTokens
,
252 // A `Nonterminal` is often a parsed AST item. At this point we now
253 // need to convert the parsed AST to an actual token stream, e.g.
254 // un-parse it basically.
256 // Unfortunately there's not really a great way to do that in a
257 // guaranteed lossless fashion right now. The fallback here is to just
258 // stringify the AST node and reparse it, but this loses all span
261 // As a result, some AST nodes are annotated with the token stream they
262 // came from. Here we attempt to extract these lossless token streams
263 // before we fall back to the stringification.
266 |tokens
: Option
<&LazyTokenStream
>| Some(tokens?
.create_token_stream().to_tokenstream());
268 let tokens
= match *nt
{
269 Nonterminal
::NtItem(ref item
) => prepend_attrs(&item
.attrs
, item
.tokens
.as_ref()),
270 Nonterminal
::NtBlock(ref block
) => convert_tokens(block
.tokens
.as_ref()),
271 Nonterminal
::NtStmt(ref stmt
) if let ast
::StmtKind
::Empty
= stmt
.kind
=> {
272 let tokens
= AttrAnnotatedTokenStream
::new(vec
![(
273 tokenstream
::AttrAnnotatedTokenTree
::Token(Token
::new(
279 prepend_attrs(&stmt
.attrs(), Some(&LazyTokenStream
::new(tokens
)))
281 Nonterminal
::NtStmt(ref stmt
) => prepend_attrs(&stmt
.attrs(), stmt
.tokens()),
282 Nonterminal
::NtPat(ref pat
) => convert_tokens(pat
.tokens
.as_ref()),
283 Nonterminal
::NtTy(ref ty
) => convert_tokens(ty
.tokens
.as_ref()),
284 Nonterminal
::NtIdent(ident
, is_raw
) => {
285 Some(tokenstream
::TokenTree
::token(token
::Ident(ident
.name
, is_raw
), ident
.span
).into())
287 Nonterminal
::NtLifetime(ident
) => {
288 Some(tokenstream
::TokenTree
::token(token
::Lifetime(ident
.name
), ident
.span
).into())
290 Nonterminal
::NtMeta(ref attr
) => convert_tokens(attr
.tokens
.as_ref()),
291 Nonterminal
::NtPath(ref path
) => convert_tokens(path
.tokens
.as_ref()),
292 Nonterminal
::NtVis(ref vis
) => convert_tokens(vis
.tokens
.as_ref()),
293 Nonterminal
::NtExpr(ref expr
) | Nonterminal
::NtLiteral(ref expr
) => {
294 prepend_attrs(&expr
.attrs
, expr
.tokens
.as_ref())
298 if let Some(tokens
) = tokens
{
300 } else if matches
!(synthesize_tokens
, CanSynthesizeMissingTokens
::Yes
) {
301 return fake_token_stream(sess
, nt
);
304 "Missing tokens for nt {:?} at {:?}: {:?}",
307 pprust
::nonterminal_to_string(nt
)
312 fn prepend_attrs(attrs
: &[Attribute
], tokens
: Option
<&LazyTokenStream
>) -> Option
<TokenStream
> {
313 let tokens
= tokens?
;
314 if attrs
.is_empty() {
315 return Some(tokens
.create_token_stream().to_tokenstream());
317 let attr_data
= AttributesData { attrs: attrs.to_vec().into(), tokens: tokens.clone() }
;
318 let wrapped
= AttrAnnotatedTokenStream
::new(vec
![(
319 AttrAnnotatedTokenTree
::Attributes(attr_data
),
322 Some(wrapped
.to_tokenstream())
325 pub fn fake_token_stream(sess
: &ParseSess
, nt
: &Nonterminal
) -> TokenStream
{
326 let source
= pprust
::nonterminal_to_string(nt
);
327 let filename
= FileName
::macro_expansion_source_code(&source
);
328 parse_stream_from_source_str(filename
, source
, sess
, Some(nt
.span()))
331 pub fn fake_token_stream_for_crate(sess
: &ParseSess
, krate
: &ast
::Crate
) -> TokenStream
{
332 let source
= pprust
::crate_to_string_for_macros(krate
);
333 let filename
= FileName
::macro_expansion_source_code(&source
);
334 parse_stream_from_source_str(filename
, source
, sess
, Some(krate
.spans
.inner_span
))
337 pub fn parse_cfg_attr(
339 parse_sess
: &ParseSess
,
340 ) -> Option
<(MetaItem
, Vec
<(AttrItem
, Span
)>)> {
341 match attr
.get_normal_item().args
{
342 ast
::MacArgs
::Delimited(dspan
, delim
, ref tts
) if !tts
.is_empty() => {
343 let msg
= "wrong `cfg_attr` delimiters";
344 crate::validate_attr
::check_meta_bad_delim(parse_sess
, dspan
, delim
, msg
);
345 match parse_in(parse_sess
, tts
.clone(), "`cfg_attr` input", |p
| p
.parse_cfg_attr()) {
346 Ok(r
) => return Some(r
),
348 e
.help(&format
!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP
))
349 .note(CFG_ATTR_NOTE_REF
)
354 _
=> error_malformed_cfg_attr_missing(attr
.span
, parse_sess
),
359 const CFG_ATTR_GRAMMAR_HELP
: &str = "#[cfg_attr(condition, attribute, other_attribute, ...)]";
360 const CFG_ATTR_NOTE_REF
: &str = "for more information, visit \
361 <https://doc.rust-lang.org/reference/conditional-compilation.html\
362 #the-cfg_attr-attribute>";
364 fn error_malformed_cfg_attr_missing(span
: Span
, parse_sess
: &ParseSess
) {
367 .struct_span_err(span
, "malformed `cfg_attr` attribute input")
370 "missing condition and attribute",
371 CFG_ATTR_GRAMMAR_HELP
.to_string(),
372 Applicability
::HasPlaceholders
,
374 .note(CFG_ATTR_NOTE_REF
)