1 //! The main parser interface.
3 #![feature(crate_visibility_modifier)]
4 #![feature(bindings_after_at)]
5 #![feature(iter_order_by)]
6 #![feature(or_patterns)]
7 #![feature(box_syntax)]
8 #![feature(box_patterns)]
9 #![recursion_limit = "256"]
12 use rustc_ast
::token
::{self, Nonterminal}
;
13 use rustc_ast
::tokenstream
::{self, CanSynthesizeMissingTokens, LazyTokenStream, TokenStream}
;
14 use rustc_ast
::AstLike
;
15 use rustc_ast_pretty
::pprust
;
16 use rustc_data_structures
::sync
::Lrc
;
17 use rustc_errors
::{Diagnostic, FatalError, Level, PResult}
;
18 use rustc_session
::parse
::ParseSess
;
19 use rustc_span
::{FileName, SourceFile, Span}
;
26 pub const MACRO_ARGUMENTS
: Option
<&str> = Some("macro arguments");
30 use parser
::{emit_unclosed_delims, make_unclosed_delims_error, Parser}
;
32 pub mod validate_attr
;
34 // A bunch of utility functions of the form `parse_<thing>_from_<source>`
35 // where <thing> includes crate, expr, item, stmt, tts, and one that
36 // uses a HOF to parse anything, and <source> includes file and
39 /// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
40 macro_rules
! panictry_buffer
{
41 ($handler
:expr
, $e
:expr
) => {{
42 use rustc_errors
::FatalError
;
43 use std
::result
::Result
::{Err, Ok}
;
48 $handler
.emit_diagnostic(&e
);
56 pub fn parse_crate_from_file
<'a
>(input
: &Path
, sess
: &'a ParseSess
) -> PResult
<'a
, ast
::Crate
> {
57 let mut parser
= new_parser_from_file(sess
, input
, None
);
58 parser
.parse_crate_mod()
61 pub fn parse_crate_attrs_from_file
<'a
>(
64 ) -> PResult
<'a
, Vec
<ast
::Attribute
>> {
65 let mut parser
= new_parser_from_file(sess
, input
, None
);
66 parser
.parse_inner_attributes()
69 pub fn parse_crate_from_source_str(
73 ) -> PResult
<'_
, ast
::Crate
> {
74 new_parser_from_source_str(sess
, name
, source
).parse_crate_mod()
77 pub fn parse_crate_attrs_from_source_str(
81 ) -> PResult
<'_
, Vec
<ast
::Attribute
>> {
82 new_parser_from_source_str(sess
, name
, source
).parse_inner_attributes()
85 pub fn parse_stream_from_source_str(
89 override_span
: Option
<Span
>,
91 let (stream
, mut errors
) =
92 source_file_to_stream(sess
, sess
.source_map().new_source_file(name
, source
), override_span
);
93 emit_unclosed_delims(&mut errors
, &sess
);
97 /// Creates a new parser from a source string.
98 pub fn new_parser_from_source_str(sess
: &ParseSess
, name
: FileName
, source
: String
) -> Parser
<'_
> {
99 panictry_buffer
!(&sess
.span_diagnostic
, maybe_new_parser_from_source_str(sess
, name
, source
))
102 /// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
104 pub fn maybe_new_parser_from_source_str(
108 ) -> Result
<Parser
<'_
>, Vec
<Diagnostic
>> {
109 maybe_source_file_to_parser(sess
, sess
.source_map().new_source_file(name
, source
))
112 /// Creates a new parser, handling errors as appropriate if the file doesn't exist.
113 /// If a span is given, that is used on an error as the source of the problem.
114 pub fn new_parser_from_file
<'a
>(sess
: &'a ParseSess
, path
: &Path
, sp
: Option
<Span
>) -> Parser
<'a
> {
115 source_file_to_parser(sess
, file_to_source_file(sess
, path
, sp
))
118 /// Given a `source_file` and config, returns a parser.
119 fn source_file_to_parser(sess
: &ParseSess
, source_file
: Lrc
<SourceFile
>) -> Parser
<'_
> {
120 panictry_buffer
!(&sess
.span_diagnostic
, maybe_source_file_to_parser(sess
, source_file
))
123 /// Given a `source_file` and config, return a parser. Returns any buffered errors from lexing the
124 /// initial token stream.
125 fn maybe_source_file_to_parser(
127 source_file
: Lrc
<SourceFile
>,
128 ) -> Result
<Parser
<'_
>, Vec
<Diagnostic
>> {
129 let end_pos
= source_file
.end_pos
;
130 let (stream
, unclosed_delims
) = maybe_file_to_stream(sess
, source_file
, None
)?
;
131 let mut parser
= stream_to_parser(sess
, stream
, None
);
132 parser
.unclosed_delims
= unclosed_delims
;
133 if parser
.token
== token
::Eof
{
134 parser
.token
.span
= Span
::new(end_pos
, end_pos
, parser
.token
.span
.ctxt());
142 /// Given a session and a path and an optional span (for error reporting),
143 /// add the path to the session's source_map and return the new source_file or
144 /// error when a file can't be read.
145 fn try_file_to_source_file(
148 spanopt
: Option
<Span
>,
149 ) -> Result
<Lrc
<SourceFile
>, Diagnostic
> {
150 sess
.source_map().load_file(path
).map_err(|e
| {
151 let msg
= format
!("couldn't read {}: {}", path
.display(), e
);
152 let mut diag
= Diagnostic
::new(Level
::Fatal
, &msg
);
153 if let Some(sp
) = spanopt
{
160 /// Given a session and a path and an optional span (for error reporting),
161 /// adds the path to the session's `source_map` and returns the new `source_file`.
162 fn file_to_source_file(sess
: &ParseSess
, path
: &Path
, spanopt
: Option
<Span
>) -> Lrc
<SourceFile
> {
163 match try_file_to_source_file(sess
, path
, spanopt
) {
164 Ok(source_file
) => source_file
,
166 sess
.span_diagnostic
.emit_diagnostic(&d
);
172 /// Given a `source_file`, produces a sequence of token trees.
173 pub fn source_file_to_stream(
175 source_file
: Lrc
<SourceFile
>,
176 override_span
: Option
<Span
>,
177 ) -> (TokenStream
, Vec
<lexer
::UnmatchedBrace
>) {
178 panictry_buffer
!(&sess
.span_diagnostic
, maybe_file_to_stream(sess
, source_file
, override_span
))
181 /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
182 /// parsing the token stream.
183 pub fn maybe_file_to_stream(
185 source_file
: Lrc
<SourceFile
>,
186 override_span
: Option
<Span
>,
187 ) -> Result
<(TokenStream
, Vec
<lexer
::UnmatchedBrace
>), Vec
<Diagnostic
>> {
188 let src
= source_file
.src
.as_ref().unwrap_or_else(|| {
190 .bug(&format
!("cannot lex `source_file` without source: {}", source_file
.name
));
193 let (token_trees
, unmatched_braces
) =
194 lexer
::parse_token_trees(sess
, src
.as_str(), source_file
.start_pos
, override_span
);
197 Ok(stream
) => Ok((stream
, unmatched_braces
)),
199 let mut buffer
= Vec
::with_capacity(1);
200 err
.buffer(&mut buffer
);
201 // Not using `emit_unclosed_delims` to use `db.buffer`
202 for unmatched
in unmatched_braces
{
203 if let Some(err
) = make_unclosed_delims_error(unmatched
, &sess
) {
204 err
.buffer(&mut buffer
);
212 /// Given a stream and the `ParseSess`, produces a parser.
213 pub fn stream_to_parser
<'a
>(
216 subparser_name
: Option
<&'
static str>,
218 Parser
::new(sess
, stream
, false, subparser_name
)
221 /// Runs the given subparser `f` on the tokens of the given `attr`'s item.
222 pub fn parse_in
<'a
, T
>(
226 mut f
: impl FnMut(&mut Parser
<'a
>) -> PResult
<'a
, T
>,
227 ) -> PResult
<'a
, T
> {
228 let mut parser
= Parser
::new(sess
, tts
, false, Some(name
));
229 let result
= f(&mut parser
)?
;
230 if parser
.token
!= token
::Eof
{
231 parser
.unexpected()?
;
236 // NOTE(Centril): The following probably shouldn't be here but it acknowledges the
237 // fact that architecturally, we are using parsing (read on below to understand why).
239 pub fn nt_to_tokenstream(
242 synthesize_tokens
: CanSynthesizeMissingTokens
,
244 // A `Nonterminal` is often a parsed AST item. At this point we now
245 // need to convert the parsed AST to an actual token stream, e.g.
246 // un-parse it basically.
248 // Unfortunately there's not really a great way to do that in a
249 // guaranteed lossless fashion right now. The fallback here is to just
250 // stringify the AST node and reparse it, but this loses all span
253 // As a result, some AST nodes are annotated with the token stream they
254 // came from. Here we attempt to extract these lossless token streams
255 // before we fall back to the stringification.
258 |tokens
: Option
<&LazyTokenStream
>| tokens
.as_ref().map(|t
| t
.create_token_stream());
260 let tokens
= match *nt
{
261 Nonterminal
::NtItem(ref item
) => prepend_attrs(sess
, &item
.attrs
, nt
, item
.tokens
.as_ref()),
262 Nonterminal
::NtBlock(ref block
) => convert_tokens(block
.tokens
.as_ref()),
263 Nonterminal
::NtStmt(ref stmt
) => {
264 let do_prepend
= |tokens
| prepend_attrs(sess
, stmt
.attrs(), nt
, tokens
);
265 if let ast
::StmtKind
::Empty
= stmt
.kind
{
266 let tokens
: TokenStream
=
267 tokenstream
::TokenTree
::token(token
::Semi
, stmt
.span
).into();
268 do_prepend(Some(&LazyTokenStream
::new(tokens
)))
270 do_prepend(stmt
.tokens())
273 Nonterminal
::NtPat(ref pat
) => convert_tokens(pat
.tokens
.as_ref()),
274 Nonterminal
::NtTy(ref ty
) => convert_tokens(ty
.tokens
.as_ref()),
275 Nonterminal
::NtIdent(ident
, is_raw
) => {
276 Some(tokenstream
::TokenTree
::token(token
::Ident(ident
.name
, is_raw
), ident
.span
).into())
278 Nonterminal
::NtLifetime(ident
) => {
279 Some(tokenstream
::TokenTree
::token(token
::Lifetime(ident
.name
), ident
.span
).into())
281 Nonterminal
::NtMeta(ref attr
) => convert_tokens(attr
.tokens
.as_ref()),
282 Nonterminal
::NtPath(ref path
) => convert_tokens(path
.tokens
.as_ref()),
283 Nonterminal
::NtVis(ref vis
) => convert_tokens(vis
.tokens
.as_ref()),
284 Nonterminal
::NtTT(ref tt
) => Some(tt
.clone().into()),
285 Nonterminal
::NtExpr(ref expr
) | Nonterminal
::NtLiteral(ref expr
) => {
286 if expr
.tokens
.is_none() {
287 debug
!("missing tokens for expr {:?}", expr
);
289 prepend_attrs(sess
, &expr
.attrs
, nt
, expr
.tokens
.as_ref())
293 if let Some(tokens
) = tokens
{
295 } else if matches
!(synthesize_tokens
, CanSynthesizeMissingTokens
::Yes
) {
296 return fake_token_stream(sess
, nt
);
298 panic
!("Missing tokens for nt at {:?}: {:?}", nt
.span(), pprust
::nonterminal_to_string(nt
));
302 pub fn fake_token_stream(sess
: &ParseSess
, nt
: &Nonterminal
) -> TokenStream
{
303 let source
= pprust
::nonterminal_to_string(nt
);
304 let filename
= FileName
::macro_expansion_source_code(&source
);
305 parse_stream_from_source_str(filename
, source
, sess
, Some(nt
.span()))
310 attrs
: &[ast
::Attribute
],
312 tokens
: Option
<&tokenstream
::LazyTokenStream
>,
313 ) -> Option
<tokenstream
::TokenStream
> {
314 if attrs
.is_empty() {
315 return Some(tokens?
.create_token_stream());
317 let mut builder
= tokenstream
::TokenStreamBuilder
::new();
319 // FIXME: Correctly handle tokens for inner attributes.
320 // For now, we fall back to reparsing the original AST node
321 if attr
.style
== ast
::AttrStyle
::Inner
{
322 return Some(fake_token_stream(sess
, nt
));
324 builder
.push(attr
.tokens());
326 builder
.push(tokens?
.create_token_stream());
327 Some(builder
.build())