]> git.proxmox.com Git - rustc.git/blob - src/librustc_expand/parse/tests.rs
New upstream version 1.44.1+dfsg1
[rustc.git] / src / librustc_expand / parse / tests.rs
1 use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
2
3 use rustc_ast::ast::{self, Name, PatKind};
4 use rustc_ast::ptr::P;
5 use rustc_ast::token::{self, Token};
6 use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
7 use rustc_ast::visit;
8 use rustc_ast::with_default_globals;
9 use rustc_ast_pretty::pprust::item_to_string;
10 use rustc_errors::PResult;
11 use rustc_parse::new_parser_from_source_str;
12 use rustc_session::parse::ParseSess;
13 use rustc_span::source_map::FilePathMapping;
14 use rustc_span::symbol::{kw, sym, Symbol};
15 use rustc_span::{BytePos, FileName, Pos, Span};
16
17 use std::path::PathBuf;
18
19 fn sess() -> ParseSess {
20 ParseSess::new(FilePathMapping::empty())
21 }
22
23 /// Parses an item.
24 ///
25 /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
26 /// when a syntax error occurred.
27 fn parse_item_from_source_str(
28 name: FileName,
29 source: String,
30 sess: &ParseSess,
31 ) -> PResult<'_, Option<P<ast::Item>>> {
32 new_parser_from_source_str(sess, name, source).parse_item()
33 }
34
35 // Produces a `rustc_span::span`.
36 fn sp(a: u32, b: u32) -> Span {
37 Span::with_root_ctxt(BytePos(a), BytePos(b))
38 }
39
40 /// Parses a string, return an expression.
41 fn string_to_expr(source_str: String) -> P<ast::Expr> {
42 with_error_checking_parse(source_str, &sess(), |p| p.parse_expr())
43 }
44
45 /// Parses a string, returns an item.
46 fn string_to_item(source_str: String) -> Option<P<ast::Item>> {
47 with_error_checking_parse(source_str, &sess(), |p| p.parse_item())
48 }
49
50 #[should_panic]
51 #[test]
52 fn bad_path_expr_1() {
53 with_default_globals(|| {
54 string_to_expr("::abc::def::return".to_string());
55 })
56 }
57
58 // Checks the token-tree-ization of macros.
59 #[test]
60 fn string_to_tts_macro() {
61 with_default_globals(|| {
62 let tts: Vec<_> =
63 string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
64 let tts: &[TokenTree] = &tts[..];
65
66 match tts {
67 [TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }), TokenTree::Token(Token { kind: token::Not, .. }), TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }), TokenTree::Delimited(_, macro_delim, macro_tts)]
68 if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" =>
69 {
70 let tts = &macro_tts.trees().collect::<Vec<_>>();
71 match &tts[..] {
72 [TokenTree::Delimited(_, first_delim, first_tts), TokenTree::Token(Token { kind: token::FatArrow, .. }), TokenTree::Delimited(_, second_delim, second_tts)]
73 if macro_delim == &token::Paren =>
74 {
75 let tts = &first_tts.trees().collect::<Vec<_>>();
76 match &tts[..] {
77 [TokenTree::Token(Token { kind: token::Dollar, .. }), TokenTree::Token(Token { kind: token::Ident(name, false), .. })]
78 if first_delim == &token::Paren && name.as_str() == "a" => {}
79 _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
80 }
81 let tts = &second_tts.trees().collect::<Vec<_>>();
82 match &tts[..] {
83 [TokenTree::Token(Token { kind: token::Dollar, .. }), TokenTree::Token(Token { kind: token::Ident(name, false), .. })]
84 if second_delim == &token::Paren && name.as_str() == "a" => {}
85 _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
86 }
87 }
88 _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
89 }
90 }
91 _ => panic!("value: {:?}", tts),
92 }
93 })
94 }
95
96 #[test]
97 fn string_to_tts_1() {
98 with_default_globals(|| {
99 let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
100
101 let expected = TokenStream::new(vec![
102 TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
103 TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
104 TokenTree::Delimited(
105 DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
106 token::DelimToken::Paren,
107 TokenStream::new(vec![
108 TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
109 TokenTree::token(token::Colon, sp(8, 9)).into(),
110 TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
111 ])
112 .into(),
113 )
114 .into(),
115 TokenTree::Delimited(
116 DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
117 token::DelimToken::Brace,
118 TokenStream::new(vec![
119 TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
120 TokenTree::token(token::Semi, sp(18, 19)).into(),
121 ])
122 .into(),
123 )
124 .into(),
125 ]);
126
127 assert_eq!(tts, expected);
128 })
129 }
130
131 #[test]
132 fn parse_use() {
133 with_default_globals(|| {
134 let use_s = "use foo::bar::baz;";
135 let vitem = string_to_item(use_s.to_string()).unwrap();
136 let vitem_s = item_to_string(&vitem);
137 assert_eq!(&vitem_s[..], use_s);
138
139 let use_s = "use foo::bar as baz;";
140 let vitem = string_to_item(use_s.to_string()).unwrap();
141 let vitem_s = item_to_string(&vitem);
142 assert_eq!(&vitem_s[..], use_s);
143 })
144 }
145
146 #[test]
147 fn parse_extern_crate() {
148 with_default_globals(|| {
149 let ex_s = "extern crate foo;";
150 let vitem = string_to_item(ex_s.to_string()).unwrap();
151 let vitem_s = item_to_string(&vitem);
152 assert_eq!(&vitem_s[..], ex_s);
153
154 let ex_s = "extern crate foo as bar;";
155 let vitem = string_to_item(ex_s.to_string()).unwrap();
156 let vitem_s = item_to_string(&vitem);
157 assert_eq!(&vitem_s[..], ex_s);
158 })
159 }
160
161 fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
162 let item = string_to_item(src.to_string()).unwrap();
163
164 struct PatIdentVisitor {
165 spans: Vec<Span>,
166 }
167 impl<'a> visit::Visitor<'a> for PatIdentVisitor {
168 fn visit_pat(&mut self, p: &'a ast::Pat) {
169 match p.kind {
170 PatKind::Ident(_, ref ident, _) => {
171 self.spans.push(ident.span.clone());
172 }
173 _ => {
174 visit::walk_pat(self, p);
175 }
176 }
177 }
178 }
179 let mut v = PatIdentVisitor { spans: Vec::new() };
180 visit::walk_item(&mut v, &item);
181 return v.spans;
182 }
183
184 #[test]
185 fn span_of_self_arg_pat_idents_are_correct() {
186 with_default_globals(|| {
187 let srcs = [
188 "impl z { fn a (&self, &myarg: i32) {} }",
189 "impl z { fn a (&mut self, &myarg: i32) {} }",
190 "impl z { fn a (&'a self, &myarg: i32) {} }",
191 "impl z { fn a (self, &myarg: i32) {} }",
192 "impl z { fn a (self: Foo, &myarg: i32) {} }",
193 ];
194
195 for &src in &srcs {
196 let spans = get_spans_of_pat_idents(src);
197 let (lo, hi) = (spans[0].lo(), spans[0].hi());
198 assert!(
199 "self" == &src[lo.to_usize()..hi.to_usize()],
200 "\"{}\" != \"self\". src=\"{}\"",
201 &src[lo.to_usize()..hi.to_usize()],
202 src
203 )
204 }
205 })
206 }
207
208 #[test]
209 fn parse_exprs() {
210 with_default_globals(|| {
211 // just make sure that they parse....
212 string_to_expr("3 + 4".to_string());
213 string_to_expr("a::z.froob(b,&(987+3))".to_string());
214 })
215 }
216
217 #[test]
218 fn attrs_fix_bug() {
219 with_default_globals(|| {
220 string_to_item(
221 "pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
222 -> Result<Box<Writer>, String> {
223 #[cfg(windows)]
224 fn wb() -> c_int {
225 (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
226 }
227
228 #[cfg(unix)]
229 fn wb() -> c_int { O_WRONLY as c_int }
230
231 let mut fflags: c_int = wb();
232 }"
233 .to_string(),
234 );
235 })
236 }
237
238 #[test]
239 fn crlf_doc_comments() {
240 with_default_globals(|| {
241 let sess = sess();
242
243 let name_1 = FileName::Custom("crlf_source_1".to_string());
244 let source = "/// doc comment\r\nfn foo() {}".to_string();
245 let item = parse_item_from_source_str(name_1, source, &sess).unwrap().unwrap();
246 let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
247 assert_eq!(doc.as_str(), "/// doc comment");
248
249 let name_2 = FileName::Custom("crlf_source_2".to_string());
250 let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
251 let item = parse_item_from_source_str(name_2, source, &sess).unwrap().unwrap();
252 let docs = item.attrs.iter().filter_map(|at| at.doc_str()).collect::<Vec<_>>();
253 let b: &[_] = &[Symbol::intern("/// doc comment"), Symbol::intern("/// line 2")];
254 assert_eq!(&docs[..], b);
255
256 let name_3 = FileName::Custom("clrf_source_3".to_string());
257 let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
258 let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap();
259 let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
260 assert_eq!(doc.as_str(), "/** doc comment\n * with CRLF */");
261 });
262 }
263
264 #[test]
265 fn ttdelim_span() {
266 fn parse_expr_from_source_str(
267 name: FileName,
268 source: String,
269 sess: &ParseSess,
270 ) -> PResult<'_, P<ast::Expr>> {
271 new_parser_from_source_str(sess, name, source).parse_expr()
272 }
273
274 with_default_globals(|| {
275 let sess = sess();
276 let expr = parse_expr_from_source_str(
277 PathBuf::from("foo").into(),
278 "foo!( fn main() { body } )".to_string(),
279 &sess,
280 )
281 .unwrap();
282
283 let tts: Vec<_> = match expr.kind {
284 ast::ExprKind::MacCall(ref mac) => mac.args.inner_tokens().trees().collect(),
285 _ => panic!("not a macro"),
286 };
287
288 let span = tts.iter().rev().next().unwrap().span();
289
290 match sess.source_map().span_to_snippet(span) {
291 Ok(s) => assert_eq!(&s[..], "{ body }"),
292 Err(_) => panic!("could not get snippet"),
293 }
294 });
295 }
296
297 // This tests that when parsing a string (rather than a file) we don't try
298 // and read in a file for a module declaration and just parse a stub.
299 // See `recurse_into_file_modules` in the parser.
300 #[test]
301 fn out_of_line_mod() {
302 with_default_globals(|| {
303 let item = parse_item_from_source_str(
304 PathBuf::from("foo").into(),
305 "mod foo { struct S; mod this_does_not_exist; }".to_owned(),
306 &sess(),
307 )
308 .unwrap()
309 .unwrap();
310
311 if let ast::ItemKind::Mod(ref m) = item.kind {
312 assert!(m.items.len() == 2);
313 } else {
314 panic!();
315 }
316 });
317 }
318
319 #[test]
320 fn eqmodws() {
321 assert_eq!(matches_codepattern("", ""), true);
322 assert_eq!(matches_codepattern("", "a"), false);
323 assert_eq!(matches_codepattern("a", ""), false);
324 assert_eq!(matches_codepattern("a", "a"), true);
325 assert_eq!(matches_codepattern("a b", "a \n\t\r b"), true);
326 assert_eq!(matches_codepattern("a b ", "a \n\t\r b"), true);
327 assert_eq!(matches_codepattern("a b", "a \n\t\r b "), false);
328 assert_eq!(matches_codepattern("a b", "a b"), true);
329 assert_eq!(matches_codepattern("ab", "a b"), false);
330 assert_eq!(matches_codepattern("a b", "ab"), true);
331 assert_eq!(matches_codepattern(" a b", "ab"), true);
332 }
333
334 #[test]
335 fn pattern_whitespace() {
336 assert_eq!(matches_codepattern("", "\x0C"), false);
337 assert_eq!(matches_codepattern("a b ", "a \u{0085}\n\t\r b"), true);
338 assert_eq!(matches_codepattern("a b", "a \u{0085}\n\t\r b "), false);
339 }
340
341 #[test]
342 fn non_pattern_whitespace() {
343 // These have the property 'White_Space' but not 'Pattern_White_Space'
344 assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
345 assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
346 assert_eq!(matches_codepattern("\u{205F}a b", "ab"), false);
347 assert_eq!(matches_codepattern("a \u{3000}b", "ab"), false);
348 }