]> git.proxmox.com Git - rustc.git/blob - src/libsyntax_expand/parse/tests.rs
New upstream version 1.41.1+dfsg1
[rustc.git] / src / libsyntax_expand / parse / tests.rs
1 use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
2
3 use rustc_parse::new_parser_from_source_str;
4 use syntax::ast::{self, Name, PatKind};
5 use syntax::attr::first_attr_value_str_by_name;
6 use syntax::sess::ParseSess;
7 use syntax::token::{self, Token};
8 use syntax::print::pprust::item_to_string;
9 use syntax::ptr::P;
10 use syntax::source_map::FilePathMapping;
11 use syntax::symbol::{kw, sym};
12 use syntax::tokenstream::{DelimSpan, TokenTree, TokenStream};
13 use syntax::visit;
14 use syntax::with_default_globals;
15 use syntax_pos::{Span, BytePos, Pos, FileName};
16 use errors::PResult;
17
18 use std::path::PathBuf;
19
20 fn sess() -> ParseSess {
21 ParseSess::new(FilePathMapping::empty())
22 }
23
24 /// Parses an item.
25 ///
26 /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
27 /// when a syntax error occurred.
28 fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
29 -> PResult<'_, Option<P<ast::Item>>> {
30 new_parser_from_source_str(sess, name, source).parse_item()
31 }
32
33 // Produces a `syntax_pos::span`.
34 fn sp(a: u32, b: u32) -> Span {
35 Span::with_root_ctxt(BytePos(a), BytePos(b))
36 }
37
38 /// Parses a string, return an expression.
39 fn string_to_expr(source_str : String) -> P<ast::Expr> {
40 with_error_checking_parse(source_str, &sess(), |p| p.parse_expr())
41 }
42
43 /// Parses a string, returns an item.
44 fn string_to_item(source_str : String) -> Option<P<ast::Item>> {
45 with_error_checking_parse(source_str, &sess(), |p| p.parse_item())
46 }
47
48 #[should_panic]
49 #[test] fn bad_path_expr_1() {
50 with_default_globals(|| {
51 string_to_expr("::abc::def::return".to_string());
52 })
53 }
54
55 // Checks the token-tree-ization of macros.
56 #[test]
57 fn string_to_tts_macro () {
58 with_default_globals(|| {
59 let tts: Vec<_> =
60 string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
61 let tts: &[TokenTree] = &tts[..];
62
63 match tts {
64 [
65 TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }),
66 TokenTree::Token(Token { kind: token::Not, .. }),
67 TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }),
68 TokenTree::Delimited(_, macro_delim, macro_tts)
69 ]
70 if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => {
71 let tts = &macro_tts.trees().collect::<Vec<_>>();
72 match &tts[..] {
73 [
74 TokenTree::Delimited(_, first_delim, first_tts),
75 TokenTree::Token(Token { kind: token::FatArrow, .. }),
76 TokenTree::Delimited(_, second_delim, second_tts),
77 ]
78 if macro_delim == &token::Paren => {
79 let tts = &first_tts.trees().collect::<Vec<_>>();
80 match &tts[..] {
81 [
82 TokenTree::Token(Token { kind: token::Dollar, .. }),
83 TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
84 ]
85 if first_delim == &token::Paren && name.as_str() == "a" => {},
86 _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
87 }
88 let tts = &second_tts.trees().collect::<Vec<_>>();
89 match &tts[..] {
90 [
91 TokenTree::Token(Token { kind: token::Dollar, .. }),
92 TokenTree::Token(Token { kind: token::Ident(name, false), .. }),
93 ]
94 if second_delim == &token::Paren && name.as_str() == "a" => {},
95 _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
96 }
97 },
98 _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
99 }
100 },
101 _ => panic!("value: {:?}",tts),
102 }
103 })
104 }
105
106 #[test]
107 fn string_to_tts_1() {
108 with_default_globals(|| {
109 let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
110
111 let expected = TokenStream::new(vec![
112 TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
113 TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(),
114 TokenTree::Delimited(
115 DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
116 token::DelimToken::Paren,
117 TokenStream::new(vec![
118 TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(),
119 TokenTree::token(token::Colon, sp(8, 9)).into(),
120 TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
121 ]).into(),
122 ).into(),
123 TokenTree::Delimited(
124 DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
125 token::DelimToken::Brace,
126 TokenStream::new(vec![
127 TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(),
128 TokenTree::token(token::Semi, sp(18, 19)).into(),
129 ]).into(),
130 ).into()
131 ]);
132
133 assert_eq!(tts, expected);
134 })
135 }
136
137 #[test] fn parse_use() {
138 with_default_globals(|| {
139 let use_s = "use foo::bar::baz;";
140 let vitem = string_to_item(use_s.to_string()).unwrap();
141 let vitem_s = item_to_string(&vitem);
142 assert_eq!(&vitem_s[..], use_s);
143
144 let use_s = "use foo::bar as baz;";
145 let vitem = string_to_item(use_s.to_string()).unwrap();
146 let vitem_s = item_to_string(&vitem);
147 assert_eq!(&vitem_s[..], use_s);
148 })
149 }
150
151 #[test] fn parse_extern_crate() {
152 with_default_globals(|| {
153 let ex_s = "extern crate foo;";
154 let vitem = string_to_item(ex_s.to_string()).unwrap();
155 let vitem_s = item_to_string(&vitem);
156 assert_eq!(&vitem_s[..], ex_s);
157
158 let ex_s = "extern crate foo as bar;";
159 let vitem = string_to_item(ex_s.to_string()).unwrap();
160 let vitem_s = item_to_string(&vitem);
161 assert_eq!(&vitem_s[..], ex_s);
162 })
163 }
164
165 fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
166 let item = string_to_item(src.to_string()).unwrap();
167
168 struct PatIdentVisitor {
169 spans: Vec<Span>
170 }
171 impl<'a> visit::Visitor<'a> for PatIdentVisitor {
172 fn visit_pat(&mut self, p: &'a ast::Pat) {
173 match p.kind {
174 PatKind::Ident(_ , ref ident, _) => {
175 self.spans.push(ident.span.clone());
176 }
177 _ => {
178 visit::walk_pat(self, p);
179 }
180 }
181 }
182 }
183 let mut v = PatIdentVisitor { spans: Vec::new() };
184 visit::walk_item(&mut v, &item);
185 return v.spans;
186 }
187
188 #[test] fn span_of_self_arg_pat_idents_are_correct() {
189 with_default_globals(|| {
190
191 let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
192 "impl z { fn a (&mut self, &myarg: i32) {} }",
193 "impl z { fn a (&'a self, &myarg: i32) {} }",
194 "impl z { fn a (self, &myarg: i32) {} }",
195 "impl z { fn a (self: Foo, &myarg: i32) {} }",
196 ];
197
198 for &src in &srcs {
199 let spans = get_spans_of_pat_idents(src);
200 let (lo, hi) = (spans[0].lo(), spans[0].hi());
201 assert!("self" == &src[lo.to_usize()..hi.to_usize()],
202 "\"{}\" != \"self\". src=\"{}\"",
203 &src[lo.to_usize()..hi.to_usize()], src)
204 }
205 })
206 }
207
208 #[test] fn parse_exprs () {
209 with_default_globals(|| {
210 // just make sure that they parse....
211 string_to_expr("3 + 4".to_string());
212 string_to_expr("a::z.froob(b,&(987+3))".to_string());
213 })
214 }
215
216 #[test] fn attrs_fix_bug () {
217 with_default_globals(|| {
218 string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
219 -> Result<Box<Writer>, String> {
220 #[cfg(windows)]
221 fn wb() -> c_int {
222 (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
223 }
224
225 #[cfg(unix)]
226 fn wb() -> c_int { O_WRONLY as c_int }
227
228 let mut fflags: c_int = wb();
229 }".to_string());
230 })
231 }
232
233 #[test] fn crlf_doc_comments() {
234 with_default_globals(|| {
235 let sess = sess();
236
237 let name_1 = FileName::Custom("crlf_source_1".to_string());
238 let source = "/// doc comment\r\nfn foo() {}".to_string();
239 let item = parse_item_from_source_str(name_1, source, &sess)
240 .unwrap().unwrap();
241 let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap();
242 assert_eq!(doc.as_str(), "/// doc comment");
243
244 let name_2 = FileName::Custom("crlf_source_2".to_string());
245 let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
246 let item = parse_item_from_source_str(name_2, source, &sess)
247 .unwrap().unwrap();
248 let docs = item.attrs.iter().filter(|a| a.has_name(sym::doc))
249 .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
250 let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
251 assert_eq!(&docs[..], b);
252
253 let name_3 = FileName::Custom("clrf_source_3".to_string());
254 let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
255 let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap();
256 let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap();
257 assert_eq!(doc.as_str(), "/** doc comment\n * with CRLF */");
258 });
259 }
260
261 #[test]
262 fn ttdelim_span() {
263 fn parse_expr_from_source_str(
264 name: FileName, source: String, sess: &ParseSess
265 ) -> PResult<'_, P<ast::Expr>> {
266 new_parser_from_source_str(sess, name, source).parse_expr()
267 }
268
269 with_default_globals(|| {
270 let sess = sess();
271 let expr = parse_expr_from_source_str(PathBuf::from("foo").into(),
272 "foo!( fn main() { body } )".to_string(), &sess).unwrap();
273
274 let tts: Vec<_> = match expr.kind {
275 ast::ExprKind::Mac(ref mac) => mac.args.inner_tokens().trees().collect(),
276 _ => panic!("not a macro"),
277 };
278
279 let span = tts.iter().rev().next().unwrap().span();
280
281 match sess.source_map().span_to_snippet(span) {
282 Ok(s) => assert_eq!(&s[..], "{ body }"),
283 Err(_) => panic!("could not get snippet"),
284 }
285 });
286 }
287
288 // This tests that when parsing a string (rather than a file) we don't try
289 // and read in a file for a module declaration and just parse a stub.
290 // See `recurse_into_file_modules` in the parser.
291 #[test]
292 fn out_of_line_mod() {
293 with_default_globals(|| {
294 let item = parse_item_from_source_str(
295 PathBuf::from("foo").into(),
296 "mod foo { struct S; mod this_does_not_exist; }".to_owned(),
297 &sess(),
298 ).unwrap().unwrap();
299
300 if let ast::ItemKind::Mod(ref m) = item.kind {
301 assert!(m.items.len() == 2);
302 } else {
303 panic!();
304 }
305 });
306 }
307
308 #[test]
309 fn eqmodws() {
310 assert_eq!(matches_codepattern("",""),true);
311 assert_eq!(matches_codepattern("","a"),false);
312 assert_eq!(matches_codepattern("a",""),false);
313 assert_eq!(matches_codepattern("a","a"),true);
314 assert_eq!(matches_codepattern("a b","a \n\t\r b"),true);
315 assert_eq!(matches_codepattern("a b ","a \n\t\r b"),true);
316 assert_eq!(matches_codepattern("a b","a \n\t\r b "),false);
317 assert_eq!(matches_codepattern("a b","a b"),true);
318 assert_eq!(matches_codepattern("ab","a b"),false);
319 assert_eq!(matches_codepattern("a b","ab"),true);
320 assert_eq!(matches_codepattern(" a b","ab"),true);
321 }
322
323 #[test]
324 fn pattern_whitespace() {
325 assert_eq!(matches_codepattern("","\x0C"), false);
326 assert_eq!(matches_codepattern("a b ","a \u{0085}\n\t\r b"),true);
327 assert_eq!(matches_codepattern("a b","a \u{0085}\n\t\r b "),false);
328 }
329
330 #[test]
331 fn non_pattern_whitespace() {
332 // These have the property 'White_Space' but not 'Pattern_White_Space'
333 assert_eq!(matches_codepattern("a b","a\u{2002}b"), false);
334 assert_eq!(matches_codepattern("a b","a\u{2002}b"), false);
335 assert_eq!(matches_codepattern("\u{205F}a b","ab"), false);
336 assert_eq!(matches_codepattern("a \u{3000}b","ab"), false);
337 }