]> git.proxmox.com Git - rustc.git/blob - src/libsyntax_expand/parse/lexer/tests.rs
New upstream version 1.41.1+dfsg1
[rustc.git] / src / libsyntax_expand / parse / lexer / tests.rs
1 use rustc_data_structures::sync::Lrc;
2 use rustc_parse::lexer::StringReader;
3 use syntax::token::{self, Token, TokenKind};
4 use syntax::sess::ParseSess;
5 use syntax::source_map::{SourceMap, FilePathMapping};
6 use syntax::util::comments::is_doc_comment;
7 use syntax::with_default_globals;
8 use syntax_pos::symbol::Symbol;
9 use syntax_pos::{BytePos, Span};
10
11 use errors::{Handler, emitter::EmitterWriter};
12 use std::io;
13 use std::path::PathBuf;
14
15 fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
16 let emitter = EmitterWriter::new(
17 Box::new(io::sink()),
18 Some(sm.clone()),
19 false,
20 false,
21 false,
22 None,
23 false,
24 );
25 ParseSess::with_span_handler(
26 Handler::with_emitter(true, None, Box::new(emitter)),
27 sm,
28 )
29 }
30
31 // Creates a string reader for the given string.
32 fn setup<'a>(sm: &SourceMap,
33 sess: &'a ParseSess,
34 teststr: String)
35 -> StringReader<'a> {
36 let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
37 StringReader::new(sess, sf, None)
38 }
39
40 #[test]
41 fn t1() {
42 with_default_globals(|| {
43 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
44 let sh = mk_sess(sm.clone());
45 let mut string_reader = setup(
46 &sm,
47 &sh,
48 "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
49 );
50 assert_eq!(string_reader.next_token(), token::Comment);
51 assert_eq!(string_reader.next_token(), token::Whitespace);
52 let tok1 = string_reader.next_token();
53 let tok2 = Token::new(
54 mk_ident("fn"),
55 Span::with_root_ctxt(BytePos(21), BytePos(23)),
56 );
57 assert_eq!(tok1.kind, tok2.kind);
58 assert_eq!(tok1.span, tok2.span);
59 assert_eq!(string_reader.next_token(), token::Whitespace);
60 // Read another token.
61 let tok3 = string_reader.next_token();
62 assert_eq!(string_reader.pos.clone(), BytePos(28));
63 let tok4 = Token::new(
64 mk_ident("main"),
65 Span::with_root_ctxt(BytePos(24), BytePos(28)),
66 );
67 assert_eq!(tok3.kind, tok4.kind);
68 assert_eq!(tok3.span, tok4.span);
69
70 assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
71 assert_eq!(string_reader.pos.clone(), BytePos(29))
72 })
73 }
74
75 // Checks that the given reader produces the desired stream
76 // of tokens (stop checking after exhausting `expected`).
77 fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
78 for expected_tok in &expected {
79 assert_eq!(&string_reader.next_token(), expected_tok);
80 }
81 }
82
83 // Makes the identifier by looking up the string in the interner.
84 fn mk_ident(id: &str) -> TokenKind {
85 token::Ident(Symbol::intern(id), false)
86 }
87
88 fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
89 TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
90 }
91
92 #[test]
93 fn doublecolon_parsing() {
94 with_default_globals(|| {
95 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
96 let sh = mk_sess(sm.clone());
97 check_tokenization(
98 setup(&sm, &sh, "a b".to_string()),
99 vec![mk_ident("a"), token::Whitespace, mk_ident("b")],
100 );
101 })
102 }
103
104 #[test]
105 fn doublecolon_parsing_2() {
106 with_default_globals(|| {
107 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
108 let sh = mk_sess(sm.clone());
109 check_tokenization(
110 setup(&sm, &sh, "a::b".to_string()),
111 vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")],
112 );
113 })
114 }
115
116 #[test]
117 fn doublecolon_parsing_3() {
118 with_default_globals(|| {
119 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
120 let sh = mk_sess(sm.clone());
121 check_tokenization(
122 setup(&sm, &sh, "a ::b".to_string()),
123 vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")],
124 );
125 })
126 }
127
128 #[test]
129 fn doublecolon_parsing_4() {
130 with_default_globals(|| {
131 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
132 let sh = mk_sess(sm.clone());
133 check_tokenization(
134 setup(&sm, &sh, "a:: b".to_string()),
135 vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")],
136 );
137 })
138 }
139
140 #[test]
141 fn character_a() {
142 with_default_globals(|| {
143 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
144 let sh = mk_sess(sm.clone());
145 assert_eq!(
146 setup(&sm, &sh, "'a'".to_string()).next_token(),
147 mk_lit(token::Char, "a", None),
148 );
149 })
150 }
151
152 #[test]
153 fn character_space() {
154 with_default_globals(|| {
155 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
156 let sh = mk_sess(sm.clone());
157 assert_eq!(
158 setup(&sm, &sh, "' '".to_string()).next_token(),
159 mk_lit(token::Char, " ", None),
160 );
161 })
162 }
163
164 #[test]
165 fn character_escaped() {
166 with_default_globals(|| {
167 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
168 let sh = mk_sess(sm.clone());
169 assert_eq!(
170 setup(&sm, &sh, "'\\n'".to_string()).next_token(),
171 mk_lit(token::Char, "\\n", None),
172 );
173 })
174 }
175
176 #[test]
177 fn lifetime_name() {
178 with_default_globals(|| {
179 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
180 let sh = mk_sess(sm.clone());
181 assert_eq!(
182 setup(&sm, &sh, "'abc".to_string()).next_token(),
183 token::Lifetime(Symbol::intern("'abc")),
184 );
185 })
186 }
187
188 #[test]
189 fn raw_string() {
190 with_default_globals(|| {
191 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
192 let sh = mk_sess(sm.clone());
193 assert_eq!(
194 setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
195 mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None),
196 );
197 })
198 }
199
200 #[test]
201 fn literal_suffixes() {
202 with_default_globals(|| {
203 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
204 let sh = mk_sess(sm.clone());
205 macro_rules! test {
206 ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
207 assert_eq!(
208 setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
209 mk_lit(token::$tok_type, $tok_contents, Some("suffix")),
210 );
211 // with a whitespace separator
212 assert_eq!(
213 setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
214 mk_lit(token::$tok_type, $tok_contents, None),
215 );
216 }}
217 }
218
219 test!("'a'", Char, "a");
220 test!("b'a'", Byte, "a");
221 test!("\"a\"", Str, "a");
222 test!("b\"a\"", ByteStr, "a");
223 test!("1234", Integer, "1234");
224 test!("0b101", Integer, "0b101");
225 test!("0xABC", Integer, "0xABC");
226 test!("1.0", Float, "1.0");
227 test!("1.0e10", Float, "1.0e10");
228
229 assert_eq!(
230 setup(&sm, &sh, "2us".to_string()).next_token(),
231 mk_lit(token::Integer, "2", Some("us")),
232 );
233 assert_eq!(
234 setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
235 mk_lit(token::StrRaw(3), "raw", Some("suffix")),
236 );
237 assert_eq!(
238 setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
239 mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")),
240 );
241 })
242 }
243
244 #[test]
245 fn line_doc_comments() {
246 assert!(is_doc_comment("///"));
247 assert!(is_doc_comment("/// blah"));
248 assert!(!is_doc_comment("////"));
249 }
250
251 #[test]
252 fn nested_block_comments() {
253 with_default_globals(|| {
254 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
255 let sh = mk_sess(sm.clone());
256 let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
257 assert_eq!(lexer.next_token(), token::Comment);
258 assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
259 })
260 }
261
262 #[test]
263 fn crlf_comments() {
264 with_default_globals(|| {
265 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
266 let sh = mk_sess(sm.clone());
267 let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
268 let comment = lexer.next_token();
269 assert_eq!(comment.kind, token::Comment);
270 assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
271 assert_eq!(lexer.next_token(), token::Whitespace);
272 assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
273 })
274 }