]> git.proxmox.com Git - rustc.git/blob - src/libsyntax/parse/lexer/tests.rs
New upstream version 1.40.0+dfsg1
[rustc.git] / src / libsyntax / parse / lexer / tests.rs
1 use super::*;
2
3 use crate::symbol::Symbol;
4 use crate::source_map::{SourceMap, FilePathMapping};
5 use crate::parse::token;
6 use crate::with_default_globals;
7
8 use errors::{Handler, emitter::EmitterWriter};
9 use std::io;
10 use std::path::PathBuf;
11 use syntax_pos::{BytePos, Span};
12
13 fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
14 let emitter = EmitterWriter::new(
15 Box::new(io::sink()),
16 Some(sm.clone()),
17 false,
18 false,
19 false,
20 None,
21 false,
22 );
23 ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
24 }
25
26 // Creates a string reader for the given string.
27 fn setup<'a>(sm: &SourceMap,
28 sess: &'a ParseSess,
29 teststr: String)
30 -> StringReader<'a> {
31 let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
32 StringReader::new(sess, sf, None)
33 }
34
35 #[test]
36 fn t1() {
37 with_default_globals(|| {
38 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
39 let sh = mk_sess(sm.clone());
40 let mut string_reader = setup(
41 &sm,
42 &sh,
43 "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
44 );
45 assert_eq!(string_reader.next_token(), token::Comment);
46 assert_eq!(string_reader.next_token(), token::Whitespace);
47 let tok1 = string_reader.next_token();
48 let tok2 = Token::new(
49 mk_ident("fn"),
50 Span::with_root_ctxt(BytePos(21), BytePos(23)),
51 );
52 assert_eq!(tok1.kind, tok2.kind);
53 assert_eq!(tok1.span, tok2.span);
54 assert_eq!(string_reader.next_token(), token::Whitespace);
55 // Read another token.
56 let tok3 = string_reader.next_token();
57 assert_eq!(string_reader.pos.clone(), BytePos(28));
58 let tok4 = Token::new(
59 mk_ident("main"),
60 Span::with_root_ctxt(BytePos(24), BytePos(28)),
61 );
62 assert_eq!(tok3.kind, tok4.kind);
63 assert_eq!(tok3.span, tok4.span);
64
65 assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
66 assert_eq!(string_reader.pos.clone(), BytePos(29))
67 })
68 }
69
70 // Checks that the given reader produces the desired stream
71 // of tokens (stop checking after exhausting `expected`).
72 fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
73 for expected_tok in &expected {
74 assert_eq!(&string_reader.next_token(), expected_tok);
75 }
76 }
77
78 // Makes the identifier by looking up the string in the interner.
79 fn mk_ident(id: &str) -> TokenKind {
80 token::Ident(Symbol::intern(id), false)
81 }
82
83 fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
84 TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
85 }
86
87 #[test]
88 fn doublecolon_parsing() {
89 with_default_globals(|| {
90 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
91 let sh = mk_sess(sm.clone());
92 check_tokenization(
93 setup(&sm, &sh, "a b".to_string()),
94 vec![mk_ident("a"), token::Whitespace, mk_ident("b")],
95 );
96 })
97 }
98
99 #[test]
100 fn doublecolon_parsing_2() {
101 with_default_globals(|| {
102 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
103 let sh = mk_sess(sm.clone());
104 check_tokenization(
105 setup(&sm, &sh, "a::b".to_string()),
106 vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")],
107 );
108 })
109 }
110
111 #[test]
112 fn doublecolon_parsing_3() {
113 with_default_globals(|| {
114 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
115 let sh = mk_sess(sm.clone());
116 check_tokenization(
117 setup(&sm, &sh, "a ::b".to_string()),
118 vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")],
119 );
120 })
121 }
122
123 #[test]
124 fn doublecolon_parsing_4() {
125 with_default_globals(|| {
126 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
127 let sh = mk_sess(sm.clone());
128 check_tokenization(
129 setup(&sm, &sh, "a:: b".to_string()),
130 vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")],
131 );
132 })
133 }
134
135 #[test]
136 fn character_a() {
137 with_default_globals(|| {
138 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
139 let sh = mk_sess(sm.clone());
140 assert_eq!(
141 setup(&sm, &sh, "'a'".to_string()).next_token(),
142 mk_lit(token::Char, "a", None),
143 );
144 })
145 }
146
147 #[test]
148 fn character_space() {
149 with_default_globals(|| {
150 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
151 let sh = mk_sess(sm.clone());
152 assert_eq!(
153 setup(&sm, &sh, "' '".to_string()).next_token(),
154 mk_lit(token::Char, " ", None),
155 );
156 })
157 }
158
159 #[test]
160 fn character_escaped() {
161 with_default_globals(|| {
162 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
163 let sh = mk_sess(sm.clone());
164 assert_eq!(
165 setup(&sm, &sh, "'\\n'".to_string()).next_token(),
166 mk_lit(token::Char, "\\n", None),
167 );
168 })
169 }
170
171 #[test]
172 fn lifetime_name() {
173 with_default_globals(|| {
174 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
175 let sh = mk_sess(sm.clone());
176 assert_eq!(
177 setup(&sm, &sh, "'abc".to_string()).next_token(),
178 token::Lifetime(Symbol::intern("'abc")),
179 );
180 })
181 }
182
183 #[test]
184 fn raw_string() {
185 with_default_globals(|| {
186 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
187 let sh = mk_sess(sm.clone());
188 assert_eq!(
189 setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
190 mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None),
191 );
192 })
193 }
194
195 #[test]
196 fn literal_suffixes() {
197 with_default_globals(|| {
198 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
199 let sh = mk_sess(sm.clone());
200 macro_rules! test {
201 ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
202 assert_eq!(
203 setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
204 mk_lit(token::$tok_type, $tok_contents, Some("suffix")),
205 );
206 // with a whitespace separator
207 assert_eq!(
208 setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
209 mk_lit(token::$tok_type, $tok_contents, None),
210 );
211 }}
212 }
213
214 test!("'a'", Char, "a");
215 test!("b'a'", Byte, "a");
216 test!("\"a\"", Str, "a");
217 test!("b\"a\"", ByteStr, "a");
218 test!("1234", Integer, "1234");
219 test!("0b101", Integer, "0b101");
220 test!("0xABC", Integer, "0xABC");
221 test!("1.0", Float, "1.0");
222 test!("1.0e10", Float, "1.0e10");
223
224 assert_eq!(
225 setup(&sm, &sh, "2us".to_string()).next_token(),
226 mk_lit(token::Integer, "2", Some("us")),
227 );
228 assert_eq!(
229 setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
230 mk_lit(token::StrRaw(3), "raw", Some("suffix")),
231 );
232 assert_eq!(
233 setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
234 mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")),
235 );
236 })
237 }
238
239 #[test]
240 fn line_doc_comments() {
241 assert!(is_doc_comment("///"));
242 assert!(is_doc_comment("/// blah"));
243 assert!(!is_doc_comment("////"));
244 }
245
246 #[test]
247 fn nested_block_comments() {
248 with_default_globals(|| {
249 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
250 let sh = mk_sess(sm.clone());
251 let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
252 assert_eq!(lexer.next_token(), token::Comment);
253 assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
254 })
255 }
256
257 #[test]
258 fn crlf_comments() {
259 with_default_globals(|| {
260 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
261 let sh = mk_sess(sm.clone());
262 let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
263 let comment = lexer.next_token();
264 assert_eq!(comment.kind, token::Comment);
265 assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
266 assert_eq!(lexer.next_token(), token::Whitespace);
267 assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
268 })
269 }