]> git.proxmox.com Git - rustc.git/blame - src/librustc_expand/parse/lexer/tests.rs
New upstream version 1.43.0+dfsg1
[rustc.git] / src / librustc_expand / parse / lexer / tests.rs
CommitLineData
74b04a01
XL
1use rustc_ast::token::{self, Token, TokenKind};
2use rustc_ast::util::comments::is_doc_comment;
3use rustc_ast::with_default_globals;
60c5eb7d 4use rustc_data_structures::sync::Lrc;
dfeec247 5use rustc_errors::{emitter::EmitterWriter, Handler};
60c5eb7d 6use rustc_parse::lexer::StringReader;
74b04a01 7use rustc_session::parse::ParseSess;
dfeec247
XL
8use rustc_span::source_map::{FilePathMapping, SourceMap};
9use rustc_span::symbol::Symbol;
10use rustc_span::{BytePos, Span};
e1599b0c 11
416331ca
XL
12use std::io;
13use std::path::PathBuf;
416331ca
XL
14
15fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
e1599b0c
XL
16 let emitter = EmitterWriter::new(
17 Box::new(io::sink()),
18 Some(sm.clone()),
19 false,
20 false,
21 false,
22 None,
23 false,
24 );
dfeec247 25 ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
e1599b0c
XL
26}
27
28// Creates a string reader for the given string.
dfeec247 29fn setup<'a>(sm: &SourceMap, sess: &'a ParseSess, teststr: String) -> StringReader<'a> {
416331ca
XL
30 let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
31 StringReader::new(sess, sf, None)
32}
33
34#[test]
35fn t1() {
36 with_default_globals(|| {
37 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
38 let sh = mk_sess(sm.clone());
e1599b0c
XL
39 let mut string_reader = setup(
40 &sm,
41 &sh,
42 "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
43 );
416331ca
XL
44 assert_eq!(string_reader.next_token(), token::Comment);
45 assert_eq!(string_reader.next_token(), token::Whitespace);
46 let tok1 = string_reader.next_token();
dfeec247 47 let tok2 = Token::new(mk_ident("fn"), Span::with_root_ctxt(BytePos(21), BytePos(23)));
416331ca
XL
48 assert_eq!(tok1.kind, tok2.kind);
49 assert_eq!(tok1.span, tok2.span);
50 assert_eq!(string_reader.next_token(), token::Whitespace);
e1599b0c 51 // Read another token.
416331ca
XL
52 let tok3 = string_reader.next_token();
53 assert_eq!(string_reader.pos.clone(), BytePos(28));
dfeec247 54 let tok4 = Token::new(mk_ident("main"), Span::with_root_ctxt(BytePos(24), BytePos(28)));
416331ca
XL
55 assert_eq!(tok3.kind, tok4.kind);
56 assert_eq!(tok3.span, tok4.span);
57
58 assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
59 assert_eq!(string_reader.pos.clone(), BytePos(29))
60 })
61}
62
e1599b0c
XL
63// Checks that the given reader produces the desired stream
64// of tokens (stop checking after exhausting `expected`).
416331ca
XL
65fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
66 for expected_tok in &expected {
67 assert_eq!(&string_reader.next_token(), expected_tok);
68 }
69}
70
e1599b0c 71// Makes the identifier by looking up the string in the interner.
416331ca
XL
72fn mk_ident(id: &str) -> TokenKind {
73 token::Ident(Symbol::intern(id), false)
74}
75
76fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
77 TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
78}
79
80#[test]
e1599b0c 81fn doublecolon_parsing() {
416331ca
XL
82 with_default_globals(|| {
83 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
84 let sh = mk_sess(sm.clone());
e1599b0c
XL
85 check_tokenization(
86 setup(&sm, &sh, "a b".to_string()),
87 vec![mk_ident("a"), token::Whitespace, mk_ident("b")],
88 );
416331ca
XL
89 })
90}
91
92#[test]
e1599b0c 93fn doublecolon_parsing_2() {
416331ca
XL
94 with_default_globals(|| {
95 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
96 let sh = mk_sess(sm.clone());
e1599b0c
XL
97 check_tokenization(
98 setup(&sm, &sh, "a::b".to_string()),
99 vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")],
100 );
416331ca
XL
101 })
102}
103
104#[test]
e1599b0c 105fn doublecolon_parsing_3() {
416331ca
XL
106 with_default_globals(|| {
107 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
108 let sh = mk_sess(sm.clone());
e1599b0c
XL
109 check_tokenization(
110 setup(&sm, &sh, "a ::b".to_string()),
111 vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")],
112 );
416331ca
XL
113 })
114}
115
116#[test]
e1599b0c 117fn doublecolon_parsing_4() {
416331ca
XL
118 with_default_globals(|| {
119 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
120 let sh = mk_sess(sm.clone());
e1599b0c
XL
121 check_tokenization(
122 setup(&sm, &sh, "a:: b".to_string()),
123 vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")],
124 );
416331ca
XL
125 })
126}
127
128#[test]
129fn character_a() {
130 with_default_globals(|| {
131 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
132 let sh = mk_sess(sm.clone());
dfeec247 133 assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(), mk_lit(token::Char, "a", None),);
416331ca
XL
134 })
135}
136
137#[test]
138fn character_space() {
139 with_default_globals(|| {
140 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
141 let sh = mk_sess(sm.clone());
dfeec247 142 assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(), mk_lit(token::Char, " ", None),);
416331ca
XL
143 })
144}
145
146#[test]
147fn character_escaped() {
148 with_default_globals(|| {
149 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
150 let sh = mk_sess(sm.clone());
e1599b0c
XL
151 assert_eq!(
152 setup(&sm, &sh, "'\\n'".to_string()).next_token(),
153 mk_lit(token::Char, "\\n", None),
154 );
416331ca
XL
155 })
156}
157
158#[test]
159fn lifetime_name() {
160 with_default_globals(|| {
161 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
162 let sh = mk_sess(sm.clone());
e1599b0c
XL
163 assert_eq!(
164 setup(&sm, &sh, "'abc".to_string()).next_token(),
165 token::Lifetime(Symbol::intern("'abc")),
166 );
416331ca
XL
167 })
168}
169
170#[test]
171fn raw_string() {
172 with_default_globals(|| {
173 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
174 let sh = mk_sess(sm.clone());
e1599b0c
XL
175 assert_eq!(
176 setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
177 mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None),
178 );
416331ca
XL
179 })
180}
181
182#[test]
183fn literal_suffixes() {
184 with_default_globals(|| {
185 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
186 let sh = mk_sess(sm.clone());
187 macro_rules! test {
188 ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
e1599b0c
XL
189 assert_eq!(
190 setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
191 mk_lit(token::$tok_type, $tok_contents, Some("suffix")),
192 );
193 // with a whitespace separator
194 assert_eq!(
195 setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
196 mk_lit(token::$tok_type, $tok_contents, None),
197 );
dfeec247 198 }};
416331ca
XL
199 }
200
201 test!("'a'", Char, "a");
202 test!("b'a'", Byte, "a");
203 test!("\"a\"", Str, "a");
204 test!("b\"a\"", ByteStr, "a");
205 test!("1234", Integer, "1234");
206 test!("0b101", Integer, "0b101");
207 test!("0xABC", Integer, "0xABC");
208 test!("1.0", Float, "1.0");
209 test!("1.0e10", Float, "1.0e10");
210
e1599b0c
XL
211 assert_eq!(
212 setup(&sm, &sh, "2us".to_string()).next_token(),
213 mk_lit(token::Integer, "2", Some("us")),
214 );
215 assert_eq!(
216 setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
217 mk_lit(token::StrRaw(3), "raw", Some("suffix")),
218 );
219 assert_eq!(
220 setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
221 mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")),
222 );
416331ca
XL
223 })
224}
225
226#[test]
227fn line_doc_comments() {
228 assert!(is_doc_comment("///"));
229 assert!(is_doc_comment("/// blah"));
230 assert!(!is_doc_comment("////"));
231}
232
233#[test]
234fn nested_block_comments() {
235 with_default_globals(|| {
236 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
237 let sh = mk_sess(sm.clone());
238 let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
239 assert_eq!(lexer.next_token(), token::Comment);
240 assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
241 })
242}
243
244#[test]
245fn crlf_comments() {
246 with_default_globals(|| {
247 let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
248 let sh = mk_sess(sm.clone());
249 let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
250 let comment = lexer.next_token();
251 assert_eq!(comment.kind, token::Comment);
252 assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
253 assert_eq!(lexer.next_token(), token::Whitespace);
254 assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
255 })
256}