1 use rustc_data_structures
::sync
::Lrc
;
2 use rustc_parse
::lexer
::StringReader
;
3 use syntax
::token
::{self, Token, TokenKind}
;
4 use syntax
::sess
::ParseSess
;
5 use syntax
::source_map
::{SourceMap, FilePathMapping}
;
6 use syntax
::util
::comments
::is_doc_comment
;
7 use syntax
::with_default_globals
;
8 use syntax_pos
::symbol
::Symbol
;
9 use syntax_pos
::{BytePos, Span}
;
11 use errors
::{Handler, emitter::EmitterWriter}
;
13 use std
::path
::PathBuf
;
15 fn mk_sess(sm
: Lrc
<SourceMap
>) -> ParseSess
{
16 let emitter
= EmitterWriter
::new(
25 ParseSess
::with_span_handler(
26 Handler
::with_emitter(true, None
, Box
::new(emitter
)),
31 // Creates a string reader for the given string.
32 fn setup
<'a
>(sm
: &SourceMap
,
36 let sf
= sm
.new_source_file(PathBuf
::from(teststr
.clone()).into(), teststr
);
37 StringReader
::new(sess
, sf
, None
)
42 with_default_globals(|| {
43 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
44 let sh
= mk_sess(sm
.clone());
45 let mut string_reader
= setup(
48 "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
50 assert_eq
!(string_reader
.next_token(), token
::Comment
);
51 assert_eq
!(string_reader
.next_token(), token
::Whitespace
);
52 let tok1
= string_reader
.next_token();
53 let tok2
= Token
::new(
55 Span
::with_root_ctxt(BytePos(21), BytePos(23)),
57 assert_eq
!(tok1
.kind
, tok2
.kind
);
58 assert_eq
!(tok1
.span
, tok2
.span
);
59 assert_eq
!(string_reader
.next_token(), token
::Whitespace
);
60 // Read another token.
61 let tok3
= string_reader
.next_token();
62 assert_eq
!(string_reader
.pos
.clone(), BytePos(28));
63 let tok4
= Token
::new(
65 Span
::with_root_ctxt(BytePos(24), BytePos(28)),
67 assert_eq
!(tok3
.kind
, tok4
.kind
);
68 assert_eq
!(tok3
.span
, tok4
.span
);
70 assert_eq
!(string_reader
.next_token(), token
::OpenDelim(token
::Paren
));
71 assert_eq
!(string_reader
.pos
.clone(), BytePos(29))
75 // Checks that the given reader produces the desired stream
76 // of tokens (stop checking after exhausting `expected`).
77 fn check_tokenization(mut string_reader
: StringReader
<'_
>, expected
: Vec
<TokenKind
>) {
78 for expected_tok
in &expected
{
79 assert_eq
!(&string_reader
.next_token(), expected_tok
);
83 // Makes the identifier by looking up the string in the interner.
84 fn mk_ident(id
: &str) -> TokenKind
{
85 token
::Ident(Symbol
::intern(id
), false)
88 fn mk_lit(kind
: token
::LitKind
, symbol
: &str, suffix
: Option
<&str>) -> TokenKind
{
89 TokenKind
::lit(kind
, Symbol
::intern(symbol
), suffix
.map(Symbol
::intern
))
93 fn doublecolon_parsing() {
94 with_default_globals(|| {
95 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
96 let sh
= mk_sess(sm
.clone());
98 setup(&sm
, &sh
, "a b".to_string()),
99 vec
![mk_ident("a"), token
::Whitespace
, mk_ident("b")],
105 fn doublecolon_parsing_2() {
106 with_default_globals(|| {
107 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
108 let sh
= mk_sess(sm
.clone());
110 setup(&sm
, &sh
, "a::b".to_string()),
111 vec
![mk_ident("a"), token
::Colon
, token
::Colon
, mk_ident("b")],
117 fn doublecolon_parsing_3() {
118 with_default_globals(|| {
119 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
120 let sh
= mk_sess(sm
.clone());
122 setup(&sm
, &sh
, "a ::b".to_string()),
123 vec
![mk_ident("a"), token
::Whitespace
, token
::Colon
, token
::Colon
, mk_ident("b")],
129 fn doublecolon_parsing_4() {
130 with_default_globals(|| {
131 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
132 let sh
= mk_sess(sm
.clone());
134 setup(&sm
, &sh
, "a:: b".to_string()),
135 vec
![mk_ident("a"), token
::Colon
, token
::Colon
, token
::Whitespace
, mk_ident("b")],
142 with_default_globals(|| {
143 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
144 let sh
= mk_sess(sm
.clone());
146 setup(&sm
, &sh
, "'a'".to_string()).next_token(),
147 mk_lit(token
::Char
, "a", None
),
153 fn character_space() {
154 with_default_globals(|| {
155 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
156 let sh
= mk_sess(sm
.clone());
158 setup(&sm
, &sh
, "' '".to_string()).next_token(),
159 mk_lit(token
::Char
, " ", None
),
165 fn character_escaped() {
166 with_default_globals(|| {
167 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
168 let sh
= mk_sess(sm
.clone());
170 setup(&sm
, &sh
, "'\\n'".to_string()).next_token(),
171 mk_lit(token
::Char
, "\\n", None
),
178 with_default_globals(|| {
179 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
180 let sh
= mk_sess(sm
.clone());
182 setup(&sm
, &sh
, "'abc".to_string()).next_token(),
183 token
::Lifetime(Symbol
::intern("'abc")),
190 with_default_globals(|| {
191 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
192 let sh
= mk_sess(sm
.clone());
194 setup(&sm
, &sh
, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
195 mk_lit(token
::StrRaw(3), "\"#a\\b\x00c\"", None
),
201 fn literal_suffixes() {
202 with_default_globals(|| {
203 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
204 let sh
= mk_sess(sm
.clone());
206 ($input
: expr
, $tok_type
: ident
, $tok_contents
: expr
) => {{
208 setup(&sm
, &sh
, format
!("{}suffix", $input
)).next_token(),
209 mk_lit(token
::$tok_type
, $tok_contents
, Some("suffix")),
211 // with a whitespace separator
213 setup(&sm
, &sh
, format
!("{} suffix", $input
)).next_token(),
214 mk_lit(token
::$tok_type
, $tok_contents
, None
),
219 test
!("'a'", Char
, "a");
220 test
!("b'a'", Byte
, "a");
221 test
!("\"a\"", Str
, "a");
222 test
!("b\"a\"", ByteStr
, "a");
223 test
!("1234", Integer
, "1234");
224 test
!("0b101", Integer
, "0b101");
225 test
!("0xABC", Integer
, "0xABC");
226 test
!("1.0", Float
, "1.0");
227 test
!("1.0e10", Float
, "1.0e10");
230 setup(&sm
, &sh
, "2us".to_string()).next_token(),
231 mk_lit(token
::Integer
, "2", Some("us")),
234 setup(&sm
, &sh
, "r###\"raw\"###suffix".to_string()).next_token(),
235 mk_lit(token
::StrRaw(3), "raw", Some("suffix")),
238 setup(&sm
, &sh
, "br###\"raw\"###suffix".to_string()).next_token(),
239 mk_lit(token
::ByteStrRaw(3), "raw", Some("suffix")),
245 fn line_doc_comments() {
246 assert
!(is_doc_comment("///"));
247 assert
!(is_doc_comment("/// blah"));
248 assert
!(!is_doc_comment("////"));
252 fn nested_block_comments() {
253 with_default_globals(|| {
254 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
255 let sh
= mk_sess(sm
.clone());
256 let mut lexer
= setup(&sm
, &sh
, "/* /* */ */'a'".to_string());
257 assert_eq
!(lexer
.next_token(), token
::Comment
);
258 assert_eq
!(lexer
.next_token(), mk_lit(token
::Char
, "a", None
));
264 with_default_globals(|| {
265 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
266 let sh
= mk_sess(sm
.clone());
267 let mut lexer
= setup(&sm
, &sh
, "// test\r\n/// test\r\n".to_string());
268 let comment
= lexer
.next_token();
269 assert_eq
!(comment
.kind
, token
::Comment
);
270 assert_eq
!((comment
.span
.lo(), comment
.span
.hi()), (BytePos(0), BytePos(7)));
271 assert_eq
!(lexer
.next_token(), token
::Whitespace
);
272 assert_eq
!(lexer
.next_token(), token
::DocComment(Symbol
::intern("/// test")));