3 use crate::symbol
::Symbol
;
4 use crate::source_map
::{SourceMap, FilePathMapping}
;
5 use crate::parse
::token
;
6 use crate::with_default_globals
;
8 use errors
::{Handler, emitter::EmitterWriter}
;
10 use std
::path
::PathBuf
;
11 use syntax_pos
::{BytePos, Span}
;
13 fn mk_sess(sm
: Lrc
<SourceMap
>) -> ParseSess
{
14 let emitter
= EmitterWriter
::new(
23 ParseSess
::with_span_handler(Handler
::with_emitter(true, None
, Box
::new(emitter
)), sm
)
26 // Creates a string reader for the given string.
27 fn setup
<'a
>(sm
: &SourceMap
,
31 let sf
= sm
.new_source_file(PathBuf
::from(teststr
.clone()).into(), teststr
);
32 StringReader
::new(sess
, sf
, None
)
37 with_default_globals(|| {
38 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
39 let sh
= mk_sess(sm
.clone());
40 let mut string_reader
= setup(
43 "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
45 assert_eq
!(string_reader
.next_token(), token
::Comment
);
46 assert_eq
!(string_reader
.next_token(), token
::Whitespace
);
47 let tok1
= string_reader
.next_token();
48 let tok2
= Token
::new(
50 Span
::with_root_ctxt(BytePos(21), BytePos(23)),
52 assert_eq
!(tok1
.kind
, tok2
.kind
);
53 assert_eq
!(tok1
.span
, tok2
.span
);
54 assert_eq
!(string_reader
.next_token(), token
::Whitespace
);
55 // Read another token.
56 let tok3
= string_reader
.next_token();
57 assert_eq
!(string_reader
.pos
.clone(), BytePos(28));
58 let tok4
= Token
::new(
60 Span
::with_root_ctxt(BytePos(24), BytePos(28)),
62 assert_eq
!(tok3
.kind
, tok4
.kind
);
63 assert_eq
!(tok3
.span
, tok4
.span
);
65 assert_eq
!(string_reader
.next_token(), token
::OpenDelim(token
::Paren
));
66 assert_eq
!(string_reader
.pos
.clone(), BytePos(29))
70 // Checks that the given reader produces the desired stream
71 // of tokens (stop checking after exhausting `expected`).
72 fn check_tokenization(mut string_reader
: StringReader
<'_
>, expected
: Vec
<TokenKind
>) {
73 for expected_tok
in &expected
{
74 assert_eq
!(&string_reader
.next_token(), expected_tok
);
78 // Makes the identifier by looking up the string in the interner.
79 fn mk_ident(id
: &str) -> TokenKind
{
80 token
::Ident(Symbol
::intern(id
), false)
83 fn mk_lit(kind
: token
::LitKind
, symbol
: &str, suffix
: Option
<&str>) -> TokenKind
{
84 TokenKind
::lit(kind
, Symbol
::intern(symbol
), suffix
.map(Symbol
::intern
))
88 fn doublecolon_parsing() {
89 with_default_globals(|| {
90 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
91 let sh
= mk_sess(sm
.clone());
93 setup(&sm
, &sh
, "a b".to_string()),
94 vec
![mk_ident("a"), token
::Whitespace
, mk_ident("b")],
100 fn doublecolon_parsing_2() {
101 with_default_globals(|| {
102 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
103 let sh
= mk_sess(sm
.clone());
105 setup(&sm
, &sh
, "a::b".to_string()),
106 vec
![mk_ident("a"), token
::Colon
, token
::Colon
, mk_ident("b")],
112 fn doublecolon_parsing_3() {
113 with_default_globals(|| {
114 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
115 let sh
= mk_sess(sm
.clone());
117 setup(&sm
, &sh
, "a ::b".to_string()),
118 vec
![mk_ident("a"), token
::Whitespace
, token
::Colon
, token
::Colon
, mk_ident("b")],
124 fn doublecolon_parsing_4() {
125 with_default_globals(|| {
126 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
127 let sh
= mk_sess(sm
.clone());
129 setup(&sm
, &sh
, "a:: b".to_string()),
130 vec
![mk_ident("a"), token
::Colon
, token
::Colon
, token
::Whitespace
, mk_ident("b")],
137 with_default_globals(|| {
138 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
139 let sh
= mk_sess(sm
.clone());
141 setup(&sm
, &sh
, "'a'".to_string()).next_token(),
142 mk_lit(token
::Char
, "a", None
),
148 fn character_space() {
149 with_default_globals(|| {
150 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
151 let sh
= mk_sess(sm
.clone());
153 setup(&sm
, &sh
, "' '".to_string()).next_token(),
154 mk_lit(token
::Char
, " ", None
),
160 fn character_escaped() {
161 with_default_globals(|| {
162 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
163 let sh
= mk_sess(sm
.clone());
165 setup(&sm
, &sh
, "'\\n'".to_string()).next_token(),
166 mk_lit(token
::Char
, "\\n", None
),
173 with_default_globals(|| {
174 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
175 let sh
= mk_sess(sm
.clone());
177 setup(&sm
, &sh
, "'abc".to_string()).next_token(),
178 token
::Lifetime(Symbol
::intern("'abc")),
185 with_default_globals(|| {
186 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
187 let sh
= mk_sess(sm
.clone());
189 setup(&sm
, &sh
, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
190 mk_lit(token
::StrRaw(3), "\"#a\\b\x00c\"", None
),
196 fn literal_suffixes() {
197 with_default_globals(|| {
198 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
199 let sh
= mk_sess(sm
.clone());
201 ($input
: expr
, $tok_type
: ident
, $tok_contents
: expr
) => {{
203 setup(&sm
, &sh
, format
!("{}suffix", $input
)).next_token(),
204 mk_lit(token
::$tok_type
, $tok_contents
, Some("suffix")),
206 // with a whitespace separator
208 setup(&sm
, &sh
, format
!("{} suffix", $input
)).next_token(),
209 mk_lit(token
::$tok_type
, $tok_contents
, None
),
214 test
!("'a'", Char
, "a");
215 test
!("b'a'", Byte
, "a");
216 test
!("\"a\"", Str
, "a");
217 test
!("b\"a\"", ByteStr
, "a");
218 test
!("1234", Integer
, "1234");
219 test
!("0b101", Integer
, "0b101");
220 test
!("0xABC", Integer
, "0xABC");
221 test
!("1.0", Float
, "1.0");
222 test
!("1.0e10", Float
, "1.0e10");
225 setup(&sm
, &sh
, "2us".to_string()).next_token(),
226 mk_lit(token
::Integer
, "2", Some("us")),
229 setup(&sm
, &sh
, "r###\"raw\"###suffix".to_string()).next_token(),
230 mk_lit(token
::StrRaw(3), "raw", Some("suffix")),
233 setup(&sm
, &sh
, "br###\"raw\"###suffix".to_string()).next_token(),
234 mk_lit(token
::ByteStrRaw(3), "raw", Some("suffix")),
240 fn line_doc_comments() {
241 assert
!(is_doc_comment("///"));
242 assert
!(is_doc_comment("/// blah"));
243 assert
!(!is_doc_comment("////"));
247 fn nested_block_comments() {
248 with_default_globals(|| {
249 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
250 let sh
= mk_sess(sm
.clone());
251 let mut lexer
= setup(&sm
, &sh
, "/* /* */ */'a'".to_string());
252 assert_eq
!(lexer
.next_token(), token
::Comment
);
253 assert_eq
!(lexer
.next_token(), mk_lit(token
::Char
, "a", None
));
259 with_default_globals(|| {
260 let sm
= Lrc
::new(SourceMap
::new(FilePathMapping
::empty()));
261 let sh
= mk_sess(sm
.clone());
262 let mut lexer
= setup(&sm
, &sh
, "// test\r\n/// test\r\n".to_string());
263 let comment
= lexer
.next_token();
264 assert_eq
!(comment
.kind
, token
::Comment
);
265 assert_eq
!((comment
.span
.lo(), comment
.span
.hi()), (BytePos(0), BytePos(7)));
266 assert_eq
!(lexer
.next_token(), token
::Whitespace
);
267 assert_eq
!(lexer
.next_token(), token
::DocComment(Symbol
::intern("/// test")));