1 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
3 use rustc_hash
::FxHashMap
;
4 use stdx
::{always, non_empty_vec::NonEmptyVec}
;
6 ast
::{self, make::tokens::doc_comment}
,
7 AstToken
, Parse
, PreorderWithTokens
, SmolStr
, SyntaxElement
, SyntaxKind
,
9 SyntaxNode
, SyntaxToken
, SyntaxTreeBuilder
, TextRange
, TextSize
, WalkEvent
, T
,
11 use tt
::buffer
::{Cursor, TokenBuffer}
;
13 use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}
;
18 /// Convert the syntax node to a `TokenTree` (what macro
20 pub fn syntax_node_to_token_tree(node
: &SyntaxNode
) -> (tt
::Subtree
, TokenMap
) {
21 let (subtree
, token_map
, _
) = syntax_node_to_token_tree_with_modifications(
31 /// Convert the syntax node to a `TokenTree` (what macro will consume)
32 /// with the censored range excluded.
33 pub fn syntax_node_to_token_tree_with_modifications(
35 existing_token_map
: TokenMap
,
37 replace
: FxHashMap
<SyntaxElement
, Vec
<SyntheticToken
>>,
38 append
: FxHashMap
<SyntaxElement
, Vec
<SyntheticToken
>>,
39 ) -> (tt
::Subtree
, TokenMap
, u32) {
40 let global_offset
= node
.text_range().start();
41 let mut c
= Converter
::new(node
, global_offset
, existing_token_map
, next_id
, replace
, append
);
42 let subtree
= convert_tokens(&mut c
);
43 c
.id_alloc
.map
.shrink_to_fit();
44 always
!(c
.replace
.is_empty(), "replace: {:?}", c
.replace
);
45 always
!(c
.append
.is_empty(), "append: {:?}", c
.append
);
46 (subtree
, c
.id_alloc
.map
, c
.id_alloc
.next_id
)
49 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
50 pub struct SyntheticTokenId(pub u32);
52 #[derive(Debug, Clone)]
53 pub struct SyntheticToken
{
57 pub id
: SyntheticTokenId
,
60 // The following items are what `rustc` macro can be parsed into :
61 // link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
62 // * Expr(P<ast::Expr>) -> token_tree_to_expr
63 // * Pat(P<ast::Pat>) -> token_tree_to_pat
64 // * Ty(P<ast::Ty>) -> token_tree_to_ty
65 // * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
66 // * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
68 // * TraitItems(SmallVec<[ast::TraitItem; 1]>)
69 // * AssocItems(SmallVec<[ast::AssocItem; 1]>)
70 // * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
72 pub fn token_tree_to_syntax_node(
74 entry_point
: parser
::TopEntryPoint
,
75 ) -> (Parse
<SyntaxNode
>, TokenMap
) {
76 let buffer
= match tt
{
77 tt
::Subtree { delimiter: None, token_trees }
=> {
78 TokenBuffer
::from_tokens(token_trees
.as_slice())
80 _
=> TokenBuffer
::from_subtree(tt
),
82 let parser_input
= to_parser_input(&buffer
);
83 let parser_output
= entry_point
.parse(&parser_input
);
84 let mut tree_sink
= TtTreeSink
::new(buffer
.begin());
85 for event
in parser_output
.iter() {
87 parser
::Step
::Token { kind, n_input_tokens: n_raw_tokens }
=> {
88 tree_sink
.token(kind
, n_raw_tokens
)
90 parser
::Step
::Enter { kind }
=> tree_sink
.start_node(kind
),
91 parser
::Step
::Exit
=> tree_sink
.finish_node(),
92 parser
::Step
::Error { msg }
=> tree_sink
.error(msg
.to_string()),
95 let (parse
, range_map
) = tree_sink
.finish();
99 /// Convert a string to a `TokenTree`
100 pub fn parse_to_token_tree(text
: &str) -> Option
<(tt
::Subtree
, TokenMap
)> {
101 let lexed
= parser
::LexedStr
::new(text
);
102 if lexed
.errors().next().is_some() {
106 let mut conv
= RawConverter
{
109 id_alloc
: TokenIdAlloc
{
110 map
: Default
::default(),
111 global_offset
: TextSize
::default(),
116 let subtree
= convert_tokens(&mut conv
);
117 Some((subtree
, conv
.id_alloc
.map
))
120 /// Split token tree with separate expr: $($e:expr)SEP*
121 pub fn parse_exprs_with_sep(tt
: &tt
::Subtree
, sep
: char) -> Vec
<tt
::Subtree
> {
122 if tt
.token_trees
.is_empty() {
126 let mut iter
= TtIter
::new(tt
);
127 let mut res
= Vec
::new();
129 while iter
.peek_n(0).is_some() {
130 let expanded
= iter
.expect_fragment(parser
::PrefixEntryPoint
::Expr
);
132 res
.push(match expanded
.value
{
134 Some(tt @ tt
::TokenTree
::Leaf(_
)) => {
135 tt
::Subtree { delimiter: None, token_trees: vec![tt] }
137 Some(tt
::TokenTree
::Subtree(tt
)) => tt
,
140 let mut fork
= iter
.clone();
141 if fork
.expect_char(sep
).is_err() {
147 if iter
.peek_n(0).is_some() {
148 res
.push(tt
::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() }
);
154 fn convert_tokens
<C
: TokenConverter
>(conv
: &mut C
) -> tt
::Subtree
{
156 subtree
: tt
::Subtree
,
158 open_range
: TextRange
,
161 let entry
= StackEntry
{
162 subtree
: tt
::Subtree { delimiter: None, ..Default::default() }
,
163 // never used (delimiter is `None`)
165 open_range
: TextRange
::empty(TextSize
::of('
.'
)),
167 let mut stack
= NonEmptyVec
::new(entry
);
170 let StackEntry { subtree, .. }
= stack
.last_mut();
171 let result
= &mut subtree
.token_trees
;
172 let (token
, range
) = match conv
.bump() {
176 let synth_id
= token
.synthetic_id(conv
);
178 let kind
= token
.kind(conv
);
180 if let Some(tokens
) = conv
.convert_doc_comment(&token
) {
181 // FIXME: There has to be a better way to do this
182 // Add the comments token id to the converted doc string
183 let id
= conv
.id_alloc().alloc(range
, synth_id
);
184 result
.extend(tokens
.into_iter().map(|mut tt
| {
185 if let tt
::TokenTree
::Subtree(sub
) = &mut tt
{
186 if let Some(tt
::TokenTree
::Leaf(tt
::Leaf
::Literal(lit
))) =
187 sub
.token_trees
.get_mut(2)
197 let tt
= if kind
.is_punct() && kind
!= UNDERSCORE
{
198 if synth_id
.is_none() {
199 assert_eq
!(range
.len(), TextSize
::of('
.'
));
202 if let Some(delim
) = subtree
.delimiter
{
203 let expected
= match delim
.kind
{
204 tt
::DelimiterKind
::Parenthesis
=> T
!['
)'
],
205 tt
::DelimiterKind
::Brace
=> T
!['
}'
],
206 tt
::DelimiterKind
::Bracket
=> T
!['
]'
],
209 if kind
== expected
{
210 if let Some(entry
) = stack
.pop() {
211 conv
.id_alloc().close_delim(entry
.idx
, Some(range
));
212 stack
.last_mut().subtree
.token_trees
.push(entry
.subtree
.into());
218 let delim
= match kind
{
219 T
!['
('
] => Some(tt
::DelimiterKind
::Parenthesis
),
220 T
!['
{'
] => Some(tt
::DelimiterKind
::Brace
),
221 T
!['
['
] => Some(tt
::DelimiterKind
::Bracket
),
225 if let Some(kind
) = delim
{
226 let mut subtree
= tt
::Subtree
::default();
227 let (id
, idx
) = conv
.id_alloc().open_delim(range
, synth_id
);
228 subtree
.delimiter
= Some(tt
::Delimiter { id, kind }
);
229 stack
.push(StackEntry { subtree, idx, open_range: range }
);
233 let spacing
= match conv
.peek().map(|next
| next
.kind(conv
)) {
234 Some(kind
) if is_single_token_op(kind
) => tt
::Spacing
::Joint
,
235 _
=> tt
::Spacing
::Alone
,
237 let char = match token
.to_char(conv
) {
240 panic
!("Token from lexer must be single char: token = {:#?}", token
);
243 tt
::Leaf
::from(tt
::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) }
)
246 macro_rules
! make_leaf
{
248 tt
::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
252 let leaf
: tt
::Leaf
= match kind
{
253 T
![true] | T
![false] => make_leaf
!(Ident
),
254 IDENT
=> make_leaf
!(Ident
),
255 UNDERSCORE
=> make_leaf
!(Ident
),
256 k
if k
.is_keyword() => make_leaf
!(Ident
),
257 k
if k
.is_literal() => make_leaf
!(Literal
),
259 let char_unit
= TextSize
::of('
\''
);
260 let r
= TextRange
::at(range
.start(), char_unit
);
261 let apostrophe
= tt
::Leaf
::from(tt
::Punct
{
263 spacing
: tt
::Spacing
::Joint
,
264 id
: conv
.id_alloc().alloc(r
, synth_id
),
266 result
.push(apostrophe
.into());
268 let r
= TextRange
::at(range
.start() + char_unit
, range
.len() - char_unit
);
269 let ident
= tt
::Leaf
::from(tt
::Ident
{
270 text
: SmolStr
::new(&token
.to_text(conv
)[1..]),
271 id
: conv
.id_alloc().alloc(r
, synth_id
),
273 result
.push(ident
.into());
284 // If we get here, we've consumed all input tokens.
285 // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
286 // Merge them so we're left with one.
287 while let Some(entry
) = stack
.pop() {
288 let parent
= stack
.last_mut();
290 conv
.id_alloc().close_delim(entry
.idx
, None
);
291 let leaf
: tt
::Leaf
= tt
::Punct
{
292 id
: conv
.id_alloc().alloc(entry
.open_range
, None
),
293 char: match entry
.subtree
.delimiter
.unwrap().kind
{
294 tt
::DelimiterKind
::Parenthesis
=> '
('
,
295 tt
::DelimiterKind
::Brace
=> '
{'
,
296 tt
::DelimiterKind
::Bracket
=> '
['
,
298 spacing
: tt
::Spacing
::Alone
,
301 parent
.subtree
.token_trees
.push(leaf
.into());
302 parent
.subtree
.token_trees
.extend(entry
.subtree
.token_trees
);
305 let subtree
= stack
.into_last().subtree
;
306 if let [tt
::TokenTree
::Subtree(first
)] = &*subtree
.token_trees
{
313 fn is_single_token_op(kind
: SyntaxKind
) -> bool
{
336 // LIFETIME_IDENT will be split into a sequence of `'` (a single quote) and an
342 /// Returns the textual content of a doc comment block as a quoted string
343 /// That is, strips leading `///` (or `/**`, etc)
344 /// and strips the ending `*/`
345 /// And then quote the string, which is needed to convert to `tt::Literal`
346 fn doc_comment_text(comment
: &ast
::Comment
) -> SmolStr
{
347 let prefix_len
= comment
.prefix().len();
348 let mut text
= &comment
.text()[prefix_len
..];
350 // Remove ending "*/"
351 if comment
.kind().shape
== ast
::CommentShape
::Block
{
352 text
= &text
[0..text
.len() - 2];
356 // Note that `tt::Literal` expect an escaped string
357 let text
= format
!("\"{}\"", text
.escape_debug());
361 fn convert_doc_comment(token
: &syntax
::SyntaxToken
) -> Option
<Vec
<tt
::TokenTree
>> {
362 cov_mark
::hit
!(test_meta_doc_comments
);
363 let comment
= ast
::Comment
::cast(token
.clone())?
;
364 let doc
= comment
.kind().doc?
;
366 // Make `doc="\" Comments\""
367 let meta_tkns
= vec
![mk_ident("doc"), mk_punct('
='
), mk_doc_literal(&comment
)];
370 let mut token_trees
= Vec
::with_capacity(3);
371 token_trees
.push(mk_punct('
#'));
372 if let ast
::CommentPlacement
::Inner
= doc
{
373 token_trees
.push(mk_punct('
!'
));
375 token_trees
.push(tt
::TokenTree
::from(tt
::Subtree
{
376 delimiter
: Some(tt
::Delimiter
{
377 kind
: tt
::DelimiterKind
::Bracket
,
378 id
: tt
::TokenId
::unspecified(),
380 token_trees
: meta_tkns
,
383 return Some(token_trees
);
386 fn mk_ident(s
: &str) -> tt
::TokenTree
{
387 tt
::TokenTree
::from(tt
::Leaf
::from(tt
::Ident
{
389 id
: tt
::TokenId
::unspecified(),
393 fn mk_punct(c
: char) -> tt
::TokenTree
{
394 tt
::TokenTree
::from(tt
::Leaf
::from(tt
::Punct
{
396 spacing
: tt
::Spacing
::Alone
,
397 id
: tt
::TokenId
::unspecified(),
401 fn mk_doc_literal(comment
: &ast
::Comment
) -> tt
::TokenTree
{
402 let lit
= tt
::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() }
;
404 tt
::TokenTree
::from(tt
::Leaf
::from(lit
))
408 struct TokenIdAlloc
{
410 global_offset
: TextSize
,
417 absolute_range
: TextRange
,
418 synthetic_id
: Option
<SyntheticTokenId
>,
420 let relative_range
= absolute_range
- self.global_offset
;
421 let token_id
= tt
::TokenId(self.next_id
);
423 self.map
.insert(token_id
, relative_range
);
424 if let Some(id
) = synthetic_id
{
425 self.map
.insert_synthetic(token_id
, id
);
432 open_abs_range
: TextRange
,
433 synthetic_id
: Option
<SyntheticTokenId
>,
434 ) -> (tt
::TokenId
, usize) {
435 let token_id
= tt
::TokenId(self.next_id
);
437 let idx
= self.map
.insert_delim(
439 open_abs_range
- self.global_offset
,
440 open_abs_range
- self.global_offset
,
442 if let Some(id
) = synthetic_id
{
443 self.map
.insert_synthetic(token_id
, id
);
448 fn close_delim(&mut self, idx
: usize, close_abs_range
: Option
<TextRange
>) {
449 match close_abs_range
{
451 self.map
.remove_delim(idx
);
454 self.map
.update_close_delim(idx
, close
- self.global_offset
);
460 /// A raw token (straight from lexer) converter
461 struct RawConverter
<'a
> {
462 lexed
: parser
::LexedStr
<'a
>,
464 id_alloc
: TokenIdAlloc
,
467 trait SrcToken
<Ctx
>: std
::fmt
::Debug
{
468 fn kind(&self, ctx
: &Ctx
) -> SyntaxKind
;
470 fn to_char(&self, ctx
: &Ctx
) -> Option
<char>;
472 fn to_text(&self, ctx
: &Ctx
) -> SmolStr
;
474 fn synthetic_id(&self, ctx
: &Ctx
) -> Option
<SyntheticTokenId
>;
477 trait TokenConverter
: Sized
{
478 type Token
: SrcToken
<Self>;
480 fn convert_doc_comment(&self, token
: &Self::Token
) -> Option
<Vec
<tt
::TokenTree
>>;
482 fn bump(&mut self) -> Option
<(Self::Token
, TextRange
)>;
484 fn peek(&self) -> Option
<Self::Token
>;
486 fn id_alloc(&mut self) -> &mut TokenIdAlloc
;
489 impl<'a
> SrcToken
<RawConverter
<'a
>> for usize {
490 fn kind(&self, ctx
: &RawConverter
<'a
>) -> SyntaxKind
{
491 ctx
.lexed
.kind(*self)
494 fn to_char(&self, ctx
: &RawConverter
<'a
>) -> Option
<char> {
495 ctx
.lexed
.text(*self).chars().next()
498 fn to_text(&self, ctx
: &RawConverter
<'_
>) -> SmolStr
{
499 ctx
.lexed
.text(*self).into()
502 fn synthetic_id(&self, _ctx
: &RawConverter
<'a
>) -> Option
<SyntheticTokenId
> {
507 impl<'a
> TokenConverter
for RawConverter
<'a
> {
510 fn convert_doc_comment(&self, &token
: &usize) -> Option
<Vec
<tt
::TokenTree
>> {
511 let text
= self.lexed
.text(token
);
512 convert_doc_comment(&doc_comment(text
))
515 fn bump(&mut self) -> Option
<(Self::Token
, TextRange
)> {
516 if self.pos
== self.lexed
.len() {
519 let token
= self.pos
;
521 let range
= self.lexed
.text_range(token
);
522 let range
= TextRange
::new(range
.start
.try_into().unwrap(), range
.end
.try_into().unwrap());
527 fn peek(&self) -> Option
<Self::Token
> {
528 if self.pos
== self.lexed
.len() {
534 fn id_alloc(&mut self) -> &mut TokenIdAlloc
{
540 id_alloc
: TokenIdAlloc
,
541 current
: Option
<SyntaxToken
>,
542 current_synthetic
: Vec
<SyntheticToken
>,
543 preorder
: PreorderWithTokens
,
544 replace
: FxHashMap
<SyntaxElement
, Vec
<SyntheticToken
>>,
545 append
: FxHashMap
<SyntaxElement
, Vec
<SyntheticToken
>>,
547 punct_offset
: Option
<(SyntaxToken
, TextSize
)>,
553 global_offset
: TextSize
,
554 existing_token_map
: TokenMap
,
556 mut replace
: FxHashMap
<SyntaxElement
, Vec
<SyntheticToken
>>,
557 mut append
: FxHashMap
<SyntaxElement
, Vec
<SyntheticToken
>>,
559 let range
= node
.text_range();
560 let mut preorder
= node
.preorder_with_tokens();
561 let (first
, synthetic
) = Self::next_token(&mut preorder
, &mut replace
, &mut append
);
563 id_alloc
: { TokenIdAlloc { map: existing_token_map, global_offset, next_id }
},
565 current_synthetic
: synthetic
,
575 preorder
: &mut PreorderWithTokens
,
576 replace
: &mut FxHashMap
<SyntaxElement
, Vec
<SyntheticToken
>>,
577 append
: &mut FxHashMap
<SyntaxElement
, Vec
<SyntheticToken
>>,
578 ) -> (Option
<SyntaxToken
>, Vec
<SyntheticToken
>) {
579 while let Some(ev
) = preorder
.next() {
581 WalkEvent
::Enter(ele
) => ele
,
582 WalkEvent
::Leave(ele
) => {
583 if let Some(mut v
) = append
.remove(&ele
) {
592 if let Some(mut v
) = replace
.remove(&ele
) {
593 preorder
.skip_subtree();
600 SyntaxElement
::Token(t
) => return (Some(t
), Vec
::new()),
610 Ordinary(SyntaxToken
),
611 // FIXME is this supposed to be `Punct`?
612 Punch(SyntaxToken
, TextSize
),
613 Synthetic(SyntheticToken
),
617 fn token(&self) -> Option
<&SyntaxToken
> {
619 SynToken
::Ordinary(it
) | SynToken
::Punch(it
, _
) => Some(it
),
620 SynToken
::Synthetic(_
) => None
,
625 impl SrcToken
<Converter
> for SynToken
{
626 fn kind(&self, ctx
: &Converter
) -> SyntaxKind
{
628 SynToken
::Ordinary(token
) => token
.kind(),
629 SynToken
::Punch(..) => SyntaxKind
::from_char(self.to_char(ctx
).unwrap()).unwrap(),
630 SynToken
::Synthetic(token
) => token
.kind
,
633 fn to_char(&self, _ctx
: &Converter
) -> Option
<char> {
635 SynToken
::Ordinary(_
) => None
,
636 SynToken
::Punch(it
, i
) => it
.text().chars().nth((*i
).into()),
637 SynToken
::Synthetic(token
) if token
.text
.len() == 1 => token
.text
.chars().next(),
638 SynToken
::Synthetic(_
) => None
,
641 fn to_text(&self, _ctx
: &Converter
) -> SmolStr
{
643 SynToken
::Ordinary(token
) => token
.text().into(),
644 SynToken
::Punch(token
, _
) => token
.text().into(),
645 SynToken
::Synthetic(token
) => token
.text
.clone(),
649 fn synthetic_id(&self, _ctx
: &Converter
) -> Option
<SyntheticTokenId
> {
651 SynToken
::Synthetic(token
) => Some(token
.id
),
657 impl TokenConverter
for Converter
{
658 type Token
= SynToken
;
659 fn convert_doc_comment(&self, token
: &Self::Token
) -> Option
<Vec
<tt
::TokenTree
>> {
660 convert_doc_comment(token
.token()?
)
663 fn bump(&mut self) -> Option
<(Self::Token
, TextRange
)> {
664 if let Some((punct
, offset
)) = self.punct_offset
.clone() {
665 if usize::from(offset
) + 1 < punct
.text().len() {
666 let offset
= offset
+ TextSize
::of('
.'
);
667 let range
= punct
.text_range();
668 self.punct_offset
= Some((punct
.clone(), offset
));
669 let range
= TextRange
::at(range
.start() + offset
, TextSize
::of('
.'
));
670 return Some((SynToken
::Punch(punct
, offset
), range
));
674 if let Some(synth_token
) = self.current_synthetic
.pop() {
675 if self.current_synthetic
.is_empty() {
676 let (new_current
, new_synth
) =
677 Self::next_token(&mut self.preorder
, &mut self.replace
, &mut self.append
);
678 self.current
= new_current
;
679 self.current_synthetic
= new_synth
;
681 let range
= synth_token
.range
;
682 return Some((SynToken
::Synthetic(synth_token
), range
));
685 let curr
= self.current
.clone()?
;
686 if !self.range
.contains_range(curr
.text_range()) {
689 let (new_current
, new_synth
) =
690 Self::next_token(&mut self.preorder
, &mut self.replace
, &mut self.append
);
691 self.current
= new_current
;
692 self.current_synthetic
= new_synth
;
693 let token
= if curr
.kind().is_punct() {
694 self.punct_offset
= Some((curr
.clone(), 0.into
()));
695 let range
= curr
.text_range();
696 let range
= TextRange
::at(range
.start(), TextSize
::of('
.'
));
697 (SynToken
::Punch(curr
, 0.into
()), range
)
699 self.punct_offset
= None
;
700 let range
= curr
.text_range();
701 (SynToken
::Ordinary(curr
), range
)
707 fn peek(&self) -> Option
<Self::Token
> {
708 if let Some((punct
, mut offset
)) = self.punct_offset
.clone() {
709 offset
+= TextSize
::of('
.'
);
710 if usize::from(offset
) < punct
.text().len() {
711 return Some(SynToken
::Punch(punct
, offset
));
715 if let Some(synth_token
) = self.current_synthetic
.last() {
716 return Some(SynToken
::Synthetic(synth_token
.clone()));
719 let curr
= self.current
.clone()?
;
720 if !self.range
.contains_range(curr
.text_range()) {
724 let token
= if curr
.kind().is_punct() {
725 SynToken
::Punch(curr
, 0.into
())
727 SynToken
::Ordinary(curr
)
732 fn id_alloc(&mut self) -> &mut TokenIdAlloc
{
737 struct TtTreeSink
<'a
> {
740 open_delims
: FxHashMap
<tt
::TokenId
, TextSize
>,
742 inner
: SyntaxTreeBuilder
,
746 impl<'a
> TtTreeSink
<'a
> {
747 fn new(cursor
: Cursor
<'a
>) -> Self {
751 open_delims
: FxHashMap
::default(),
753 inner
: SyntaxTreeBuilder
::default(),
754 token_map
: TokenMap
::default(),
758 fn finish(mut self) -> (Parse
<SyntaxNode
>, TokenMap
) {
759 self.token_map
.shrink_to_fit();
760 (self.inner
.finish(), self.token_map
)
764 fn delim_to_str(d
: tt
::DelimiterKind
, closing
: bool
) -> &'
static str {
765 let texts
= match d
{
766 tt
::DelimiterKind
::Parenthesis
=> "()",
767 tt
::DelimiterKind
::Brace
=> "{}",
768 tt
::DelimiterKind
::Bracket
=> "[]",
771 let idx
= closing
as usize;
772 &texts
[idx
..texts
.len() - (1 - idx
)]
775 impl<'a
> TtTreeSink
<'a
> {
776 fn token(&mut self, kind
: SyntaxKind
, mut n_tokens
: u8) {
777 if kind
== LIFETIME_IDENT
{
781 let mut last
= self.cursor
;
782 for _
in 0..n_tokens
{
784 if self.cursor
.eof() {
788 let text
: &str = loop {
789 break match self.cursor
.token_tree() {
790 Some(tt
::buffer
::TokenTreeRef
::Leaf(leaf
, _
)) => {
791 // Mark the range if needed
792 let (text
, id
) = match leaf
{
793 tt
::Leaf
::Ident(ident
) => (ident
.text
.as_str(), ident
.id
),
794 tt
::Leaf
::Punct(punct
) => {
795 assert
!(punct
.char.is_ascii());
796 tmp
= punct
.char as u8;
797 (std
::str::from_utf8(std
::slice
::from_ref(&tmp
)).unwrap(), punct
.id
)
799 tt
::Leaf
::Literal(lit
) => (lit
.text
.as_str(), lit
.id
),
801 let range
= TextRange
::at(self.text_pos
, TextSize
::of(text
));
802 self.token_map
.insert(id
, range
);
803 self.cursor
= self.cursor
.bump();
806 Some(tt
::buffer
::TokenTreeRef
::Subtree(subtree
, _
)) => {
807 self.cursor
= self.cursor
.subtree().unwrap();
808 match subtree
.delimiter
{
810 self.open_delims
.insert(d
.id
, self.text_pos
);
811 delim_to_str(d
.kind
, false)
817 let parent
= self.cursor
.end().unwrap();
818 self.cursor
= self.cursor
.bump();
819 match parent
.delimiter
{
821 if let Some(open_delim
) = self.open_delims
.get(&d
.id
) {
822 let open_range
= TextRange
::at(*open_delim
, TextSize
::of('
('
));
824 TextRange
::at(self.text_pos
, TextSize
::of('
('
));
825 self.token_map
.insert_delim(d
.id
, open_range
, close_range
);
827 delim_to_str(d
.kind
, true)
835 self.text_pos
+= TextSize
::of(text
);
838 self.inner
.token(kind
, self.buf
.as_str());
840 // Add whitespace between adjoint puncts
841 let next
= last
.bump();
843 Some(tt
::buffer
::TokenTreeRef
::Leaf(tt
::Leaf
::Punct(curr
), _
)),
844 Some(tt
::buffer
::TokenTreeRef
::Leaf(tt
::Leaf
::Punct(next
), _
)),
845 ) = (last
.token_tree(), next
.token_tree())
847 // Note: We always assume the semi-colon would be the last token in
848 // other parts of RA such that we don't add whitespace here.
850 // When `next` is a `Punct` of `'`, that's a part of a lifetime identifier so we don't
851 // need to add whitespace either.
852 if curr
.spacing
== tt
::Spacing
::Alone
&& curr
.char != '
;'
&& next
.char != '
\''
{
853 self.inner
.token(WHITESPACE
, " ");
854 self.text_pos
+= TextSize
::of(' '
);
859 fn start_node(&mut self, kind
: SyntaxKind
) {
860 self.inner
.start_node(kind
);
863 fn finish_node(&mut self) {
864 self.inner
.finish_node();
867 fn error(&mut self, error
: String
) {
868 self.inner
.error(error
, self.text_pos
)