1
1
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
2
2
3
- use rustc_hash:: { FxHashMap , FxHashSet } ;
3
+ use rustc_hash:: FxHashMap ;
4
4
use stdx:: { always, non_empty_vec:: NonEmptyVec } ;
5
5
use syntax:: {
6
6
ast:: { self , make:: tokens:: doc_comment} ,
@@ -35,7 +35,16 @@ pub fn syntax_node_to_token_tree_censored(
35
35
( subtree, c. id_alloc . map )
36
36
}
37
37
38
- pub type SyntheticToken = ( SyntaxKind , SmolStr ) ;
38
+ #[ derive( Clone , Copy , Debug , PartialEq , Eq , Hash ) ]
39
+ pub struct SyntheticTokenId ( pub u32 ) ;
40
+
41
+ #[ derive( Debug , Clone ) ]
42
+ pub struct SyntheticToken {
43
+ pub kind : SyntaxKind ,
44
+ pub text : SmolStr ,
45
+ pub range : TextRange ,
46
+ pub id : SyntheticTokenId ,
47
+ }
39
48
40
49
// The following items are what `rustc` macro can be parsed into :
41
50
// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
@@ -153,13 +162,14 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
153
162
Some ( it) => it,
154
163
None => break ,
155
164
} ;
165
+ let synth_id = token. synthetic_id ( & conv) ;
156
166
157
167
let kind = token. kind ( & conv) ;
158
168
if kind == COMMENT {
159
169
if let Some ( tokens) = conv. convert_doc_comment ( & token) {
160
170
// FIXME: There has to be a better way to do this
161
171
// Add the comments token id to the converted doc string
162
- let id = conv. id_alloc ( ) . alloc ( range) ;
172
+ let id = conv. id_alloc ( ) . alloc ( range, synth_id ) ;
163
173
result. extend ( tokens. into_iter ( ) . map ( |mut tt| {
164
174
if let tt:: TokenTree :: Subtree ( sub) = & mut tt {
165
175
if let Some ( tt:: TokenTree :: Leaf ( tt:: Leaf :: Literal ( lit) ) ) =
@@ -174,7 +184,7 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
174
184
continue ;
175
185
}
176
186
let tt = if kind. is_punct ( ) && kind != UNDERSCORE {
177
- assert_eq ! ( range. len( ) , TextSize :: of( '.' ) ) ;
187
+ // assert_eq!(range.len(), TextSize::of('.'));
178
188
179
189
if let Some ( delim) = subtree. delimiter {
180
190
let expected = match delim. kind {
@@ -226,11 +236,13 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
226
236
panic ! ( "Token from lexer must be single char: token = {:#?}" , token) ;
227
237
}
228
238
} ;
229
- tt:: Leaf :: from ( tt:: Punct { char, spacing, id : conv. id_alloc ( ) . alloc ( range) } ) . into ( )
239
+ tt:: Leaf :: from ( tt:: Punct { char, spacing, id : conv. id_alloc ( ) . alloc ( range, synth_id) } )
240
+ . into ( )
230
241
} else {
231
242
macro_rules! make_leaf {
232
243
( $i: ident) => {
233
- tt:: $i { id: conv. id_alloc( ) . alloc( range) , text: token. to_text( conv) } . into( )
244
+ tt:: $i { id: conv. id_alloc( ) . alloc( range, synth_id) , text: token. to_text( conv) }
245
+ . into( )
234
246
} ;
235
247
}
236
248
let leaf: tt:: Leaf = match kind {
@@ -245,14 +257,14 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
245
257
let apostrophe = tt:: Leaf :: from ( tt:: Punct {
246
258
char : '\'' ,
247
259
spacing : tt:: Spacing :: Joint ,
248
- id : conv. id_alloc ( ) . alloc ( r) ,
260
+ id : conv. id_alloc ( ) . alloc ( r, synth_id ) ,
249
261
} ) ;
250
262
result. push ( apostrophe. into ( ) ) ;
251
263
252
264
let r = TextRange :: at ( range. start ( ) + char_unit, range. len ( ) - char_unit) ;
253
265
let ident = tt:: Leaf :: from ( tt:: Ident {
254
266
text : SmolStr :: new ( & token. to_text ( conv) [ 1 ..] ) ,
255
- id : conv. id_alloc ( ) . alloc ( r) ,
267
+ id : conv. id_alloc ( ) . alloc ( r, synth_id ) ,
256
268
} ) ;
257
269
result. push ( ident. into ( ) ) ;
258
270
continue ;
@@ -273,7 +285,7 @@ fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
273
285
274
286
conv. id_alloc ( ) . close_delim ( entry. idx , None ) ;
275
287
let leaf: tt:: Leaf = tt:: Punct {
276
- id : conv. id_alloc ( ) . alloc ( entry. open_range ) ,
288
+ id : conv. id_alloc ( ) . alloc ( entry. open_range , None ) ,
277
289
char : match entry. subtree . delimiter . unwrap ( ) . kind {
278
290
tt:: DelimiterKind :: Parenthesis => '(' ,
279
291
tt:: DelimiterKind :: Brace => '{' ,
@@ -367,11 +379,18 @@ struct TokenIdAlloc {
367
379
}
368
380
369
381
impl TokenIdAlloc {
370
- fn alloc ( & mut self , absolute_range : TextRange ) -> tt:: TokenId {
382
+ fn alloc (
383
+ & mut self ,
384
+ absolute_range : TextRange ,
385
+ synthetic_id : Option < SyntheticTokenId > ,
386
+ ) -> tt:: TokenId {
371
387
let relative_range = absolute_range - self . global_offset ;
372
388
let token_id = tt:: TokenId ( self . next_id ) ;
373
389
self . next_id += 1 ;
374
390
self . map . insert ( token_id, relative_range) ;
391
+ if let Some ( id) = synthetic_id {
392
+ self . map . insert_synthetic ( token_id, id) ;
393
+ }
375
394
token_id
376
395
}
377
396
@@ -411,6 +430,8 @@ trait SrcToken<Ctx>: std::fmt::Debug {
411
430
fn to_char ( & self , ctx : & Ctx ) -> Option < char > ;
412
431
413
432
fn to_text ( & self , ctx : & Ctx ) -> SmolStr ;
433
+
434
+ fn synthetic_id ( & self , ctx : & Ctx ) -> Option < SyntheticTokenId > ;
414
435
}
415
436
416
437
trait TokenConvertor : Sized {
@@ -437,6 +458,10 @@ impl<'a> SrcToken<RawConvertor<'a>> for usize {
437
458
fn to_text ( & self , ctx : & RawConvertor < ' _ > ) -> SmolStr {
438
459
ctx. lexed . text ( * self ) . into ( )
439
460
}
461
+
462
+ fn synthetic_id ( & self , _ctx : & RawConvertor < ' a > ) -> Option < SyntheticTokenId > {
463
+ None
464
+ }
440
465
}
441
466
442
467
impl < ' a > TokenConvertor for RawConvertor < ' a > {
@@ -564,21 +589,29 @@ impl SrcToken<Convertor> for SynToken {
564
589
match self {
565
590
SynToken :: Ordinary ( token) => token. kind ( ) ,
566
591
SynToken :: Punch ( token, _) => token. kind ( ) ,
567
- SynToken :: Synthetic ( ( kind , _ ) ) => * kind,
592
+ SynToken :: Synthetic ( token ) => token . kind ,
568
593
}
569
594
}
570
595
fn to_char ( & self , _ctx : & Convertor ) -> Option < char > {
571
596
match self {
572
597
SynToken :: Ordinary ( _) => None ,
573
598
SynToken :: Punch ( it, i) => it. text ( ) . chars ( ) . nth ( ( * i) . into ( ) ) ,
599
+ SynToken :: Synthetic ( token) if token. text . len ( ) == 1 => token. text . chars ( ) . next ( ) ,
574
600
SynToken :: Synthetic ( _) => None ,
575
601
}
576
602
}
577
603
fn to_text ( & self , _ctx : & Convertor ) -> SmolStr {
578
604
match self {
579
605
SynToken :: Ordinary ( token) => token. text ( ) . into ( ) ,
580
606
SynToken :: Punch ( token, _) => token. text ( ) . into ( ) ,
581
- SynToken :: Synthetic ( ( _, text) ) => text. clone ( ) ,
607
+ SynToken :: Synthetic ( token) => token. text . clone ( ) ,
608
+ }
609
+ }
610
+
611
+ fn synthetic_id ( & self , _ctx : & Convertor ) -> Option < SyntheticTokenId > {
612
+ match self {
613
+ SynToken :: Synthetic ( token) => Some ( token. id ) ,
614
+ _ => None ,
582
615
}
583
616
}
584
617
}
0 commit comments