Skip to content

Commit 98819d8

Browse files
Merge #4029
4029: Fix various proc-macro bugs r=matklad a=edwin0cheng This PRs does the following things: 1. Fixed #4001 by splitting `LIFETIME` lexer token to two mbe tokens. It is because rustc token stream expects `LIFETIME` as a combination of punct and ident, but RA `tt:TokenTree` treats it as a single `Ident` previously. 2. Fixed #4003, by skipping `proc-macro` for completion. It is because currently we don't have `AstNode` for `proc-macro`. We would need to redesign how to implement `HasSource` for `proc-macro`. 3. Fixed a bug how empty `TokenStream` merging in `proc-macro-srv` such that no L_DOLLAR and R_DOLLAR will be emitted accidentally. Co-authored-by: Edwin Cheng <[email protected]>
2 parents 84e3304 + 72bba98 commit 98819d8

File tree

7 files changed

+117
-17
lines changed

7 files changed

+117
-17
lines changed

crates/ra_hir/src/code_model.rs

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -759,6 +759,17 @@ impl MacroDef {
759759
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
760760
self.source(db).value.name().map(|it| it.as_name())
761761
}
762+
763+
/// Indicate it is a proc-macro
764+
pub fn is_proc_macro(&self) -> bool {
765+
match self.id.kind {
766+
hir_expand::MacroDefKind::Declarative => false,
767+
hir_expand::MacroDefKind::BuiltIn(_) => false,
768+
hir_expand::MacroDefKind::BuiltInDerive(_) => false,
769+
hir_expand::MacroDefKind::BuiltInEager(_) => false,
770+
hir_expand::MacroDefKind::CustomDerive(_) => true,
771+
}
772+
}
762773
}
763774

764775
/// Invariant: `inner.as_assoc_item(db).is_some()`

crates/ra_ide/src/completion/presentation.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,12 @@ impl Completions {
156156
name: Option<String>,
157157
macro_: hir::MacroDef,
158158
) {
159+
// FIXME: Currently proc-macro do not have ast-node,
160+
// such that it does not have source
161+
if macro_.is_proc_macro() {
162+
return;
163+
}
164+
159165
let name = match name {
160166
Some(it) => it,
161167
None => return,

crates/ra_mbe/src/mbe_expander/matcher.rs

Lines changed: 26 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -202,6 +202,13 @@ impl<'a> TtIter<'a> {
202202
}
203203

204204
pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
205+
match self.peek_n(0) {
206+
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => {
207+
return self.expect_lifetime();
208+
}
209+
_ => (),
210+
}
211+
205212
let tt = self.next().ok_or_else(|| ())?.clone();
206213
let punct = match tt {
207214
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
@@ -255,13 +262,21 @@ impl<'a> TtIter<'a> {
255262
}
256263
}
257264

258-
pub(crate) fn expect_lifetime(&mut self) -> Result<&tt::Ident, ()> {
259-
let ident = self.expect_ident()?;
260-
// check if it start from "`"
261-
if !ident.text.starts_with('\'') {
265+
pub(crate) fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
266+
let punct = self.expect_punct()?;
267+
if punct.char != '\'' {
262268
return Err(());
263269
}
264-
Ok(ident)
270+
let ident = self.expect_ident()?;
271+
272+
Ok(tt::Subtree {
273+
delimiter: None,
274+
token_trees: vec![
275+
tt::Leaf::Punct(punct.clone()).into(),
276+
tt::Leaf::Ident(ident.clone()).into(),
277+
],
278+
}
279+
.into())
265280
}
266281

267282
pub(crate) fn expect_fragment(
@@ -274,7 +289,10 @@ impl<'a> TtIter<'a> {
274289
}
275290

276291
impl<'a> TreeSink for OffsetTokenSink<'a> {
277-
fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) {
292+
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
293+
if kind == SyntaxKind::LIFETIME {
294+
n_tokens = 2;
295+
}
278296
for _ in 0..n_tokens {
279297
self.cursor = self.cursor.bump_subtree();
280298
}
@@ -286,7 +304,7 @@ impl<'a> TtIter<'a> {
286304
}
287305
}
288306

289-
let buffer = TokenBuffer::new(self.inner.as_slice());
307+
let buffer = TokenBuffer::new(&self.inner.as_slice());
290308
let mut src = SubtreeTokenSource::new(&buffer);
291309
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
292310

@@ -422,7 +440,7 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen
422440
"tt" => input.expect_tt().map(Some).map_err(|()| err!()),
423441
"lifetime" => input
424442
.expect_lifetime()
425-
.map(|ident| Some(tt::Leaf::Ident(ident.clone()).into()))
443+
.map(|tt| Some(tt))
426444
.map_err(|()| err!("expected lifetime")),
427445
"literal" => input
428446
.expect_literal()

crates/ra_mbe/src/subtree_source.rs

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,26 @@ impl<'a> SubtreeTokenSource<'a> {
5050
}
5151

5252
fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
53+
fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
54+
let tkn = c.token_tree();
55+
56+
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
57+
if punct.char == '\'' {
58+
let next = c.bump();
59+
if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() {
60+
let res_cursor = next.bump();
61+
let text = SmolStr::new("'".to_string() + &ident.to_string());
62+
63+
return Some((res_cursor, text));
64+
} else {
65+
panic!("Next token must be ident : {:#?}", next.token_tree());
66+
}
67+
}
68+
}
69+
70+
None
71+
}
72+
5373
if pos < self.cached.borrow().len() {
5474
return Ref::map(self.cached.borrow(), |c| &c[pos]);
5575
}
@@ -63,6 +83,12 @@ impl<'a> SubtreeTokenSource<'a> {
6383
continue;
6484
}
6585

86+
if let Some((curr, text)) = is_lifetime(cursor) {
87+
cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text }));
88+
self.cached_cursor.set(curr);
89+
continue;
90+
}
91+
6692
match cursor.token_tree() {
6793
Some(tt::TokenTree::Leaf(leaf)) => {
6894
cached.push(Some(convert_leaf(&leaf)));
@@ -152,7 +178,11 @@ fn convert_ident(ident: &tt::Ident) -> TtToken {
152178
}
153179

154180
fn convert_punct(p: tt::Punct) -> TtToken {
155-
let kind = SyntaxKind::from_char(p.char).unwrap();
181+
let kind = match SyntaxKind::from_char(p.char) {
182+
None => panic!("{:#?} is not a valid punct", p),
183+
Some(kind) => kind,
184+
};
185+
156186
let text = {
157187
let mut buf = [0u8; 4];
158188
let s: &str = p.char.encode_utf8(&mut buf);

crates/ra_mbe/src/syntax_bridge.rs

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ struct RawConvertor<'a> {
271271
inner: std::slice::Iter<'a, RawToken>,
272272
}
273273

274-
trait SrcToken {
274+
trait SrcToken: std::fmt::Debug {
275275
fn kind(&self) -> SyntaxKind;
276276

277277
fn to_char(&self) -> Option<char>;
@@ -361,8 +361,12 @@ trait TokenConvertor {
361361
Some(next) if next.kind().is_punct() => tt::Spacing::Joint,
362362
_ => tt::Spacing::Alone,
363363
};
364-
let char = token.to_char().expect("Token from lexer must be single char");
365-
364+
let char = match token.to_char() {
365+
Some(c) => c,
366+
None => {
367+
panic!("Token from lexer must be single char: token = {:#?}", token);
368+
}
369+
};
366370
tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into()
367371
}
368372
} else {
@@ -373,9 +377,28 @@ trait TokenConvertor {
373377
}
374378
let leaf: tt::Leaf = match k {
375379
T![true] | T![false] => make_leaf!(Literal),
376-
IDENT | LIFETIME => make_leaf!(Ident),
380+
IDENT => make_leaf!(Ident),
377381
k if k.is_keyword() => make_leaf!(Ident),
378382
k if k.is_literal() => make_leaf!(Literal),
383+
LIFETIME => {
384+
let char_unit = TextUnit::from_usize(1);
385+
let r = TextRange::offset_len(range.start(), char_unit);
386+
let apostrophe = tt::Leaf::from(tt::Punct {
387+
char: '\'',
388+
spacing: tt::Spacing::Joint,
389+
id: self.id_alloc().alloc(r),
390+
});
391+
result.push(apostrophe.into());
392+
393+
let r =
394+
TextRange::offset_len(range.start() + char_unit, range.len() - char_unit);
395+
let ident = tt::Leaf::from(tt::Ident {
396+
text: SmolStr::new(&token.to_text()[1..]),
397+
id: self.id_alloc().alloc(r),
398+
});
399+
result.push(ident.into());
400+
return;
401+
}
379402
_ => return,
380403
};
381404

@@ -455,6 +478,7 @@ impl Convertor {
455478
}
456479
}
457480

481+
#[derive(Debug)]
458482
enum SynToken {
459483
Ordiniary(SyntaxToken),
460484
Punch(SyntaxToken, TextUnit),
@@ -592,11 +616,14 @@ fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
592616
}
593617

594618
impl<'a> TreeSink for TtTreeSink<'a> {
595-
fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
619+
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
596620
if kind == L_DOLLAR || kind == R_DOLLAR {
597621
self.cursor = self.cursor.bump_subtree();
598622
return;
599623
}
624+
if kind == LIFETIME {
625+
n_tokens = 2;
626+
}
600627

601628
let mut last = self.cursor;
602629
for _ in 0..n_tokens {

crates/ra_proc_macro_srv/src/rustc_server.rs

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,16 @@ impl Extend<TokenTree> for TokenStream {
7676
impl Extend<TokenStream> for TokenStream {
7777
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
7878
for item in streams {
79-
self.subtree.token_trees.extend(&mut item.into_iter())
79+
for tkn in item {
80+
match tkn {
81+
tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
82+
self.subtree.token_trees.extend(subtree.token_trees);
83+
}
84+
_ => {
85+
self.subtree.token_trees.push(tkn);
86+
}
87+
}
88+
}
8089
}
8190
}
8291
}

crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,7 @@ SUBTREE $
2525
SUBTREE () 4294967295
2626
IDENT feature 4294967295
2727
PUNCH = [alone] 4294967295
28-
SUBTREE $
29-
LITERAL "cargo-clippy" 0
28+
LITERAL "cargo-clippy" 0
3029
PUNCH , [alone] 4294967295
3130
IDENT allow 4294967295
3231
SUBTREE () 4294967295

0 commit comments

Comments
 (0)