Skip to content

Commit ffe2347

Browse files
committed
syntax: Keep full Tokens for macro_rules separators
1 parent 5c45343 commit ffe2347

File tree

5 files changed

+32
-37
lines changed

5 files changed

+32
-37
lines changed

src/libsyntax/ext/tt/macro_parser.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ struct MatcherPos<'root, 'tt: 'root> {
199199
seq_op: Option<quoted::KleeneOp>,
200200

201201
/// The separator if we are in a repetition.
202-
sep: Option<TokenKind>,
202+
sep: Option<Token>,
203203

204204
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
205205
/// before we enter the sequence.

src/libsyntax/ext/tt/macro_rules.rs

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use crate::symbol::{Symbol, kw, sym};
1717
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
1818

1919
use errors::FatalError;
20-
use syntax_pos::{Span, DUMMY_SP, symbol::Ident};
20+
use syntax_pos::{Span, symbol::Ident};
2121
use log::debug;
2222

2323
use rustc_data_structures::fx::{FxHashMap};
@@ -266,17 +266,19 @@ pub fn compile(
266266
let argument_gram = vec![
267267
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
268268
tts: vec![
269-
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
270-
quoted::TokenTree::token(token::FatArrow, DUMMY_SP),
271-
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
269+
quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, ast::Ident::from_str("tt")),
270+
quoted::TokenTree::token(token::FatArrow, def.span),
271+
quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, ast::Ident::from_str("tt")),
272272
],
273-
separator: Some(if body.legacy { token::Semi } else { token::Comma }),
273+
separator: Some(Token::new(
274+
if body.legacy { token::Semi } else { token::Comma }, def.span
275+
)),
274276
op: quoted::KleeneOp::OneOrMore,
275277
num_captures: 2,
276278
})),
277279
// to phase into semicolon-termination instead of semicolon-separation
278280
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
279-
tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)],
281+
tts: vec![quoted::TokenTree::token(token::Semi, def.span)],
280282
separator: None,
281283
op: quoted::KleeneOp::ZeroOrMore,
282284
num_captures: 0
@@ -608,9 +610,8 @@ impl FirstSets {
608610
// If the sequence contents can be empty, then the first
609611
// token could be the separator token itself.
610612

611-
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
612-
subfirst.maybe_empty) {
613-
first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
613+
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
614+
first.add_one_maybe(TokenTree::Token(sep.clone()));
614615
}
615616

616617
// Reverse scan: Sequence comes before `first`.
@@ -658,9 +659,8 @@ impl FirstSets {
658659
// If the sequence contents can be empty, then the first
659660
// token could be the separator token itself.
660661

661-
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
662-
subfirst.maybe_empty) {
663-
first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire()));
662+
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
663+
first.add_one_maybe(TokenTree::Token(sep.clone()));
664664
}
665665

666666
assert!(first.maybe_empty);
@@ -851,7 +851,7 @@ fn check_matcher_core(sess: &ParseSess,
851851
// against SUFFIX
852852
continue 'each_token;
853853
}
854-
TokenTree::Sequence(sp, ref seq_rep) => {
854+
TokenTree::Sequence(_, ref seq_rep) => {
855855
suffix_first = build_suffix_first();
856856
// The trick here: when we check the interior, we want
857857
// to include the separator (if any) as a potential
@@ -864,9 +864,9 @@ fn check_matcher_core(sess: &ParseSess,
864864
// work of cloning it? But then again, this way I may
865865
// get a "tighter" span?
866866
let mut new;
867-
let my_suffix = if let Some(ref u) = seq_rep.separator {
867+
let my_suffix = if let Some(sep) = &seq_rep.separator {
868868
new = suffix_first.clone();
869-
new.add_one_maybe(TokenTree::token(u.clone(), sp.entire()));
869+
new.add_one_maybe(TokenTree::Token(sep.clone()));
870870
&new
871871
} else {
872872
&suffix_first

src/libsyntax/ext/tt/quoted.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ pub struct SequenceRepetition {
5959
/// The sequence of token trees
6060
pub tts: Vec<TokenTree>,
6161
/// The optional separator
62-
pub separator: Option<TokenKind>,
62+
pub separator: Option<Token>,
6363
/// Whether the sequence can be repeated zero (*), or one or more times (+)
6464
pub op: KleeneOp,
6565
/// The number of `Match`s that appear in the sequence (and subsequences)
@@ -424,7 +424,7 @@ fn parse_sep_and_kleene_op<I>(
424424
attrs: &[ast::Attribute],
425425
edition: Edition,
426426
macro_node_id: NodeId,
427-
) -> (Option<TokenKind>, KleeneOp)
427+
) -> (Option<Token>, KleeneOp)
428428
where
429429
I: Iterator<Item = tokenstream::TokenTree>,
430430
{
@@ -449,7 +449,7 @@ fn parse_sep_and_kleene_op_2015<I>(
449449
_features: &Features,
450450
_attrs: &[ast::Attribute],
451451
macro_node_id: NodeId,
452-
) -> (Option<TokenKind>, KleeneOp)
452+
) -> (Option<Token>, KleeneOp)
453453
where
454454
I: Iterator<Item = tokenstream::TokenTree>,
455455
{
@@ -502,7 +502,7 @@ where
502502
a hard error in an upcoming edition",
503503
);
504504

505-
return (Some(token::Question), op);
505+
return (Some(Token::new(token::Question, op1_span)), op);
506506
}
507507

508508
// #2 is a random token (this is an error) :(
@@ -541,7 +541,7 @@ where
541541
}
542542

543543
// #2 is a KleeneOp :D
544-
Ok(Ok((op, _))) => return (Some(token.kind), op),
544+
Ok(Ok((op, _))) => return (Some(token), op),
545545

546546
// #2 is a random token :(
547547
Ok(Err(token)) => token.span,
@@ -567,7 +567,7 @@ fn parse_sep_and_kleene_op_2018<I>(
567567
sess: &ParseSess,
568568
_features: &Features,
569569
_attrs: &[ast::Attribute],
570-
) -> (Option<TokenKind>, KleeneOp)
570+
) -> (Option<Token>, KleeneOp)
571571
where
572572
I: Iterator<Item = tokenstream::TokenTree>,
573573
{
@@ -596,7 +596,7 @@ where
596596
}
597597

598598
// #2 is a KleeneOp :D
599-
Ok(Ok((op, _))) => return (Some(token.kind), op),
599+
Ok(Ok((op, _))) => return (Some(token), op),
600600

601601
// #2 is a random token :(
602602
Ok(Err(token)) => token.span,

src/libsyntax/ext/tt/transcribe.rs

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,10 @@ use crate::ext::expand::Marker;
44
use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
55
use crate::ext::tt::quoted;
66
use crate::mut_visit::noop_visit_tt;
7-
use crate::parse::token::{self, NtTT, TokenKind};
7+
use crate::parse::token::{self, NtTT, Token, TokenKind};
88
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
99

1010
use smallvec::{smallvec, SmallVec};
11-
use syntax_pos::DUMMY_SP;
1211

1312
use rustc_data_structures::fx::FxHashMap;
1413
use rustc_data_structures::sync::Lrc;
@@ -18,7 +17,7 @@ use std::rc::Rc;
1817
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
1918
enum Frame {
2019
Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
21-
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<TokenKind> },
20+
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
2221
}
2322

2423
impl Frame {
@@ -109,17 +108,13 @@ pub fn transcribe(
109108
else {
110109
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
111110
// go back to the beginning of the sequence.
112-
if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() {
113-
let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap();
111+
if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
112+
let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
114113
*repeat_idx += 1;
115-
if *repeat_idx < repeat_len {
114+
if repeat_idx < repeat_len {
116115
*idx = 0;
117-
if let Some(sep) = sep.clone() {
118-
let prev_span = match result.last() {
119-
Some((tt, _)) => tt.span(),
120-
None => DUMMY_SP,
121-
};
122-
result.push(TokenTree::token(sep, prev_span).into());
116+
if let Some(sep) = sep {
117+
result.push(TokenTree::Token(sep.clone()).into());
123118
}
124119
continue;
125120
}

src/test/ui/macros/macro-input-future-proofing.stderr

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,10 +55,10 @@ LL | ($($a:ty, $b:ty)* -) => ();
5555
= note: allowed there are: `{`, `[`, `=>`, `,`, `>`, `=`, `:`, `;`, `|`, `as` or `where`
5656

5757
error: `$ty:ty` is followed by `-`, which is not allowed for `ty` fragments
58-
--> $DIR/macro-input-future-proofing.rs:18:7
58+
--> $DIR/macro-input-future-proofing.rs:18:15
5959
|
6060
LL | ($($ty:ty)-+) => ();
61-
| ^^^^^^^^ not allowed after `ty` fragments
61+
| ^ not allowed after `ty` fragments
6262
|
6363
= note: allowed there are: `{`, `[`, `=>`, `,`, `>`, `=`, `:`, `;`, `|`, `as` or `where`
6464

0 commit comments

Comments
 (0)