@@ -100,20 +100,21 @@ matching, by enforcing the following restrictions on its input:
100
100
use std:: collections:: { HashSet , HashMap } ;
101
101
use std:: collections:: hash_map:: Entry :: { Occupied , Vacant } ;
102
102
103
- use syntax:: diagnostic:: FatalError ;
104
- use syntax:: ptr:: P ;
105
- use syntax:: codemap:: { Span , Spanned , spanned} ;
106
103
use syntax:: ast;
107
- use syntax:: parse:: parser:: { Parser , Restrictions } ;
108
- use syntax:: parse:: { token, parser, classify} ;
109
- use syntax:: parse;
104
+ use syntax:: codemap:: { Span , Spanned , spanned} ;
105
+ use syntax:: errors:: DiagnosticBuilder ;
110
106
use syntax:: ext:: base:: { ExtCtxt , MacResult , MacEager } ;
107
+ use syntax:: parse;
108
+ use syntax:: parse:: { token, parser, classify} ;
109
+ use syntax:: parse:: parser:: { Parser , Restrictions } ;
110
+ use syntax:: ptr:: P ;
111
+ use syntax:: tokenstream:: TokenTree ;
111
112
112
113
use self :: TagKind :: { StartTag , EndTag } ;
113
114
use self :: LHS :: { Pat , Tags } ;
114
115
use self :: RHS :: { Else , Expr } ;
115
116
116
- type Tokens = Vec < ast :: TokenTree > ;
117
+ type Tokens = Vec < TokenTree > ;
117
118
118
119
// FIXME: duplicated in src/tokenizer/interface.rs
119
120
#[ derive( PartialEq , Eq , Hash , Copy , Clone , Debug ) ]
@@ -170,22 +171,22 @@ fn push_all<T>(lhs: &mut Vec<T>, rhs: Vec<T>) {
170
171
lhs. extend ( rhs. into_iter ( ) ) ;
171
172
}
172
173
173
- fn parse_spanned_ident ( parser : & mut Parser ) -> Result < ast:: SpannedIdent , FatalError > {
174
+ fn parse_spanned_ident < ' a > ( parser : & mut Parser < ' a > ) -> Result < ast:: SpannedIdent , DiagnosticBuilder < ' a > > {
174
175
let lo = parser. span . lo ;
175
176
let ident = try!( parser. parse_ident ( ) ) ;
176
177
let hi = parser. last_span . hi ;
177
178
Ok ( spanned ( lo, hi, ident) )
178
179
}
179
180
180
- fn parse_tag ( parser : & mut Parser ) -> Result < Spanned < Tag > , FatalError > {
181
+ fn parse_tag < ' a > ( parser : & mut Parser < ' a > ) -> Result < Spanned < Tag > , DiagnosticBuilder < ' a > > {
181
182
let lo = parser. span . lo ;
182
183
try!( parser. expect ( & token:: Lt ) ) ;
183
184
184
- let kind = match try! ( parser. eat ( & token:: BinOp ( token:: Slash ) ) ) {
185
+ let kind = match parser. eat ( & token:: BinOp ( token:: Slash ) ) {
185
186
true => EndTag ,
186
187
false => StartTag ,
187
188
} ;
188
- let name = match try! ( parser. eat ( & token:: Underscore ) ) {
189
+ let name = match parser. eat ( & token:: Underscore ) {
189
190
true => None ,
190
191
false => Some ( ( * try!( parser. parse_ident ( ) ) . name . as_str ( ) ) . to_owned ( ) ) ,
191
192
} ;
@@ -198,18 +199,18 @@ fn parse_tag(parser: &mut Parser) -> Result<Spanned<Tag>, FatalError> {
198
199
}
199
200
200
201
/// Parse a `match_token!` invocation into the little AST defined above.
201
- fn parse ( cx : & mut ExtCtxt , toks : & [ ast :: TokenTree ] ) -> Result < Match , FatalError > {
202
+ fn parse < ' a > ( cx : & ' a mut ExtCtxt , toks : & [ TokenTree ] ) -> Result < Match , DiagnosticBuilder < ' a > > {
202
203
let mut parser = parse:: new_parser_from_tts ( cx. parse_sess ( ) , cx. cfg ( ) , toks. to_vec ( ) ) ;
203
204
204
- let discriminant = try!( parser. parse_expr_res ( Restrictions :: RESTRICTION_NO_STRUCT_LITERAL ) ) ;
205
- try!( parser. commit_expr_expecting ( & * discriminant , token:: OpenDelim ( token:: Brace ) ) ) ;
205
+ let discriminant = try!( parser. parse_expr_res ( Restrictions :: RESTRICTION_NO_STRUCT_LITERAL , None ) ) ;
206
+ try!( parser. expect ( & token:: OpenDelim ( token:: Brace ) ) ) ;
206
207
207
208
let mut arms: Vec < Arm > = Vec :: new ( ) ;
208
209
while parser. token != token:: CloseDelim ( token:: Brace ) {
209
210
let mut binding = None ;
210
211
if parser. look_ahead ( 1 , |t| * t == token:: At ) {
211
212
binding = Some ( try!( parse_spanned_ident ( & mut parser) ) ) ;
212
- try! ( parser. bump ( ) ) ; // Consume the @
213
+ parser. bump ( ) ; // Consume the @
213
214
}
214
215
215
216
let lhs_lo = parser. span . lo ;
@@ -230,22 +231,22 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result<Match, FatalError>
230
231
231
232
let rhs_lo = parser. span . lo ;
232
233
let mut rhs_hi = parser. span . hi ;
233
- let rhs = if try! ( parser. eat_keyword ( token:: keywords:: Else ) ) {
234
+ let rhs = if parser. eat_keyword ( token:: keywords:: Else ) {
234
235
try!( parser. expect ( & token:: Comma ) ) ;
235
236
Else
236
237
} else {
237
- let expr = try!( parser. parse_expr_res ( Restrictions :: RESTRICTION_STMT_EXPR ) ) ;
238
+ let expr = try!( parser. parse_expr_res ( Restrictions :: RESTRICTION_STMT_EXPR , None ) ) ;
238
239
rhs_hi = parser. last_span . hi ;
239
240
240
241
let require_comma =
241
242
!classify:: expr_is_simple_block ( & * expr)
242
243
&& parser. token != token:: CloseDelim ( token:: Brace ) ;
243
244
244
245
if require_comma {
245
- try!( parser. commit_expr (
246
- & * expr , & [ token:: Comma ] , & [ token:: CloseDelim ( token:: Brace ) ] ) ) ;
246
+ try!( parser. expect_one_of (
247
+ & [ token:: Comma ] , & [ token:: CloseDelim ( token:: Brace ) ] ) ) ;
247
248
} else {
248
- try! ( parser. eat ( & token:: Comma ) ) ;
249
+ parser. eat ( & token:: Comma ) ;
249
250
}
250
251
251
252
Expr ( expr)
@@ -259,7 +260,7 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result<Match, FatalError>
259
260
}
260
261
261
262
// Consume the closing brace
262
- try! ( parser. bump ( ) ) ;
263
+ parser. bump ( ) ;
263
264
264
265
Ok ( Match {
265
266
discriminant : discriminant,
@@ -300,8 +301,8 @@ macro_rules! ext_err_if {
300
301
}
301
302
302
303
/// Expand the `match_token!` macro.
303
- pub fn expand_to_tokens ( cx : & mut ExtCtxt , span : Span , toks : & [ ast :: TokenTree ] )
304
- -> Result < Vec < ast :: TokenTree > , ( Span , & ' static str ) > {
304
+ pub fn expand_to_tokens ( cx : & mut ExtCtxt , span : Span , toks : & [ TokenTree ] )
305
+ -> Result < Vec < TokenTree > , ( Span , & ' static str ) > {
305
306
let Match { discriminant, mut arms } = panictry ! ( parse( cx, toks) ) ;
306
307
307
308
// Handle the last arm specially at the end.
@@ -436,7 +437,7 @@ pub fn expand_to_tokens(cx: &mut ExtCtxt, span: Span, toks: &[ast::TokenTree])
436
437
( None , Tags ( _) , _) => ext_err ! ( lhs. span, "the last arm cannot have tag patterns" ) ,
437
438
( None , _, Else ) => ext_err ! ( rhs. span, "the last arm cannot use 'else'" ) ,
438
439
( None , Pat ( p) , Expr ( e) ) => match p. node {
439
- ast:: PatWild | ast:: PatIdent ( ..) => ( p, e) ,
440
+ ast:: PatKind :: Wild | ast:: PatKind :: Ident ( ..) => ( p, e) ,
440
441
_ => ext_err ! ( lhs. span, "the last arm must have a wildcard or ident pattern" ) ,
441
442
} ,
442
443
} ;
0 commit comments