@@ -26,21 +26,21 @@ use std::io::File;
26
26
27
27
use syntax:: parse;
28
28
use syntax:: parse:: lexer;
29
- use rustc:: driver :: { session , config} ;
29
+ use rustc:: session :: { mod , config} ;
30
30
31
31
use syntax:: ast;
32
32
use syntax:: ast:: Name ;
33
33
use syntax:: parse:: token;
34
34
use syntax:: parse:: lexer:: TokenAndSpan ;
35
35
36
- fn parse_token_list ( file : & str ) -> HashMap < String , Token > {
37
- fn id ( ) -> Token {
36
+ fn parse_token_list ( file : & str ) -> HashMap < String , token :: Token > {
37
+ fn id ( ) -> token :: Token {
38
38
token:: Ident ( ast:: Ident { name : Name ( 0 ) , ctxt : 0 , } , token:: Plain )
39
39
}
40
40
41
41
let mut res = HashMap :: new ( ) ;
42
42
43
- res. insert ( "-1" . to_string ( ) , EOF ) ;
43
+ res. insert ( "-1" . to_string ( ) , token :: Eof ) ;
44
44
45
45
for line in file. split ( '\n' ) {
46
46
let eq = match line. trim ( ) . rfind ( '=' ) {
@@ -60,7 +60,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
60
60
"INT_SUFFIX" => id ( ) ,
61
61
"SHL" => token:: BinOp ( token:: Shl ) ,
62
62
"LBRACE" => token:: OpenDelim ( token:: Brace ) ,
63
- "RARROW" => token:: Rarrow ,
63
+ "RARROW" => token:: RArrow ,
64
64
"LIT_STR" => token:: Literal ( token:: Str_ ( Name ( 0 ) ) ) ,
65
65
"DOTDOT" => token:: DotDot ,
66
66
"MOD_SEP" => token:: ModSep ,
@@ -78,7 +78,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
78
78
"LIFETIME" => token:: Lifetime ( ast:: Ident { name : Name ( 0 ) , ctxt : 0 } ) ,
79
79
"CARET" => token:: BinOp ( token:: Caret ) ,
80
80
"TILDE" => token:: Tilde ,
81
- "IDENT" => token :: Id ( ) ,
81
+ "IDENT" => id ( ) ,
82
82
"PLUS" => token:: BinOp ( token:: Plus ) ,
83
83
"LIT_CHAR" => token:: Literal ( token:: Char ( Name ( 0 ) ) ) ,
84
84
"LIT_BYTE" => token:: Literal ( token:: Byte ( Name ( 0 ) ) ) ,
@@ -119,7 +119,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
119
119
res
120
120
}
121
121
122
- fn str_to_binop ( s : & str ) -> BinOpToken {
122
+ fn str_to_binop ( s : & str ) -> token :: BinOpToken {
123
123
match s {
124
124
"+" => token:: Plus ,
125
125
"/" => token:: Slash ,
@@ -167,7 +167,7 @@ fn count(lit: &str) -> uint {
167
167
lit. chars ( ) . take_while ( |c| * c == '#' ) . count ( )
168
168
}
169
169
170
- fn parse_antlr_token ( s : & str , tokens : & HashMap < String , Token > ) -> TokenAndSpan {
170
+ fn parse_antlr_token ( s : & str , tokens : & HashMap < String , token :: Token > ) -> TokenAndSpan {
171
171
let re = regex ! (
172
172
r"\[@(?P<seq>\d+),(?P<start>\d+):(?P<end>\d+)='(?P<content>.+?)',<(?P<toknum>-?\d+)>,\d+:\d+]"
173
173
) ;
@@ -178,7 +178,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
178
178
let toknum = m. name ( "toknum" ) ;
179
179
let content = m. name ( "content" ) ;
180
180
181
- let proto_tok = tokens. get ( & toknum) . expect ( format ! ( "didn't find token {} in the map" ,
181
+ let proto_tok = tokens. get ( toknum) . expect ( format ! ( "didn't find token {} in the map" ,
182
182
toknum) . as_slice ( ) ) ;
183
183
184
184
let nm = parse:: token:: intern ( content) ;
@@ -206,7 +206,8 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
206
206
ref t => t. clone ( )
207
207
} ;
208
208
209
- let offset = if real_tok == EOF {
209
+ let offset = if real_tok == token:: Eof
210
+ {
210
211
1
211
212
} else {
212
213
0
@@ -224,7 +225,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
224
225
}
225
226
}
226
227
227
- fn tok_cmp ( a : & Token , b : & Token ) -> bool {
228
+ fn tok_cmp ( a : & token :: Token , b : & token :: Token ) -> bool {
228
229
match a {
229
230
& token:: Ident ( id, _) => match b {
230
231
& token:: Ident ( id2, _) => id == id2,
@@ -242,25 +243,25 @@ fn main() {
242
243
243
244
let args = std:: os:: args ( ) ;
244
245
245
- let mut token_file = File :: open ( & Path :: new ( args. get ( 2 ) . as_slice ( ) ) ) ;
246
+ let mut token_file = File :: open ( & Path :: new ( args[ 2 ] . as_slice ( ) ) ) ;
246
247
let token_map = parse_token_list ( token_file. read_to_string ( ) . unwrap ( ) . as_slice ( ) ) ;
247
248
248
249
let mut stdin = std:: io:: stdin ( ) ;
249
250
let mut antlr_tokens = stdin. lines ( ) . map ( |l| parse_antlr_token ( l. unwrap ( ) . as_slice ( ) . trim ( ) ,
250
251
& token_map) ) ;
251
252
252
- let code = File :: open ( & Path :: new ( args. get ( 1 ) . as_slice ( ) ) ) . unwrap ( ) . read_to_string ( ) . unwrap ( ) ;
253
+ let code = File :: open ( & Path :: new ( args[ 1 ] . as_slice ( ) ) ) . unwrap ( ) . read_to_string ( ) . unwrap ( ) ;
253
254
let options = config:: basic_options ( ) ;
254
255
let session = session:: build_session ( options, None ,
255
- syntax:: diagnostics:: registry:: Registry :: new ( [ ] ) ) ;
256
+ syntax:: diagnostics:: registry:: Registry :: new ( & [ ] ) ) ;
256
257
let filemap = parse:: string_to_filemap ( & session. parse_sess ,
257
258
code,
258
259
String :: from_str ( "<n/a>" ) ) ;
259
260
let mut lexer = lexer:: StringReader :: new ( session. diagnostic ( ) , filemap) ;
260
261
261
262
for antlr_tok in antlr_tokens {
262
263
let rustc_tok = next ( & mut lexer) ;
263
- if rustc_tok. tok == EOF && antlr_tok. tok == EOF {
264
+ if rustc_tok. tok == token :: Eof && antlr_tok. tok == token :: Eof {
264
265
continue
265
266
}
266
267
@@ -294,11 +295,11 @@ fn main() {
294
295
token:: Literal ( token:: StrRaw ( ..) ) ,
295
296
token:: Literal ( token:: Binary ( ..) ) ,
296
297
token:: Literal ( token:: BinaryRaw ( ..) ) ,
297
- Ident ( ..) ,
298
- Lifetime ( ..) ,
299
- Interpolated ( ..) ,
300
- DocComment ( ..) ,
301
- Shebang ( ..)
298
+ token :: Ident ( ..) ,
299
+ token :: Lifetime ( ..) ,
300
+ token :: Interpolated ( ..) ,
301
+ token :: DocComment ( ..) ,
302
+ token :: Shebang ( ..)
302
303
) ;
303
304
}
304
305
}
0 commit comments