Skip to content

Commit dd3a0f9

Browse files
committed
---
yaml --- r: 160295 b: refs/heads/master c: 9d20a46 h: refs/heads/master i: 160293: a912670 160291: 873972c 160287: fd3b4f0 v: v3
1 parent 8e13442 commit dd3a0f9

File tree

2 files changed

+22
-21
lines changed

2 files changed

+22
-21
lines changed

[refs]

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
refs/heads/master: 5b5638f6863803477d56e200d6a9a208015838c1
2+
refs/heads/master: 9d20a46799178df9d2fb28dfec95ba55cbfb7f9c
33
refs/heads/snap-stage1: e33de59e47c5076a89eadeb38f4934f58a3618a6
44
refs/heads/snap-stage3: 96c8f2b0c1846756e617f1f1fc1372c506e24248
55
refs/heads/try: 225de0d60f8ca8dcc62ab2fd8818ebbda4b58cfe

trunk/src/grammar/verify.rs

Lines changed: 21 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -26,21 +26,21 @@ use std::io::File;
2626

2727
use syntax::parse;
2828
use syntax::parse::lexer;
29-
use rustc::driver::{session, config};
29+
use rustc::session::{mod, config};
3030

3131
use syntax::ast;
3232
use syntax::ast::Name;
3333
use syntax::parse::token;
3434
use syntax::parse::lexer::TokenAndSpan;
3535

36-
fn parse_token_list(file: &str) -> HashMap<String, Token> {
37-
fn id() -> Token {
36+
fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
37+
fn id() -> token::Token {
3838
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, token::Plain)
3939
}
4040

4141
let mut res = HashMap::new();
4242

43-
res.insert("-1".to_string(), EOF);
43+
res.insert("-1".to_string(), token::Eof);
4444

4545
for line in file.split('\n') {
4646
let eq = match line.trim().rfind('=') {
@@ -60,7 +60,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
6060
"INT_SUFFIX" => id(),
6161
"SHL" => token::BinOp(token::Shl),
6262
"LBRACE" => token::OpenDelim(token::Brace),
63-
"RARROW" => token::Rarrow,
63+
"RARROW" => token::RArrow,
6464
"LIT_STR" => token::Literal(token::Str_(Name(0))),
6565
"DOTDOT" => token::DotDot,
6666
"MOD_SEP" => token::ModSep,
@@ -78,7 +78,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
7878
"LIFETIME" => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }),
7979
"CARET" => token::BinOp(token::Caret),
8080
"TILDE" => token::Tilde,
81-
"IDENT" => token::Id(),
81+
"IDENT" => id(),
8282
"PLUS" => token::BinOp(token::Plus),
8383
"LIT_CHAR" => token::Literal(token::Char(Name(0))),
8484
"LIT_BYTE" => token::Literal(token::Byte(Name(0))),
@@ -119,7 +119,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
119119
res
120120
}
121121

122-
fn str_to_binop(s: &str) -> BinOpToken {
122+
fn str_to_binop(s: &str) -> token::BinOpToken {
123123
match s {
124124
"+" => token::Plus,
125125
"/" => token::Slash,
@@ -167,7 +167,7 @@ fn count(lit: &str) -> uint {
167167
lit.chars().take_while(|c| *c == '#').count()
168168
}
169169

170-
fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
170+
fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>) -> TokenAndSpan {
171171
let re = regex!(
172172
r"\[@(?P<seq>\d+),(?P<start>\d+):(?P<end>\d+)='(?P<content>.+?)',<(?P<toknum>-?\d+)>,\d+:\d+]"
173173
);
@@ -178,7 +178,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
178178
let toknum = m.name("toknum");
179179
let content = m.name("content");
180180

181-
let proto_tok = tokens.get(&toknum).expect(format!("didn't find token {} in the map",
181+
let proto_tok = tokens.get(toknum).expect(format!("didn't find token {} in the map",
182182
toknum).as_slice());
183183

184184
let nm = parse::token::intern(content);
@@ -206,7 +206,8 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
206206
ref t => t.clone()
207207
};
208208

209-
let offset = if real_tok == EOF {
209+
let offset = if real_tok == token::Eof
210+
{
210211
1
211212
} else {
212213
0
@@ -224,7 +225,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
224225
}
225226
}
226227

227-
fn tok_cmp(a: &Token, b: &Token) -> bool {
228+
fn tok_cmp(a: &token::Token, b: &token::Token) -> bool {
228229
match a {
229230
&token::Ident(id, _) => match b {
230231
&token::Ident(id2, _) => id == id2,
@@ -242,25 +243,25 @@ fn main() {
242243

243244
let args = std::os::args();
244245

245-
let mut token_file = File::open(&Path::new(args.get(2).as_slice()));
246+
let mut token_file = File::open(&Path::new(args[2].as_slice()));
246247
let token_map = parse_token_list(token_file.read_to_string().unwrap().as_slice());
247248

248249
let mut stdin = std::io::stdin();
249250
let mut antlr_tokens = stdin.lines().map(|l| parse_antlr_token(l.unwrap().as_slice().trim(),
250251
&token_map));
251252

252-
let code = File::open(&Path::new(args.get(1).as_slice())).unwrap().read_to_string().unwrap();
253+
let code = File::open(&Path::new(args[1].as_slice())).unwrap().read_to_string().unwrap();
253254
let options = config::basic_options();
254255
let session = session::build_session(options, None,
255-
syntax::diagnostics::registry::Registry::new([]));
256+
syntax::diagnostics::registry::Registry::new(&[]));
256257
let filemap = parse::string_to_filemap(&session.parse_sess,
257258
code,
258259
String::from_str("<n/a>"));
259260
let mut lexer = lexer::StringReader::new(session.diagnostic(), filemap);
260261

261262
for antlr_tok in antlr_tokens {
262263
let rustc_tok = next(&mut lexer);
263-
if rustc_tok.tok == EOF && antlr_tok.tok == EOF {
264+
if rustc_tok.tok == token::Eof && antlr_tok.tok == token::Eof {
264265
continue
265266
}
266267

@@ -294,11 +295,11 @@ fn main() {
294295
token::Literal(token::StrRaw(..)),
295296
token::Literal(token::Binary(..)),
296297
token::Literal(token::BinaryRaw(..)),
297-
Ident(..),
298-
Lifetime(..),
299-
Interpolated(..),
300-
DocComment(..),
301-
Shebang(..)
298+
token::Ident(..),
299+
token::Lifetime(..),
300+
token::Interpolated(..),
301+
token::DocComment(..),
302+
token::Shebang(..)
302303
);
303304
}
304305
}

0 commit comments

Comments
 (0)