Skip to content

Commit 8a4339b

Browse files
committed
---
yaml --- r: 13509 b: refs/heads/master c: 32167f5 h: refs/heads/master i: 13507: cd21572 v: v3
1 parent 7ab2581 commit 8a4339b

File tree

8 files changed

+286
-248
lines changed

8 files changed

+286
-248
lines changed

[refs]

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
refs/heads/master: 473b1ec0a09db8aee9fde61a55cbe5074422c91f
2+
refs/heads/master: 32167f52b018e319ac1e62a9713b771566bebe8e
33
refs/heads/snap-stage1: e33de59e47c5076a89eadeb38f4934f58a3618a6
44
refs/heads/snap-stage3: 4a81779abd786ff22d71434c6d9a5917ea4cdfff
55
refs/heads/try: 2898dcc5d97da9427ac367542382b6239d9c0bbf

trunk/src/libsyntax/ast.rs

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -374,13 +374,10 @@ enum blk_sort {
374374
*/
375375

376376
#[auto_serialize]
377-
type token_tree = spanned<token_tree_>;
378-
379-
#[auto_serialize]
380-
enum token_tree_ {
377+
enum token_tree {
381378
/* for macro invocations; parsing is the macro's job */
382-
tt_delim(token::token, [token_tree]),
383-
tt_flat(token::token)
379+
tt_delim([token_tree]),
380+
tt_flat(uint, token::token)
384381
}
385382

386383
#[auto_serialize]

trunk/src/libsyntax/parse.rs

Lines changed: 51 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import dvec::extensions;
44
export parse_sess;
55
export next_node_id;
66
export new_parser_from_file;
7+
export new_parser_etc_from_file;
78
export new_parser_from_source_str;
89
export parse_crate_from_file;
910
export parse_crate_from_crate_file;
@@ -17,7 +18,7 @@ import attr::parser_attr;
1718
import common::parser_common;
1819
import ast::node_id;
1920
import util::interner;
20-
import lexer::reader;
21+
import lexer::{string_reader_as_reader, reader, string_reader};
2122

2223
type parse_sess = @{
2324
cm: codemap::codemap,
@@ -42,14 +43,15 @@ fn parse_crate_from_file(input: str, cfg: ast::crate_cfg, sess: parse_sess) ->
4243

4344
fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
4445
sess: parse_sess) -> @ast::crate {
45-
let p = new_parser_from_file(sess, cfg, input, parser::CRATE_FILE);
46+
let (p, rdr) = new_parser_etc_from_file(sess, cfg, input,
47+
parser::CRATE_FILE);
4648
let lo = p.span.lo;
47-
let prefix = path::dirname(p.reader.filemap.name);
49+
let prefix = path::dirname(input);
4850
let leading_attrs = p.parse_inner_attrs_and_next();
4951
let { inner: crate_attrs, next: first_cdir_attr } = leading_attrs;
5052
let cdirs = p.parse_crate_directives(token::EOF, first_cdir_attr);
51-
sess.chpos = p.reader.chpos;
52-
sess.byte_pos = sess.byte_pos + p.reader.pos;
53+
sess.chpos = rdr.chpos;
54+
sess.byte_pos = sess.byte_pos + rdr.pos;
5355
let cx = @{sess: sess, cfg: /* FIXME: bad */ copy p.cfg};
5456
let (companionmod, _) = path::splitext(path::basename(input));
5557
let (m, attrs) = eval::eval_crate_directives_to_mod(
@@ -65,41 +67,42 @@ fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
6567

6668
fn parse_crate_from_source_file(input: str, cfg: ast::crate_cfg,
6769
sess: parse_sess) -> @ast::crate {
68-
let p = new_parser_from_file(sess, cfg, input, parser::SOURCE_FILE);
70+
let (p, rdr) = new_parser_etc_from_file(sess, cfg, input,
71+
parser::SOURCE_FILE);
6972
let r = p.parse_crate_mod(cfg);
70-
sess.chpos = p.reader.chpos;
71-
sess.byte_pos = sess.byte_pos + p.reader.pos;
73+
sess.chpos = rdr.chpos;
74+
sess.byte_pos = sess.byte_pos + rdr.pos;
7275
ret r;
7376
}
7477

7578
fn parse_crate_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
7679
sess: parse_sess) -> @ast::crate {
77-
let p = new_parser_from_source_str(
78-
sess, cfg, name, codemap::fss_none, source);
80+
let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name,
81+
codemap::fss_none, source);
7982
let r = p.parse_crate_mod(cfg);
80-
sess.chpos = p.reader.chpos;
81-
sess.byte_pos = sess.byte_pos + p.reader.pos;
83+
sess.chpos = rdr.chpos;
84+
sess.byte_pos = sess.byte_pos + rdr.pos;
8285
ret r;
8386
}
8487

8588
fn parse_expr_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
8689
sess: parse_sess) -> @ast::expr {
87-
let p = new_parser_from_source_str(
88-
sess, cfg, name, codemap::fss_none, source);
90+
let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name,
91+
codemap::fss_none, source);
8992
let r = p.parse_expr();
90-
sess.chpos = p.reader.chpos;
91-
sess.byte_pos = sess.byte_pos + p.reader.pos;
93+
sess.chpos = rdr.chpos;
94+
sess.byte_pos = sess.byte_pos + rdr.pos;
9295
ret r;
9396
}
9497

9598
fn parse_item_from_source_str(name: str, source: @str, cfg: ast::crate_cfg,
9699
+attrs: [ast::attribute], vis: ast::visibility,
97100
sess: parse_sess) -> option<@ast::item> {
98-
let p = new_parser_from_source_str(
99-
sess, cfg, name, codemap::fss_none, source);
101+
let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name,
102+
codemap::fss_none, source);
100103
let r = p.parse_item(attrs, vis);
101-
sess.chpos = p.reader.chpos;
102-
sess.byte_pos = sess.byte_pos + p.reader.pos;
104+
sess.chpos = rdr.chpos;
105+
sess.byte_pos = sess.byte_pos + rdr.pos;
103106
ret r;
104107
}
105108

@@ -109,13 +112,14 @@ fn parse_from_source_str<T>(f: fn (p: parser) -> T,
109112
sess: parse_sess)
110113
-> T
111114
{
112-
let p = new_parser_from_source_str(sess, cfg, name, ss, source);
115+
let (p, rdr) = new_parser_etc_from_source_str(sess, cfg, name, ss,
116+
source);
113117
let r = f(p);
114118
if !p.reader.is_eof() {
115119
p.reader.fatal("expected end-of-string");
116120
}
117-
sess.chpos = p.reader.chpos;
118-
sess.byte_pos = sess.byte_pos + p.reader.pos;
121+
sess.chpos = rdr.chpos;
122+
sess.byte_pos = sess.byte_pos + rdr.pos;
119123
ret r;
120124
}
121125

@@ -127,9 +131,9 @@ fn next_node_id(sess: parse_sess) -> node_id {
127131
ret rv;
128132
}
129133

130-
fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
131-
+name: str, +ss: codemap::file_substr,
132-
source: @str) -> parser {
134+
fn new_parser_etc_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
135+
+name: str, +ss: codemap::file_substr,
136+
source: @str) -> (parser, string_reader) {
133137
let ftype = parser::SOURCE_FILE;
134138
let filemap = codemap::new_filemap_w_substr
135139
(name, ss, source, sess.chpos, sess.byte_pos);
@@ -138,14 +142,21 @@ fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
138142
{|x|str::hash(*x)},
139143
{|x,y|str::eq(*x, *y)}
140144
);
141-
let rdr = lexer::new_reader(sess.span_diagnostic,
142-
filemap, itr);
143-
ret parser(sess, cfg, rdr, ftype);
145+
let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, itr);
146+
ret (parser(sess, cfg, srdr as reader, ftype), srdr);
144147
}
145148

146-
fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
147-
ftype: parser::file_type) ->
148-
parser {
149+
fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
150+
+name: str, +ss: codemap::file_substr,
151+
source: @str) -> parser {
152+
let (p, _) = new_parser_etc_from_source_str(sess, cfg, name, ss, source);
153+
ret p;
154+
}
155+
156+
157+
fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
158+
ftype: parser::file_type) ->
159+
(parser, string_reader) {
149160
let res = io::read_whole_file_str(path);
150161
alt res {
151162
result::ok(_) { /* Continue. */ }
@@ -158,6 +169,12 @@ fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
158169
{|x|str::hash(*x)},
159170
{|x,y|str::eq(*x, *y)}
160171
);
161-
let rdr = lexer::new_reader(sess.span_diagnostic, filemap, itr);
162-
ret parser(sess, cfg, rdr, ftype);
172+
let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap, itr);
173+
ret (parser(sess, cfg, srdr as reader, ftype), srdr);
174+
}
175+
176+
fn new_parser_from_file(sess: parse_sess, cfg: ast::crate_cfg, +path: str,
177+
ftype: parser::file_type) -> parser {
178+
let (p, _) = new_parser_etc_from_file(sess, cfg, path, ftype);
179+
ret p;
163180
}

trunk/src/libsyntax/parse/comments.rs

Lines changed: 46 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
import io::reader_util;
22
import io::println;//XXXXXXXXxxx
33
import util::interner;
4-
import lexer::{ reader, new_reader, next_token, is_whitespace };
4+
import lexer::{ string_reader, bump, is_eof, nextch, new_string_reader,
5+
is_whitespace, get_str_from, string_reader_as_reader };
56

67
export cmnt;
78
export lit;
@@ -17,45 +18,46 @@ enum cmnt_style {
1718

1819
type cmnt = {style: cmnt_style, lines: [str], pos: uint};
1920

20-
fn read_to_eol(rdr: reader) -> str {
21+
fn read_to_eol(rdr: string_reader) -> str {
2122
let mut val = "";
22-
while rdr.curr != '\n' && !rdr.is_eof() {
23+
while rdr.curr != '\n' && !is_eof(rdr) {
2324
str::push_char(val, rdr.curr);
24-
rdr.bump();
25+
bump(rdr);
2526
}
26-
if rdr.curr == '\n' { rdr.bump(); }
27+
if rdr.curr == '\n' { bump(rdr); }
2728
ret val;
2829
}
2930

30-
fn read_one_line_comment(rdr: reader) -> str {
31+
fn read_one_line_comment(rdr: string_reader) -> str {
3132
let val = read_to_eol(rdr);
3233
assert ((val[0] == '/' as u8 && val[1] == '/' as u8) ||
3334
(val[0] == '#' as u8 && val[1] == '!' as u8));
3435
ret val;
3536
}
3637

37-
fn consume_non_eol_whitespace(rdr: reader) {
38-
while is_whitespace(rdr.curr) && rdr.curr != '\n' && !rdr.is_eof() {
39-
rdr.bump();
38+
fn consume_non_eol_whitespace(rdr: string_reader) {
39+
while is_whitespace(rdr.curr) && rdr.curr != '\n' && !is_eof(rdr) {
40+
bump(rdr);
4041
}
4142
}
4243

43-
fn push_blank_line_comment(rdr: reader, &comments: [cmnt]) {
44+
fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]) {
4445
#debug(">>> blank-line comment");
4546
let v: [str] = [];
4647
comments += [{style: blank_line, lines: v, pos: rdr.chpos}];
4748
}
4849

49-
fn consume_whitespace_counting_blank_lines(rdr: reader, &comments: [cmnt]) {
50-
while is_whitespace(rdr.curr) && !rdr.is_eof() {
50+
fn consume_whitespace_counting_blank_lines(rdr: string_reader,
51+
&comments: [cmnt]) {
52+
while is_whitespace(rdr.curr) && !is_eof(rdr) {
5153
if rdr.col == 0u && rdr.curr == '\n' {
5254
push_blank_line_comment(rdr, comments);
5355
}
54-
rdr.bump();
56+
bump(rdr);
5557
}
5658
}
5759

58-
fn read_shebang_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
60+
fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
5961
#debug(">>> shebang comment");
6062
let p = rdr.chpos;
6163
#debug("<<< shebang comment");
@@ -64,11 +66,11 @@ fn read_shebang_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
6466
pos: p};
6567
}
6668

67-
fn read_line_comments(rdr: reader, code_to_the_left: bool) -> cmnt {
69+
fn read_line_comments(rdr: string_reader, code_to_the_left: bool) -> cmnt {
6870
#debug(">>> line comments");
6971
let p = rdr.chpos;
7072
let mut lines: [str] = [];
71-
while rdr.curr == '/' && rdr.next() == '/' {
73+
while rdr.curr == '/' && nextch(rdr) == '/' {
7274
let line = read_one_line_comment(rdr);
7375
log(debug, line);
7476
lines += [line];
@@ -99,36 +101,36 @@ fn trim_whitespace_prefix_and_push_line(&lines: [str],
99101
lines += [s1];
100102
}
101103

102-
fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
104+
fn read_block_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
103105
#debug(">>> block comment");
104106
let p = rdr.chpos;
105107
let mut lines: [str] = [];
106108
let mut col: uint = rdr.col;
107-
rdr.bump();
108-
rdr.bump();
109+
bump(rdr);
110+
bump(rdr);
109111
let mut curr_line = "/*";
110112
let mut level: int = 1;
111113
while level > 0 {
112114
#debug("=== block comment level %d", level);
113-
if rdr.is_eof() { rdr.fatal("unterminated block comment"); }
115+
if is_eof(rdr) {(rdr as reader).fatal("unterminated block comment");}
114116
if rdr.curr == '\n' {
115117
trim_whitespace_prefix_and_push_line(lines, curr_line, col);
116118
curr_line = "";
117-
rdr.bump();
119+
bump(rdr);
118120
} else {
119121
str::push_char(curr_line, rdr.curr);
120-
if rdr.curr == '/' && rdr.next() == '*' {
121-
rdr.bump();
122-
rdr.bump();
122+
if rdr.curr == '/' && nextch(rdr) == '*' {
123+
bump(rdr);
124+
bump(rdr);
123125
curr_line += "*";
124126
level += 1;
125127
} else {
126-
if rdr.curr == '*' && rdr.next() == '/' {
127-
rdr.bump();
128-
rdr.bump();
128+
if rdr.curr == '*' && nextch(rdr) == '/' {
129+
bump(rdr);
130+
bump(rdr);
129131
curr_line += "/";
130132
level -= 1;
131-
} else { rdr.bump(); }
133+
} else { bump(rdr); }
132134
}
133135
}
134136
}
@@ -137,26 +139,27 @@ fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
137139
}
138140
let mut style = if code_to_the_left { trailing } else { isolated };
139141
consume_non_eol_whitespace(rdr);
140-
if !rdr.is_eof() && rdr.curr != '\n' && vec::len(lines) == 1u {
142+
if !is_eof(rdr) && rdr.curr != '\n' && vec::len(lines) == 1u {
141143
style = mixed;
142144
}
143145
#debug("<<< block comment");
144146
ret {style: style, lines: lines, pos: p};
145147
}
146148

147-
fn peeking_at_comment(rdr: reader) -> bool {
148-
ret ((rdr.curr == '/' && rdr.next() == '/') ||
149-
(rdr.curr == '/' && rdr.next() == '*')) ||
150-
(rdr.curr == '#' && rdr.next() == '!');
149+
fn peeking_at_comment(rdr: string_reader) -> bool {
150+
ret ((rdr.curr == '/' && nextch(rdr) == '/') ||
151+
(rdr.curr == '/' && nextch(rdr) == '*')) ||
152+
(rdr.curr == '#' && nextch(rdr) == '!');
151153
}
152154

153-
fn consume_comment(rdr: reader, code_to_the_left: bool, &comments: [cmnt]) {
155+
fn consume_comment(rdr: string_reader, code_to_the_left: bool,
156+
&comments: [cmnt]) {
154157
#debug(">>> consume comment");
155-
if rdr.curr == '/' && rdr.next() == '/' {
158+
if rdr.curr == '/' && nextch(rdr) == '/' {
156159
comments += [read_line_comments(rdr, code_to_the_left)];
157-
} else if rdr.curr == '/' && rdr.next() == '*' {
160+
} else if rdr.curr == '/' && nextch(rdr) == '*' {
158161
comments += [read_block_comment(rdr, code_to_the_left)];
159-
} else if rdr.curr == '#' && rdr.next() == '!' {
162+
} else if rdr.curr == '#' && nextch(rdr) == '!' {
160163
comments += [read_shebang_comment(rdr, code_to_the_left)];
161164
} else { fail; }
162165
#debug("<<< consume comment");
@@ -173,12 +176,12 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
173176
{|x|str::hash(*x)},
174177
{|x,y|str::eq(*x, *y)}
175178
);
176-
let rdr = new_reader(span_diagnostic,
177-
codemap::new_filemap(path, src, 0u, 0u), itr);
179+
let rdr = new_string_reader(span_diagnostic,
180+
codemap::new_filemap(path, src, 0u, 0u), itr);
178181
let mut comments: [cmnt] = [];
179182
let mut literals: [lit] = [];
180183
let mut first_read: bool = true;
181-
while !rdr.is_eof() {
184+
while !is_eof(rdr) {
182185
loop {
183186
let mut code_to_the_left = !first_read;
184187
consume_non_eol_whitespace(rdr);
@@ -192,9 +195,10 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
192195
}
193196
break;
194197
}
195-
let tok = next_token(rdr);
198+
let bpos = rdr.pos;
199+
let tok = rdr.next_token();
196200
if token::is_lit(tok.tok) {
197-
let s = rdr.get_str_from(tok.bpos);
201+
let s = get_str_from(rdr, bpos);
198202
literals += [{lit: s, pos: tok.chpos}];
199203
log(debug, "tok lit: " + s);
200204
} else {

0 commit comments

Comments
 (0)