Skip to content

Commit 62dfacb

Browse files
author
bors-servo
authored
Auto merge of #218 - nox:syntex, r=SimonSapin
Update to latest libsyntax <!-- Reviewable:start --> This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/html5ever/218) <!-- Reviewable:end -->
2 parents 8905aac + 1bb3903 commit 62dfacb

File tree

5 files changed

+50
-41
lines changed

5 files changed

+50
-41
lines changed

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22

33
name = "html5ever"
4-
version = "0.5.4"
4+
version = "0.5.5"
55
authors = [ "The html5ever Project Developers" ]
66
license = "MIT / Apache-2.0"
77
repository = "https://github.com/servo/html5ever"

macros/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22

33
name = "html5ever_macros"
4-
version = "0.2.6"
4+
version = "0.2.7"
55
authors = [ "The html5ever Project Developers" ]
66
license = "MIT / Apache-2.0"
77
repository = "https://github.com/servo/html5ever"

macros/src/lib.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,13 @@ extern crate mac;
1818
// See https://github.com/rust-lang/rust/pull/23857
1919
macro_rules! panictry {
2020
($e:expr) => ({
21-
use syntax::diagnostic::FatalError;
21+
use syntax::errors::FatalError;
2222
match $e {
2323
Ok(e) => e,
24-
Err(FatalError) => panic!(FatalError)
24+
Err(mut e) => {
25+
e.emit();
26+
panic!(FatalError);
27+
}
2528
}
2629
})
2730
}

macros/src/match_token.rs

Lines changed: 25 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -100,20 +100,21 @@ matching, by enforcing the following restrictions on its input:
100100
use std::collections::{HashSet, HashMap};
101101
use std::collections::hash_map::Entry::{Occupied, Vacant};
102102

103-
use syntax::diagnostic::FatalError;
104-
use syntax::ptr::P;
105-
use syntax::codemap::{Span, Spanned, spanned};
106103
use syntax::ast;
107-
use syntax::parse::parser::{Parser, Restrictions};
108-
use syntax::parse::{token, parser, classify};
109-
use syntax::parse;
104+
use syntax::codemap::{Span, Spanned, spanned};
105+
use syntax::errors::DiagnosticBuilder;
110106
use syntax::ext::base::{ExtCtxt, MacResult, MacEager};
107+
use syntax::parse;
108+
use syntax::parse::{token, parser, classify};
109+
use syntax::parse::parser::{Parser, Restrictions};
110+
use syntax::ptr::P;
111+
use syntax::tokenstream::TokenTree;
111112

112113
use self::TagKind::{StartTag, EndTag};
113114
use self::LHS::{Pat, Tags};
114115
use self::RHS::{Else, Expr};
115116

116-
type Tokens = Vec<ast::TokenTree>;
117+
type Tokens = Vec<TokenTree>;
117118

118119
// FIXME: duplicated in src/tokenizer/interface.rs
119120
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
@@ -170,22 +171,22 @@ fn push_all<T>(lhs: &mut Vec<T>, rhs: Vec<T>) {
170171
lhs.extend(rhs.into_iter());
171172
}
172173

173-
fn parse_spanned_ident(parser: &mut Parser) -> Result<ast::SpannedIdent, FatalError> {
174+
fn parse_spanned_ident<'a>(parser: &mut Parser<'a>) -> Result<ast::SpannedIdent, DiagnosticBuilder<'a>> {
174175
let lo = parser.span.lo;
175176
let ident = try!(parser.parse_ident());
176177
let hi = parser.last_span.hi;
177178
Ok(spanned(lo, hi, ident))
178179
}
179180

180-
fn parse_tag(parser: &mut Parser) -> Result<Spanned<Tag>, FatalError> {
181+
fn parse_tag<'a>(parser: &mut Parser<'a>) -> Result<Spanned<Tag>, DiagnosticBuilder<'a>> {
181182
let lo = parser.span.lo;
182183
try!(parser.expect(&token::Lt));
183184

184-
let kind = match try!(parser.eat(&token::BinOp(token::Slash))) {
185+
let kind = match parser.eat(&token::BinOp(token::Slash)) {
185186
true => EndTag,
186187
false => StartTag,
187188
};
188-
let name = match try!(parser.eat(&token::Underscore)) {
189+
let name = match parser.eat(&token::Underscore) {
189190
true => None,
190191
false => Some((*try!(parser.parse_ident()).name.as_str()).to_owned()),
191192
};
@@ -198,18 +199,18 @@ fn parse_tag(parser: &mut Parser) -> Result<Spanned<Tag>, FatalError> {
198199
}
199200

200201
/// Parse a `match_token!` invocation into the little AST defined above.
201-
fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result<Match, FatalError> {
202+
fn parse<'a>(cx: &'a mut ExtCtxt, toks: &[TokenTree]) -> Result<Match, DiagnosticBuilder<'a>> {
202203
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), toks.to_vec());
203204

204-
let discriminant = try!(parser.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL));
205-
try!(parser.commit_expr_expecting(&*discriminant, token::OpenDelim(token::Brace)));
205+
let discriminant = try!(parser.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None));
206+
try!(parser.expect(&token::OpenDelim(token::Brace)));
206207

207208
let mut arms: Vec<Arm> = Vec::new();
208209
while parser.token != token::CloseDelim(token::Brace) {
209210
let mut binding = None;
210211
if parser.look_ahead(1, |t| *t == token::At) {
211212
binding = Some(try!(parse_spanned_ident(&mut parser)));
212-
try!(parser.bump()); // Consume the @
213+
parser.bump(); // Consume the @
213214
}
214215

215216
let lhs_lo = parser.span.lo;
@@ -230,22 +231,22 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result<Match, FatalError>
230231

231232
let rhs_lo = parser.span.lo;
232233
let mut rhs_hi = parser.span.hi;
233-
let rhs = if try!(parser.eat_keyword(token::keywords::Else)) {
234+
let rhs = if parser.eat_keyword(token::keywords::Else) {
234235
try!(parser.expect(&token::Comma));
235236
Else
236237
} else {
237-
let expr = try!(parser.parse_expr_res(Restrictions::RESTRICTION_STMT_EXPR));
238+
let expr = try!(parser.parse_expr_res(Restrictions::RESTRICTION_STMT_EXPR, None));
238239
rhs_hi = parser.last_span.hi;
239240

240241
let require_comma =
241242
!classify::expr_is_simple_block(&*expr)
242243
&& parser.token != token::CloseDelim(token::Brace);
243244

244245
if require_comma {
245-
try!(parser.commit_expr(
246-
&*expr, &[token::Comma], &[token::CloseDelim(token::Brace)]));
246+
try!(parser.expect_one_of(
247+
&[token::Comma], &[token::CloseDelim(token::Brace)]));
247248
} else {
248-
try!(parser.eat(&token::Comma));
249+
parser.eat(&token::Comma);
249250
}
250251

251252
Expr(expr)
@@ -259,7 +260,7 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result<Match, FatalError>
259260
}
260261

261262
// Consume the closing brace
262-
try!(parser.bump());
263+
parser.bump();
263264

264265
Ok(Match {
265266
discriminant: discriminant,
@@ -300,8 +301,8 @@ macro_rules! ext_err_if {
300301
}
301302

302303
/// Expand the `match_token!` macro.
303-
pub fn expand_to_tokens(cx: &mut ExtCtxt, span: Span, toks: &[ast::TokenTree])
304-
-> Result<Vec<ast::TokenTree>, (Span, &'static str)> {
304+
pub fn expand_to_tokens(cx: &mut ExtCtxt, span: Span, toks: &[TokenTree])
305+
-> Result<Vec<TokenTree>, (Span, &'static str)> {
305306
let Match { discriminant, mut arms } = panictry!(parse(cx, toks));
306307

307308
// Handle the last arm specially at the end.
@@ -436,7 +437,7 @@ pub fn expand_to_tokens(cx: &mut ExtCtxt, span: Span, toks: &[ast::TokenTree])
436437
(None, Tags(_), _) => ext_err!(lhs.span, "the last arm cannot have tag patterns"),
437438
(None, _, Else) => ext_err!(rhs.span, "the last arm cannot use 'else'"),
438439
(None, Pat(p), Expr(e)) => match p.node {
439-
ast::PatWild | ast::PatIdent(..) => (p, e),
440+
ast::PatKind::Wild | ast::PatKind::Ident(..) => (p, e),
440441
_ => ext_err!(lhs.span, "the last arm must have a wildcard or ident pattern"),
441442
},
442443
};

macros/src/pre_expand.rs

Lines changed: 18 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,14 @@
99

1010
use match_token;
1111
use std::fs::File;
12-
use std::hash::{Hash, Hasher, SipHasher};
12+
use std::hash::{Hash, Hasher};
1313
use std::io::{Read, Write};
1414
use std::path::Path;
1515
use std::rc::Rc;
16-
use syntax::{ast, codemap, ext, parse, print};
16+
use syntax::{codemap, ext, parse, print};
17+
use syntax::ext::base::DummyResolver;
1718
use syntax::parse::token;
19+
use syntax::tokenstream::{Delimited, TokenTree};
1820

1921
pub fn pre_expand(from: &Path, to: &Path) {
2022
let mut source = String::new();
@@ -25,36 +27,36 @@ pub fn pre_expand(from: &Path, to: &Path) {
2527
write_header(&from, &source, &mut file_to);
2628

2729
let sess = parse::ParseSess::new();
28-
let mut feature_gated_cfgs = Vec::new();
30+
let mut resolver = DummyResolver;
2931
let mut cx = ext::base::ExtCtxt::new(&sess, vec![],
3032
ext::expand::ExpansionConfig::default("".to_owned()),
31-
&mut feature_gated_cfgs);
33+
&mut resolver);
3234

3335
let from = from.to_string_lossy().into_owned();
34-
let tts = parse::parse_tts_from_source_str(from, source, vec![], &sess);
36+
let tts = panictry!(parse::parse_tts_from_source_str(from, source, vec![], &sess));
3537
let tts = find_and_expand_match_token(&mut cx, tts);
3638
let tts = pretty(&mut cx, tts);
3739

3840
let expanded = print::pprust::tts_to_string(&tts);
3941
file_to.write_all(expanded.as_bytes()).unwrap();
4042
}
4143

42-
fn find_and_expand_match_token(cx: &mut ext::base::ExtCtxt, tts: Vec<ast::TokenTree>)
43-
-> Vec<ast::TokenTree> {
44+
fn find_and_expand_match_token(cx: &mut ext::base::ExtCtxt, tts: Vec<TokenTree>)
45+
-> Vec<TokenTree> {
4446
let mut expanded = Vec::new();
4547
let mut tts = tts.into_iter().peekable();
4648
while let Some(tt) = tts.next() {
4749
match tt {
48-
ast::TokenTree::Token(span, token::Token::Ident(ident, token::IdentStyle::Plain))
50+
TokenTree::Token(span, token::Token::Ident(ident))
4951
if ident.name.as_str() == "match_token"
5052
=> {
5153
// `!`
52-
if !matches!(tts.next(), Some(ast::TokenTree::Token(_, token::Token::Not))) {
54+
if !matches!(tts.next(), Some(TokenTree::Token(_, token::Token::Not))) {
5355
expanded.push(tt);
5456
continue
5557
}
5658
match tts.next() {
57-
Some(ast::TokenTree::Delimited(_, block)) => {
59+
Some(TokenTree::Delimited(_, block)) => {
5860
cx.bt_push(expn_info(span));
5961
expanded.extend(
6062
match match_token::expand_to_tokens(cx, span, &block.tts) {
@@ -69,10 +71,10 @@ fn find_and_expand_match_token(cx: &mut ext::base::ExtCtxt, tts: Vec<ast::TokenT
6971
_ => panic!("expected a block after {:?}", span)
7072
}
7173
}
72-
ast::TokenTree::Delimited(span, mut block) => {
74+
TokenTree::Delimited(span, mut block) => {
7375
Rc::make_mut(&mut block);
7476
let block = Rc::try_unwrap(block).unwrap();
75-
expanded.push(ast::TokenTree::Delimited(span, Rc::new(ast::Delimited {
77+
expanded.push(TokenTree::Delimited(span, Rc::new(Delimited {
7678
delim: block.delim,
7779
open_span: block.open_span,
7880
tts: find_and_expand_match_token(cx, block.tts),
@@ -97,7 +99,7 @@ fn expn_info(span: codemap::Span) -> codemap::ExpnInfo {
9799
}
98100

99101
/// Somehow, going through a parser and back to tokens gives nicer whitespace.
100-
fn pretty(cx: &mut ext::base::ExtCtxt, tts: Vec<ast::TokenTree>) -> Vec<ast::TokenTree> {
102+
fn pretty(cx: &mut ext::base::ExtCtxt, tts: Vec<TokenTree>) -> Vec<TokenTree> {
101103
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts);
102104
let start_span = parser.span;
103105
let mut items = Vec::new();
@@ -109,7 +111,10 @@ fn pretty(cx: &mut ext::base::ExtCtxt, tts: Vec<ast::TokenTree>) -> Vec<ast::Tok
109111
quote_tokens!(&mut *cx, $attrs $items)
110112
}
111113

114+
#[allow(deprecated)]
112115
fn write_header(source_file_name: &Path, source: &str, file: &mut File) {
116+
use std::hash::SipHasher;
117+
113118
let mut hasher = SipHasher::new();
114119
source.hash(&mut hasher);
115120
let source_hash = hasher.finish();

0 commit comments

Comments
 (0)