Skip to content

Update to latest libsyntax #218

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Oct 17, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]

name = "html5ever"
version = "0.5.4"
version = "0.5.5"
authors = [ "The html5ever Project Developers" ]
license = "MIT / Apache-2.0"
repository = "https://github.com/servo/html5ever"
Expand Down
2 changes: 1 addition & 1 deletion macros/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[package]

name = "html5ever_macros"
version = "0.2.6"
version = "0.2.7"
authors = [ "The html5ever Project Developers" ]
license = "MIT / Apache-2.0"
repository = "https://github.com/servo/html5ever"
Expand Down
7 changes: 5 additions & 2 deletions macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,13 @@ extern crate mac;
// See https://github.com/rust-lang/rust/pull/23857
macro_rules! panictry {
($e:expr) => ({
use syntax::diagnostic::FatalError;
use syntax::errors::FatalError;
match $e {
Ok(e) => e,
Err(FatalError) => panic!(FatalError)
Err(mut e) => {
e.emit();
panic!(FatalError);
}
}
})
}
Expand Down
49 changes: 25 additions & 24 deletions macros/src/match_token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,20 +100,21 @@ matching, by enforcing the following restrictions on its input:
use std::collections::{HashSet, HashMap};
use std::collections::hash_map::Entry::{Occupied, Vacant};

use syntax::diagnostic::FatalError;
use syntax::ptr::P;
use syntax::codemap::{Span, Spanned, spanned};
use syntax::ast;
use syntax::parse::parser::{Parser, Restrictions};
use syntax::parse::{token, parser, classify};
use syntax::parse;
use syntax::codemap::{Span, Spanned, spanned};
use syntax::errors::DiagnosticBuilder;
use syntax::ext::base::{ExtCtxt, MacResult, MacEager};
use syntax::parse;
use syntax::parse::{token, parser, classify};
use syntax::parse::parser::{Parser, Restrictions};
use syntax::ptr::P;
use syntax::tokenstream::TokenTree;

use self::TagKind::{StartTag, EndTag};
use self::LHS::{Pat, Tags};
use self::RHS::{Else, Expr};

type Tokens = Vec<ast::TokenTree>;
type Tokens = Vec<TokenTree>;

// FIXME: duplicated in src/tokenizer/interface.rs
#[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)]
Expand Down Expand Up @@ -170,22 +171,22 @@ fn push_all<T>(lhs: &mut Vec<T>, rhs: Vec<T>) {
lhs.extend(rhs.into_iter());
}

fn parse_spanned_ident(parser: &mut Parser) -> Result<ast::SpannedIdent, FatalError> {
fn parse_spanned_ident<'a>(parser: &mut Parser<'a>) -> Result<ast::SpannedIdent, DiagnosticBuilder<'a>> {
let lo = parser.span.lo;
let ident = try!(parser.parse_ident());
let hi = parser.last_span.hi;
Ok(spanned(lo, hi, ident))
}

fn parse_tag(parser: &mut Parser) -> Result<Spanned<Tag>, FatalError> {
fn parse_tag<'a>(parser: &mut Parser<'a>) -> Result<Spanned<Tag>, DiagnosticBuilder<'a>> {
let lo = parser.span.lo;
try!(parser.expect(&token::Lt));

let kind = match try!(parser.eat(&token::BinOp(token::Slash))) {
let kind = match parser.eat(&token::BinOp(token::Slash)) {
true => EndTag,
false => StartTag,
};
let name = match try!(parser.eat(&token::Underscore)) {
let name = match parser.eat(&token::Underscore) {
true => None,
false => Some((*try!(parser.parse_ident()).name.as_str()).to_owned()),
};
Expand All @@ -198,18 +199,18 @@ fn parse_tag(parser: &mut Parser) -> Result<Spanned<Tag>, FatalError> {
}

/// Parse a `match_token!` invocation into the little AST defined above.
fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result<Match, FatalError> {
fn parse<'a>(cx: &'a mut ExtCtxt, toks: &[TokenTree]) -> Result<Match, DiagnosticBuilder<'a>> {
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), toks.to_vec());

let discriminant = try!(parser.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL));
try!(parser.commit_expr_expecting(&*discriminant, token::OpenDelim(token::Brace)));
let discriminant = try!(parser.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None));
try!(parser.expect(&token::OpenDelim(token::Brace)));

let mut arms: Vec<Arm> = Vec::new();
while parser.token != token::CloseDelim(token::Brace) {
let mut binding = None;
if parser.look_ahead(1, |t| *t == token::At) {
binding = Some(try!(parse_spanned_ident(&mut parser)));
try!(parser.bump()); // Consume the @
parser.bump(); // Consume the @
}

let lhs_lo = parser.span.lo;
Expand All @@ -230,22 +231,22 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result<Match, FatalError>

let rhs_lo = parser.span.lo;
let mut rhs_hi = parser.span.hi;
let rhs = if try!(parser.eat_keyword(token::keywords::Else)) {
let rhs = if parser.eat_keyword(token::keywords::Else) {
try!(parser.expect(&token::Comma));
Else
} else {
let expr = try!(parser.parse_expr_res(Restrictions::RESTRICTION_STMT_EXPR));
let expr = try!(parser.parse_expr_res(Restrictions::RESTRICTION_STMT_EXPR, None));
rhs_hi = parser.last_span.hi;

let require_comma =
!classify::expr_is_simple_block(&*expr)
&& parser.token != token::CloseDelim(token::Brace);

if require_comma {
try!(parser.commit_expr(
&*expr, &[token::Comma], &[token::CloseDelim(token::Brace)]));
try!(parser.expect_one_of(
&[token::Comma], &[token::CloseDelim(token::Brace)]));
} else {
try!(parser.eat(&token::Comma));
parser.eat(&token::Comma);
}

Expr(expr)
Expand All @@ -259,7 +260,7 @@ fn parse(cx: &mut ExtCtxt, toks: &[ast::TokenTree]) -> Result<Match, FatalError>
}

// Consume the closing brace
try!(parser.bump());
parser.bump();

Ok(Match {
discriminant: discriminant,
Expand Down Expand Up @@ -300,8 +301,8 @@ macro_rules! ext_err_if {
}

/// Expand the `match_token!` macro.
pub fn expand_to_tokens(cx: &mut ExtCtxt, span: Span, toks: &[ast::TokenTree])
-> Result<Vec<ast::TokenTree>, (Span, &'static str)> {
pub fn expand_to_tokens(cx: &mut ExtCtxt, span: Span, toks: &[TokenTree])
-> Result<Vec<TokenTree>, (Span, &'static str)> {
let Match { discriminant, mut arms } = panictry!(parse(cx, toks));

// Handle the last arm specially at the end.
Expand Down Expand Up @@ -436,7 +437,7 @@ pub fn expand_to_tokens(cx: &mut ExtCtxt, span: Span, toks: &[ast::TokenTree])
(None, Tags(_), _) => ext_err!(lhs.span, "the last arm cannot have tag patterns"),
(None, _, Else) => ext_err!(rhs.span, "the last arm cannot use 'else'"),
(None, Pat(p), Expr(e)) => match p.node {
ast::PatWild | ast::PatIdent(..) => (p, e),
ast::PatKind::Wild | ast::PatKind::Ident(..) => (p, e),
_ => ext_err!(lhs.span, "the last arm must have a wildcard or ident pattern"),
},
};
Expand Down
31 changes: 18 additions & 13 deletions macros/src/pre_expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,14 @@

use match_token;
use std::fs::File;
use std::hash::{Hash, Hasher, SipHasher};
use std::hash::{Hash, Hasher};
use std::io::{Read, Write};
use std::path::Path;
use std::rc::Rc;
use syntax::{ast, codemap, ext, parse, print};
use syntax::{codemap, ext, parse, print};
use syntax::ext::base::DummyResolver;
use syntax::parse::token;
use syntax::tokenstream::{Delimited, TokenTree};

pub fn pre_expand(from: &Path, to: &Path) {
let mut source = String::new();
Expand All @@ -25,36 +27,36 @@ pub fn pre_expand(from: &Path, to: &Path) {
write_header(&from, &source, &mut file_to);

let sess = parse::ParseSess::new();
let mut feature_gated_cfgs = Vec::new();
let mut resolver = DummyResolver;
let mut cx = ext::base::ExtCtxt::new(&sess, vec![],
ext::expand::ExpansionConfig::default("".to_owned()),
&mut feature_gated_cfgs);
&mut resolver);

let from = from.to_string_lossy().into_owned();
let tts = parse::parse_tts_from_source_str(from, source, vec![], &sess);
let tts = panictry!(parse::parse_tts_from_source_str(from, source, vec![], &sess));
let tts = find_and_expand_match_token(&mut cx, tts);
let tts = pretty(&mut cx, tts);

let expanded = print::pprust::tts_to_string(&tts);
file_to.write_all(expanded.as_bytes()).unwrap();
}

fn find_and_expand_match_token(cx: &mut ext::base::ExtCtxt, tts: Vec<ast::TokenTree>)
-> Vec<ast::TokenTree> {
fn find_and_expand_match_token(cx: &mut ext::base::ExtCtxt, tts: Vec<TokenTree>)
-> Vec<TokenTree> {
let mut expanded = Vec::new();
let mut tts = tts.into_iter().peekable();
while let Some(tt) = tts.next() {
match tt {
ast::TokenTree::Token(span, token::Token::Ident(ident, token::IdentStyle::Plain))
TokenTree::Token(span, token::Token::Ident(ident))
if ident.name.as_str() == "match_token"
=> {
// `!`
if !matches!(tts.next(), Some(ast::TokenTree::Token(_, token::Token::Not))) {
if !matches!(tts.next(), Some(TokenTree::Token(_, token::Token::Not))) {
expanded.push(tt);
continue
}
match tts.next() {
Some(ast::TokenTree::Delimited(_, block)) => {
Some(TokenTree::Delimited(_, block)) => {
cx.bt_push(expn_info(span));
expanded.extend(
match match_token::expand_to_tokens(cx, span, &block.tts) {
Expand All @@ -69,10 +71,10 @@ fn find_and_expand_match_token(cx: &mut ext::base::ExtCtxt, tts: Vec<ast::TokenT
_ => panic!("expected a block after {:?}", span)
}
}
ast::TokenTree::Delimited(span, mut block) => {
TokenTree::Delimited(span, mut block) => {
Rc::make_mut(&mut block);
let block = Rc::try_unwrap(block).unwrap();
expanded.push(ast::TokenTree::Delimited(span, Rc::new(ast::Delimited {
expanded.push(TokenTree::Delimited(span, Rc::new(Delimited {
delim: block.delim,
open_span: block.open_span,
tts: find_and_expand_match_token(cx, block.tts),
Expand All @@ -97,7 +99,7 @@ fn expn_info(span: codemap::Span) -> codemap::ExpnInfo {
}

/// Somehow, going through a parser and back to tokens gives nicer whitespace.
fn pretty(cx: &mut ext::base::ExtCtxt, tts: Vec<ast::TokenTree>) -> Vec<ast::TokenTree> {
fn pretty(cx: &mut ext::base::ExtCtxt, tts: Vec<TokenTree>) -> Vec<TokenTree> {
let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts);
let start_span = parser.span;
let mut items = Vec::new();
Expand All @@ -109,7 +111,10 @@ fn pretty(cx: &mut ext::base::ExtCtxt, tts: Vec<ast::TokenTree>) -> Vec<ast::Tok
quote_tokens!(&mut *cx, $attrs $items)
}

#[allow(deprecated)]
fn write_header(source_file_name: &Path, source: &str, file: &mut File) {
use std::hash::SipHasher;

let mut hasher = SipHasher::new();
source.hash(&mut hasher);
let source_hash = hasher.finish();
Expand Down