Skip to content

Commit f6c0c48

Browse files
committed
Refactor proc_macro::TokenStream to use syntax::tokenstream::TokenStream; fix tests for changed semantics
1 parent ff591b6 commit f6c0c48

File tree

6 files changed

+67
-34
lines changed

6 files changed

+67
-34
lines changed

src/libproc_macro/lib.rs

Lines changed: 47 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -39,9 +39,9 @@ extern crate syntax;
3939
use std::fmt;
4040
use std::str::FromStr;
4141

42-
use syntax::ast;
42+
use syntax::errors::DiagnosticBuilder;
4343
use syntax::parse;
44-
use syntax::ptr::P;
44+
use syntax::tokenstream::TokenStream as TokenStream_;
4545

4646
/// The main type provided by this crate, representing an abstract stream of
4747
/// tokens.
@@ -54,7 +54,7 @@ use syntax::ptr::P;
5454
/// time!
5555
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
5656
pub struct TokenStream {
57-
inner: Vec<P<ast::Item>>,
57+
inner: TokenStream_,
5858
}
5959

6060
/// Error returned from `TokenStream::from_str`.
@@ -77,17 +77,41 @@ pub struct LexError {
7777
#[doc(hidden)]
7878
pub mod __internal {
7979
use std::cell::Cell;
80+
use std::rc::Rc;
8081

8182
use syntax::ast;
8283
use syntax::ptr::P;
83-
use syntax::parse::ParseSess;
84-
use super::TokenStream;
84+
use syntax::parse::{self, token, ParseSess};
85+
use syntax::tokenstream::TokenStream as TokenStream_;
86+
87+
use super::{TokenStream, LexError};
8588

8689
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
87-
TokenStream { inner: vec![item] }
90+
TokenStream { inner: TokenStream_::from_tokens(vec![
91+
token::Interpolated(Rc::new(token::NtItem(item)))
92+
])}
93+
}
94+
95+
pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
96+
TokenStream {
97+
inner: inner
98+
}
99+
}
100+
101+
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
102+
with_parse_sess(move |sess| {
103+
let mut parser = parse::new_parser_from_ts(sess, stream.inner);
104+
let mut items = Vec::new();
105+
106+
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
107+
items.push(item)
108+
}
109+
110+
Ok(items)
111+
})
88112
}
89113

90-
pub fn token_stream_items(stream: TokenStream) -> Vec<P<ast::Item>> {
114+
pub fn token_stream_inner(stream: TokenStream) -> TokenStream_ {
91115
stream.inner
92116
}
93117

@@ -96,6 +120,10 @@ pub mod __internal {
96120
trait_name: &str,
97121
expand: fn(TokenStream) -> TokenStream,
98122
attributes: &[&'static str]);
123+
124+
fn register_attr_proc_macro(&mut self,
125+
name: &str,
126+
expand: fn(TokenStream, TokenStream) -> TokenStream);
99127
}
100128

101129
// Emulate scoped_thread_local!() here essentially
@@ -125,11 +153,17 @@ pub mod __internal {
125153
where F: FnOnce(&ParseSess) -> R
126154
{
127155
let p = CURRENT_SESS.with(|p| p.get());
128-
assert!(!p.is_null());
156+
assert!(!p.is_null(), "proc_macro::__internal::with_parse_sess() called \
157+
before set_parse_sess()!");
129158
f(unsafe { &*p })
130159
}
131160
}
132161

162+
fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
163+
err.cancel();
164+
LexError { _inner: () }
165+
}
166+
133167
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
134168
impl FromStr for TokenStream {
135169
type Err = LexError;
@@ -138,30 +172,17 @@ impl FromStr for TokenStream {
138172
__internal::with_parse_sess(|sess| {
139173
let src = src.to_string();
140174
let name = "<proc-macro source code>".to_string();
141-
let mut parser = parse::new_parser_from_source_str(sess, name, src);
142-
let mut ret = TokenStream { inner: Vec::new() };
143-
loop {
144-
match parser.parse_item() {
145-
Ok(Some(item)) => ret.inner.push(item),
146-
Ok(None) => return Ok(ret),
147-
Err(mut err) => {
148-
err.cancel();
149-
return Err(LexError { _inner: () })
150-
}
151-
}
152-
}
175+
let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
176+
.map_err(parse_to_lex_err));
177+
178+
Ok(__internal::token_stream_wrap(TokenStream_::from_tts(tts)))
153179
})
154180
}
155181
}
156182

157183
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
158184
impl fmt::Display for TokenStream {
159185
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
160-
for item in self.inner.iter() {
161-
let item = syntax::print::pprust::item_to_string(item);
162-
try!(f.write_str(&item));
163-
try!(f.write_str("\n"));
164-
}
165-
Ok(())
186+
self.inner.fmt(f)
166187
}
167188
}

src/libsyntax_ext/deriving/custom.rs

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,8 +77,9 @@ impl MultiItemModifier for CustomDerive {
7777
let inner = self.inner;
7878
panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input)))
7979
});
80-
let new_items = match res {
81-
Ok(stream) => __internal::token_stream_items(stream),
80+
81+
let stream = match res {
82+
Ok(stream) => stream,
8283
Err(e) => {
8384
let msg = "custom derive attribute panicked";
8485
let mut err = ecx.struct_span_fatal(span, msg);
@@ -94,6 +95,18 @@ impl MultiItemModifier for CustomDerive {
9495
}
9596
};
9697

98+
let new_items = __internal::set_parse_sess(&ecx.parse_sess, || {
99+
match __internal::token_stream_parse_items(stream) {
100+
Ok(new_items) => new_items,
101+
Err(_) => {
102+
// FIXME: handle this better
103+
let msg = "custom derive produced unparseable tokens";
104+
ecx.struct_span_fatal(span, msg).emit();
105+
panic!(FatalError);
106+
}
107+
}
108+
});
109+
97110
let mut res = vec![Annotatable::Item(item)];
98111
// Reassign spans of all expanded items to the input `item`
99112
// for better errors here.

src/test/compile-fail-fulldeps/proc-macro/derive-bad.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,7 @@ extern crate derive_bad;
1616
#[derive(
1717
A
1818
)]
19-
//~^^ ERROR: custom derive attribute panicked
20-
//~| HELP: called `Result::unwrap()` on an `Err` value: LexError
19+
//~^^ ERROR: custom derive produced unparseable tokens
2120
struct A;
2221

2322
fn main() {}

src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-atob.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,6 @@ use proc_macro::TokenStream;
1919
#[proc_macro_derive(AToB)]
2020
pub fn derive(input: TokenStream) -> TokenStream {
2121
let input = input.to_string();
22-
assert_eq!(input, "#[derive(Copy, Clone)]\nstruct A;\n");
22+
assert_eq!(input, "#[derive(Copy, Clone)]\nstruct A;");
2323
"struct B;".parse().unwrap()
2424
}

src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-ctod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,6 @@ use proc_macro::TokenStream;
1919
#[proc_macro_derive(CToD)]
2020
pub fn derive(input: TokenStream) -> TokenStream {
2121
let input = input.to_string();
22-
assert_eq!(input, "struct C;\n");
22+
assert_eq!(input, "struct C;");
2323
"struct D;".parse().unwrap()
2424
}

src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-same-struct.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,12 @@ use proc_macro::TokenStream;
1818
#[proc_macro_derive(AToB)]
1919
pub fn derive1(input: TokenStream) -> TokenStream {
2020
println!("input1: {:?}", input.to_string());
21-
assert_eq!(input.to_string(), "struct A;\n");
21+
assert_eq!(input.to_string(), "struct A;");
2222
"#[derive(BToC)] struct B;".parse().unwrap()
2323
}
2424

2525
#[proc_macro_derive(BToC)]
2626
pub fn derive2(input: TokenStream) -> TokenStream {
27-
assert_eq!(input.to_string(), "struct B;\n");
27+
assert_eq!(input.to_string(), "struct B;");
2828
"struct C;".parse().unwrap()
2929
}

0 commit comments

Comments
 (0)