Skip to content

Commit f6b5788

Browse files
committed
Remove TokenStream::from_streams.
By inlining it into the only non-test call site. The one test call site is changed to use `TokenStreamBuilder`.
1 parent 178b746 commit f6b5788

File tree

2 files changed

+41
-42
lines changed

2 files changed

+41
-42
lines changed

compiler/rustc_ast/src/tokenstream.rs

Lines changed: 37 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -399,45 +399,6 @@ impl TokenStream {
399399
self.0.len()
400400
}
401401

402-
pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
403-
match streams.len() {
404-
0 => TokenStream::default(),
405-
1 => streams.pop().unwrap(),
406-
_ => {
407-
// We are going to extend the first stream in `streams` with
408-
// the elements from the subsequent streams. This requires
409-
// using `make_mut()` on the first stream, and in practice this
410-
// doesn't cause cloning 99.9% of the time.
411-
//
412-
// One very common use case is when `streams` has two elements,
413-
// where the first stream has any number of elements within
414-
// (often 1, but sometimes many more) and the second stream has
415-
// a single element within.
416-
417-
// Determine how much the first stream will be extended.
418-
// Needed to avoid quadratic blow up from on-the-fly
419-
// reallocations (#57735).
420-
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
421-
422-
// Get the first stream. If it's `None`, create an empty
423-
// stream.
424-
let mut iter = streams.drain(..);
425-
let mut first_stream_lrc = iter.next().unwrap().0;
426-
427-
// Append the elements to the first stream, after reserving
428-
// space for them.
429-
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
430-
first_vec_mut.reserve(num_appends);
431-
for stream in iter {
432-
first_vec_mut.extend(stream.0.iter().cloned());
433-
}
434-
435-
// Create the final `TokenStream`.
436-
TokenStream(first_stream_lrc)
437-
}
438-
}
439-
}
440-
441402
pub fn trees(&self) -> CursorRef<'_> {
442403
CursorRef::new(self)
443404
}
@@ -605,7 +566,43 @@ impl TokenStreamBuilder {
605566
}
606567

607568
pub fn build(self) -> TokenStream {
608-
TokenStream::from_streams(self.0)
569+
let mut streams = self.0;
570+
match streams.len() {
571+
0 => TokenStream::default(),
572+
1 => streams.pop().unwrap(),
573+
_ => {
574+
// We are going to extend the first stream in `streams` with
575+
// the elements from the subsequent streams. This requires
576+
// using `make_mut()` on the first stream, and in practice this
577+
// doesn't cause cloning 99.9% of the time.
578+
//
579+
// One very common use case is when `streams` has two elements,
580+
// where the first stream has any number of elements within
581+
// (often 1, but sometimes many more) and the second stream has
582+
// a single element within.
583+
584+
// Determine how much the first stream will be extended.
585+
// Needed to avoid quadratic blow up from on-the-fly
586+
// reallocations (#57735).
587+
let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum();
588+
589+
// Get the first stream. If it's `None`, create an empty
590+
// stream.
591+
let mut iter = streams.drain(..);
592+
let mut first_stream_lrc = iter.next().unwrap().0;
593+
594+
// Append the elements to the first stream, after reserving
595+
// space for them.
596+
let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
597+
first_vec_mut.reserve(num_appends);
598+
for stream in iter {
599+
first_vec_mut.extend(stream.0.iter().cloned());
600+
}
601+
602+
// Create the final `TokenStream`.
603+
TokenStream(first_stream_lrc)
604+
}
605+
}
609606
}
610607
}
611608

compiler/rustc_expand/src/tokenstream/tests.rs

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ use rustc_ast::token;
44
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenStreamBuilder, TokenTree};
55
use rustc_span::create_default_session_globals_then;
66
use rustc_span::{BytePos, Span, Symbol};
7-
use smallvec::smallvec;
87

98
fn string_to_ts(string: &str) -> TokenStream {
109
string_to_stream(string.to_owned())
@@ -24,7 +23,10 @@ fn test_concat() {
2423
let test_res = string_to_ts("foo::bar::baz");
2524
let test_fst = string_to_ts("foo::bar");
2625
let test_snd = string_to_ts("::baz");
27-
let eq_res = TokenStream::from_streams(smallvec![test_fst, test_snd]);
26+
let mut builder = TokenStreamBuilder::new();
27+
builder.push(test_fst);
28+
builder.push(test_snd);
29+
let eq_res = builder.build();
2830
assert_eq!(test_res.trees().count(), 5);
2931
assert_eq!(eq_res.trees().count(), 5);
3032
assert_eq!(test_res.eq_unspanned(&eq_res), true);

0 commit comments

Comments
 (0)