Skip to content

Commit 0d3160c

Browse files
committed
Rework Arena structure
Implements in-place growth. Removes the use of Rc within Arena. Closes #22847
1 parent d42693a commit 0d3160c

File tree

1 file changed

+36
-34
lines changed

1 file changed

+36
-34
lines changed

src/libarena/lib.rs

Lines changed: 36 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,11 @@
2929
test(no_crate_inject, attr(deny(warnings))))]
3030

3131
#![feature(alloc)]
32-
#![feature(box_syntax)]
3332
#![feature(core_intrinsics)]
3433
#![feature(drop_in_place)]
35-
#![feature(raw)]
3634
#![feature(heap_api)]
37-
#![feature(oom)]
3835
#![feature(raw)]
36+
#![feature(heap_api)]
3937
#![feature(staged_api)]
4038
#![feature(dropck_parametricity)]
4139
#![cfg_attr(test, feature(test))]
@@ -48,31 +46,34 @@ use std::intrinsics;
4846
use std::marker::{PhantomData, Send};
4947
use std::mem;
5048
use std::ptr;
51-
use std::raw;
52-
use std::raw::Repr;
53-
use std::rc::Rc;
54-
use std::slice;
5549

5650
use alloc::heap;
5751
use alloc::raw_vec::RawVec;
5852

5953
// The way arena uses arrays is really deeply awful. The arrays are
6054
// allocated, and have capacities reserved, but the fill for the array
6155
// will always stay at 0.
62-
#[derive(Clone, PartialEq)]
6356
struct Chunk {
64-
data: Rc<RefCell<Vec<u8>>>,
57+
data: RawVec<u8>,
6558
fill: Cell<usize>,
6659
is_copy: Cell<bool>,
6760
}
6861

6962
impl Chunk {
63+
fn new(size: usize, is_copy: bool) -> Chunk {
64+
Chunk {
65+
data: RawVec::with_capacity(size),
66+
fill: Cell::new(0),
67+
is_copy: Cell::new(is_copy),
68+
}
69+
}
70+
7071
fn capacity(&self) -> usize {
71-
self.data.borrow().capacity()
72+
self.data.cap()
7273
}
7374

7475
unsafe fn as_ptr(&self) -> *const u8 {
75-
self.data.borrow().as_ptr()
76+
self.data.ptr()
7677
}
7778
}
7879

@@ -115,22 +116,14 @@ impl<'a> Arena<'a> {
115116
/// Allocates a new Arena with `initial_size` bytes preallocated.
116117
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
117118
Arena {
118-
head: RefCell::new(chunk(initial_size, false)),
119-
copy_head: RefCell::new(chunk(initial_size, true)),
119+
head: RefCell::new(Chunk::new(initial_size, false)),
120+
copy_head: RefCell::new(Chunk::new(initial_size, true)),
120121
chunks: RefCell::new(Vec::new()),
121122
_marker: PhantomData,
122123
}
123124
}
124125
}
125126

126-
fn chunk(size: usize, is_copy: bool) -> Chunk {
127-
Chunk {
128-
data: Rc::new(RefCell::new(Vec::with_capacity(size))),
129-
fill: Cell::new(0),
130-
is_copy: Cell::new(is_copy),
131-
}
132-
}
133-
134127
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
135128
fn drop(&mut self) {
136129
unsafe {
@@ -165,8 +158,6 @@ unsafe fn destroy_chunk(chunk: &Chunk) {
165158

166159
let start = round_up(after_tydesc, align);
167160

168-
// debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
169-
// start, size, align, is_done);
170161
if is_done {
171162
((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
172163
}
@@ -215,28 +206,34 @@ unsafe fn get_tydesc<T>() -> *const TyDesc {
215206
}
216207

217208
impl<'longer_than_self> Arena<'longer_than_self> {
209+
#[inline]
218210
fn chunk_size(&self) -> usize {
219211
self.copy_head.borrow().capacity()
220212
}
221213

222214
// Functions for the POD part of the arena
215+
#[cold]
223216
fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
224217
// Allocate a new chunk.
225218
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
226-
self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
227-
228-
*self.copy_head.borrow_mut() = chunk((new_min_chunk_size + 1).next_power_of_two(), true);
219+
let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), true);
220+
let mut copy_head = self.copy_head.borrow_mut();
221+
let old_chunk = mem::replace(&mut *copy_head, new_chunk);
222+
self.chunks.borrow_mut().push(old_chunk);
229223

230224
self.alloc_copy_inner(n_bytes, align)
231225
}
232226

233227
#[inline]
234228
fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
235229
let start = round_up(self.copy_head.borrow().fill.get(), align);
230+
let chunk_size = self.chunk_size();
236231

237232
let end = start + n_bytes;
238-
if end > self.chunk_size() {
239-
return self.alloc_copy_grow(n_bytes, align);
233+
if end > chunk_size {
234+
if !self.copy_head.borrow_mut().data.reserve_in_place(start, n_bytes) {
235+
return self.alloc_copy_grow(n_bytes, align);
236+
}
240237
}
241238

242239
let copy_head = self.copy_head.borrow();
@@ -261,9 +258,10 @@ impl<'longer_than_self> Arena<'longer_than_self> {
261258
fn alloc_noncopy_grow(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
262259
// Allocate a new chunk.
263260
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
264-
self.chunks.borrow_mut().push(self.head.borrow().clone());
265-
266-
*self.head.borrow_mut() = chunk((new_min_chunk_size + 1).next_power_of_two(), false);
261+
let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false);
262+
let mut head = self.head.borrow_mut();
263+
let old_chunk = mem::replace(&mut *head, new_chunk);
264+
self.chunks.borrow_mut().push(old_chunk);
267265

268266
self.alloc_noncopy_inner(n_bytes, align)
269267
}
@@ -606,7 +604,11 @@ mod tests {
606604
#[bench]
607605
pub fn bench_copy_nonarena(b: &mut Bencher) {
608606
b.iter(|| {
609-
let _: Box<_> = box Point { x: 1, y: 2, z: 3 };
607+
let _: Box<_> = Box::new(Point {
608+
x: 1,
609+
y: 2,
610+
z: 3
611+
});
610612
})
611613
}
612614

@@ -647,10 +649,10 @@ mod tests {
647649
#[bench]
648650
pub fn bench_noncopy_nonarena(b: &mut Bencher) {
649651
b.iter(|| {
650-
let _: Box<_> = box Noncopy {
652+
let _: Box<_> = Box::new(Noncopy {
651653
string: "hello world".to_string(),
652654
array: vec![1, 2, 3, 4, 5],
653-
};
655+
});
654656
})
655657
}
656658

0 commit comments

Comments
 (0)