Skip to content

Commit 5f1b1ec

Browse files
committed
Rework Arena code
1 parent 0d3160c commit 5f1b1ec

File tree

1 file changed

+82
-85
lines changed

1 file changed

+82
-85
lines changed

src/libarena/lib.rs

Lines changed: 82 additions & 85 deletions
Original file line numberDiff line numberDiff line change
@@ -50,12 +50,11 @@ use std::ptr;
5050
use alloc::heap;
5151
use alloc::raw_vec::RawVec;
5252

53-
// The way arena uses arrays is really deeply awful. The arrays are
54-
// allocated, and have capacities reserved, but the fill for the array
55-
// will always stay at 0.
5653
struct Chunk {
5754
data: RawVec<u8>,
55+
/// Index of the first unused byte.
5856
fill: Cell<usize>,
57+
/// Indicates whether objects with destructors are stored in this chunk.
5958
is_copy: Cell<bool>,
6059
}
6160

@@ -75,12 +74,37 @@ impl Chunk {
7574
unsafe fn as_ptr(&self) -> *const u8 {
7675
self.data.ptr()
7776
}
77+
78+
// Walk down a chunk, running the destructors for any objects stored
79+
// in it.
80+
unsafe fn destroy(&self) {
81+
let mut idx = 0;
82+
let buf = self.as_ptr();
83+
let fill = self.fill.get();
84+
85+
while idx < fill {
86+
let tydesc_data = buf.offset(idx as isize) as *const usize;
87+
let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
88+
let (size, align) = ((*tydesc).size, (*tydesc).align);
89+
90+
let after_tydesc = idx + mem::size_of::<*const TyDesc>();
91+
92+
let start = round_up(after_tydesc, align);
93+
94+
if is_done {
95+
((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
96+
}
97+
98+
// Find where the next tydesc lives
99+
idx = round_up(start + size, mem::align_of::<*const TyDesc>());
100+
}
101+
}
78102
}
79103

80104
/// A slower reflection-based arena that can allocate objects of any type.
81105
///
82-
/// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
83-
/// each allocated object, the arena stores a pointer to the type descriptor
106+
/// This arena uses `RawVec<u8>` as a backing store to allocate objects from.
107+
/// For each allocated object, the arena stores a pointer to the type descriptor
84108
/// followed by the object (potentially with alignment padding after each
85109
/// element). When the arena is destroyed, it iterates through all of its
86110
/// chunks, and uses the tydesc information to trace through the objects,
@@ -127,10 +151,10 @@ impl<'a> Arena<'a> {
127151
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
128152
fn drop(&mut self) {
129153
unsafe {
130-
destroy_chunk(&*self.head.borrow());
154+
self.head.borrow().destroy();
131155
for chunk in self.chunks.borrow().iter() {
132156
if !chunk.is_copy.get() {
133-
destroy_chunk(chunk);
157+
chunk.destroy();
134158
}
135159
}
136160
}
@@ -142,31 +166,6 @@ fn round_up(base: usize, align: usize) -> usize {
142166
(base.checked_add(align - 1)).unwrap() & !(align - 1)
143167
}
144168

145-
// Walk down a chunk, running the destructors for any objects stored
146-
// in it.
147-
unsafe fn destroy_chunk(chunk: &Chunk) {
148-
let mut idx = 0;
149-
let buf = chunk.as_ptr();
150-
let fill = chunk.fill.get();
151-
152-
while idx < fill {
153-
let tydesc_data = buf.offset(idx as isize) as *const usize;
154-
let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
155-
let (size, align) = ((*tydesc).size, (*tydesc).align);
156-
157-
let after_tydesc = idx + mem::size_of::<*const TyDesc>();
158-
159-
let start = round_up(after_tydesc, align);
160-
161-
if is_done {
162-
((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
163-
}
164-
165-
// Find where the next tydesc lives
166-
idx = round_up(start + size, mem::align_of::<*const TyDesc>());
167-
}
168-
}
169-
170169
// We encode whether the object a tydesc describes has been
171170
// initialized in the arena in the low bit of the tydesc pointer. This
172171
// is necessary in order to properly do cleanup if a panic occurs
@@ -183,6 +182,9 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
183182
// HACK(eddyb) TyDesc replacement using a trait object vtable.
184183
// This could be replaced in the future with a custom DST layout,
185184
// or `&'static (drop_glue, size, align)` created by a `const fn`.
185+
// Requirements:
186+
// * rvalue promotion (issue #1056)
187+
// * mem::{size_of, align_of} must be const fns
186188
struct TyDesc {
187189
drop_glue: fn(*const i8),
188190
size: usize,
@@ -198,45 +200,52 @@ impl<T: ?Sized> AllTypes for T {}
198200
unsafe fn get_tydesc<T>() -> *const TyDesc {
199201
use std::raw::TraitObject;
200202

201-
let ptr = &*(1 as *const T);
203+
let ptr = &*(heap::EMPTY as *const T);
202204

203205
// Can use any trait that is implemented for all types.
204206
let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
205207
obj.vtable as *const TyDesc
206208
}
207209

208210
impl<'longer_than_self> Arena<'longer_than_self> {
209-
#[inline]
210-
fn chunk_size(&self) -> usize {
211-
self.copy_head.borrow().capacity()
212-
}
213-
214-
// Functions for the POD part of the arena
211+
// Grows a given chunk and returns `false`, or replaces it with a bigger
212+
// chunk and returns `true`.
213+
// This method is shared by both parts of the arena.
215214
#[cold]
216-
fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
217-
// Allocate a new chunk.
218-
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
219-
let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), true);
220-
let mut copy_head = self.copy_head.borrow_mut();
221-
let old_chunk = mem::replace(&mut *copy_head, new_chunk);
222-
self.chunks.borrow_mut().push(old_chunk);
223-
224-
self.alloc_copy_inner(n_bytes, align)
215+
fn alloc_grow(&self, head: &mut Chunk, used_cap: usize, n_bytes: usize) -> bool {
216+
if head.data.reserve_in_place(used_cap, n_bytes) {
217+
// In-place reallocation succeeded.
218+
false
219+
} else {
220+
// Allocate a new chunk.
221+
let new_min_chunk_size = cmp::max(n_bytes, head.capacity());
222+
let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false);
223+
let old_chunk = mem::replace(head, new_chunk);
224+
if old_chunk.fill.get() != 0 {
225+
self.chunks.borrow_mut().push(old_chunk);
226+
}
227+
true
228+
}
225229
}
226230

231+
// Functions for the copyable part of the arena.
232+
227233
#[inline]
228234
fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
229-
let start = round_up(self.copy_head.borrow().fill.get(), align);
230-
let chunk_size = self.chunk_size();
231-
232-
let end = start + n_bytes;
233-
if end > chunk_size {
234-
if !self.copy_head.borrow_mut().data.reserve_in_place(start, n_bytes) {
235-
return self.alloc_copy_grow(n_bytes, align);
235+
let mut copy_head = self.copy_head.borrow_mut();
236+
let fill = copy_head.fill.get();
237+
let mut start = round_up(fill, align);
238+
let mut end = start + n_bytes;
239+
240+
if end > copy_head.capacity() {
241+
if self.alloc_grow(&mut *copy_head, fill, end - fill) {
242+
// Continuing with a newly allocated chunk
243+
start = 0;
244+
end = n_bytes;
245+
copy_head.is_copy.set(true);
236246
}
237247
}
238248

239-
let copy_head = self.copy_head.borrow();
240249
copy_head.fill.set(end);
241250

242251
unsafe { copy_head.as_ptr().offset(start as isize) }
@@ -254,40 +263,28 @@ impl<'longer_than_self> Arena<'longer_than_self> {
254263
}
255264
}
256265

257-
// Functions for the non-POD part of the arena
258-
fn alloc_noncopy_grow(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
259-
// Allocate a new chunk.
260-
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
261-
let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false);
262-
let mut head = self.head.borrow_mut();
263-
let old_chunk = mem::replace(&mut *head, new_chunk);
264-
self.chunks.borrow_mut().push(old_chunk);
265-
266-
self.alloc_noncopy_inner(n_bytes, align)
267-
}
266+
// Functions for the non-copyable part of the arena.
268267

269268
#[inline]
270269
fn alloc_noncopy_inner(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
271-
// Be careful to not maintain any `head` borrows active, because
272-
// `alloc_noncopy_grow` borrows it mutably.
273-
let (start, end, tydesc_start, head_capacity) = {
274-
let head = self.head.borrow();
275-
let fill = head.fill.get();
276-
277-
let tydesc_start = fill;
278-
let after_tydesc = fill + mem::size_of::<*const TyDesc>();
279-
let start = round_up(after_tydesc, align);
280-
let end = start + n_bytes;
281-
282-
(start, end, tydesc_start, head.capacity())
283-
};
284-
285-
if end > head_capacity {
286-
return self.alloc_noncopy_grow(n_bytes, align);
270+
let mut head = self.head.borrow_mut();
271+
let fill = head.fill.get();
272+
273+
let mut tydesc_start = fill;
274+
let after_tydesc = fill + mem::size_of::<*const TyDesc>();
275+
let mut start = round_up(after_tydesc, align);
276+
let mut end = round_up(start + n_bytes, mem::align_of::<*const TyDesc>());
277+
278+
if end > head.capacity() {
279+
if self.alloc_grow(&mut *head, tydesc_start, end - tydesc_start) {
280+
// Continuing with a newly allocated chunk
281+
tydesc_start = 0;
282+
start = round_up(mem::size_of::<*const TyDesc>(), align);
283+
end = round_up(start + n_bytes, mem::align_of::<*const TyDesc>());
284+
}
287285
}
288286

289-
let head = self.head.borrow();
290-
head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
287+
head.fill.set(end);
291288

292289
unsafe {
293290
let buf = head.as_ptr();

0 commit comments

Comments
 (0)