Skip to content

Commit ede19c8

Browse files
committed
Safely keep track of pending extensions to the Heap
This makes Heap::extend correct and fully infallible. To make this work, a slight change to the usable size of the last Hole needed to happen. Significant changes: - pending_extend added to HoleList, tracks impartial extension amounts - for odd-sized heaps or extensions, the last Hole will only utilize space up to the highest usize-aligned address - the above change forces top to always be aligned to usize - the correctness of `extend` relies on an aligned top pointer, so a debug assertion is added to catch any future code changes - Heap::size() reports usable size for allocations (updated some tests accordingly) - pub fn Heap::top() reports top-most pointer of memory claimed by the Heap, even if thost last bytes aren't usable yet. (private field HoleList.top still is always aligned) - documented best values to give to extend - removed ExtendError and try_extend
1 parent c7e3428 commit ede19c8

File tree

3 files changed

+99
-109
lines changed

3 files changed

+99
-109
lines changed

src/hole.rs

Lines changed: 50 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ use core::mem::{align_of, size_of};
44
use core::ptr::null_mut;
55
use core::ptr::NonNull;
66

7-
use crate::{align_up_size, ExtendError};
7+
use crate::{align_down_size, align_up_size};
88

99
use super::align_up;
1010

@@ -13,6 +13,7 @@ pub struct HoleList {
1313
pub(crate) first: Hole, // dummy
1414
pub(crate) bottom: *mut u8,
1515
pub(crate) top: *mut u8,
16+
pub(crate) pending_extend: u8,
1617
}
1718

1819
pub(crate) struct Cursor {
@@ -278,6 +279,7 @@ impl HoleList {
278279
},
279280
bottom: null_mut(),
280281
top: null_mut(),
282+
pending_extend: 0,
281283
}
282284
}
283285

@@ -328,6 +330,11 @@ impl HoleList {
328330
/// alignment of the `hole_addr` pointer, the minimum size is between
329331
/// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
330332
///
333+
/// The usable size for allocations will be truncated to the nearest
334+
/// alignment of `align_of::<usize>`. Any extra bytes left at the end
335+
/// will be reclaimed once sufficient additional space is given to
336+
/// [`extend`][crate::Heap::extend].
337+
///
331338
/// # Safety
332339
///
333340
/// This function is unsafe because it creates a hole at the given `hole_addr`.
@@ -338,7 +345,8 @@ impl HoleList {
338345
assert!(hole_size >= size_of::<Hole>());
339346

340347
let aligned_hole_addr = align_up(hole_addr, align_of::<Hole>());
341-
let aligned_hole_size = hole_size - ((aligned_hole_addr as usize) - (hole_addr as usize));
348+
let requested_hole_size = hole_size - ((aligned_hole_addr as usize) - (hole_addr as usize));
349+
let aligned_hole_size = align_down_size(requested_hole_size, align_of::<Hole>());
342350
assert!(aligned_hole_size >= size_of::<Hole>());
343351

344352
let ptr = aligned_hole_addr as *mut Hole;
@@ -349,15 +357,17 @@ impl HoleList {
349357

350358
assert_eq!(
351359
hole_addr.wrapping_add(hole_size),
352-
aligned_hole_addr.wrapping_add(aligned_hole_size)
360+
aligned_hole_addr.wrapping_add(requested_hole_size)
353361
);
362+
354363
HoleList {
355364
first: Hole {
356365
size: 0,
357366
next: Some(NonNull::new_unchecked(ptr)),
358367
},
359368
bottom: aligned_hole_addr,
360-
top: hole_addr.wrapping_add(hole_size),
369+
top: aligned_hole_addr.wrapping_add(aligned_hole_size),
370+
pending_extend: (requested_hole_size - aligned_hole_size) as u8,
361371
}
362372
}
363373

@@ -443,51 +453,40 @@ impl HoleList {
443453
})
444454
}
445455

446-
// amount previously extended but not big enough to be claimed by a Hole yet
447-
pub(crate) fn pending_extend(&self) -> usize {
448-
// this field is not used by anything else
449-
self.first.size
450-
}
451-
452-
pub(crate) unsafe fn try_extend(&mut self, by: usize) -> Result<(), ExtendError> {
456+
pub(crate) unsafe fn extend(&mut self, by: usize) {
453457
let top = self.top;
454-
if top.is_null() {
455-
// this is the only case where I don't think will need to make/init a new heap...
456-
// since we can't do that from here, maybe we just add this to the
457-
// documentation to not call .extend() on uninitialized Heaps
458-
unreachable!();
459-
}
460-
461-
// join this extend request with any pending (but not yet acted on) extensions
462-
let by = self.pending_extend() + by;
463458

464459
let dead_space = top.align_offset(align_of::<Hole>());
465-
let minimum_extend = dead_space + Self::min_size();
466-
467-
if by < minimum_extend {
468-
self.first.size = by;
460+
debug_assert_eq!(
461+
0, dead_space,
462+
"dead space detected during extend: {} bytes. This means top was unaligned",
463+
dead_space
464+
);
469465

470-
#[cfg(test)]
471-
println!("pending extend size: {} bytes", self.pending_extend());
466+
debug_assert!(
467+
(self.pending_extend as usize) < Self::min_size(),
468+
"pending extend was larger than expected"
469+
);
472470

473-
return Ok(());
474-
}
471+
// join this extend request with any pending (but not yet acted on) extension
472+
let extend_by = self.pending_extend as usize + by;
475473

476-
#[cfg(test)]
477-
if dead_space > 0 {
478-
println!("dead space created during extend: {} bytes", dead_space);
474+
let minimum_extend = Self::min_size();
475+
if extend_by < minimum_extend {
476+
self.pending_extend = extend_by as u8;
477+
return;
479478
}
480479

481-
let aligned_top = align_up(top, align_of::<Hole>());
482-
let layout = Layout::from_size_align(by - dead_space, 1).unwrap();
483-
484-
self.deallocate(NonNull::new_unchecked(aligned_top as *mut u8), layout);
485-
self.top = top.add(by);
480+
// only extend up to another valid boundary
481+
let new_hole_size = align_down_size(extend_by, align_of::<Hole>());
482+
let layout = Layout::from_size_align(new_hole_size, 1).unwrap();
486483

487-
// reset pending resizes
488-
self.first.size = 0;
484+
// instantiate the hole by forcing a deallocation on the new memory
485+
self.deallocate(NonNull::new_unchecked(top as *mut u8), layout);
486+
self.top = top.add(new_hole_size);
489487

490-
Ok(())
488+
// save extra bytes given to extend that weren't aligned to the hole size
489+
self.pending_extend = (extend_by - new_hole_size) as u8;
491490
}
492491
}
493492

@@ -566,7 +565,10 @@ impl Cursor {
566565
// Does hole overlap node?
567566
assert!(
568567
hole_u8.wrapping_add(hole_size) <= node_u8,
569-
"Freed node aliases existing hole! Bad free?",
568+
"Freed node ({:?}) aliases existing hole ({:?}[{}])! Bad free?",
569+
node_u8,
570+
hole_u8,
571+
hole_size,
570572
);
571573

572574
// All good! Let's insert that after.
@@ -692,7 +694,8 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) {
692694
#[cfg(test)]
693695
pub mod test {
694696
use super::HoleList;
695-
use crate::Heap;
697+
use crate::{align_down_size, Heap};
698+
use core::mem::size_of;
696699
use std::{alloc::Layout, convert::TryInto, mem::MaybeUninit, prelude::v1::*, ptr::NonNull};
697700

698701
#[repr(align(128))]
@@ -715,8 +718,8 @@ pub mod test {
715718
let assumed_location = data.as_mut_ptr().cast();
716719

717720
let heap = Heap::from_slice(data);
718-
assert!(heap.bottom() == assumed_location);
719-
assert!(heap.size() == HEAP_SIZE);
721+
assert_eq!(heap.bottom(), assumed_location);
722+
assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
720723
heap
721724
}
722725

@@ -727,7 +730,10 @@ pub mod test {
727730
// This is the "dummy" node
728731
assert_eq!(curs.previous().size, 0);
729732
// This is the "full" heap
730-
assert_eq!(curs.current().size, 1000);
733+
assert_eq!(
734+
curs.current().size,
735+
align_down_size(1000, size_of::<usize>())
736+
);
731737
// There is no other hole
732738
assert!(curs.next().is_none());
733739
}

src/lib.rs

Lines changed: 36 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,11 @@ impl Heap {
6767
/// alignment of the `hole_addr` pointer, the minimum size is between
6868
/// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
6969
///
70+
/// The usable size for allocations will be truncated to the nearest
71+
/// alignment of `align_of::<usize>`. Any extra bytes left at the end
72+
/// will be reclaimed once sufficient additional space is given to
73+
/// [`extend`][Heap::extend].
74+
///
7075
/// # Safety
7176
///
7277
/// This function must be called at most once and must only be used on an
@@ -94,6 +99,11 @@ impl Heap {
9499
/// deallocation (e.g. a simple bump allocator). Then the overlaid linked-list-allocator can
95100
/// provide memory reclamation.
96101
///
102+
/// The usable size for allocations will be truncated to the nearest
103+
/// alignment of `align_of::<usize>`. Any extra bytes left at the end
104+
/// will be reclaimed once sufficient additional space is given to
105+
/// [`extend`][Heap::extend].
106+
///
97107
/// # Panics
98108
///
99109
/// This method panics if the heap is already initialized.
@@ -126,6 +136,11 @@ impl Heap {
126136
/// alignment of the `hole_addr` pointer, the minimum size is between
127137
/// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
128138
///
139+
/// The usable size for allocations will be truncated to the nearest
140+
/// alignment of `align_of::<usize>`. Any extra bytes left at the end
141+
/// will be reclaimed once sufficient additional space is given to
142+
/// [`extend`][Heap::extend].
143+
///
129144
/// # Safety
130145
///
131146
/// The bottom address must be valid and the memory in the
@@ -197,13 +212,21 @@ impl Heap {
197212
}
198213

199214
/// Returns the size of the heap.
215+
///
216+
/// This is the size the heap is using for allocations, not necessarily the
217+
/// total amount of bytes given to the heap. To determine the exact memory
218+
/// boundaries, use [`bottom`][Self::bottom] and [`top`][Self::top].
200219
pub fn size(&self) -> usize {
201-
(self.top() as usize) - (self.bottom() as usize)
220+
unsafe { self.holes.top.offset_from(self.holes.bottom) as usize }
202221
}
203222

204-
/// Return the top address of the heap
223+
/// Return the top address of the heap.
224+
///
225+
/// Note: The heap may choose to not use bytes at the end for allocations
226+
/// until there is enough room for metadata, but it still retains ownership
227+
/// over memory from [`bottom`][Self::bottom] to the address returned.
205228
pub fn top(&self) -> *mut u8 {
206-
self.holes.top
229+
unsafe { self.holes.top.add(self.holes.pending_extend as usize) }
207230
}
208231

209232
/// Returns the size of the used part of the heap
@@ -218,53 +241,24 @@ impl Heap {
218241

219242
/// Extends the size of the heap by creating a new hole at the end.
220243
///
221-
/// Panics when the heap extension fails. Use [`try_extend`] to handle potential errors.
244+
/// Small extensions are not guaranteed to grow the usable size of
245+
/// the heap. In order to grow the Heap most effectively, extend by
246+
/// at least `2 * size_of::<usize>`, keeping the amount a multiple of
247+
/// `size_of::<usize>`.
222248
///
223249
/// # Safety
224250
///
225251
/// The amount of data given in `by` MUST exist directly after the original
226252
/// range of data provided when constructing the [Heap]. The additional data
227253
/// must have the same lifetime of the original range of data.
228-
pub unsafe fn extend(&mut self, by: usize) {
229-
self.holes.try_extend(by).expect("heap extension failed");
230-
}
231-
232-
/// Tries to extend the size of the heap by creating a new hole at the end.
233254
///
234-
/// # Safety
255+
/// Even if this operation doesn't increase the [usable size][`Self::size`]
256+
/// by exactly `by` bytes, those bytes are still owned by the Heap for
257+
/// later use.
235258
///
236-
/// The amount of data given in `by` MUST exist directly after the original
237-
/// range of data provided when constructing the [Heap]. The additional data
238-
/// must have the same lifetime of the original range of data.
239-
pub unsafe fn try_extend(&mut self, by: usize) -> Result<(), ExtendError> {
240-
self.holes.try_extend(by)
241-
}
242-
}
243-
244-
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
245-
pub enum ExtendError {
246-
/// The given extension size was not large enough to store the necessary metadata.
247-
SizeTooSmall,
248-
/// The given extension size was must be a multiple of 16.
249-
OddSize,
250-
/// The heap size must be a multiple of 16, otherwise extension is not possible.
251-
OddHeapSize,
252-
/// Extending an empty heap is not possible.
253-
EmptyHeap,
254-
}
255-
256-
impl core::fmt::Display for ExtendError {
257-
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
258-
f.write_str(match self {
259-
ExtendError::SizeTooSmall => {
260-
"heap extension size was not large enough to store the necessary metadata"
261-
}
262-
ExtendError::OddSize => "heap extension size is not a multiple of 16",
263-
ExtendError::OddHeapSize => {
264-
"heap extension not possible because heap size is not a multiple of 16"
265-
}
266-
ExtendError::EmptyHeap => "tried to extend an emtpy heap",
267-
})
259+
/// Calling this method on an uninitialized Heap is undefined behavior.
260+
pub unsafe fn extend(&mut self, by: usize) {
261+
self.holes.extend(by);
268262
}
269263
}
270264

src/test.rs

Lines changed: 13 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,8 @@ fn new_heap() -> Heap {
2323
let assumed_location = data.as_mut_ptr().cast();
2424

2525
let heap = Heap::from_slice(data);
26-
assert!(heap.bottom() == assumed_location);
27-
assert!(heap.size() == HEAP_SIZE);
26+
assert_eq!(heap.bottom(), assumed_location);
27+
assert_eq!(heap.size(), align_down_size(HEAP_SIZE, size_of::<usize>()));
2828
heap
2929
}
3030

@@ -37,8 +37,8 @@ fn new_max_heap() -> Heap {
3737

3838
// Unsafe so that we have provenance over the whole allocation.
3939
let heap = unsafe { Heap::new(start_ptr, HEAP_SIZE) };
40-
assert!(heap.bottom() == start_ptr);
41-
assert!(heap.size() == HEAP_SIZE);
40+
assert_eq!(heap.bottom(), start_ptr);
41+
assert_eq!(heap.size(), HEAP_SIZE);
4242
heap
4343
}
4444

@@ -196,11 +196,12 @@ fn allocate_many_size_aligns() {
196196
const STRATS: Range<usize> = 0..4;
197197

198198
let mut heap = new_heap();
199-
assert_eq!(heap.size(), 1000);
199+
let aligned_heap_size = align_down_size(1000, size_of::<usize>());
200+
assert_eq!(heap.size(), aligned_heap_size);
200201

201202
heap.holes.debug();
202203

203-
let max_alloc = Layout::from_size_align(1000, 1).unwrap();
204+
let max_alloc = Layout::from_size_align(aligned_heap_size, 1).unwrap();
204205
let full = heap.allocate_first_fit(max_alloc).unwrap();
205206
unsafe {
206207
heap.deallocate(full, max_alloc);
@@ -507,7 +508,7 @@ fn small_heap_extension() {
507508
unsafe {
508509
let mut heap = Heap::new(HEAP.as_mut_ptr().cast(), 32);
509510
heap.extend(1);
510-
assert_eq!(1, heap.holes.pending_extend());
511+
assert_eq!(1, heap.holes.pending_extend);
511512
}
512513
}
513514

@@ -519,20 +520,8 @@ fn oddly_sized_heap_extension() {
519520
unsafe {
520521
let mut heap = Heap::new(HEAP.as_mut_ptr().cast(), 16);
521522
heap.extend(17);
522-
assert_eq!(0, heap.holes.pending_extend());
523-
assert_eq!(16 + 17, heap.size());
524-
}
525-
}
526-
527-
/// Ensures that heap extension fails when trying to extend an empty heap.
528-
///
529-
/// Empty heaps always have a start pointer of 0.
530-
#[test]
531-
#[should_panic]
532-
fn extend_empty() {
533-
unsafe {
534-
let mut heap = Heap::empty();
535-
heap.extend(16);
523+
assert_eq!(1, heap.holes.pending_extend);
524+
assert_eq!(16 + 16, heap.size());
536525
}
537526
}
538527

@@ -546,10 +535,11 @@ fn extend_odd_size() {
546535
static mut HEAP: [u64; 5] = [0; 5];
547536
unsafe {
548537
let mut heap = Heap::new(HEAP.as_mut_ptr().cast(), 17);
538+
assert_eq!(1, heap.holes.pending_extend);
549539
heap.extend(16);
550-
assert_eq!(16, heap.holes.pending_extend());
540+
assert_eq!(1, heap.holes.pending_extend);
551541
heap.extend(15);
552-
assert_eq!(0, heap.holes.pending_extend());
542+
assert_eq!(0, heap.holes.pending_extend);
553543
assert_eq!(17 + 16 + 15, heap.size());
554544
}
555545
}

0 commit comments

Comments
 (0)