Skip to content

Commit 2f12b28

Browse files
committed
Restore dealloc API
1 parent c6f8253 commit 2f12b28

File tree

3 files changed

+140
-140
lines changed

3 files changed

+140
-140
lines changed

src/hole.rs

Lines changed: 115 additions & 115 deletions
Original file line numberDiff line numberDiff line change
@@ -317,37 +317,37 @@ impl HoleList {
317317
}
318318
}
319319

320-
// /// Frees the allocation given by `ptr` and `layout`.
321-
// ///
322-
// /// `ptr` must be a pointer returned by a call to the [`allocate_first_fit`] function with
323-
// /// identical layout. Undefined behavior may occur for invalid arguments.
324-
// /// The function performs exactly the same layout adjustments as [`allocate_first_fit`] and
325-
// /// returns the aligned layout.
326-
// ///
327-
// /// This function walks the list and inserts the given block at the correct place. If the freed
328-
// /// block is adjacent to another free block, the blocks are merged again.
329-
// /// This operation is in `O(n)` since the list needs to be sorted by address.
330-
// ///
331-
// /// [`allocate_first_fit`]: HoleList::allocate_first_fit
332-
// pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) -> Layout {
333-
// let aligned_layout = Self::align_layout(layout);
334-
// deallocate(&mut self.first, ptr.as_ptr(), aligned_layout.size());
335-
// aligned_layout
336-
// }
320+
/// Frees the allocation given by `ptr` and `layout`.
321+
///
322+
/// `ptr` must be a pointer returned by a call to the [`allocate_first_fit`] function with
323+
/// identical layout. Undefined behavior may occur for invalid arguments.
324+
/// The function performs exactly the same layout adjustments as [`allocate_first_fit`] and
325+
/// returns the aligned layout.
326+
///
327+
/// This function walks the list and inserts the given block at the correct place. If the freed
328+
/// block is adjacent to another free block, the blocks are merged again.
329+
/// This operation is in `O(n)` since the list needs to be sorted by address.
330+
///
331+
/// [`allocate_first_fit`]: HoleList::allocate_first_fit
332+
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) -> Layout {
333+
let aligned_layout = Self::align_layout(layout);
334+
deallocate(&mut self.first, ptr.as_ptr(), aligned_layout.size());
335+
aligned_layout
336+
}
337337

338338
/// Returns the minimal allocation size. Smaller allocations or deallocations are not allowed.
339339
pub fn min_size() -> usize {
340340
size_of::<usize>() * 2
341341
}
342342

343-
// /// Returns information about the first hole for test purposes.
344-
// #[cfg(test)]
345-
// pub fn first_hole(&self) -> Option<(*const u8, usize)> {
346-
// self.first
347-
// .next
348-
// .as_ref()
349-
// .map(|hole| ((*hole) as *const Hole as *const u8, hole.size))
350-
// }
343+
/// Returns information about the first hole for test purposes.
344+
#[cfg(test)]
345+
pub fn first_hole(&self) -> Option<(*const u8, usize)> {
346+
self.first
347+
.next
348+
.as_ref()
349+
.map(|hole| (hole.as_ptr() as *mut u8 as *const u8, unsafe { hole.as_ref().size }))
350+
}
351351
}
352352

353353
/// A block containing free memory. It points to the next hole and thus forms a linked list.
@@ -381,96 +381,96 @@ struct Allocation {
381381
back_padding: Option<HoleInfo>,
382382
}
383383

384-
// /// Frees the allocation given by `(addr, size)`. It starts at the given hole and walks the list to
385-
// /// find the correct place (the list is sorted by address).
386-
// fn deallocate(mut hole: &mut Hole, addr: *mut u8, mut size: usize) {
387-
// loop {
388-
// assert!(size >= HoleList::min_size());
389-
390-
// let hole_addr = if hole.size == 0 {
391-
// // It's the dummy hole, which is the head of the HoleList. It's somewhere on the stack,
392-
// // so it's address is not the address of the hole. We set the addr to 0 as it's always
393-
// // the first hole.
394-
// core::ptr::null_mut()
395-
// } else {
396-
// // tt's a real hole in memory and its address is the address of the hole
397-
// hole as *mut _ as *mut u8
398-
// };
399-
400-
// // Each freed block must be handled by the previous hole in memory. Thus the freed
401-
// // address must be always behind the current hole.
402-
// assert!(
403-
// hole_addr.wrapping_offset(hole.size.try_into().unwrap()) <= addr,
404-
// "invalid deallocation (probably a double free)"
405-
// );
406-
407-
// // get information about the next block
408-
// let next_hole_info = hole.next.as_mut().map(|next| next.info());
409-
410-
// match next_hole_info {
411-
// Some(next)
412-
// if hole_addr.wrapping_offset(hole.size.try_into().unwrap()) == addr
413-
// && addr.wrapping_offset(size.try_into().unwrap()) == next.addr =>
414-
// {
415-
// // block fills the gap between this hole and the next hole
416-
// // before: ___XXX____YYYYY____ where X is this hole and Y the next hole
417-
// // after: ___XXXFFFFYYYYY____ where F is the freed block
418-
419-
// hole.size += size + next.size; // merge the F and Y blocks to this X block
420-
// hole.next = hole.next.as_mut().unwrap().next.take(); // remove the Y block
421-
// }
422-
// _ if hole_addr.wrapping_add(hole.size.try_into().unwrap()) == addr => {
423-
// // block is right behind this hole but there is used memory after it
424-
// // before: ___XXX______YYYYY____ where X is this hole and Y the next hole
425-
// // after: ___XXXFFFF__YYYYY____ where F is the freed block
426-
427-
// // or: block is right behind this hole and this is the last hole
428-
// // before: ___XXX_______________ where X is this hole and Y the next hole
429-
// // after: ___XXXFFFF___________ where F is the freed block
430-
431-
// hole.size += size; // merge the F block to this X block
432-
// }
433-
// Some(next) if addr.wrapping_offset(size.try_into().unwrap()) == next.addr => {
434-
// // block is right before the next hole but there is used memory before it
435-
// // before: ___XXX______YYYYY____ where X is this hole and Y the next hole
436-
// // after: ___XXX__FFFFYYYYY____ where F is the freed block
437-
438-
// hole.next = hole.next.as_mut().unwrap().next.take(); // remove the Y block
439-
// size += next.size; // free the merged F/Y block in next iteration
440-
// continue;
441-
// }
442-
// Some(next) if next.addr <= addr => {
443-
// // block is behind the next hole, so we delegate it to the next hole
444-
// // before: ___XXX__YYYYY________ where X is this hole and Y the next hole
445-
// // after: ___XXX__YYYYY__FFFF__ where F is the freed block
446-
447-
// hole = move_helper(hole).next.as_mut().unwrap(); // start next iteration at next hole
448-
// continue;
449-
// }
450-
// _ => {
451-
// // block is between this and the next hole
452-
// // before: ___XXX________YYYYY_ where X is this hole and Y the next hole
453-
// // after: ___XXX__FFFF__YYYYY_ where F is the freed block
454-
455-
// // or: this is the last hole
456-
// // before: ___XXX_________ where X is this hole
457-
// // after: ___XXX__FFFF___ where F is the freed block
458-
459-
// let new_hole = Hole {
460-
// size: size,
461-
// next: hole.next.take(), // the reference to the Y block (if it exists)
462-
// };
463-
// // write the new hole to the freed memory
464-
// debug_assert_eq!(addr as usize % align_of::<Hole>(), 0);
465-
// let ptr = addr as *mut Hole;
466-
// unsafe { ptr.write(new_hole) };
467-
// // add the F block as the next block of the X block
468-
// hole.next = Some(unsafe { &mut *ptr });
469-
// }
470-
// }
471-
// break;
472-
// }
473-
// }
384+
/// Frees the allocation given by `(addr, size)`. It starts at the given hole and walks the list to
385+
/// find the correct place (the list is sorted by address).
386+
fn deallocate(mut hole: &mut Hole, addr: *mut u8, mut size: usize) {
387+
loop {
388+
assert!(size >= HoleList::min_size());
389+
390+
let hole_addr = if hole.size == 0 {
391+
// It's the dummy hole, which is the head of the HoleList. It's somewhere on the stack,
392+
// so it's address is not the address of the hole. We set the addr to 0 as it's always
393+
// the first hole.
394+
core::ptr::null_mut()
395+
} else {
396+
// tt's a real hole in memory and its address is the address of the hole
397+
hole as *mut _ as *mut u8
398+
};
399+
400+
// Each freed block must be handled by the previous hole in memory. Thus the freed
401+
// address must be always behind the current hole.
402+
assert!(
403+
hole_addr.wrapping_offset(hole.size.try_into().unwrap()) <= addr,
404+
"invalid deallocation (probably a double free)"
405+
);
406+
407+
// get information about the next block
408+
let next_hole_info = hole.next.as_mut().map(|next| unsafe { next.as_mut().info() });
409+
410+
match next_hole_info {
411+
Some(next)
412+
if hole_addr.wrapping_offset(hole.size.try_into().unwrap()) == addr
413+
&& addr.wrapping_offset(size.try_into().unwrap()) == next.addr =>
414+
{
415+
// block fills the gap between this hole and the next hole
416+
// before: ___XXX____YYYYY____ where X is this hole and Y the next hole
417+
// after: ___XXXFFFFYYYYY____ where F is the freed block
418+
419+
hole.size += size + next.size; // merge the F and Y blocks to this X block
420+
hole.next = unsafe { hole.next.as_mut().unwrap().as_mut().next.take() }; // remove the Y block
421+
}
422+
_ if hole_addr.wrapping_add(hole.size.try_into().unwrap()) == addr => {
423+
// block is right behind this hole but there is used memory after it
424+
// before: ___XXX______YYYYY____ where X is this hole and Y the next hole
425+
// after: ___XXXFFFF__YYYYY____ where F is the freed block
426+
427+
// or: block is right behind this hole and this is the last hole
428+
// before: ___XXX_______________ where X is this hole and Y the next hole
429+
// after: ___XXXFFFF___________ where F is the freed block
430+
431+
hole.size += size; // merge the F block to this X block
432+
}
433+
Some(next) if addr.wrapping_offset(size.try_into().unwrap()) == next.addr => {
434+
// block is right before the next hole but there is used memory before it
435+
// before: ___XXX______YYYYY____ where X is this hole and Y the next hole
436+
// after: ___XXX__FFFFYYYYY____ where F is the freed block
437+
438+
hole.next = unsafe { hole.next.as_mut().unwrap().as_mut().next.take() }; // remove the Y block
439+
size += next.size; // free the merged F/Y block in next iteration
440+
continue;
441+
}
442+
Some(next) if next.addr <= addr => {
443+
// block is behind the next hole, so we delegate it to the next hole
444+
// before: ___XXX__YYYYY________ where X is this hole and Y the next hole
445+
// after: ___XXX__YYYYY__FFFF__ where F is the freed block
446+
447+
hole = unsafe { move_helper(hole).next.as_mut().unwrap().as_mut() }; // start next iteration at next hole
448+
continue;
449+
}
450+
_ => {
451+
// block is between this and the next hole
452+
// before: ___XXX________YYYYY_ where X is this hole and Y the next hole
453+
// after: ___XXX__FFFF__YYYYY_ where F is the freed block
454+
455+
// or: this is the last hole
456+
// before: ___XXX_________ where X is this hole
457+
// after: ___XXX__FFFF___ where F is the freed block
458+
459+
let new_hole = Hole {
460+
size: size,
461+
next: hole.next.take(), // the reference to the Y block (if it exists)
462+
};
463+
// write the new hole to the freed memory
464+
debug_assert_eq!(addr as usize % align_of::<Hole>(), 0);
465+
let ptr = addr as *mut Hole;
466+
unsafe { ptr.write(new_hole) };
467+
// add the F block as the next block of the X block
468+
hole.next = Some(unsafe { NonNull::new_unchecked(ptr) });
469+
}
470+
}
471+
break;
472+
}
473+
}
474474

475475
/// Identity function to ease moving of references.
476476
///

src/lib.rs

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,8 @@ use hole::HoleList;
3232
use spinning_top::Spinlock;
3333

3434
pub mod hole;
35-
// #[cfg(test)]
36-
// mod test;
35+
#[cfg(test)]
36+
mod test;
3737

3838
/// A fixed size heap backed by a linked list of free memory blocks.
3939
pub struct Heap {
@@ -151,16 +151,16 @@ impl Heap {
151151
}
152152
}
153153

154-
// /// Frees the given allocation. `ptr` must be a pointer returned
155-
// /// by a call to the `allocate_first_fit` function with identical size and alignment. Undefined
156-
// /// behavior may occur for invalid arguments, thus this function is unsafe.
157-
// ///
158-
// /// This function walks the list of free memory blocks and inserts the freed block at the
159-
// /// correct place. If the freed block is adjacent to another free block, the blocks are merged
160-
// /// again. This operation is in `O(n)` since the list needs to be sorted by address.
161-
// pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
162-
// self.used -= self.holes.deallocate(ptr, layout).size();
163-
// }
154+
/// Frees the given allocation. `ptr` must be a pointer returned
155+
/// by a call to the `allocate_first_fit` function with identical size and alignment. Undefined
156+
/// behavior may occur for invalid arguments, thus this function is unsafe.
157+
///
158+
/// This function walks the list of free memory blocks and inserts the freed block at the
159+
/// correct place. If the freed block is adjacent to another free block, the blocks are merged
160+
/// again. This operation is in `O(n)` since the list needs to be sorted by address.
161+
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
162+
self.used -= self.holes.deallocate(ptr, layout).size();
163+
}
164164

165165
/// Returns the bottom address of the heap.
166166
pub fn bottom(&self) -> *mut u8 {
@@ -196,18 +196,18 @@ impl Heap {
196196
&mut self.holes
197197
}
198198

199-
// /// Extends the size of the heap by creating a new hole at the end
200-
// ///
201-
// /// # Unsafety
202-
// ///
203-
// /// The new extended area must be valid
204-
// pub unsafe fn extend(&mut self, by: usize) {
205-
// let top = self.top();
206-
// let layout = Layout::from_size_align(by, 1).unwrap();
207-
// self.holes
208-
// .deallocate(NonNull::new_unchecked(top as *mut u8), layout);
209-
// self.size += by;
210-
// }
199+
/// Extends the size of the heap by creating a new hole at the end
200+
///
201+
/// # Unsafety
202+
///
203+
/// The new extended area must be valid
204+
pub unsafe fn extend(&mut self, by: usize) {
205+
let top = self.top();
206+
let layout = Layout::from_size_align(by, 1).unwrap();
207+
self.holes
208+
.deallocate(NonNull::new_unchecked(top as *mut u8), layout);
209+
self.size += by;
210+
}
211211
}
212212

213213
// #[cfg(all(feature = "alloc_ref", feature = "use_spin"))]

src/test.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ fn allocate_and_free_double_usize() {
8888
*(x.as_ptr() as *mut (usize, usize)) = (0xdeafdeadbeafbabe, 0xdeafdeadbeafbabe);
8989

9090
heap.deallocate(x, layout.clone());
91-
let real_first = heap.holes().first.next.as_ref().unwrap();
91+
let real_first = heap.holes().first.next.as_ref().unwrap().as_ref();
9292

9393
assert_eq!(real_first.size, heap.size);
9494
assert!(real_first.next.is_none());

0 commit comments

Comments
 (0)