@@ -4,7 +4,7 @@ use core::mem::{align_of, size_of};
4
4
use core:: ptr:: null_mut;
5
5
use core:: ptr:: NonNull ;
6
6
7
- use crate :: align_up_size;
7
+ use crate :: { align_down_size , align_up_size} ;
8
8
9
9
use super :: align_up;
10
10
@@ -13,6 +13,7 @@ pub struct HoleList {
13
13
pub ( crate ) first : Hole , // dummy
14
14
pub ( crate ) bottom : * mut u8 ,
15
15
pub ( crate ) top : * mut u8 ,
16
+ pub ( crate ) pending_extend : u8 ,
16
17
}
17
18
18
19
pub ( crate ) struct Cursor {
@@ -278,6 +279,7 @@ impl HoleList {
278
279
} ,
279
280
bottom : null_mut ( ) ,
280
281
top : null_mut ( ) ,
282
+ pending_extend : 0 ,
281
283
}
282
284
}
283
285
@@ -320,30 +322,52 @@ impl HoleList {
320
322
321
323
/// Creates a `HoleList` that contains the given hole.
322
324
///
325
+ /// The `hole_addr` pointer is automatically aligned, so the `bottom`
326
+ /// field might be larger than the given `hole_addr`.
327
+ ///
328
+ /// The given `hole_size` must be large enough to store the required
329
+ /// metadata, otherwise this function will panic. Depending on the
330
+ /// alignment of the `hole_addr` pointer, the minimum size is between
331
+ /// `2 * size_of::<usize>` and `3 * size_of::<usize>`.
332
+ ///
333
+ /// The usable size for allocations will be truncated to the nearest
334
+ /// alignment of `align_of::<usize>`. Any extra bytes left at the end
335
+ /// will be reclaimed once sufficient additional space is given to
336
+ /// [`extend`][crate::Heap::extend].
337
+ ///
323
338
/// # Safety
324
339
///
325
340
/// This function is unsafe because it creates a hole at the given `hole_addr`.
326
341
/// This can cause undefined behavior if this address is invalid or if memory from the
327
342
/// `[hole_addr, hole_addr+size)` range is used somewhere else.
328
- ///
329
- /// The pointer to `hole_addr` is automatically aligned.
330
343
pub unsafe fn new ( hole_addr : * mut u8 , hole_size : usize ) -> HoleList {
331
344
assert_eq ! ( size_of:: <Hole >( ) , Self :: min_size( ) ) ;
345
+ assert ! ( hole_size >= size_of:: <Hole >( ) ) ;
332
346
333
347
let aligned_hole_addr = align_up ( hole_addr, align_of :: < Hole > ( ) ) ;
348
+ let requested_hole_size = hole_size - ( ( aligned_hole_addr as usize ) - ( hole_addr as usize ) ) ;
349
+ let aligned_hole_size = align_down_size ( requested_hole_size, align_of :: < Hole > ( ) ) ;
350
+ assert ! ( aligned_hole_size >= size_of:: <Hole >( ) ) ;
351
+
334
352
let ptr = aligned_hole_addr as * mut Hole ;
335
353
ptr. write ( Hole {
336
- size : hole_size - ( ( aligned_hole_addr as usize ) - ( hole_addr as usize ) ) ,
354
+ size : aligned_hole_size ,
337
355
next : None ,
338
356
} ) ;
339
357
358
+ assert_eq ! (
359
+ hole_addr. wrapping_add( hole_size) ,
360
+ aligned_hole_addr. wrapping_add( requested_hole_size)
361
+ ) ;
362
+
340
363
HoleList {
341
364
first : Hole {
342
365
size : 0 ,
343
366
next : Some ( NonNull :: new_unchecked ( ptr) ) ,
344
367
} ,
345
368
bottom : aligned_hole_addr,
346
- top : hole_addr. wrapping_add ( hole_size) ,
369
+ top : aligned_hole_addr. wrapping_add ( aligned_hole_size) ,
370
+ pending_extend : ( requested_hole_size - aligned_hole_size) as u8 ,
347
371
}
348
372
}
349
373
@@ -428,6 +452,44 @@ impl HoleList {
428
452
} )
429
453
} )
430
454
}
455
+
456
+ pub ( crate ) unsafe fn extend ( & mut self , by : usize ) {
457
+ assert ! ( !self . top. is_null( ) , "tried to extend an empty heap" ) ;
458
+
459
+ let top = self . top ;
460
+
461
+ let dead_space = top. align_offset ( align_of :: < Hole > ( ) ) ;
462
+ debug_assert_eq ! (
463
+ 0 , dead_space,
464
+ "dead space detected during extend: {} bytes. This means top was unaligned" ,
465
+ dead_space
466
+ ) ;
467
+
468
+ debug_assert ! (
469
+ ( self . pending_extend as usize ) < Self :: min_size( ) ,
470
+ "pending extend was larger than expected"
471
+ ) ;
472
+
473
+ // join this extend request with any pending (but not yet acted on) extension
474
+ let extend_by = self . pending_extend as usize + by;
475
+
476
+ let minimum_extend = Self :: min_size ( ) ;
477
+ if extend_by < minimum_extend {
478
+ self . pending_extend = extend_by as u8 ;
479
+ return ;
480
+ }
481
+
482
+ // only extend up to another valid boundary
483
+ let new_hole_size = align_down_size ( extend_by, align_of :: < Hole > ( ) ) ;
484
+ let layout = Layout :: from_size_align ( new_hole_size, 1 ) . unwrap ( ) ;
485
+
486
+ // instantiate the hole by forcing a deallocation on the new memory
487
+ self . deallocate ( NonNull :: new_unchecked ( top as * mut u8 ) , layout) ;
488
+ self . top = top. add ( new_hole_size) ;
489
+
490
+ // save extra bytes given to extend that weren't aligned to the hole size
491
+ self . pending_extend = ( extend_by - new_hole_size) as u8 ;
492
+ }
431
493
}
432
494
433
495
unsafe fn make_hole ( addr : * mut u8 , size : usize ) -> NonNull < Hole > {
@@ -505,7 +567,10 @@ impl Cursor {
505
567
// Does hole overlap node?
506
568
assert ! (
507
569
hole_u8. wrapping_add( hole_size) <= node_u8,
508
- "Freed node aliases existing hole! Bad free?" ,
570
+ "Freed node ({:?}) aliases existing hole ({:?}[{}])! Bad free?" ,
571
+ node_u8,
572
+ hole_u8,
573
+ hole_size,
509
574
) ;
510
575
511
576
// All good! Let's insert that after.
@@ -630,10 +695,10 @@ fn deallocate(list: &mut HoleList, addr: *mut u8, size: usize) {
630
695
631
696
#[ cfg( test) ]
632
697
pub mod test {
633
- use crate :: Heap ;
634
- use core :: alloc :: Layout ;
635
- use std :: mem:: MaybeUninit ;
636
- use std:: prelude:: v1:: * ;
698
+ use super :: HoleList ;
699
+ use crate :: { align_down_size , Heap } ;
700
+ use core :: mem:: size_of ;
701
+ use std:: { alloc :: Layout , convert :: TryInto , mem :: MaybeUninit , prelude:: v1:: * , ptr :: NonNull } ;
637
702
638
703
#[ repr( align( 128 ) ) ]
639
704
struct Chonk < const N : usize > {
@@ -655,8 +720,8 @@ pub mod test {
655
720
let assumed_location = data. as_mut_ptr ( ) . cast ( ) ;
656
721
657
722
let heap = Heap :: from_slice ( data) ;
658
- assert ! ( heap. bottom( ) == assumed_location) ;
659
- assert ! ( heap. size( ) == HEAP_SIZE ) ;
723
+ assert_eq ! ( heap. bottom( ) , assumed_location) ;
724
+ assert_eq ! ( heap. size( ) , align_down_size ( HEAP_SIZE , size_of :: < usize > ( ) ) ) ;
660
725
heap
661
726
}
662
727
@@ -667,7 +732,10 @@ pub mod test {
667
732
// This is the "dummy" node
668
733
assert_eq ! ( curs. previous( ) . size, 0 ) ;
669
734
// This is the "full" heap
670
- assert_eq ! ( curs. current( ) . size, 1000 ) ;
735
+ assert_eq ! (
736
+ curs. current( ) . size,
737
+ align_down_size( 1000 , size_of:: <usize >( ) )
738
+ ) ;
671
739
// There is no other hole
672
740
assert ! ( curs. next( ) . is_none( ) ) ;
673
741
}
@@ -678,4 +746,76 @@ pub mod test {
678
746
let reqd = Layout :: from_size_align ( 256 , 1 ) . unwrap ( ) ;
679
747
let _ = heap. allocate_first_fit ( reqd) . unwrap ( ) ;
680
748
}
749
+
750
+ /// Tests `HoleList::new` with the minimal allowed `hole_size`.
751
+ #[ test]
752
+ fn hole_list_new_min_size ( ) {
753
+ // define an array of `u64` instead of `u8` for alignment
754
+ static mut HEAP : [ u64 ; 2 ] = [ 0 ; 2 ] ;
755
+ let heap =
756
+ unsafe { HoleList :: new ( HEAP . as_mut_ptr ( ) . cast ( ) , 2 * core:: mem:: size_of :: < usize > ( ) ) } ;
757
+ assert_eq ! ( heap. bottom. cast( ) , unsafe { HEAP . as_mut_ptr( ) } ) ;
758
+ assert_eq ! ( heap. top. cast( ) , unsafe { HEAP . as_mut_ptr( ) . add( 2 ) } ) ;
759
+ assert_eq ! ( heap. first. size, 0 ) ; // dummy
760
+ assert_eq ! (
761
+ heap. first. next,
762
+ Some ( NonNull :: new( heap. bottom. cast( ) ) ) . unwrap( )
763
+ ) ;
764
+ assert_eq ! (
765
+ unsafe { & * ( heap. first. next. unwrap( ) . as_ptr( ) ) } . size,
766
+ 2 * core:: mem:: size_of:: <usize >( )
767
+ ) ;
768
+ assert_eq ! ( unsafe { & * ( heap. first. next. unwrap( ) . as_ptr( ) ) } . next, None ) ;
769
+ }
770
+
771
+ /// Tests that `HoleList::new` aligns the `hole_addr` correctly and adjusts the size
772
+ /// accordingly.
773
+ #[ test]
774
+ fn hole_list_new_align ( ) {
775
+ // define an array of `u64` instead of `u8` for alignment
776
+ static mut HEAP : [ u64 ; 3 ] = [ 0 ; 3 ] ;
777
+
778
+ let heap_start: * mut u8 = unsafe { HEAP . as_mut_ptr ( ) . add ( 1 ) } . cast ( ) ;
779
+ // initialize the HoleList with a hole_addr one byte before `heap_start`
780
+ // -> the function should align it up to `heap_start`
781
+ let heap =
782
+ unsafe { HoleList :: new ( heap_start. sub ( 1 ) , 2 * core:: mem:: size_of :: < usize > ( ) + 1 ) } ;
783
+ assert_eq ! ( heap. bottom, heap_start) ;
784
+ assert_eq ! ( heap. top. cast( ) , unsafe {
785
+ // one byte less than the `hole_size` given to `new` because of alignment
786
+ heap_start. add( 2 * core:: mem:: size_of:: <usize >( ) )
787
+ } ) ;
788
+
789
+ assert_eq ! ( heap. first. size, 0 ) ; // dummy
790
+ assert_eq ! (
791
+ heap. first. next,
792
+ Some ( NonNull :: new( heap. bottom. cast( ) ) ) . unwrap( )
793
+ ) ;
794
+ assert_eq ! (
795
+ unsafe { & * ( heap. first. next. unwrap( ) . as_ptr( ) ) } . size,
796
+ unsafe { heap. top. offset_from( heap. bottom) }
797
+ . try_into( )
798
+ . unwrap( )
799
+ ) ;
800
+ assert_eq ! ( unsafe { & * ( heap. first. next. unwrap( ) . as_ptr( ) ) } . next, None ) ;
801
+ }
802
+
803
+ #[ test]
804
+ #[ should_panic]
805
+ fn hole_list_new_too_small ( ) {
806
+ // define an array of `u64` instead of `u8` for alignment
807
+ static mut HEAP : [ u64 ; 3 ] = [ 0 ; 3 ] ;
808
+
809
+ let heap_start: * mut u8 = unsafe { HEAP . as_mut_ptr ( ) . add ( 1 ) } . cast ( ) ;
810
+ // initialize the HoleList with a hole_addr one byte before `heap_start`
811
+ // -> the function should align it up to `heap_start`, but then the
812
+ // available size is too small to store a hole -> it should panic
813
+ unsafe { HoleList :: new ( heap_start. sub ( 1 ) , 2 * core:: mem:: size_of :: < usize > ( ) ) } ;
814
+ }
815
+
816
+ #[ test]
817
+ #[ should_panic]
818
+ fn extend_empty ( ) {
819
+ unsafe { HoleList :: empty ( ) . extend ( 16 ) } ;
820
+ }
681
821
}
0 commit comments