1
1
use core:: alloc:: Layout ;
2
+ use core:: convert:: { TryFrom , TryInto } ;
2
3
use core:: mem;
3
4
use core:: mem:: { align_of, size_of} ;
4
5
use core:: ptr:: NonNull ;
5
6
7
+ use crate :: align_up_size;
8
+
6
9
use super :: align_up;
7
10
8
11
/// A sorted list of holes. It uses the the holes itself to store its nodes.
@@ -42,13 +45,14 @@ impl HoleList {
42
45
/// is invalid or if memory from the `[hole_addr, hole_addr+size)` range is used somewhere else.
43
46
///
44
47
/// The pointer to `hole_addr` is automatically aligned.
45
- pub unsafe fn new ( hole_addr : usize , hole_size : usize ) -> HoleList {
48
+ pub unsafe fn new ( hole_addr : * mut u8 , hole_size : usize ) -> HoleList {
46
49
assert_eq ! ( size_of:: <Hole >( ) , Self :: min_size( ) ) ;
47
50
48
51
let aligned_hole_addr = align_up ( hole_addr, align_of :: < Hole > ( ) ) ;
49
52
let ptr = aligned_hole_addr as * mut Hole ;
50
53
ptr. write ( Hole {
51
- size : hole_size. saturating_sub ( aligned_hole_addr - hole_addr) ,
54
+ size : hole_size
55
+ . saturating_sub ( aligned_hole_addr. offset_from ( hole_addr) . try_into ( ) . unwrap ( ) ) ,
52
56
next : None ,
53
57
} ) ;
54
58
@@ -73,7 +77,7 @@ impl HoleList {
73
77
if size < Self :: min_size ( ) {
74
78
size = Self :: min_size ( ) ;
75
79
}
76
- let size = align_up ( size, mem:: align_of :: < Hole > ( ) ) ;
80
+ let size = align_up_size ( size, mem:: align_of :: < Hole > ( ) ) ;
77
81
let layout = Layout :: from_size_align ( size, layout. align ( ) ) . unwrap ( ) ;
78
82
79
83
layout
@@ -114,11 +118,7 @@ impl HoleList {
114
118
/// [`allocate_first_fit`]: HoleList::allocate_first_fit
115
119
pub unsafe fn deallocate ( & mut self , ptr : NonNull < u8 > , layout : Layout ) -> Layout {
116
120
let aligned_layout = Self :: align_layout ( layout) ;
117
- deallocate (
118
- & mut self . first ,
119
- ptr. as_ptr ( ) as usize ,
120
- aligned_layout. size ( ) ,
121
- ) ;
121
+ deallocate ( & mut self . first , ptr. as_ptr ( ) , aligned_layout. size ( ) ) ;
122
122
aligned_layout
123
123
}
124
124
@@ -129,11 +129,11 @@ impl HoleList {
129
129
130
130
/// Returns information about the first hole for test purposes.
131
131
#[ cfg( test) ]
132
- pub fn first_hole ( & self ) -> Option < ( usize , usize ) > {
132
+ pub fn first_hole ( & self ) -> Option < ( * const u8 , usize ) > {
133
133
self . first
134
134
. next
135
135
. as_ref ( )
136
- . map ( |hole| ( ( * hole) as * const Hole as usize , hole. size ) )
136
+ . map ( |hole| ( ( * hole) as * const Hole as * const u8 , hole. size ) )
137
137
}
138
138
}
139
139
@@ -152,9 +152,9 @@ pub struct Hole {
152
152
153
153
impl Hole {
154
154
/// Returns basic information about the hole.
155
- fn info ( & self ) -> HoleInfo {
155
+ fn info ( & mut self ) -> HoleInfo {
156
156
HoleInfo {
157
- addr : self as * const _ as usize ,
157
+ addr : self as * mut _ as * mut u8 ,
158
158
size : self . size ,
159
159
}
160
160
}
@@ -163,7 +163,7 @@ impl Hole {
163
163
/// Basic information about a hole.
164
164
#[ derive( Debug , Clone , Copy ) ]
165
165
struct HoleInfo {
166
- addr : usize ,
166
+ addr : * mut u8 ,
167
167
size : usize ,
168
168
}
169
169
@@ -189,24 +189,29 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option<Allocation> {
189
189
( hole. addr , None )
190
190
} else {
191
191
// the required alignment causes some padding before the allocation
192
- let aligned_addr = align_up ( hole. addr + HoleList :: min_size ( ) , required_align) ;
192
+ let aligned_addr = align_up ( hole. addr . wrapping_add ( HoleList :: min_size ( ) ) , required_align) ;
193
193
(
194
194
aligned_addr,
195
195
Some ( HoleInfo {
196
196
addr : hole. addr ,
197
- size : aligned_addr - hole. addr ,
197
+ size : unsafe { aligned_addr. offset_from ( hole. addr ) }
198
+ . try_into ( )
199
+ . unwrap ( ) ,
198
200
} ) ,
199
201
)
200
202
} ;
201
203
202
204
let aligned_hole = {
203
- if aligned_addr + required_size > hole. addr + hole. size {
205
+ if aligned_addr. wrapping_offset ( required_size. try_into ( ) . unwrap ( ) )
206
+ > hole. addr . wrapping_offset ( hole. size . try_into ( ) . unwrap ( ) )
207
+ {
204
208
// hole is too small
205
209
return None ;
206
210
}
207
211
HoleInfo {
208
212
addr : aligned_addr,
209
- size : hole. size - ( aligned_addr - hole. addr ) ,
213
+ size : hole. size
214
+ - usize:: try_from ( unsafe { aligned_addr. offset_from ( hole. addr ) } ) . unwrap ( ) ,
210
215
}
211
216
} ;
212
217
@@ -219,7 +224,9 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option<Allocation> {
219
224
} else {
220
225
// the hole is bigger than necessary, so there is some padding behind the allocation
221
226
Some ( HoleInfo {
222
- addr : aligned_hole. addr + required_size,
227
+ addr : aligned_hole
228
+ . addr
229
+ . wrapping_offset ( required_size. try_into ( ) . unwrap ( ) ) ,
223
230
size : aligned_hole. size - required_size,
224
231
} )
225
232
} ;
@@ -291,40 +298,43 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result<HoleInf
291
298
292
299
/// Frees the allocation given by `(addr, size)`. It starts at the given hole and walks the list to
293
300
/// find the correct place (the list is sorted by address).
294
- fn deallocate ( mut hole : & mut Hole , addr : usize , mut size : usize ) {
301
+ fn deallocate ( mut hole : & mut Hole , addr : * mut u8 , mut size : usize ) {
295
302
loop {
296
303
assert ! ( size >= HoleList :: min_size( ) ) ;
297
304
298
305
let hole_addr = if hole. size == 0 {
299
306
// It's the dummy hole, which is the head of the HoleList. It's somewhere on the stack,
300
307
// so it's address is not the address of the hole. We set the addr to 0 as it's always
301
308
// the first hole.
302
- 0
309
+ core :: ptr :: null_mut ( )
303
310
} else {
304
311
// tt's a real hole in memory and its address is the address of the hole
305
- hole as * mut _ as usize
312
+ hole as * mut _ as * mut u8
306
313
} ;
307
314
308
315
// Each freed block must be handled by the previous hole in memory. Thus the freed
309
316
// address must be always behind the current hole.
310
317
assert ! (
311
- hole_addr + hole. size <= addr,
318
+ hole_addr. wrapping_offset ( hole. size. try_into ( ) . unwrap ( ) ) <= addr,
312
319
"invalid deallocation (probably a double free)"
313
320
) ;
314
321
315
322
// get information about the next block
316
- let next_hole_info = hole. next . as_ref ( ) . map ( |next| next. info ( ) ) ;
323
+ let next_hole_info = hole. next . as_mut ( ) . map ( |next| next. info ( ) ) ;
317
324
318
325
match next_hole_info {
319
- Some ( next) if hole_addr + hole. size == addr && addr + size == next. addr => {
326
+ Some ( next)
327
+ if hole_addr. wrapping_offset ( hole. size . try_into ( ) . unwrap ( ) ) == addr
328
+ && addr. wrapping_offset ( size. try_into ( ) . unwrap ( ) ) == next. addr =>
329
+ {
320
330
// block fills the gap between this hole and the next hole
321
331
// before: ___XXX____YYYYY____ where X is this hole and Y the next hole
322
332
// after: ___XXXFFFFYYYYY____ where F is the freed block
323
333
324
334
hole. size += size + next. size ; // merge the F and Y blocks to this X block
325
335
hole. next = hole. next . as_mut ( ) . unwrap ( ) . next . take ( ) ; // remove the Y block
326
336
}
327
- _ if hole_addr + hole. size == addr => {
337
+ _ if hole_addr. wrapping_add ( hole. size . try_into ( ) . unwrap ( ) ) == addr => {
328
338
// block is right behind this hole but there is used memory after it
329
339
// before: ___XXX______YYYYY____ where X is this hole and Y the next hole
330
340
// after: ___XXXFFFF__YYYYY____ where F is the freed block
@@ -335,7 +345,7 @@ fn deallocate(mut hole: &mut Hole, addr: usize, mut size: usize) {
335
345
336
346
hole. size += size; // merge the F block to this X block
337
347
}
338
- Some ( next) if addr + size == next. addr => {
348
+ Some ( next) if addr. wrapping_offset ( size. try_into ( ) . unwrap ( ) ) == next. addr => {
339
349
// block is right before the next hole but there is used memory before it
340
350
// before: ___XXX______YYYYY____ where X is this hole and Y the next hole
341
351
// after: ___XXX__FFFFYYYYY____ where F is the freed block
@@ -366,7 +376,7 @@ fn deallocate(mut hole: &mut Hole, addr: usize, mut size: usize) {
366
376
next : hole. next . take ( ) , // the reference to the Y block (if it exists)
367
377
} ;
368
378
// write the new hole to the freed memory
369
- debug_assert_eq ! ( addr % align_of:: <Hole >( ) , 0 ) ;
379
+ debug_assert_eq ! ( addr as usize % align_of:: <Hole >( ) , 0 ) ;
370
380
let ptr = addr as * mut Hole ;
371
381
unsafe { ptr. write ( new_hole) } ;
372
382
// add the F block as the next block of the X block
0 commit comments