Skip to content

Commit 1f848ba

Browse files
committed
Try to make miri happy by using *mut u8 instead of usize for addresses
1 parent c8190bd commit 1f848ba

File tree

3 files changed

+71
-48
lines changed

3 files changed

+71
-48
lines changed

src/hole.rs

Lines changed: 37 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
11
use core::alloc::Layout;
2+
use core::convert::{TryFrom, TryInto};
23
use core::mem;
34
use core::mem::{align_of, size_of};
45
use core::ptr::NonNull;
56

7+
use crate::align_up_size;
8+
69
use super::align_up;
710

811
/// A sorted list of holes. It uses the the holes itself to store its nodes.
@@ -42,13 +45,14 @@ impl HoleList {
4245
/// is invalid or if memory from the `[hole_addr, hole_addr+size)` range is used somewhere else.
4346
///
4447
/// The pointer to `hole_addr` is automatically aligned.
45-
pub unsafe fn new(hole_addr: usize, hole_size: usize) -> HoleList {
48+
pub unsafe fn new(hole_addr: *mut u8, hole_size: usize) -> HoleList {
4649
assert_eq!(size_of::<Hole>(), Self::min_size());
4750

4851
let aligned_hole_addr = align_up(hole_addr, align_of::<Hole>());
4952
let ptr = aligned_hole_addr as *mut Hole;
5053
ptr.write(Hole {
51-
size: hole_size.saturating_sub(aligned_hole_addr - hole_addr),
54+
size: hole_size
55+
.saturating_sub(aligned_hole_addr.offset_from(hole_addr).try_into().unwrap()),
5256
next: None,
5357
});
5458

@@ -73,7 +77,7 @@ impl HoleList {
7377
if size < Self::min_size() {
7478
size = Self::min_size();
7579
}
76-
let size = align_up(size, mem::align_of::<Hole>());
80+
let size = align_up_size(size, mem::align_of::<Hole>());
7781
let layout = Layout::from_size_align(size, layout.align()).unwrap();
7882

7983
layout
@@ -114,11 +118,7 @@ impl HoleList {
114118
/// [`allocate_first_fit`]: HoleList::allocate_first_fit
115119
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) -> Layout {
116120
let aligned_layout = Self::align_layout(layout);
117-
deallocate(
118-
&mut self.first,
119-
ptr.as_ptr() as usize,
120-
aligned_layout.size(),
121-
);
121+
deallocate(&mut self.first, ptr.as_ptr(), aligned_layout.size());
122122
aligned_layout
123123
}
124124

@@ -129,11 +129,11 @@ impl HoleList {
129129

130130
/// Returns information about the first hole for test purposes.
131131
#[cfg(test)]
132-
pub fn first_hole(&self) -> Option<(usize, usize)> {
132+
pub fn first_hole(&self) -> Option<(*const u8, usize)> {
133133
self.first
134134
.next
135135
.as_ref()
136-
.map(|hole| ((*hole) as *const Hole as usize, hole.size))
136+
.map(|hole| ((*hole) as *const Hole as *const u8, hole.size))
137137
}
138138
}
139139

@@ -152,9 +152,9 @@ pub struct Hole {
152152

153153
impl Hole {
154154
/// Returns basic information about the hole.
155-
fn info(&self) -> HoleInfo {
155+
fn info(&mut self) -> HoleInfo {
156156
HoleInfo {
157-
addr: self as *const _ as usize,
157+
addr: self as *mut _ as *mut u8,
158158
size: self.size,
159159
}
160160
}
@@ -163,7 +163,7 @@ impl Hole {
163163
/// Basic information about a hole.
164164
#[derive(Debug, Clone, Copy)]
165165
struct HoleInfo {
166-
addr: usize,
166+
addr: *mut u8,
167167
size: usize,
168168
}
169169

@@ -189,24 +189,29 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option<Allocation> {
189189
(hole.addr, None)
190190
} else {
191191
// the required alignment causes some padding before the allocation
192-
let aligned_addr = align_up(hole.addr + HoleList::min_size(), required_align);
192+
let aligned_addr = align_up(hole.addr.wrapping_add(HoleList::min_size()), required_align);
193193
(
194194
aligned_addr,
195195
Some(HoleInfo {
196196
addr: hole.addr,
197-
size: aligned_addr - hole.addr,
197+
size: unsafe { aligned_addr.offset_from(hole.addr) }
198+
.try_into()
199+
.unwrap(),
198200
}),
199201
)
200202
};
201203

202204
let aligned_hole = {
203-
if aligned_addr + required_size > hole.addr + hole.size {
205+
if aligned_addr.wrapping_offset(required_size.try_into().unwrap())
206+
> hole.addr.wrapping_offset(hole.size.try_into().unwrap())
207+
{
204208
// hole is too small
205209
return None;
206210
}
207211
HoleInfo {
208212
addr: aligned_addr,
209-
size: hole.size - (aligned_addr - hole.addr),
213+
size: hole.size
214+
- usize::try_from(unsafe { aligned_addr.offset_from(hole.addr) }).unwrap(),
210215
}
211216
};
212217

@@ -219,7 +224,9 @@ fn split_hole(hole: HoleInfo, required_layout: Layout) -> Option<Allocation> {
219224
} else {
220225
// the hole is bigger than necessary, so there is some padding behind the allocation
221226
Some(HoleInfo {
222-
addr: aligned_hole.addr + required_size,
227+
addr: aligned_hole
228+
.addr
229+
.wrapping_offset(required_size.try_into().unwrap()),
223230
size: aligned_hole.size - required_size,
224231
})
225232
};
@@ -291,40 +298,43 @@ fn allocate_first_fit(mut previous: &mut Hole, layout: Layout) -> Result<HoleInf
291298

292299
/// Frees the allocation given by `(addr, size)`. It starts at the given hole and walks the list to
293300
/// find the correct place (the list is sorted by address).
294-
fn deallocate(mut hole: &mut Hole, addr: usize, mut size: usize) {
301+
fn deallocate(mut hole: &mut Hole, addr: *mut u8, mut size: usize) {
295302
loop {
296303
assert!(size >= HoleList::min_size());
297304

298305
let hole_addr = if hole.size == 0 {
299306
// It's the dummy hole, which is the head of the HoleList. It's somewhere on the stack,
300307
// so it's address is not the address of the hole. We set the addr to 0 as it's always
301308
// the first hole.
302-
0
309+
core::ptr::null_mut()
303310
} else {
304311
// tt's a real hole in memory and its address is the address of the hole
305-
hole as *mut _ as usize
312+
hole as *mut _ as *mut u8
306313
};
307314

308315
// Each freed block must be handled by the previous hole in memory. Thus the freed
309316
// address must be always behind the current hole.
310317
assert!(
311-
hole_addr + hole.size <= addr,
318+
hole_addr.wrapping_offset(hole.size.try_into().unwrap()) <= addr,
312319
"invalid deallocation (probably a double free)"
313320
);
314321

315322
// get information about the next block
316-
let next_hole_info = hole.next.as_ref().map(|next| next.info());
323+
let next_hole_info = hole.next.as_mut().map(|next| next.info());
317324

318325
match next_hole_info {
319-
Some(next) if hole_addr + hole.size == addr && addr + size == next.addr => {
326+
Some(next)
327+
if hole_addr.wrapping_offset(hole.size.try_into().unwrap()) == addr
328+
&& addr.wrapping_offset(size.try_into().unwrap()) == next.addr =>
329+
{
320330
// block fills the gap between this hole and the next hole
321331
// before: ___XXX____YYYYY____ where X is this hole and Y the next hole
322332
// after: ___XXXFFFFYYYYY____ where F is the freed block
323333

324334
hole.size += size + next.size; // merge the F and Y blocks to this X block
325335
hole.next = hole.next.as_mut().unwrap().next.take(); // remove the Y block
326336
}
327-
_ if hole_addr + hole.size == addr => {
337+
_ if hole_addr.wrapping_add(hole.size.try_into().unwrap()) == addr => {
328338
// block is right behind this hole but there is used memory after it
329339
// before: ___XXX______YYYYY____ where X is this hole and Y the next hole
330340
// after: ___XXXFFFF__YYYYY____ where F is the freed block
@@ -335,7 +345,7 @@ fn deallocate(mut hole: &mut Hole, addr: usize, mut size: usize) {
335345

336346
hole.size += size; // merge the F block to this X block
337347
}
338-
Some(next) if addr + size == next.addr => {
348+
Some(next) if addr.wrapping_offset(size.try_into().unwrap()) == next.addr => {
339349
// block is right before the next hole but there is used memory before it
340350
// before: ___XXX______YYYYY____ where X is this hole and Y the next hole
341351
// after: ___XXX__FFFFYYYYY____ where F is the freed block
@@ -366,7 +376,7 @@ fn deallocate(mut hole: &mut Hole, addr: usize, mut size: usize) {
366376
next: hole.next.take(), // the reference to the Y block (if it exists)
367377
};
368378
// write the new hole to the freed memory
369-
debug_assert_eq!(addr % align_of::<Hole>(), 0);
379+
debug_assert_eq!(addr as usize % align_of::<Hole>(), 0);
370380
let ptr = addr as *mut Hole;
371381
unsafe { ptr.write(new_hole) };
372382
// add the F block as the next block of the X block

src/lib.rs

Lines changed: 26 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ use core::alloc::GlobalAlloc;
1717
use core::alloc::Layout;
1818
#[cfg(feature = "alloc_ref")]
1919
use core::alloc::{AllocError, Allocator};
20+
use core::convert::{TryFrom, TryInto};
2021
use core::mem::MaybeUninit;
2122
#[cfg(feature = "use_spin")]
2223
use core::ops::Deref;
@@ -33,7 +34,7 @@ mod test;
3334

3435
/// A fixed size heap backed by a linked list of free memory blocks.
3536
pub struct Heap {
36-
bottom: usize,
37+
bottom: *mut u8,
3738
size: usize,
3839
used: usize,
3940
holes: HoleList,
@@ -54,7 +55,7 @@ impl Heap {
5455
#[cfg(feature = "const_mut_refs")]
5556
pub const fn empty() -> Heap {
5657
Heap {
57-
bottom: 0,
58+
bottom: core::ptr::null_mut(),
5859
size: 0,
5960
used: 0,
6061
holes: HoleList::empty(),
@@ -67,7 +68,7 @@ impl Heap {
6768
///
6869
/// This function must be called at most once and must only be used on an
6970
/// empty heap.
70-
pub unsafe fn init(&mut self, heap_bottom: usize, heap_size: usize) {
71+
pub unsafe fn init(&mut self, heap_bottom: *mut u8, heap_size: usize) {
7172
self.bottom = heap_bottom;
7273
self.size = heap_size;
7374
self.used = 0;
@@ -89,9 +90,12 @@ impl Heap {
8990
///
9091
/// This method panics if the heap is already initialized.
9192
pub fn init_from_slice(&mut self, mem: &'static mut [MaybeUninit<u8>]) {
92-
assert!(self.bottom == 0, "The heap has already been initialized.");
93+
assert!(
94+
self.bottom.is_null(),
95+
"The heap has already been initialized."
96+
);
9397
let size = mem.len();
94-
let address = mem.as_ptr() as usize;
98+
let address = mem.as_mut_ptr().cast();
9599
// SAFETY: All initialization requires the bottom address to be valid, which implies it
96100
// must not be 0. Initially the address is 0. The assertion above ensures that no
97101
// initialization had been called before.
@@ -104,7 +108,7 @@ impl Heap {
104108
/// and the memory in the `[heap_bottom, heap_bottom + heap_size)` range must not be used for
105109
/// anything else. This function is unsafe because it can cause undefined behavior if the
106110
/// given address is invalid.
107-
pub unsafe fn new(heap_bottom: usize, heap_size: usize) -> Heap {
111+
pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> Heap {
108112
if heap_size < HoleList::min_size() {
109113
Self::empty()
110114
} else {
@@ -123,7 +127,7 @@ impl Heap {
123127
/// single operation that can not panic.
124128
pub fn from_slice(mem: &'static mut [MaybeUninit<u8>]) -> Heap {
125129
let size = mem.len();
126-
let address = mem.as_ptr() as usize;
130+
let address = mem.as_mut_ptr().cast();
127131
// SAFETY: The given address and size is valid according to the safety invariants of the
128132
// mutable reference handed to us by the caller.
129133
unsafe { Self::new(address, size) }
@@ -156,7 +160,7 @@ impl Heap {
156160
}
157161

158162
/// Returns the bottom address of the heap.
159-
pub fn bottom(&self) -> usize {
163+
pub fn bottom(&self) -> *mut u8 {
160164
self.bottom
161165
}
162166

@@ -166,8 +170,9 @@ impl Heap {
166170
}
167171

168172
/// Return the top address of the heap
169-
pub fn top(&self) -> usize {
170-
self.bottom + self.size
173+
pub fn top(&self) -> *mut u8 {
174+
self.bottom
175+
.wrapping_offset(isize::try_from(self.size).unwrap())
171176
}
172177

173178
/// Returns the size of the used part of the heap
@@ -234,7 +239,7 @@ impl LockedHeap {
234239
/// and the memory in the `[heap_bottom, heap_bottom + heap_size)` range must not be used for
235240
/// anything else. This function is unsafe because it can cause undefined behavior if the
236241
/// given address is invalid.
237-
pub unsafe fn new(heap_bottom: usize, heap_size: usize) -> LockedHeap {
242+
pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> LockedHeap {
238243
LockedHeap(Spinlock::new(Heap {
239244
bottom: heap_bottom,
240245
size: heap_size,
@@ -272,18 +277,23 @@ unsafe impl GlobalAlloc for LockedHeap {
272277

273278
/// Align downwards. Returns the greatest x with alignment `align`
274279
/// so that x <= addr. The alignment must be a power of 2.
275-
pub fn align_down(addr: usize, align: usize) -> usize {
280+
pub fn align_down_size(size: usize, align: usize) -> usize {
276281
if align.is_power_of_two() {
277-
addr & !(align - 1)
282+
size & !(align - 1)
278283
} else if align == 0 {
279-
addr
284+
size
280285
} else {
281286
panic!("`align` must be a power of 2");
282287
}
283288
}
284289

290+
pub fn align_up_size(size: usize, align: usize) -> usize {
291+
align_down_size(size + align - 1, align)
292+
}
293+
285294
/// Align upwards. Returns the smallest x with alignment `align`
286295
/// so that x >= addr. The alignment must be a power of 2.
287-
pub fn align_up(addr: usize, align: usize) -> usize {
288-
align_down(addr + align - 1, align)
296+
pub fn align_up(addr: *mut u8, align: usize) -> *mut u8 {
297+
let offset = addr.align_offset(align);
298+
addr.wrapping_offset(offset.try_into().unwrap())
289299
}

src/test.rs

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use std::prelude::v1::*;
66
fn new_heap() -> Heap {
77
const HEAP_SIZE: usize = 1000;
88
let heap_space = Box::leak(Box::new([MaybeUninit::uninit(); HEAP_SIZE]));
9-
let assumed_location = heap_space.as_ptr() as usize;
9+
let assumed_location = heap_space.as_mut_ptr().cast();
1010

1111
let heap = Heap::from_slice(heap_space);
1212
assert!(heap.bottom == assumed_location);
@@ -18,7 +18,7 @@ fn new_max_heap() -> Heap {
1818
const HEAP_SIZE: usize = 1024;
1919
const HEAP_SIZE_MAX: usize = 2048;
2020
let heap_space = Box::leak(Box::new([MaybeUninit::<u8>::uninit(); HEAP_SIZE_MAX]));
21-
let start_ptr = heap_space.as_ptr() as usize;
21+
let start_ptr = heap_space.as_mut_ptr().cast();
2222

2323
// Unsafe so that we have provenance over the whole allocation.
2424
let heap = unsafe { Heap::new(start_ptr, HEAP_SIZE) };
@@ -49,14 +49,17 @@ fn allocate_double_usize() {
4949
let layout = Layout::from_size_align(size, align_of::<usize>());
5050
let addr = heap.allocate_first_fit(layout.unwrap());
5151
assert!(addr.is_ok());
52-
let addr = addr.unwrap().as_ptr() as usize;
52+
let addr = addr.unwrap().as_ptr();
5353
assert!(addr == heap.bottom);
5454
let (hole_addr, hole_size) = heap.holes.first_hole().expect("ERROR: no hole left");
55-
assert!(hole_addr == heap.bottom + size);
55+
assert!(hole_addr == heap.bottom.wrapping_add(size));
5656
assert!(hole_size == heap.size - size);
5757

5858
unsafe {
59-
assert_eq!((*((addr + size) as *const Hole)).size, heap.size - size);
59+
assert_eq!(
60+
(*((addr.wrapping_offset(size.try_into().unwrap())) as *const Hole)).size,
61+
heap.size - size
62+
);
6063
}
6164
}
6265

0 commit comments

Comments
 (0)