@@ -50,12 +50,11 @@ use std::ptr;
50
50
use alloc:: heap;
51
51
use alloc:: raw_vec:: RawVec ;
52
52
53
- // The way arena uses arrays is really deeply awful. The arrays are
54
- // allocated, and have capacities reserved, but the fill for the array
55
- // will always stay at 0.
56
53
struct Chunk {
57
54
data : RawVec < u8 > ,
55
+ /// Index of the first unused byte.
58
56
fill : Cell < usize > ,
57
+ /// Indicates whether objects with destructors are stored in this chunk.
59
58
is_copy : Cell < bool > ,
60
59
}
61
60
@@ -75,12 +74,37 @@ impl Chunk {
75
74
unsafe fn as_ptr ( & self ) -> * const u8 {
76
75
self . data . ptr ( )
77
76
}
77
+
78
+ // Walk down a chunk, running the destructors for any objects stored
79
+ // in it.
80
+ unsafe fn destroy ( & self ) {
81
+ let mut idx = 0 ;
82
+ let buf = self . as_ptr ( ) ;
83
+ let fill = self . fill . get ( ) ;
84
+
85
+ while idx < fill {
86
+ let tydesc_data = buf. offset ( idx as isize ) as * const usize ;
87
+ let ( tydesc, is_done) = un_bitpack_tydesc_ptr ( * tydesc_data) ;
88
+ let ( size, align) = ( ( * tydesc) . size , ( * tydesc) . align ) ;
89
+
90
+ let after_tydesc = idx + mem:: size_of :: < * const TyDesc > ( ) ;
91
+
92
+ let start = round_up ( after_tydesc, align) ;
93
+
94
+ if is_done {
95
+ ( ( * tydesc) . drop_glue ) ( buf. offset ( start as isize ) as * const i8 ) ;
96
+ }
97
+
98
+ // Find where the next tydesc lives
99
+ idx = round_up ( start + size, mem:: align_of :: < * const TyDesc > ( ) ) ;
100
+ }
101
+ }
78
102
}
79
103
80
104
/// A slower reflection-based arena that can allocate objects of any type.
81
105
///
82
- /// This arena uses `Vec <u8>` as a backing store to allocate objects from. For
83
- /// each allocated object, the arena stores a pointer to the type descriptor
106
+ /// This arena uses `RawVec <u8>` as a backing store to allocate objects from.
107
+ /// For each allocated object, the arena stores a pointer to the type descriptor
84
108
/// followed by the object (potentially with alignment padding after each
85
109
/// element). When the arena is destroyed, it iterates through all of its
86
110
/// chunks, and uses the tydesc information to trace through the objects,
@@ -127,10 +151,10 @@ impl<'a> Arena<'a> {
127
151
impl < ' longer_than_self > Drop for Arena < ' longer_than_self > {
128
152
fn drop ( & mut self ) {
129
153
unsafe {
130
- destroy_chunk ( & * self . head . borrow ( ) ) ;
154
+ self . head . borrow ( ) . destroy ( ) ;
131
155
for chunk in self . chunks . borrow ( ) . iter ( ) {
132
156
if !chunk. is_copy . get ( ) {
133
- destroy_chunk ( chunk) ;
157
+ chunk. destroy ( ) ;
134
158
}
135
159
}
136
160
}
@@ -142,31 +166,6 @@ fn round_up(base: usize, align: usize) -> usize {
142
166
( base. checked_add ( align - 1 ) ) . unwrap ( ) & !( align - 1 )
143
167
}
144
168
145
- // Walk down a chunk, running the destructors for any objects stored
146
- // in it.
147
- unsafe fn destroy_chunk ( chunk : & Chunk ) {
148
- let mut idx = 0 ;
149
- let buf = chunk. as_ptr ( ) ;
150
- let fill = chunk. fill . get ( ) ;
151
-
152
- while idx < fill {
153
- let tydesc_data = buf. offset ( idx as isize ) as * const usize ;
154
- let ( tydesc, is_done) = un_bitpack_tydesc_ptr ( * tydesc_data) ;
155
- let ( size, align) = ( ( * tydesc) . size , ( * tydesc) . align ) ;
156
-
157
- let after_tydesc = idx + mem:: size_of :: < * const TyDesc > ( ) ;
158
-
159
- let start = round_up ( after_tydesc, align) ;
160
-
161
- if is_done {
162
- ( ( * tydesc) . drop_glue ) ( buf. offset ( start as isize ) as * const i8 ) ;
163
- }
164
-
165
- // Find where the next tydesc lives
166
- idx = round_up ( start + size, mem:: align_of :: < * const TyDesc > ( ) ) ;
167
- }
168
- }
169
-
170
169
// We encode whether the object a tydesc describes has been
171
170
// initialized in the arena in the low bit of the tydesc pointer. This
172
171
// is necessary in order to properly do cleanup if a panic occurs
@@ -183,6 +182,9 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
183
182
// HACK(eddyb) TyDesc replacement using a trait object vtable.
184
183
// This could be replaced in the future with a custom DST layout,
185
184
// or `&'static (drop_glue, size, align)` created by a `const fn`.
185
+ // Requirements:
186
+ // * rvalue promotion (issue #1056)
187
+ // * mem::{size_of, align_of} must be const fns
186
188
struct TyDesc {
187
189
drop_glue : fn ( * const i8 ) ,
188
190
size : usize ,
@@ -198,45 +200,52 @@ impl<T: ?Sized> AllTypes for T {}
198
200
unsafe fn get_tydesc < T > ( ) -> * const TyDesc {
199
201
use std:: raw:: TraitObject ;
200
202
201
- let ptr = & * ( 1 as * const T ) ;
203
+ let ptr = & * ( heap :: EMPTY as * const T ) ;
202
204
203
205
// Can use any trait that is implemented for all types.
204
206
let obj = mem:: transmute :: < & AllTypes , TraitObject > ( ptr) ;
205
207
obj. vtable as * const TyDesc
206
208
}
207
209
208
210
impl < ' longer_than_self > Arena < ' longer_than_self > {
209
- #[ inline]
210
- fn chunk_size ( & self ) -> usize {
211
- self . copy_head . borrow ( ) . capacity ( )
212
- }
213
-
214
- // Functions for the POD part of the arena
211
+ // Grows a given chunk and returns `false`, or replaces it with a bigger
212
+ // chunk and returns `true`.
213
+ // This method is shared by both parts of the arena.
215
214
#[ cold]
216
- fn alloc_copy_grow ( & self , n_bytes : usize , align : usize ) -> * const u8 {
217
- // Allocate a new chunk.
218
- let new_min_chunk_size = cmp:: max ( n_bytes, self . chunk_size ( ) ) ;
219
- let new_chunk = Chunk :: new ( ( new_min_chunk_size + 1 ) . next_power_of_two ( ) , true ) ;
220
- let mut copy_head = self . copy_head . borrow_mut ( ) ;
221
- let old_chunk = mem:: replace ( & mut * copy_head, new_chunk) ;
222
- self . chunks . borrow_mut ( ) . push ( old_chunk) ;
223
-
224
- self . alloc_copy_inner ( n_bytes, align)
215
+ fn alloc_grow ( & self , head : & mut Chunk , used_cap : usize , n_bytes : usize ) -> bool {
216
+ if head. data . reserve_in_place ( used_cap, n_bytes) {
217
+ // In-place reallocation succeeded.
218
+ false
219
+ } else {
220
+ // Allocate a new chunk.
221
+ let new_min_chunk_size = cmp:: max ( n_bytes, head. capacity ( ) ) ;
222
+ let new_chunk = Chunk :: new ( ( new_min_chunk_size + 1 ) . next_power_of_two ( ) , false ) ;
223
+ let old_chunk = mem:: replace ( head, new_chunk) ;
224
+ if old_chunk. fill . get ( ) != 0 {
225
+ self . chunks . borrow_mut ( ) . push ( old_chunk) ;
226
+ }
227
+ true
228
+ }
225
229
}
226
230
231
+ // Functions for the copyable part of the arena.
232
+
227
233
#[ inline]
228
234
fn alloc_copy_inner ( & self , n_bytes : usize , align : usize ) -> * const u8 {
229
- let start = round_up ( self . copy_head . borrow ( ) . fill . get ( ) , align) ;
230
- let chunk_size = self . chunk_size ( ) ;
231
-
232
- let end = start + n_bytes;
233
- if end > chunk_size {
234
- if !self . copy_head . borrow_mut ( ) . data . reserve_in_place ( start, n_bytes) {
235
- return self . alloc_copy_grow ( n_bytes, align) ;
235
+ let mut copy_head = self . copy_head . borrow_mut ( ) ;
236
+ let fill = copy_head. fill . get ( ) ;
237
+ let mut start = round_up ( fill, align) ;
238
+ let mut end = start + n_bytes;
239
+
240
+ if end > copy_head. capacity ( ) {
241
+ if self . alloc_grow ( & mut * copy_head, fill, end - fill) {
242
+ // Continuing with a newly allocated chunk
243
+ start = 0 ;
244
+ end = n_bytes;
245
+ copy_head. is_copy . set ( true ) ;
236
246
}
237
247
}
238
248
239
- let copy_head = self . copy_head . borrow ( ) ;
240
249
copy_head. fill . set ( end) ;
241
250
242
251
unsafe { copy_head. as_ptr ( ) . offset ( start as isize ) }
@@ -254,40 +263,28 @@ impl<'longer_than_self> Arena<'longer_than_self> {
254
263
}
255
264
}
256
265
257
- // Functions for the non-POD part of the arena
258
- fn alloc_noncopy_grow ( & self , n_bytes : usize , align : usize ) -> ( * const u8 , * const u8 ) {
259
- // Allocate a new chunk.
260
- let new_min_chunk_size = cmp:: max ( n_bytes, self . chunk_size ( ) ) ;
261
- let new_chunk = Chunk :: new ( ( new_min_chunk_size + 1 ) . next_power_of_two ( ) , false ) ;
262
- let mut head = self . head . borrow_mut ( ) ;
263
- let old_chunk = mem:: replace ( & mut * head, new_chunk) ;
264
- self . chunks . borrow_mut ( ) . push ( old_chunk) ;
265
-
266
- self . alloc_noncopy_inner ( n_bytes, align)
267
- }
266
+ // Functions for the non-copyable part of the arena.
268
267
269
268
#[ inline]
270
269
fn alloc_noncopy_inner ( & self , n_bytes : usize , align : usize ) -> ( * const u8 , * const u8 ) {
271
- // Be careful to not maintain any `head` borrows active, because
272
- // `alloc_noncopy_grow` borrows it mutably.
273
- let ( start, end, tydesc_start, head_capacity) = {
274
- let head = self . head . borrow ( ) ;
275
- let fill = head. fill . get ( ) ;
276
-
277
- let tydesc_start = fill;
278
- let after_tydesc = fill + mem:: size_of :: < * const TyDesc > ( ) ;
279
- let start = round_up ( after_tydesc, align) ;
280
- let end = start + n_bytes;
281
-
282
- ( start, end, tydesc_start, head. capacity ( ) )
283
- } ;
284
-
285
- if end > head_capacity {
286
- return self . alloc_noncopy_grow ( n_bytes, align) ;
270
+ let mut head = self . head . borrow_mut ( ) ;
271
+ let fill = head. fill . get ( ) ;
272
+
273
+ let mut tydesc_start = fill;
274
+ let after_tydesc = fill + mem:: size_of :: < * const TyDesc > ( ) ;
275
+ let mut start = round_up ( after_tydesc, align) ;
276
+ let mut end = round_up ( start + n_bytes, mem:: align_of :: < * const TyDesc > ( ) ) ;
277
+
278
+ if end > head. capacity ( ) {
279
+ if self . alloc_grow ( & mut * head, tydesc_start, end - tydesc_start) {
280
+ // Continuing with a newly allocated chunk
281
+ tydesc_start = 0 ;
282
+ start = round_up ( mem:: size_of :: < * const TyDesc > ( ) , align) ;
283
+ end = round_up ( start + n_bytes, mem:: align_of :: < * const TyDesc > ( ) ) ;
284
+ }
287
285
}
288
286
289
- let head = self . head . borrow ( ) ;
290
- head. fill . set ( round_up ( end, mem:: align_of :: < * const TyDesc > ( ) ) ) ;
287
+ head. fill . set ( end) ;
291
288
292
289
unsafe {
293
290
let buf = head. as_ptr ( ) ;
0 commit comments