1
1
// Dynamic arenas.
2
2
3
+ // Arenas are used to quickly allocate objects that share a
4
+ // lifetime. The arena uses ~[u8] vectors as a backing store to
5
+ // allocate objects from. For each allocated object, the arena stores
6
+ // a pointer to the type descriptor followed by the
7
+ // object. (Potentially with alignment padding after each of them.)
8
+ // When the arena is destroyed, it iterates through all of its chunks,
9
+ // and uses the tydesc information to trace through the objects,
10
+ // calling the destructors on them.
11
+ // One subtle point that needs to be addressed is how to handle
12
+ // failures while running the user provided initializer function. It
13
+ // is important to not run the destructor on uninitalized objects, but
14
+ // how to detect them is somewhat subtle. Since alloc() can be invoked
15
+ // recursively, it is not sufficient to simply exclude the most recent
16
+ // object. To solve this without requiring extra space, we use the low
17
+ // order bit of the tydesc pointer to encode whether the object it
18
+ // describes has been fully initialized.
19
+
20
+ // A good extension of this scheme would be to segregate data with and
21
+ // without destructors in order to avoid the overhead in the
22
+ // plain-old-data case.
23
+
3
24
export arena, arena_with_size;
4
25
5
26
import list;
6
27
import list :: { list, cons, nil} ;
7
28
import unsafe:: reinterpret_cast;
29
+ import sys:: TypeDesc ;
30
+ import libc:: size_t;
8
31
9
- type chunk = { data : ~[ u8 ] , mut fill : uint } ;
32
+ #[ abi = "rust-intrinsic" ]
33
+ extern mod rusti {
34
+ fn move_val_init < T > ( & dst: T , -src : T ) ;
35
+ }
36
+ extern mod rustrt {
37
+ #[ rust_stack]
38
+ fn rust_call_tydesc_glue ( root : * u8 , tydesc : * TypeDesc , field : size_t ) ;
39
+ }
40
+ // This probably belongs somewhere else. Needs to be kept in sync with
41
+ // changes to glue...
42
+ const tydesc_drop_glue_index: size_t = 3 as size_t ;
10
43
11
- type arena_ = { mut chunks : @list < @chunk > } ;
44
+ // The way arena uses arrays is really deeply awful. The arrays are
45
+ // allocated, and have capacities reserved, but the fill for the array
46
+ // will always stay at 0.
47
+ type chunk = { data : ~[ u8 ] , mut fill : uint } ;
12
48
13
- enum arena {
14
- arena_( arena_ )
49
+ struct arena {
50
+ // The head is seperated out from the list as a unbenchmarked
51
+ // microoptimization, to avoid needing to case on the list to
52
+ // access the head.
53
+ priv mut head : @chunk ;
54
+ priv mut chunks : @list < @chunk > ;
55
+ drop {
56
+ unsafe {
57
+ destroy_chunk( self . head ) ;
58
+ for list:: each( self . chunks) |chunk| { destroy_chunk( chunk) ; }
59
+ }
60
+ }
15
61
}
16
62
17
63
fn chunk ( size : uint ) -> @chunk {
@@ -21,58 +67,152 @@ fn chunk(size: uint) -> @chunk {
21
67
}
22
68
23
69
fn arena_with_size ( initial_size : uint ) -> arena {
24
- return arena_ ( { mut chunks: @cons ( chunk ( initial_size) , @nil) } ) ;
70
+ return arena { mut head : chunk ( initial_size) ,
71
+ mut chunks : @nil} ;
25
72
}
26
73
27
74
fn arena ( ) -> arena {
28
75
arena_with_size ( 32 u)
29
76
}
30
77
31
- #[ abi = "rust-intrinsic" ]
32
- extern mod rusti {
33
- fn move_val_init < T > ( & dst: T , -src : T ) ;
78
+ #[ inline( always) ]
79
+ fn round_up_to ( base : uint , align : uint ) -> uint {
80
+ ( base + ( align - 1 ) ) & !( align - 1 )
81
+ }
82
+
83
+ // Walk down a chunk, running the destructors for any objects stored
84
+ // in it.
85
+ unsafe fn destroy_chunk ( chunk : @chunk ) {
86
+ let mut idx = 0 ;
87
+ let buf = vec:: unsafe:: to_ptr ( chunk. data ) ;
88
+ let fill = chunk. fill ;
89
+
90
+ while idx < fill {
91
+ let tydesc_data: * uint = reinterpret_cast ( ptr:: offset ( buf, idx) ) ;
92
+ let ( tydesc, is_done) = un_bitpack_tydesc_ptr ( * tydesc_data) ;
93
+ let size = ( * tydesc) . size , align = ( * tydesc) . align ;
94
+
95
+ let after_tydesc = idx + sys:: size_of :: < * TypeDesc > ( ) ;
96
+
97
+ let start = round_up_to ( after_tydesc, align) ;
98
+
99
+ //debug!("freeing object: idx = %u, size = %u, align = %u, done = %b",
100
+ // start, size, align, is_done);
101
+ if is_done {
102
+ rustrt:: rust_call_tydesc_glue (
103
+ ptr:: offset ( buf, start) , tydesc, tydesc_drop_glue_index) ;
104
+ }
105
+
106
+ // Find where the next tydesc lives
107
+ idx = round_up_to ( start + size, sys:: pref_align_of :: < * TypeDesc > ( ) ) ;
108
+ }
34
109
}
35
110
111
+ // We encode whether the object a tydesc describes has been
112
+ // initialized in the arena in the low bit of the tydesc pointer. This
113
+ // is necessary in order to properly do cleanup if a failure occurs
114
+ // during an initializer.
115
+ #[ inline( always) ]
116
+ unsafe fn bitpack_tydesc_ptr ( p : * TypeDesc , is_done : bool ) -> uint {
117
+ let p_bits: uint = reinterpret_cast ( p) ;
118
+ p_bits | ( is_done as uint )
119
+ }
120
+ #[ inline( always) ]
121
+ unsafe fn un_bitpack_tydesc_ptr ( p : uint ) -> ( * TypeDesc , bool ) {
122
+ ( reinterpret_cast ( p & !1 ) , p & 1 == 1 )
123
+ }
124
+
125
+
36
126
impl & arena {
37
- fn alloc_grow ( n_bytes : uint , align : uint ) -> * ( ) {
127
+ fn alloc_grow ( n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
38
128
// Allocate a new chunk.
39
- let mut head = list:: head ( self . chunks ) ;
40
- let chunk_size = vec:: capacity ( head. data ) ;
129
+ let chunk_size = vec:: capacity ( self . head . data ) ;
41
130
let new_min_chunk_size = uint:: max ( n_bytes, chunk_size) ;
42
- head = chunk ( uint :: next_power_of_two ( new_min_chunk_size + 1 u ) ) ;
43
- self . chunks = @ cons ( head , self . chunks ) ;
131
+ self . chunks = @ cons ( self . head , self . chunks ) ;
132
+ self . head = chunk ( uint :: next_power_of_two ( new_min_chunk_size + 1 u ) ) ;
44
133
45
134
return self . alloc_inner ( n_bytes, align) ;
46
135
}
47
136
48
137
#[ inline( always) ]
49
- fn alloc_inner ( n_bytes : uint , align : uint ) -> * ( ) {
50
- let alignm1 = align - 1 u;
51
- let mut head = list:: head ( self . chunks ) ;
138
+ fn alloc_inner ( n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
139
+ let head = self . head ;
52
140
53
- let mut start = head. fill ;
54
- start = ( start + alignm1) & !alignm1;
141
+ let after_tydesc = head. fill + sys:: size_of :: < * TypeDesc > ( ) ;
142
+
143
+ let start = round_up_to ( after_tydesc, align) ;
55
144
let end = start + n_bytes;
56
145
if end > vec:: capacity ( head. data ) {
57
146
return self . alloc_grow ( n_bytes, align) ;
58
147
}
59
148
149
+ //debug!("idx = %u, size = %u, align = %u, fill = %u",
150
+ // start, n_bytes, align, head.fill);
151
+
60
152
unsafe {
61
- let p = ptr:: offset ( vec:: unsafe:: to_ptr ( head. data ) , start) ;
62
- head. fill = end;
63
- return unsafe :: reinterpret_cast ( p) ;
153
+ let buf = vec:: unsafe:: to_ptr ( head. data ) ;
154
+ let tydesc_p = ptr:: offset ( buf, head. fill ) ;
155
+ let p = ptr:: offset ( buf, start) ;
156
+ head. fill = round_up_to ( end, sys:: pref_align_of :: < * TypeDesc > ( ) ) ;
157
+
158
+ return ( tydesc_p, p) ;
64
159
}
65
160
}
66
161
67
162
#[ inline( always) ]
68
163
fn alloc < T > ( op : fn ( ) -> T ) -> & self /T {
69
164
unsafe {
70
165
let tydesc = sys:: get_type_desc :: < T > ( ) ;
71
- let ptr = self . alloc_inner ( ( * tydesc) . size , ( * tydesc) . align ) ;
166
+ let ( ty_ptr, ptr) =
167
+ self . alloc_inner ( ( * tydesc) . size , ( * tydesc) . align ) ;
168
+ let ty_ptr: * mut uint = reinterpret_cast ( ty_ptr) ;
72
169
let ptr: * mut T = reinterpret_cast ( ptr) ;
170
+ // Write in our tydesc along with a bit indicating that it
171
+ // has *not* been initialized yet.
172
+ * ty_ptr = reinterpret_cast ( tydesc) ;
173
+ // Actually initialize it
73
174
rusti:: move_val_init ( * ptr, op ( ) ) ;
175
+ // Now that we are done, update the tydesc to indicate that
176
+ // the object is there.
177
+ * ty_ptr = bitpack_tydesc_ptr ( tydesc, true ) ;
178
+
74
179
return reinterpret_cast ( ptr) ;
75
180
}
76
181
}
77
182
}
78
183
184
+ #[ test]
185
+ fn test_arena_destructors ( ) {
186
+ let arena = arena:: arena ( ) ;
187
+ for uint:: range( 0 , 10 ) |i| {
188
+ // Arena allocate something with drop glue to make sure it
189
+ // doesn't leak.
190
+ do arena. alloc { @i } ;
191
+ // Allocate something with funny size and alignment, to keep
192
+ // things interesting.
193
+ do arena. alloc { [ 0u8 , 1u8 , 2u8 ] /3 } ;
194
+ }
195
+ }
196
+
197
+ #[ test]
198
+ #[ should_fail]
199
+ fn test_arena_destructors_fail ( ) {
200
+ let arena = arena : : arena( ) ;
201
+ // Put some stuff in the arena.
202
+ for uint:: range( 0 , 10 ) |i| {
203
+ // Arena allocate something with drop glue to make sure it
204
+ // doesn't leak.
205
+ do arena. alloc { @i } ;
206
+ // Allocate something with funny size and alignment, to keep
207
+ // things interesting.
208
+ do arena. alloc { [ 0u8 , 1u8 , 2u8 ] /3 } ;
209
+ }
210
+ // Now, fail while allocating
211
+ do arena. alloc:: < @int > {
212
+ // First, recursively allocate something else; that needs to
213
+ // get freed too.
214
+ do arena. alloc { @20 } ;
215
+ // Now fail.
216
+ fail;
217
+ } ;
218
+ }
0 commit comments