17
17
// order bit of the tydesc pointer to encode whether the object it
18
18
// describes has been fully initialized.
19
19
20
- // A good extension of this scheme would be to segregate data with and
21
- // without destructors in order to avoid the overhead in the
22
- // plain-old-data case.
20
+ // As an optimization, objects with destructors are stored in
21
+ // different chunks than objects without destructors. This reduces
22
+ // overhead when initializing plain-old-data and means we don't need
23
+ // to waste time running the destructors of POD.
23
24
24
25
export arena, arena_with_size;
25
26
@@ -32,6 +33,7 @@ import libc::size_t;
32
33
#[ abi = "rust-intrinsic" ]
33
34
extern mod rusti {
34
35
fn move_val_init < T > ( & dst: T , -src : T ) ;
36
+ fn needs_drop < T > ( ) -> bool ;
35
37
}
36
38
extern mod rustrt {
37
39
#[ rust_stack]
@@ -44,30 +46,34 @@ const tydesc_drop_glue_index: size_t = 3 as size_t;
44
46
// The way arena uses arrays is really deeply awful. The arrays are
45
47
// allocated, and have capacities reserved, but the fill for the array
46
48
// will always stay at 0.
47
- type chunk = { data : ~[ u8 ] , mut fill : uint } ;
49
+ type chunk = { data : ~[ u8 ] , mut fill : uint , is_pod : bool } ;
48
50
49
51
struct arena {
50
52
// The head is seperated out from the list as a unbenchmarked
51
53
// microoptimization, to avoid needing to case on the list to
52
54
// access the head.
53
55
priv mut head : @chunk ;
56
+ priv mut pod_head : @chunk ;
54
57
priv mut chunks : @list < @chunk > ;
55
58
drop {
56
59
unsafe {
57
60
destroy_chunk( self . head ) ;
58
- for list:: each( self . chunks) |chunk| { destroy_chunk( chunk) ; }
61
+ for list:: each( self . chunks) |chunk| {
62
+ if !chunk. is_pod { destroy_chunk ( chunk) ; }
63
+ }
59
64
}
60
65
}
61
66
}
62
67
63
- fn chunk ( size : uint ) -> @chunk {
68
+ fn chunk ( size : uint , is_pod : bool ) -> @chunk {
64
69
let mut v = ~[ ] ;
65
70
vec:: reserve ( v, size) ;
66
- @{ data: v, mut fill: 0 u }
71
+ @{ data: v, mut fill: 0 u, is_pod : is_pod }
67
72
}
68
73
69
74
fn arena_with_size ( initial_size : uint ) -> arena {
70
- return arena { mut head : chunk ( initial_size) ,
75
+ return arena { mut head : chunk ( initial_size, false ) ,
76
+ mut pod_head : chunk ( initial_size, true ) ,
71
77
mut chunks : @nil} ;
72
78
}
73
79
@@ -122,49 +128,90 @@ unsafe fn un_bitpack_tydesc_ptr(p: uint) -> (*TypeDesc, bool) {
122
128
( reinterpret_cast ( p & !1 ) , p & 1 == 1 )
123
129
}
124
130
125
-
131
+ // The duplication between the POD and non-POD functions is annoying.
126
132
impl & arena {
127
- fn alloc_grow ( n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
133
+ // Functions for the POD part of the arena
134
+ fn alloc_pod_grow ( n_bytes : uint , align : uint ) -> * u8 {
135
+ // Allocate a new chunk.
136
+ let chunk_size = vec:: capacity ( self . pod_head . data ) ;
137
+ let new_min_chunk_size = uint:: max ( n_bytes, chunk_size) ;
138
+ self . chunks = @cons ( self . pod_head , self . chunks ) ;
139
+ self . pod_head =
140
+ chunk ( uint:: next_power_of_two ( new_min_chunk_size + 1 u) , true ) ;
141
+
142
+ return self . alloc_pod_inner ( n_bytes, align) ;
143
+ }
144
+
145
+ #[ inline( always) ]
146
+ fn alloc_pod_inner ( n_bytes : uint , align : uint ) -> * u8 {
147
+ let head = self . pod_head ;
148
+
149
+ let start = round_up_to ( head. fill , align) ;
150
+ let end = start + n_bytes;
151
+ if end > vec:: capacity ( head. data ) {
152
+ return self . alloc_pod_grow ( n_bytes, align) ;
153
+ }
154
+ head. fill = end;
155
+
156
+ //debug!("idx = %u, size = %u, align = %u, fill = %u",
157
+ // start, n_bytes, align, head.fill);
158
+
159
+ unsafe {
160
+ ptr:: offset ( vec:: unsafe:: to_ptr ( head. data ) , start)
161
+ }
162
+ }
163
+
164
+ #[ inline( always) ]
165
+ fn alloc_pod < T > ( op : fn ( ) -> T ) -> & self /T {
166
+ unsafe {
167
+ let tydesc = sys:: get_type_desc :: < T > ( ) ;
168
+ let ptr = self . alloc_pod_inner ( ( * tydesc) . size , ( * tydesc) . align ) ;
169
+ let ptr: * mut T = reinterpret_cast ( ptr) ;
170
+ rusti:: move_val_init ( * ptr, op ( ) ) ;
171
+ return reinterpret_cast ( ptr) ;
172
+ }
173
+ }
174
+
175
+ // Functions for the non-POD part of the arena
176
+ fn alloc_nonpod_grow ( n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
128
177
// Allocate a new chunk.
129
178
let chunk_size = vec:: capacity ( self . head . data ) ;
130
179
let new_min_chunk_size = uint:: max ( n_bytes, chunk_size) ;
131
180
self . chunks = @cons ( self . head , self . chunks ) ;
132
- self . head = chunk ( uint:: next_power_of_two ( new_min_chunk_size + 1 u) ) ;
181
+ self . head =
182
+ chunk ( uint:: next_power_of_two ( new_min_chunk_size + 1 u) , false ) ;
133
183
134
- return self . alloc_inner ( n_bytes, align) ;
184
+ return self . alloc_nonpod_inner ( n_bytes, align) ;
135
185
}
136
186
137
187
#[ inline( always) ]
138
- fn alloc_inner ( n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
188
+ fn alloc_nonpod_inner ( n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
139
189
let head = self . head ;
140
190
191
+ let tydesc_start = head. fill ;
141
192
let after_tydesc = head. fill + sys:: size_of :: < * TypeDesc > ( ) ;
142
-
143
193
let start = round_up_to ( after_tydesc, align) ;
144
194
let end = start + n_bytes;
145
195
if end > vec:: capacity ( head. data ) {
146
- return self . alloc_grow ( n_bytes, align) ;
196
+ return self . alloc_nonpod_grow ( n_bytes, align) ;
147
197
}
198
+ head. fill = round_up_to ( end, sys:: pref_align_of :: < * TypeDesc > ( ) ) ;
148
199
149
200
//debug!("idx = %u, size = %u, align = %u, fill = %u",
150
201
// start, n_bytes, align, head.fill);
151
202
152
203
unsafe {
153
204
let buf = vec:: unsafe:: to_ptr ( head. data ) ;
154
- let tydesc_p = ptr:: offset ( buf, head. fill ) ;
155
- let p = ptr:: offset ( buf, start) ;
156
- head. fill = round_up_to ( end, sys:: pref_align_of :: < * TypeDesc > ( ) ) ;
157
-
158
- return ( tydesc_p, p) ;
205
+ return ( ptr:: offset ( buf, tydesc_start) , ptr:: offset ( buf, start) ) ;
159
206
}
160
207
}
161
208
162
209
#[ inline( always) ]
163
- fn alloc < T > ( op : fn ( ) -> T ) -> & self /T {
210
+ fn alloc_nonpod < T > ( op : fn ( ) -> T ) -> & self /T {
164
211
unsafe {
165
212
let tydesc = sys:: get_type_desc :: < T > ( ) ;
166
213
let ( ty_ptr, ptr) =
167
- self . alloc_inner ( ( * tydesc) . size , ( * tydesc) . align ) ;
214
+ self . alloc_nonpod_inner ( ( * tydesc) . size , ( * tydesc) . align ) ;
168
215
let ty_ptr: * mut uint = reinterpret_cast ( ty_ptr) ;
169
216
let ptr: * mut T = reinterpret_cast ( ptr) ;
170
217
// Write in our tydesc along with a bit indicating that it
@@ -179,6 +226,14 @@ impl &arena {
179
226
return reinterpret_cast ( ptr) ;
180
227
}
181
228
}
229
+
230
+ // The external interface
231
+ #[ inline( always) ]
232
+ fn alloc < T > ( op : fn ( ) -> T ) -> & self /T {
233
+ if !rusti:: needs_drop :: < T > ( ) {
234
+ self . alloc_pod ( op)
235
+ } else { self . alloc_nonpod ( op) }
236
+ }
182
237
}
183
238
184
239
#[ test]
0 commit comments