@@ -46,15 +46,15 @@ const tydesc_drop_glue_index: size_t = 3 as size_t;
46
46
// The way arena uses arrays is really deeply awful. The arrays are
47
47
// allocated, and have capacities reserved, but the fill for the array
48
48
// will always stay at 0.
49
- type chunk = { data : ~ [ u8 ] , mut fill : uint , is_pod : bool } ;
49
+ type chunk = { data : @ [ u8 ] , mut fill : uint , is_pod : bool } ;
50
50
51
51
struct arena {
52
52
// The head is seperated out from the list as a unbenchmarked
53
53
// microoptimization, to avoid needing to case on the list to
54
54
// access the head.
55
- priv mut head : @ chunk ;
56
- priv mut pod_head : @ chunk ;
57
- priv mut chunks : @list < @ chunk > ;
55
+ priv mut head : chunk ;
56
+ priv mut pod_head: chunk;
57
+ priv mut chunks: @list<chunk>;
58
58
drop {
59
59
unsafe {
60
60
destroy_chunk( self . head ) ;
@@ -65,10 +65,10 @@ struct arena {
65
65
}
66
66
}
67
67
68
- fn chunk ( size : uint , is_pod : bool ) -> @ chunk {
69
- let mut v = ~ [ ] ;
70
- vec :: reserve ( v, size) ;
71
- @ { data: v, mut fill: 0 u, is_pod: is_pod }
68
+ fn chunk ( size : uint , is_pod : bool ) -> chunk {
69
+ let mut v = @ [ ] ;
70
+ unsafe { at_vec :: unsafe :: reserve ( v, size) ; }
71
+ { data: v, mut fill: 0 u, is_pod: is_pod }
72
72
}
73
73
74
74
fn arena_with_size ( initial_size : uint ) -> arena {
@@ -88,9 +88,9 @@ fn round_up_to(base: uint, align: uint) -> uint {
88
88
89
89
// Walk down a chunk, running the destructors for any objects stored
90
90
// in it.
91
- unsafe fn destroy_chunk ( chunk : @ chunk ) {
91
+ unsafe fn destroy_chunk ( chunk : chunk ) {
92
92
let mut idx = 0 ;
93
- let buf = vec:: unsafe:: to_ptr ( chunk. data ) ;
93
+ let buf = vec:: unsafe:: to_ptr_slice ( chunk. data ) ;
94
94
let fill = chunk. fill ;
95
95
96
96
while idx < fill {
@@ -133,9 +133,9 @@ impl &arena {
133
133
// Functions for the POD part of the arena
134
134
fn alloc_pod_grow ( n_bytes : uint , align : uint ) -> * u8 {
135
135
// Allocate a new chunk.
136
- let chunk_size = vec :: capacity ( self . pod_head . data ) ;
136
+ let chunk_size = at_vec :: capacity ( self . pod_head . data ) ;
137
137
let new_min_chunk_size = uint:: max ( n_bytes, chunk_size) ;
138
- self . chunks = @cons ( self . pod_head , self . chunks ) ;
138
+ self . chunks = @cons ( copy self . pod_head , self . chunks ) ;
139
139
self . pod_head =
140
140
chunk ( uint:: next_power_of_two ( new_min_chunk_size + 1 u) , true ) ;
141
141
@@ -144,11 +144,11 @@ impl &arena {
144
144
145
145
#[ inline( always) ]
146
146
fn alloc_pod_inner ( n_bytes : uint , align : uint ) -> * u8 {
147
- let head = self . pod_head ;
147
+ let head = & mut self . pod_head ;
148
148
149
149
let start = round_up_to ( head. fill , align) ;
150
150
let end = start + n_bytes;
151
- if end > vec :: capacity ( head. data ) {
151
+ if end > at_vec :: capacity ( head. data ) {
152
152
return self . alloc_pod_grow ( n_bytes, align) ;
153
153
}
154
154
head. fill = end;
@@ -157,7 +157,7 @@ impl &arena {
157
157
// start, n_bytes, align, head.fill);
158
158
159
159
unsafe {
160
- ptr:: offset ( vec:: unsafe:: to_ptr ( head. data ) , start)
160
+ ptr:: offset ( vec:: unsafe:: to_ptr_slice ( head. data ) , start)
161
161
}
162
162
}
163
163
@@ -175,9 +175,9 @@ impl &arena {
175
175
// Functions for the non-POD part of the arena
176
176
fn alloc_nonpod_grow ( n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
177
177
// Allocate a new chunk.
178
- let chunk_size = vec :: capacity ( self . head . data ) ;
178
+ let chunk_size = at_vec :: capacity ( self . head . data ) ;
179
179
let new_min_chunk_size = uint:: max ( n_bytes, chunk_size) ;
180
- self . chunks = @cons ( self . head , self . chunks ) ;
180
+ self . chunks = @cons ( copy self . head , self . chunks ) ;
181
181
self . head =
182
182
chunk ( uint:: next_power_of_two ( new_min_chunk_size + 1 u) , false ) ;
183
183
@@ -186,13 +186,13 @@ impl &arena {
186
186
187
187
#[ inline( always) ]
188
188
fn alloc_nonpod_inner ( n_bytes : uint , align : uint ) -> ( * u8 , * u8 ) {
189
- let head = self . head ;
189
+ let head = & mut self . head ;
190
190
191
191
let tydesc_start = head. fill ;
192
192
let after_tydesc = head. fill + sys:: size_of :: < * TypeDesc > ( ) ;
193
193
let start = round_up_to ( after_tydesc, align) ;
194
194
let end = start + n_bytes;
195
- if end > vec :: capacity ( head. data ) {
195
+ if end > at_vec :: capacity ( head. data ) {
196
196
return self . alloc_nonpod_grow ( n_bytes, align) ;
197
197
}
198
198
head. fill = round_up_to ( end, sys:: pref_align_of :: < * TypeDesc > ( ) ) ;
@@ -201,7 +201,7 @@ impl &arena {
201
201
// start, n_bytes, align, head.fill);
202
202
203
203
unsafe {
204
- let buf = vec:: unsafe:: to_ptr ( head. data ) ;
204
+ let buf = vec:: unsafe:: to_ptr_slice ( head. data ) ;
205
205
return ( ptr:: offset ( buf, tydesc_start) , ptr:: offset ( buf, start) ) ;
206
206
}
207
207
}
0 commit comments