1
1
#include " rust_internal.h"
2
2
#include " rust_shape.h"
3
3
4
+ class annihilator : public shape ::data<annihilator,shape::ptr> {
5
+ friend class shape ::data<annihilator,shape::ptr>;
6
+
7
+ annihilator (const annihilator &other, const shape::ptr &in_dp)
8
+ : shape::data<annihilator,shape::ptr>(other.task, other.align,
9
+ other.sp, other.params,
10
+ other.tables, in_dp) {}
11
+
12
+ annihilator (const annihilator &other,
13
+ const uint8_t *in_sp,
14
+ const shape::type_param *in_params,
15
+ const rust_shape_tables *in_tables = NULL )
16
+ : shape::data<annihilator,shape::ptr>(other.task,
17
+ other.align,
18
+ in_sp,
19
+ in_params,
20
+ in_tables ? in_tables : other.tables,
21
+ other.dp) {}
22
+
23
+ annihilator (const annihilator &other,
24
+ const uint8_t *in_sp,
25
+ const shape::type_param *in_params,
26
+ const rust_shape_tables *in_tables,
27
+ shape::ptr in_dp)
28
+ : shape::data<annihilator,shape::ptr>(other.task,
29
+ other.align,
30
+ in_sp,
31
+ in_params,
32
+ in_tables,
33
+ in_dp) {}
34
+
35
+ annihilator (rust_task *in_task,
36
+ bool in_align,
37
+ const uint8_t *in_sp,
38
+ const shape::type_param *in_params,
39
+ const rust_shape_tables *in_tables,
40
+ uint8_t *in_data)
41
+ : shape::data<annihilator,shape::ptr>(in_task, in_align, in_sp,
42
+ in_params, in_tables, in_data) {}
43
+
44
+ void walk_vec2 (bool is_pod, uint16_t sp_size) {
45
+ void *vec = shape::get_dp<void *>(dp);
46
+ walk_vec2 (is_pod, get_vec_data_range (dp));
47
+ task->kernel ->free (vec);
48
+ }
49
+
50
+ void walk_vec2 (bool is_pod,
51
+ const std::pair<shape::ptr,shape::ptr> &data_range) {
52
+ annihilator sub (*this , data_range.first );
53
+ shape::ptr data_end = sub.end_dp = data_range.second ;
54
+ while (sub.dp < data_end) {
55
+ sub.walk_reset ();
56
+ sub.align = true ;
57
+ }
58
+ }
59
+
60
+ void walk_tag2 (shape::tag_info &tinfo, uint32_t tag_variant) {
61
+ shape::data<annihilator,shape::ptr>::walk_variant1 (tinfo, tag_variant);
62
+ }
63
+
64
+ void walk_uniq2 () {
65
+ void *x = *((void **)dp);
66
+ // free contents first:
67
+ shape::data<annihilator,shape::ptr>::walk_uniq_contents1 ();
68
+ // now free the ptr:
69
+ task->kernel ->free (x);
70
+ }
71
+
72
+ void walk_box2 () {
73
+ // In annihilator phase, do not walk the box contents. There is an
74
+ // outer loop walking all remaining boxes, and this box may well
75
+ // have been freed already!
76
+ }
77
+
78
+ void walk_fn2 (char code) {
79
+ switch (code) {
80
+ case shape::SHAPE_UNIQ_FN: {
81
+ fn_env_pair pair = *(fn_env_pair*)dp;
82
+
83
+ if (pair.env ) {
84
+ // free closed over data:
85
+ shape::data<annihilator,shape::ptr>::walk_fn_contents1 ();
86
+
87
+ // now free the ptr:
88
+ task->kernel ->free (pair.env );
89
+ }
90
+ break ;
91
+ }
92
+ case shape::SHAPE_BOX_FN: {
93
+ // the box will be visited separately:
94
+ shape::bump_dp<void *>(dp); // skip over the code ptr
95
+ walk_box2 (); // walk over the environment ptr
96
+ break ;
97
+ }
98
+ case shape::SHAPE_BARE_FN: // Does not close over data.
99
+ case shape::SHAPE_STACK_FN: break ; // Not reachable from heap.
100
+ default : abort ();
101
+ }
102
+ }
103
+
104
+ void walk_obj2 () {
105
+ return ;
106
+ }
107
+
108
+ void walk_iface2 () {
109
+ walk_box2 ();
110
+ }
111
+
112
+ void walk_tydesc2 (char kind) {
113
+ switch (kind) {
114
+ case shape::SHAPE_TYDESC:
115
+ case shape::SHAPE_SEND_TYDESC:
116
+ break ;
117
+ default : abort ();
118
+ }
119
+ }
120
+
121
+ struct run_dtor_args {
122
+ const shape::rust_fn *dtor;
123
+ void *data;
124
+ };
125
+
126
+ typedef void (*dtor)(void **retptr, void *env, void *dptr);
127
+
128
+ static void run_dtor (run_dtor_args *args) {
129
+ dtor f = (dtor)args->dtor ;
130
+ f (NULL , args->dtor ->env , args->data );
131
+ }
132
+
133
+ void walk_res2 (const shape::rust_fn *dtor, unsigned n_params,
134
+ const shape::type_param *params, const uint8_t *end_sp,
135
+ bool live) {
136
+ void *data = (void *)(uintptr_t )dp;
137
+ // Switch back to the Rust stack to run the destructor
138
+ run_dtor_args args = {dtor, data};
139
+ task->call_on_rust_stack ((void *)&args, (void *)run_dtor);
140
+
141
+ while (this ->sp != end_sp) {
142
+ this ->walk ();
143
+ align = true ;
144
+ }
145
+ }
146
+
147
+ void walk_subcontext2 (annihilator &sub) { sub.walk (); }
148
+
149
+ void walk_uniq_contents2 (annihilator &sub) { sub.walk (); }
150
+
151
+ void walk_struct2 (const uint8_t *end_sp) {
152
+ while (this ->sp != end_sp) {
153
+ this ->walk ();
154
+ align = true ;
155
+ }
156
+ }
157
+
158
+ void walk_variant2 (shape::tag_info &tinfo, uint32_t variant_id,
159
+ const std::pair<const uint8_t *,const uint8_t *>
160
+ variant_ptr_and_end) {
161
+ annihilator sub (*this , variant_ptr_and_end.first , tinfo.params );
162
+
163
+ const uint8_t *variant_end = variant_ptr_and_end.second ;
164
+ while (sub.sp < variant_end) {
165
+ sub.walk ();
166
+ align = true ;
167
+ }
168
+ }
169
+
170
+ template <typename T>
171
+ inline void walk_number2 () { /* no-op */ }
172
+
173
+ public:
174
+ static void do_annihilate (rust_task *task, rust_opaque_box *box);
175
+ };
176
+
177
+ void
178
+ annihilator::do_annihilate (rust_task *task, rust_opaque_box *box) {
179
+ const type_desc *tydesc = box->td ;
180
+ uint8_t *p = (uint8_t *) box_body (box);
181
+ shape::arena arena;
182
+ shape::type_param *params =
183
+ shape::type_param::from_tydesc_and_data (tydesc, p, arena);
184
+
185
+ annihilator annihilator (task, true , tydesc->shape ,
186
+ params, tydesc->shape_tables , p);
187
+ annihilator.walk ();
188
+ task->boxed .free (box);
189
+ }
190
+
191
+ void
192
+ annihilate_box (rust_task *task, rust_opaque_box *box) {
193
+ annihilator::do_annihilate (task, box);
194
+ }
195
+
4
196
void
5
197
annihilate_boxes (rust_task *task) {
6
198
LOG (task, gc, " annihilating boxes for task %p" , task);
@@ -10,6 +202,7 @@ annihilate_boxes(rust_task *task) {
10
202
while (box != NULL ) {
11
203
rust_opaque_box *tmp = box;
12
204
box = box->next ;
13
- boxed-> free ( tmp);
205
+ annihilate_box (task, tmp);
14
206
}
15
207
}
208
+
0 commit comments