]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
vm: iterate_callstack_reversed in slot_visitor
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6         mark_bits<object> *data_forwarding_map;
7         mark_bits<code_block> *code_forwarding_map;
8         const object **data_finger;
9         const code_block **code_finger;
10
11         explicit compaction_fixup(
12                 mark_bits<object> *data_forwarding_map_,
13                 mark_bits<code_block> *code_forwarding_map_,
14                 const object **data_finger_,
15                 const code_block **code_finger_) :
16                 data_forwarding_map(data_forwarding_map_),
17                 code_forwarding_map(code_forwarding_map_),
18                 data_finger(data_finger_),
19                 code_finger(code_finger_) {}
20
21         object *fixup_data(object *obj)
22         {
23                 return data_forwarding_map->forward_block(obj);
24         }
25
26         code_block *fixup_code(code_block *compiled)
27         {
28                 return code_forwarding_map->forward_block(compiled);
29         }
30
31         object *translate_data(const object *obj)
32         {
33                 if(obj < *data_finger)
34                         return fixup_data((object *)obj);
35                 else
36                         return (object *)obj;
37         }
38
39         code_block *translate_code(const code_block *compiled)
40         {
41                 if(compiled < *code_finger)
42                         return fixup_code((code_block *)compiled);
43                 else
44                         return (code_block *)compiled;
45         }
46
47         cell size(object *obj)
48         {
49                 if(data_forwarding_map->marked_p(obj))
50                         return obj->size(*this);
51                 else
52                         return data_forwarding_map->unmarked_block_size(obj);
53         }
54
55         cell size(code_block *compiled)
56         {
57                 if(code_forwarding_map->marked_p(compiled))
58                         return compiled->size(*this);
59                 else
60                         return code_forwarding_map->unmarked_block_size(compiled);
61         }
62 };
63
64 struct object_compaction_updater {
65         factor_vm *parent;
66         compaction_fixup fixup;
67         object_start_map *starts;
68
69         explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
70                 parent(parent_),
71                 fixup(fixup_),
72                 starts(&parent->data->tenured->starts) {}
73
74         void operator()(object *old_address, object *new_address, cell size)
75         {
76                 slot_visitor<compaction_fixup> slot_forwarder(parent,fixup);
77                 slot_forwarder.visit_slots(new_address);
78
79                 code_block_visitor<compaction_fixup> code_forwarder(parent,fixup);
80                 code_forwarder.visit_object_code_block(new_address);
81
82                 starts->record_object_start_offset(new_address);
83         }
84 };
85
86 template<typename Fixup>
87 struct code_block_compaction_relocation_visitor {
88         factor_vm *parent;
89         code_block *old_address;
90         Fixup fixup;
91
92         explicit code_block_compaction_relocation_visitor(factor_vm *parent_,
93                 code_block *old_address_,
94                 Fixup fixup_) :
95                 parent(parent_),
96                 old_address(old_address_),
97                 fixup(fixup_) {}
98
99         void operator()(instruction_operand op)
100         {
101                 cell old_offset = op.rel_offset() + (cell)old_address->entry_point();
102
103                 switch(op.rel_type())
104                 {
105                 case RT_LITERAL:
106                         {
107                                 cell value = op.load_value(old_offset);
108                                 if(immediate_p(value))
109                                         op.store_value(value);
110                                 else
111                                         op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
112                                 break;
113                         }
114                 case RT_ENTRY_POINT:
115                 case RT_ENTRY_POINT_PIC:
116                 case RT_ENTRY_POINT_PIC_TAIL:
117                 case RT_HERE:
118                         {
119                                 cell value = op.load_value(old_offset);
120                                 cell offset = TAG(value);
121                                 code_block *compiled = (code_block *)UNTAG(value);
122                                 op.store_value((cell)fixup.fixup_code(compiled) + offset);
123                                 break;
124                         }
125                 case RT_THIS:
126                 case RT_CARDS_OFFSET:
127                 case RT_DECKS_OFFSET:
128                         parent->store_external_address(op);
129                         break;
130                 default:
131                         op.store_value(op.load_value(old_offset));
132                         break;
133                 }
134         }
135 };
136
137 template<typename Fixup>
138 struct code_block_compaction_updater {
139         factor_vm *parent;
140         Fixup fixup;
141         slot_visitor<Fixup> data_forwarder;
142         code_block_visitor<Fixup> code_forwarder;
143
144         explicit code_block_compaction_updater(factor_vm *parent_,
145                 Fixup fixup_,
146                 slot_visitor<Fixup> data_forwarder_,
147                 code_block_visitor<Fixup> code_forwarder_) :
148                 parent(parent_),
149                 fixup(fixup_),
150                 data_forwarder(data_forwarder_),
151                 code_forwarder(code_forwarder_) {}
152
153         void operator()(code_block *old_address, code_block *new_address, cell size)
154         {
155                 data_forwarder.visit_code_block_objects(new_address);
156
157                 code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
158                 new_address->each_instruction_operand(visitor);
159         }
160 };
161
162 /* After a compaction, invalidate any code heap roots which are not
163 marked, and also slide the valid roots up so that call sites can be updated
164 correctly in case an inline cache compilation triggered compaction. */
165 void factor_vm::update_code_roots_for_compaction()
166 {
167         std::vector<code_root *>::const_iterator iter = code_roots.begin();
168         std::vector<code_root *>::const_iterator end = code_roots.end();
169
170         mark_bits<code_block> *state = &code->allocator->state;
171
172         for(; iter < end; iter++)
173         {
174                 code_root *root = *iter;
175                 code_block *block = (code_block *)(root->value & (~data_alignment + 1));
176
177                 /* Offset of return address within 16-byte allocation line */
178                 cell offset = root->value - (cell)block;
179
180                 if(root->valid && state->marked_p(block))
181                 {
182                         block = state->forward_block(block);
183                         root->value = (cell)block + offset;
184                 }
185                 else
186                         root->valid = false;
187         }
188 }
189
190 /* Compact data and code heaps */
191 void factor_vm::collect_compact_impl(bool trace_contexts_p)
192 {
193         gc_event *event = current_gc->event;
194
195         if(event) event->started_compaction();
196
197         tenured_space *tenured = data->tenured;
198         mark_bits<object> *data_forwarding_map = &tenured->state;
199         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
200
201         /* Figure out where blocks are going to go */
202         data_forwarding_map->compute_forwarding();
203         code_forwarding_map->compute_forwarding();
204
205         const object *data_finger = tenured->first_block();
206         const code_block *code_finger = code->allocator->first_block();
207
208         compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
209         slot_visitor<compaction_fixup> data_forwarder(this,fixup);
210         code_block_visitor<compaction_fixup> code_forwarder(this,fixup);
211
212         code_forwarder.visit_code_roots();
213
214         /* Object start offsets get recomputed by the object_compaction_updater */
215         data->tenured->starts.clear_object_start_offsets();
216
217         /* Slide everything in tenured space up, and update data and code heap
218         pointers inside objects. */
219         object_compaction_updater object_updater(this,fixup);
220         tenured->compact(object_updater,fixup,&data_finger);
221
222         /* Slide everything in the code heap up, and update data and code heap
223         pointers inside code blocks. */
224         code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
225         code->allocator->compact(code_block_updater,fixup,&code_finger);
226
227         code->update_all_blocks_set(code_forwarding_map);
228
229         data_forwarder.visit_roots();
230         if(trace_contexts_p)
231         {
232                 data_forwarder.visit_contexts();
233                 code_forwarder.visit_context_code_blocks();
234         }
235
236         update_code_roots_for_compaction();
237         callbacks->update();
238
239         code->initialize_all_blocks_set();
240
241         if(event) event->ended_compaction();
242 }
243
244 struct code_compaction_fixup {
245         mark_bits<code_block> *code_forwarding_map;
246         const code_block **code_finger;
247
248         explicit code_compaction_fixup(mark_bits<code_block> *code_forwarding_map_,
249                 const code_block **code_finger_) :
250                 code_forwarding_map(code_forwarding_map_),
251                 code_finger(code_finger_) {}
252
253         object *fixup_data(object *obj)
254         {
255                 return obj;
256         }
257
258         code_block *fixup_code(code_block *compiled)
259         {
260                 return code_forwarding_map->forward_block(compiled);
261         }
262
263         object *translate_data(const object *obj)
264         {
265                 return fixup_data((object *)obj);
266         }
267
268         code_block *translate_code(const code_block *compiled)
269         {
270                 if(compiled >= *code_finger)
271                         return fixup_code((code_block *)compiled);
272                 else
273                         return (code_block *)compiled;
274         }
275
276         cell size(object *obj)
277         {
278                 return obj->size();
279         }
280
281         cell size(code_block *compiled)
282         {
283                 if(code_forwarding_map->marked_p(compiled))
284                         return compiled->size(*this);
285                 else
286                         return code_forwarding_map->unmarked_block_size(compiled);
287         }
288 };
289
290 struct object_grow_heap_updater {
291         code_block_visitor<code_compaction_fixup> code_forwarder;
292
293         explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
294                 code_forwarder(code_forwarder_) {}
295
296         void operator()(object *obj)
297         {
298                 code_forwarder.visit_object_code_block(obj);
299         }
300 };
301
302 /* Compact just the code heap, after growing the data heap */
303 void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
304 {
305         /* Figure out where blocks are going to go */
306         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
307         code_forwarding_map->compute_forwarding();
308
309         const code_block *code_finger = code->allocator->first_block();
310
311         code_compaction_fixup fixup(code_forwarding_map,&code_finger);
312         slot_visitor<code_compaction_fixup> data_forwarder(this,fixup);
313         code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup);
314
315         code_forwarder.visit_code_roots();
316
317         if(trace_contexts_p)
318                 code_forwarder.visit_context_code_blocks();
319
320         /* Update code heap references in data heap */
321         object_grow_heap_updater object_updater(code_forwarder);
322         each_object(object_updater);
323
324         /* Slide everything in the code heap up, and update code heap
325         pointers inside code blocks. */
326         code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
327         code->allocator->compact(code_block_updater,fixup,&code_finger);
328
329         update_code_roots_for_compaction();
330         callbacks->update();
331 }
332
333 void factor_vm::collect_compact(bool trace_contexts_p)
334 {
335         collect_mark_impl(trace_contexts_p);
336         collect_compact_impl(trace_contexts_p);
337         
338         if(data->high_fragmentation_p())
339         {
340                 /* Compaction did not free up enough memory. Grow the heap. */
341                 set_current_gc_op(collect_growing_heap_op);
342                 collect_growing_heap(0,trace_contexts_p);
343         }
344
345         code->flush_icache();
346 }
347
348 void factor_vm::collect_growing_heap(cell requested_size, bool trace_contexts_p)
349 {
350         /* Grow the data heap and copy all live objects to the new heap. */
351         data_heap *old = data;
352         set_data_heap(data->grow(requested_size));
353         collect_mark_impl(trace_contexts_p);
354         collect_compact_code_impl(trace_contexts_p);
355         code->flush_icache();
356         delete old;
357 }
358
359 }