]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
GC maps for more compact inline GC checks
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6         mark_bits<object> *data_forwarding_map;
7         mark_bits<code_block> *code_forwarding_map;
8         const object **data_finger;
9         const code_block **code_finger;
10
11         explicit compaction_fixup(
12                 mark_bits<object> *data_forwarding_map_,
13                 mark_bits<code_block> *code_forwarding_map_,
14                 const object **data_finger_,
15                 const code_block **code_finger_) :
16                 data_forwarding_map(data_forwarding_map_),
17                 code_forwarding_map(code_forwarding_map_),
18                 data_finger(data_finger_),
19                 code_finger(code_finger_) {}
20
21         object *fixup_data(object *obj)
22         {
23                 return data_forwarding_map->forward_block(obj);
24         }
25
26         code_block *fixup_code(code_block *compiled)
27         {
28                 return code_forwarding_map->forward_block(compiled);
29         }
30
31         object *translate_data(const object *obj)
32         {
33                 if(obj < *data_finger)
34                         return fixup_data((object *)obj);
35                 else
36                         return (object *)obj;
37         }
38
39         code_block *translate_code(const code_block *compiled)
40         {
41                 if(compiled < *code_finger)
42                         return fixup_code((code_block *)compiled);
43                 else
44                         return (code_block *)compiled;
45         }
46
47         cell size(object *obj)
48         {
49                 if(data_forwarding_map->marked_p(obj))
50                         return obj->size(*this);
51                 else
52                         return data_forwarding_map->unmarked_block_size(obj);
53         }
54
55         cell size(code_block *compiled)
56         {
57                 if(code_forwarding_map->marked_p(compiled))
58                         return compiled->size(*this);
59                 else
60                         return code_forwarding_map->unmarked_block_size(compiled);
61         }
62 };
63
64 struct object_compaction_updater {
65         factor_vm *parent;
66         compaction_fixup fixup;
67         object_start_map *starts;
68
69         explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
70                 parent(parent_),
71                 fixup(fixup_),
72                 starts(&parent->data->tenured->starts) {}
73
74         void operator()(object *old_address, object *new_address, cell size)
75         {
76                 slot_visitor<compaction_fixup> slot_forwarder(parent,fixup);
77                 slot_forwarder.visit_slots(new_address);
78
79                 code_block_visitor<compaction_fixup> code_forwarder(parent,fixup);
80                 code_forwarder.visit_object_code_block(new_address);
81
82                 starts->record_object_start_offset(new_address);
83         }
84 };
85
86 template<typename Fixup>
87 struct code_block_compaction_relocation_visitor {
88         factor_vm *parent;
89         code_block *old_address;
90         Fixup fixup;
91
92         explicit code_block_compaction_relocation_visitor(factor_vm *parent_,
93                 code_block *old_address_,
94                 Fixup fixup_) :
95                 parent(parent_),
96                 old_address(old_address_),
97                 fixup(fixup_) {}
98
99         void operator()(instruction_operand op)
100         {
101                 cell old_offset = op.rel_offset() + (cell)old_address->entry_point();
102
103                 switch(op.rel_type())
104                 {
105                 case RT_LITERAL:
106                         {
107                                 cell value = op.load_value(old_offset);
108                                 if(immediate_p(value))
109                                         op.store_value(value);
110                                 else
111                                         op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
112                                 break;
113                         }
114                 case RT_ENTRY_POINT:
115                 case RT_ENTRY_POINT_PIC:
116                 case RT_ENTRY_POINT_PIC_TAIL:
117                 case RT_HERE:
118                         {
119                                 cell value = op.load_value(old_offset);
120                                 cell offset = value & (data_alignment - 1);
121                                 op.store_value((cell)fixup.fixup_code((code_block *)value) + offset);
122                                 break;
123                         }
124                 case RT_THIS:
125                 case RT_CARDS_OFFSET:
126                 case RT_DECKS_OFFSET:
127                         parent->store_external_address(op);
128                         break;
129                 default:
130                         op.store_value(op.load_value(old_offset));
131                         break;
132                 }
133         }
134 };
135
136 template<typename Fixup>
137 struct code_block_compaction_updater {
138         factor_vm *parent;
139         Fixup fixup;
140         slot_visitor<Fixup> data_forwarder;
141         code_block_visitor<Fixup> code_forwarder;
142
143         explicit code_block_compaction_updater(factor_vm *parent_,
144                 Fixup fixup_,
145                 slot_visitor<Fixup> data_forwarder_,
146                 code_block_visitor<Fixup> code_forwarder_) :
147                 parent(parent_),
148                 fixup(fixup_),
149                 data_forwarder(data_forwarder_),
150                 code_forwarder(code_forwarder_) {}
151
152         void operator()(code_block *old_address, code_block *new_address, cell size)
153         {
154                 data_forwarder.visit_code_block_objects(new_address);
155
156                 code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
157                 new_address->each_instruction_operand(visitor);
158         }
159 };
160
161 /* After a compaction, invalidate any code heap roots which are not
162 marked, and also slide the valid roots up so that call sites can be updated
163 correctly in case an inline cache compilation triggered compaction. */
164 void factor_vm::update_code_roots_for_compaction()
165 {
166         std::vector<code_root *>::const_iterator iter = code_roots.begin();
167         std::vector<code_root *>::const_iterator end = code_roots.end();
168
169         mark_bits<code_block> *state = &code->allocator->state;
170
171         for(; iter < end; iter++)
172         {
173                 code_root *root = *iter;
174                 code_block *block = (code_block *)(root->value & (~data_alignment + 1));
175
176                 /* Offset of return address within 16-byte allocation line */
177                 cell offset = root->value - (cell)block;
178
179                 if(root->valid && state->marked_p(block))
180                 {
181                         block = state->forward_block(block);
182                         root->value = (cell)block + offset;
183                 }
184                 else
185                         root->valid = false;
186         }
187 }
188
189 /* Compact data and code heaps */
190 void factor_vm::collect_compact_impl(bool trace_contexts_p)
191 {
192         current_gc->event->started_compaction();
193
194         tenured_space *tenured = data->tenured;
195         mark_bits<object> *data_forwarding_map = &tenured->state;
196         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
197
198         /* Figure out where blocks are going to go */
199         data_forwarding_map->compute_forwarding();
200         code_forwarding_map->compute_forwarding();
201
202         const object *data_finger = tenured->first_block();
203         const code_block *code_finger = code->allocator->first_block();
204
205         compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
206         slot_visitor<compaction_fixup> data_forwarder(this,fixup);
207         code_block_visitor<compaction_fixup> code_forwarder(this,fixup);
208
209         code_forwarder.visit_uninitialized_code_blocks();
210
211         /* Object start offsets get recomputed by the object_compaction_updater */
212         data->tenured->starts.clear_object_start_offsets();
213
214         /* Slide everything in tenured space up, and update data and code heap
215         pointers inside objects. */
216         object_compaction_updater object_updater(this,fixup);
217         tenured->compact(object_updater,fixup,&data_finger);
218
219         /* Slide everything in the code heap up, and update data and code heap
220         pointers inside code blocks. */
221         code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
222         code->allocator->compact(code_block_updater,fixup,&code_finger);
223
224         data_forwarder.visit_roots();
225         if(trace_contexts_p)
226         {
227                 data_forwarder.visit_contexts();
228                 code_forwarder.visit_context_code_blocks();
229         }
230
231         update_code_roots_for_compaction();
232         callbacks->update();
233
234         current_gc->event->ended_compaction();
235 }
236
237 struct code_compaction_fixup {
238         mark_bits<code_block> *code_forwarding_map;
239         const code_block **code_finger;
240
241         explicit code_compaction_fixup(mark_bits<code_block> *code_forwarding_map_,
242                 const code_block **code_finger_) :
243                 code_forwarding_map(code_forwarding_map_),
244                 code_finger(code_finger_) {}
245
246         object *fixup_data(object *obj)
247         {
248                 return obj;
249         }
250
251         code_block *fixup_code(code_block *compiled)
252         {
253                 return code_forwarding_map->forward_block(compiled);
254         }
255
256         object *translate_data(const object *obj)
257         {
258                 return fixup_data((object *)obj);
259         }
260
261         code_block *translate_code(const code_block *compiled)
262         {
263                 if(compiled >= *code_finger)
264                         return fixup_code((code_block *)compiled);
265                 else
266                         return (code_block *)compiled;
267         }
268
269         cell size(object *obj)
270         {
271                 return obj->size();
272         }
273
274         cell size(code_block *compiled)
275         {
276                 if(code_forwarding_map->marked_p(compiled))
277                         return compiled->size(*this);
278                 else
279                         return code_forwarding_map->unmarked_block_size(compiled);
280         }
281 };
282
283 struct object_grow_heap_updater {
284         code_block_visitor<code_compaction_fixup> code_forwarder;
285
286         explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
287                 code_forwarder(code_forwarder_) {}
288
289         void operator()(object *obj)
290         {
291                 code_forwarder.visit_object_code_block(obj);
292         }
293 };
294
295 /* Compact just the code heap, after growing the data heap */
296 void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
297 {
298         /* Figure out where blocks are going to go */
299         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
300         code_forwarding_map->compute_forwarding();
301
302         const code_block *code_finger = code->allocator->first_block();
303
304         code_compaction_fixup fixup(code_forwarding_map,&code_finger);
305         slot_visitor<code_compaction_fixup> data_forwarder(this,fixup);
306         code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup);
307
308         code_forwarder.visit_uninitialized_code_blocks();
309
310         if(trace_contexts_p)
311                 code_forwarder.visit_context_code_blocks();
312
313         /* Update code heap references in data heap */
314         object_grow_heap_updater object_updater(code_forwarder);
315         each_object(object_updater);
316
317         /* Slide everything in the code heap up, and update code heap
318         pointers inside code blocks. */
319         code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
320         code->allocator->compact(code_block_updater,fixup,&code_finger);
321
322         update_code_roots_for_compaction();
323         callbacks->update();
324 }
325
326 void factor_vm::collect_compact(bool trace_contexts_p)
327 {
328         collect_mark_impl(trace_contexts_p);
329         collect_compact_impl(trace_contexts_p);
330         code->flush_icache();
331 }
332
333 void factor_vm::collect_growing_heap(cell requested_bytes, bool trace_contexts_p)
334 {
335         /* Grow the data heap and copy all live objects to the new heap. */
336         data_heap *old = data;
337         set_data_heap(data->grow(requested_bytes));
338         collect_mark_impl(trace_contexts_p);
339         collect_compact_code_impl(trace_contexts_p);
340         code->flush_icache();
341         delete old;
342 }
343
344 }