]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
Merge remote-tracking branch 'upstream/master'
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6         mark_bits<object> *data_forwarding_map;
7         mark_bits<code_block> *code_forwarding_map;
8         const object **data_finger;
9         const code_block **code_finger;
10
11         explicit compaction_fixup(
12                 mark_bits<object> *data_forwarding_map_,
13                 mark_bits<code_block> *code_forwarding_map_,
14                 const object **data_finger_,
15                 const code_block **code_finger_) :
16                 data_forwarding_map(data_forwarding_map_),
17                 code_forwarding_map(code_forwarding_map_),
18                 data_finger(data_finger_),
19                 code_finger(code_finger_) {}
20
21         object *fixup_data(object *obj)
22         {
23                 return data_forwarding_map->forward_block(obj);
24         }
25
26         code_block *fixup_code(code_block *compiled)
27         {
28                 return code_forwarding_map->forward_block(compiled);
29         }
30
31         object *translate_data(const object *obj)
32         {
33                 if(obj < *data_finger)
34                         return fixup_data((object *)obj);
35                 else
36                         return (object *)obj;
37         }
38
39         code_block *translate_code(const code_block *compiled)
40         {
41                 if(compiled < *code_finger)
42                         return fixup_code((code_block *)compiled);
43                 else
44                         return (code_block *)compiled;
45         }
46
47         cell size(object *obj)
48         {
49                 if(data_forwarding_map->marked_p(obj))
50                         return obj->size(*this);
51                 else
52                         return data_forwarding_map->unmarked_block_size(obj);
53         }
54
55         cell size(code_block *compiled)
56         {
57                 if(code_forwarding_map->marked_p(compiled))
58                         return compiled->size(*this);
59                 else
60                         return code_forwarding_map->unmarked_block_size(compiled);
61         }
62 };
63
64 struct object_compaction_updater {
65         factor_vm *parent;
66         compaction_fixup fixup;
67         object_start_map *starts;
68
69         explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
70                 parent(parent_),
71                 fixup(fixup_),
72                 starts(&parent->data->tenured->starts) {}
73
74         void operator()(object *old_address, object *new_address, cell size)
75         {
76                 slot_visitor<compaction_fixup> slot_forwarder(parent,fixup);
77                 slot_forwarder.visit_slots(new_address);
78
79                 code_block_visitor<compaction_fixup> code_forwarder(parent,fixup);
80                 code_forwarder.visit_object_code_block(new_address);
81
82                 starts->record_object_start_offset(new_address);
83         }
84 };
85
86 template<typename Fixup>
87 struct code_block_compaction_relocation_visitor {
88         factor_vm *parent;
89         code_block *old_address;
90         Fixup fixup;
91
92         explicit code_block_compaction_relocation_visitor(factor_vm *parent_,
93                 code_block *old_address_,
94                 Fixup fixup_) :
95                 parent(parent_),
96                 old_address(old_address_),
97                 fixup(fixup_) {}
98
99         void operator()(instruction_operand op)
100         {
101                 cell old_offset = op.rel_offset() + (cell)old_address->entry_point();
102
103                 switch(op.rel_type())
104                 {
105                 case RT_LITERAL:
106                         {
107                                 cell value = op.load_value(old_offset);
108                                 if(immediate_p(value))
109                                         op.store_value(value);
110                                 else
111                                         op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
112                                 break;
113                         }
114                 case RT_ENTRY_POINT:
115                 case RT_ENTRY_POINT_PIC:
116                 case RT_ENTRY_POINT_PIC_TAIL:
117                 case RT_HERE:
118                         {
119                                 cell value = op.load_value(old_offset);
120                                 cell offset = TAG(value);
121                                 code_block *compiled = (code_block *)UNTAG(value);
122                                 op.store_value((cell)fixup.fixup_code(compiled) + offset);
123                                 break;
124                         }
125                 case RT_THIS:
126                 case RT_CARDS_OFFSET:
127                 case RT_DECKS_OFFSET:
128                         parent->store_external_address(op);
129                         break;
130                 default:
131                         op.store_value(op.load_value(old_offset));
132                         break;
133                 }
134         }
135 };
136
137 template<typename Fixup>
138 struct code_block_compaction_updater {
139         factor_vm *parent;
140         Fixup fixup;
141         slot_visitor<Fixup> data_forwarder;
142         code_block_visitor<Fixup> code_forwarder;
143
144         explicit code_block_compaction_updater(factor_vm *parent_,
145                 Fixup fixup_,
146                 slot_visitor<Fixup> data_forwarder_,
147                 code_block_visitor<Fixup> code_forwarder_) :
148                 parent(parent_),
149                 fixup(fixup_),
150                 data_forwarder(data_forwarder_),
151                 code_forwarder(code_forwarder_) {}
152
153         void operator()(code_block *old_address, code_block *new_address, cell size)
154         {
155                 data_forwarder.visit_code_block_objects(new_address);
156
157                 code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
158                 new_address->each_instruction_operand(visitor);
159         }
160 };
161
162 /* After a compaction, invalidate any code heap roots which are not
163 marked, and also slide the valid roots up so that call sites can be updated
164 correctly in case an inline cache compilation triggered compaction. */
165 void factor_vm::update_code_roots_for_compaction()
166 {
167         std::vector<code_root *>::const_iterator iter = code_roots.begin();
168         std::vector<code_root *>::const_iterator end = code_roots.end();
169
170         mark_bits<code_block> *state = &code->allocator->state;
171
172         for(; iter < end; iter++)
173         {
174                 code_root *root = *iter;
175                 code_block *block = (code_block *)(root->value & (~data_alignment + 1));
176
177                 /* Offset of return address within 16-byte allocation line */
178                 cell offset = root->value - (cell)block;
179
180                 if(root->valid && state->marked_p(block))
181                 {
182                         block = state->forward_block(block);
183                         root->value = (cell)block + offset;
184                 }
185                 else
186                         root->valid = false;
187         }
188 }
189
190 /* Compact data and code heaps */
191 void factor_vm::collect_compact_impl(bool trace_contexts_p)
192 {
193         gc_event *event = current_gc->event;
194
195         if(event) event->started_compaction();
196
197         tenured_space *tenured = data->tenured;
198         mark_bits<object> *data_forwarding_map = &tenured->state;
199         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
200
201         /* Figure out where blocks are going to go */
202         data_forwarding_map->compute_forwarding();
203         code_forwarding_map->compute_forwarding();
204
205         const object *data_finger = tenured->first_block();
206         const code_block *code_finger = code->allocator->first_block();
207
208         compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
209         slot_visitor<compaction_fixup> data_forwarder(this,fixup);
210         code_block_visitor<compaction_fixup> code_forwarder(this,fixup);
211
212         code_forwarder.visit_uninitialized_code_blocks();
213
214         /* Object start offsets get recomputed by the object_compaction_updater */
215         data->tenured->starts.clear_object_start_offsets();
216
217         /* Slide everything in tenured space up, and update data and code heap
218         pointers inside objects. */
219         object_compaction_updater object_updater(this,fixup);
220         tenured->compact(object_updater,fixup,&data_finger);
221
222         /* Slide everything in the code heap up, and update data and code heap
223         pointers inside code blocks. */
224         code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
225         code->allocator->compact(code_block_updater,fixup,&code_finger);
226
227         data_forwarder.visit_roots();
228         if(trace_contexts_p)
229         {
230                 data_forwarder.visit_contexts();
231                 code_forwarder.visit_context_code_blocks();
232         }
233
234         update_code_roots_for_compaction();
235         callbacks->update();
236
237         if(event) event->ended_compaction();
238 }
239
240 struct code_compaction_fixup {
241         mark_bits<code_block> *code_forwarding_map;
242         const code_block **code_finger;
243
244         explicit code_compaction_fixup(mark_bits<code_block> *code_forwarding_map_,
245                 const code_block **code_finger_) :
246                 code_forwarding_map(code_forwarding_map_),
247                 code_finger(code_finger_) {}
248
249         object *fixup_data(object *obj)
250         {
251                 return obj;
252         }
253
254         code_block *fixup_code(code_block *compiled)
255         {
256                 return code_forwarding_map->forward_block(compiled);
257         }
258
259         object *translate_data(const object *obj)
260         {
261                 return fixup_data((object *)obj);
262         }
263
264         code_block *translate_code(const code_block *compiled)
265         {
266                 if(compiled >= *code_finger)
267                         return fixup_code((code_block *)compiled);
268                 else
269                         return (code_block *)compiled;
270         }
271
272         cell size(object *obj)
273         {
274                 return obj->size();
275         }
276
277         cell size(code_block *compiled)
278         {
279                 if(code_forwarding_map->marked_p(compiled))
280                         return compiled->size(*this);
281                 else
282                         return code_forwarding_map->unmarked_block_size(compiled);
283         }
284 };
285
286 struct object_grow_heap_updater {
287         code_block_visitor<code_compaction_fixup> code_forwarder;
288
289         explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
290                 code_forwarder(code_forwarder_) {}
291
292         void operator()(object *obj)
293         {
294                 code_forwarder.visit_object_code_block(obj);
295         }
296 };
297
298 /* Compact just the code heap, after growing the data heap */
299 void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
300 {
301         /* Figure out where blocks are going to go */
302         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
303         code_forwarding_map->compute_forwarding();
304
305         const code_block *code_finger = code->allocator->first_block();
306
307         code_compaction_fixup fixup(code_forwarding_map,&code_finger);
308         slot_visitor<code_compaction_fixup> data_forwarder(this,fixup);
309         code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup);
310
311         code_forwarder.visit_uninitialized_code_blocks();
312
313         if(trace_contexts_p)
314                 code_forwarder.visit_context_code_blocks();
315
316         /* Update code heap references in data heap */
317         object_grow_heap_updater object_updater(code_forwarder);
318         each_object(object_updater);
319
320         /* Slide everything in the code heap up, and update code heap
321         pointers inside code blocks. */
322         code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
323         code->allocator->compact(code_block_updater,fixup,&code_finger);
324
325         update_code_roots_for_compaction();
326         callbacks->update();
327 }
328
329 void factor_vm::collect_compact(bool trace_contexts_p)
330 {
331         collect_mark_impl(trace_contexts_p);
332         collect_compact_impl(trace_contexts_p);
333         
334         if(data->high_fragmentation_p())
335         {
336                 /* Compaction did not free up enough memory. Grow the heap. */
337                 set_current_gc_op(collect_growing_heap_op);
338                 collect_growing_heap(0,trace_contexts_p);
339         }
340
341         code->flush_icache();
342 }
343
344 void factor_vm::collect_growing_heap(cell requested_size, bool trace_contexts_p)
345 {
346         /* Grow the data heap and copy all live objects to the new heap. */
347         data_heap *old = data;
348         set_data_heap(data->grow(requested_size));
349         collect_mark_impl(trace_contexts_p);
350         collect_compact_code_impl(trace_contexts_p);
351         code->flush_icache();
352         delete old;
353 }
354
355 }