]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
vm: keep a set of all code_blocks in the code_heap
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6         mark_bits<object> *data_forwarding_map;
7         mark_bits<code_block> *code_forwarding_map;
8         const object **data_finger;
9         const code_block **code_finger;
10
11         explicit compaction_fixup(
12                 mark_bits<object> *data_forwarding_map_,
13                 mark_bits<code_block> *code_forwarding_map_,
14                 const object **data_finger_,
15                 const code_block **code_finger_) :
16                 data_forwarding_map(data_forwarding_map_),
17                 code_forwarding_map(code_forwarding_map_),
18                 data_finger(data_finger_),
19                 code_finger(code_finger_) {}
20
21         object *fixup_data(object *obj)
22         {
23                 return data_forwarding_map->forward_block(obj);
24         }
25
26         code_block *fixup_code(code_block *compiled)
27         {
28                 return code_forwarding_map->forward_block(compiled);
29         }
30
31         object *translate_data(const object *obj)
32         {
33                 if(obj < *data_finger)
34                         return fixup_data((object *)obj);
35                 else
36                         return (object *)obj;
37         }
38
39         code_block *translate_code(const code_block *compiled)
40         {
41                 if(compiled < *code_finger)
42                         return fixup_code((code_block *)compiled);
43                 else
44                         return (code_block *)compiled;
45         }
46
47         cell size(object *obj)
48         {
49                 if(data_forwarding_map->marked_p(obj))
50                         return obj->size(*this);
51                 else
52                         return data_forwarding_map->unmarked_block_size(obj);
53         }
54
55         cell size(code_block *compiled)
56         {
57                 if(code_forwarding_map->marked_p(compiled))
58                         return compiled->size(*this);
59                 else
60                         return code_forwarding_map->unmarked_block_size(compiled);
61         }
62 };
63
64 struct object_compaction_updater {
65         factor_vm *parent;
66         compaction_fixup fixup;
67         object_start_map *starts;
68
69         explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
70                 parent(parent_),
71                 fixup(fixup_),
72                 starts(&parent->data->tenured->starts) {}
73
74         void operator()(object *old_address, object *new_address, cell size)
75         {
76                 slot_visitor<compaction_fixup> slot_forwarder(parent,fixup);
77                 slot_forwarder.visit_slots(new_address);
78
79                 code_block_visitor<compaction_fixup> code_forwarder(parent,fixup);
80                 code_forwarder.visit_object_code_block(new_address);
81
82                 starts->record_object_start_offset(new_address);
83         }
84 };
85
86 template<typename Fixup>
87 struct code_block_compaction_relocation_visitor {
88         factor_vm *parent;
89         code_block *old_address;
90         Fixup fixup;
91
92         explicit code_block_compaction_relocation_visitor(factor_vm *parent_,
93                 code_block *old_address_,
94                 Fixup fixup_) :
95                 parent(parent_),
96                 old_address(old_address_),
97                 fixup(fixup_) {}
98
99         void operator()(instruction_operand op)
100         {
101                 cell old_offset = op.rel_offset() + (cell)old_address->entry_point();
102
103                 switch(op.rel_type())
104                 {
105                 case RT_LITERAL:
106                         {
107                                 cell value = op.load_value(old_offset);
108                                 if(immediate_p(value))
109                                         op.store_value(value);
110                                 else
111                                         op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
112                                 break;
113                         }
114                 case RT_ENTRY_POINT:
115                 case RT_ENTRY_POINT_PIC:
116                 case RT_ENTRY_POINT_PIC_TAIL:
117                 case RT_HERE:
118                         {
119                                 cell value = op.load_value(old_offset);
120                                 cell offset = TAG(value);
121                                 code_block *compiled = (code_block *)UNTAG(value);
122                                 op.store_value((cell)fixup.fixup_code(compiled) + offset);
123                                 break;
124                         }
125                 case RT_THIS:
126                 case RT_CARDS_OFFSET:
127                 case RT_DECKS_OFFSET:
128                         parent->store_external_address(op);
129                         break;
130                 default:
131                         op.store_value(op.load_value(old_offset));
132                         break;
133                 }
134         }
135 };
136
137 template<typename Fixup>
138 struct code_block_compaction_updater {
139         factor_vm *parent;
140         Fixup fixup;
141         slot_visitor<Fixup> data_forwarder;
142         code_block_visitor<Fixup> code_forwarder;
143
144         explicit code_block_compaction_updater(factor_vm *parent_,
145                 Fixup fixup_,
146                 slot_visitor<Fixup> data_forwarder_,
147                 code_block_visitor<Fixup> code_forwarder_) :
148                 parent(parent_),
149                 fixup(fixup_),
150                 data_forwarder(data_forwarder_),
151                 code_forwarder(code_forwarder_) {}
152
153         void operator()(code_block *old_address, code_block *new_address, cell size)
154         {
155                 data_forwarder.visit_code_block_objects(new_address);
156
157                 code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
158                 new_address->each_instruction_operand(visitor);
159         }
160 };
161
162 /* After a compaction, invalidate any code heap roots which are not
163 marked, and also slide the valid roots up so that call sites can be updated
164 correctly in case an inline cache compilation triggered compaction. */
165 void factor_vm::update_code_roots_for_compaction()
166 {
167         std::vector<code_root *>::const_iterator iter = code_roots.begin();
168         std::vector<code_root *>::const_iterator end = code_roots.end();
169
170         mark_bits<code_block> *state = &code->allocator->state;
171
172         for(; iter < end; iter++)
173         {
174                 code_root *root = *iter;
175                 code_block *block = (code_block *)(root->value & (~data_alignment + 1));
176
177                 /* Offset of return address within 16-byte allocation line */
178                 cell offset = root->value - (cell)block;
179
180                 if(root->valid && state->marked_p(block))
181                 {
182                         block = state->forward_block(block);
183                         root->value = (cell)block + offset;
184                 }
185                 else
186                         root->valid = false;
187         }
188
189         code->update_all_blocks_map(state);
190 }
191
192 /* Compact data and code heaps */
193 void factor_vm::collect_compact_impl(bool trace_contexts_p)
194 {
195         gc_event *event = current_gc->event;
196
197         if(event) event->started_compaction();
198
199         tenured_space *tenured = data->tenured;
200         mark_bits<object> *data_forwarding_map = &tenured->state;
201         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
202
203         /* Figure out where blocks are going to go */
204         data_forwarding_map->compute_forwarding();
205         code_forwarding_map->compute_forwarding();
206
207         const object *data_finger = tenured->first_block();
208         const code_block *code_finger = code->allocator->first_block();
209
210         compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
211         slot_visitor<compaction_fixup> data_forwarder(this,fixup);
212         code_block_visitor<compaction_fixup> code_forwarder(this,fixup);
213
214         code_forwarder.visit_code_roots();
215
216         /* Object start offsets get recomputed by the object_compaction_updater */
217         data->tenured->starts.clear_object_start_offsets();
218
219         /* Slide everything in tenured space up, and update data and code heap
220         pointers inside objects. */
221         object_compaction_updater object_updater(this,fixup);
222         tenured->compact(object_updater,fixup,&data_finger);
223
224         /* Slide everything in the code heap up, and update data and code heap
225         pointers inside code blocks. */
226         code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
227         code->allocator->compact(code_block_updater,fixup,&code_finger);
228
229         data_forwarder.visit_roots();
230         if(trace_contexts_p)
231         {
232                 data_forwarder.visit_contexts();
233                 code_forwarder.visit_context_code_blocks();
234         }
235
236         update_code_roots_for_compaction();
237         callbacks->update();
238
239         if(event) event->ended_compaction();
240 }
241
242 struct code_compaction_fixup {
243         mark_bits<code_block> *code_forwarding_map;
244         const code_block **code_finger;
245
246         explicit code_compaction_fixup(mark_bits<code_block> *code_forwarding_map_,
247                 const code_block **code_finger_) :
248                 code_forwarding_map(code_forwarding_map_),
249                 code_finger(code_finger_) {}
250
251         object *fixup_data(object *obj)
252         {
253                 return obj;
254         }
255
256         code_block *fixup_code(code_block *compiled)
257         {
258                 return code_forwarding_map->forward_block(compiled);
259         }
260
261         object *translate_data(const object *obj)
262         {
263                 return fixup_data((object *)obj);
264         }
265
266         code_block *translate_code(const code_block *compiled)
267         {
268                 if(compiled >= *code_finger)
269                         return fixup_code((code_block *)compiled);
270                 else
271                         return (code_block *)compiled;
272         }
273
274         cell size(object *obj)
275         {
276                 return obj->size();
277         }
278
279         cell size(code_block *compiled)
280         {
281                 if(code_forwarding_map->marked_p(compiled))
282                         return compiled->size(*this);
283                 else
284                         return code_forwarding_map->unmarked_block_size(compiled);
285         }
286 };
287
288 struct object_grow_heap_updater {
289         code_block_visitor<code_compaction_fixup> code_forwarder;
290
291         explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
292                 code_forwarder(code_forwarder_) {}
293
294         void operator()(object *obj)
295         {
296                 code_forwarder.visit_object_code_block(obj);
297         }
298 };
299
300 /* Compact just the code heap, after growing the data heap */
301 void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
302 {
303         /* Figure out where blocks are going to go */
304         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
305         code_forwarding_map->compute_forwarding();
306
307         const code_block *code_finger = code->allocator->first_block();
308
309         code_compaction_fixup fixup(code_forwarding_map,&code_finger);
310         slot_visitor<code_compaction_fixup> data_forwarder(this,fixup);
311         code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup);
312
313         code_forwarder.visit_code_roots();
314
315         if(trace_contexts_p)
316                 code_forwarder.visit_context_code_blocks();
317
318         /* Update code heap references in data heap */
319         object_grow_heap_updater object_updater(code_forwarder);
320         each_object(object_updater);
321
322         /* Slide everything in the code heap up, and update code heap
323         pointers inside code blocks. */
324         code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
325         code->allocator->compact(code_block_updater,fixup,&code_finger);
326
327         update_code_roots_for_compaction();
328         callbacks->update();
329 }
330
331 void factor_vm::collect_compact(bool trace_contexts_p)
332 {
333         collect_mark_impl(trace_contexts_p);
334         collect_compact_impl(trace_contexts_p);
335         
336         if(data->high_fragmentation_p())
337         {
338                 /* Compaction did not free up enough memory. Grow the heap. */
339                 set_current_gc_op(collect_growing_heap_op);
340                 collect_growing_heap(0,trace_contexts_p);
341         }
342
343         code->flush_icache();
344 }
345
346 void factor_vm::collect_growing_heap(cell requested_size, bool trace_contexts_p)
347 {
348         /* Grow the data heap and copy all live objects to the new heap. */
349         data_heap *old = data;
350         set_data_heap(data->grow(requested_size));
351         collect_mark_impl(trace_contexts_p);
352         collect_compact_code_impl(trace_contexts_p);
353         code->flush_icache();
354         delete old;
355 }
356
357 }