]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
VM: change the definition of entry_point in word and quotation from void* to cell
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6   static const bool translated_code_block_map = false;
7
8   mark_bits* data_forwarding_map;
9   mark_bits* code_forwarding_map;
10   const object** data_finger;
11   const code_block** code_finger;
12
13   compaction_fixup(mark_bits* data_forwarding_map,
14                    mark_bits* code_forwarding_map,
15                    const object** data_finger,
16                    const code_block** code_finger)
17       : data_forwarding_map(data_forwarding_map),
18         code_forwarding_map(code_forwarding_map),
19         data_finger(data_finger),
20         code_finger(code_finger) {}
21
22   object* fixup_data(object* obj) {
23     return (object*)data_forwarding_map->forward_block((cell)obj);
24   }
25
26   code_block* fixup_code(code_block* compiled) {
27     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
28   }
29
30   object* translate_data(const object* obj) {
31     if (obj < *data_finger)
32       return fixup_data((object*)obj);
33     else
34       return (object*)obj;
35   }
36
37   code_block* translate_code(const code_block* compiled) {
38     if (compiled < *code_finger)
39       return fixup_code((code_block*)compiled);
40     else
41       return (code_block*)compiled;
42   }
43
44   cell size(object* obj) {
45     if (data_forwarding_map->marked_p((cell)obj))
46       return obj->size(*this);
47     else
48       return data_forwarding_map->unmarked_block_size((cell)obj);
49   }
50
51   cell size(code_block* compiled) {
52     if (code_forwarding_map->marked_p((cell)compiled))
53       return compiled->size(*this);
54     else
55       return code_forwarding_map->unmarked_block_size((cell)compiled);
56   }
57 };
58
59 struct object_compaction_updater {
60   factor_vm* parent;
61   compaction_fixup fixup;
62   object_start_map* starts;
63
64   object_compaction_updater(factor_vm* parent, compaction_fixup fixup)
65       : parent(parent),
66         fixup(fixup),
67         starts(&parent->data->tenured->starts) {}
68
69   void operator()(object* old_address, object* new_address, cell size) {
70     slot_visitor<compaction_fixup> slot_forwarder(parent, fixup);
71     slot_forwarder.visit_slots(new_address);
72
73     code_block_visitor<compaction_fixup> code_forwarder(parent, fixup);
74     code_forwarder.visit_object_code_block(new_address);
75
76     starts->record_object_start_offset(new_address);
77   }
78 };
79
80 template <typename Fixup> struct code_block_compaction_relocation_visitor {
81   factor_vm* parent;
82   code_block* old_address;
83   Fixup fixup;
84
85   code_block_compaction_relocation_visitor(factor_vm* parent,
86                                            code_block* old_address,
87                                            Fixup fixup)
88       : parent(parent), old_address(old_address), fixup(fixup) {}
89
90   void operator()(instruction_operand op) {
91     cell old_offset = op.rel_offset() + old_address->entry_point();
92
93     switch (op.rel_type()) {
94       case RT_LITERAL: {
95         cell value = op.load_value(old_offset);
96         if (immediate_p(value))
97           op.store_value(value);
98         else
99           op.store_value(
100               RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
101         break;
102       }
103       case RT_ENTRY_POINT:
104       case RT_ENTRY_POINT_PIC:
105       case RT_ENTRY_POINT_PIC_TAIL:
106       case RT_HERE: {
107         cell value = op.load_value(old_offset);
108         cell offset = TAG(value);
109         code_block* compiled = (code_block*)UNTAG(value);
110         op.store_value((cell)fixup.fixup_code(compiled) + offset);
111         break;
112       }
113       case RT_THIS:
114       case RT_CARDS_OFFSET:
115       case RT_DECKS_OFFSET:
116         parent->store_external_address(op);
117         break;
118       default:
119         op.store_value(op.load_value(old_offset));
120         break;
121     }
122   }
123 };
124
125 template <typename Fixup> struct code_block_compaction_updater {
126   factor_vm* parent;
127   Fixup fixup;
128   slot_visitor<Fixup> data_forwarder;
129   code_block_visitor<Fixup> code_forwarder;
130
131   code_block_compaction_updater(
132       factor_vm* parent, Fixup fixup, slot_visitor<Fixup> data_forwarder,
133       code_block_visitor<Fixup> code_forwarder)
134       : parent(parent),
135         fixup(fixup),
136         data_forwarder(data_forwarder),
137         code_forwarder(code_forwarder) {}
138
139   void operator()(code_block* old_address, code_block* new_address, cell size) {
140     data_forwarder.visit_code_block_objects(new_address);
141
142     code_block_compaction_relocation_visitor<Fixup> visitor(parent, old_address,
143                                                             fixup);
144     new_address->each_instruction_operand(visitor);
145   }
146 };
147
148 /* After a compaction, invalidate any code heap roots which are not
149 marked, and also slide the valid roots up so that call sites can be updated
150 correctly in case an inline cache compilation triggered compaction. */
151 void factor_vm::update_code_roots_for_compaction() {
152   std::vector<code_root*>::const_iterator iter = code_roots.begin();
153   std::vector<code_root*>::const_iterator end = code_roots.end();
154
155   mark_bits* state = &code->allocator->state;
156
157   for (; iter < end; iter++) {
158     code_root* root = *iter;
159     cell block = root->value & (~data_alignment + 1);
160
161     /* Offset of return address within 16-byte allocation line */
162     cell offset = root->value - block;
163
164     if (root->valid && state->marked_p(block)) {
165       block = state->forward_block(block);
166       root->value = block + offset;
167     } else
168       root->valid = false;
169   }
170 }
171
172 /* Compact data and code heaps */
173 void factor_vm::collect_compact_impl(bool trace_contexts_p) {
174   gc_event* event = current_gc->event;
175
176 #ifdef FACTOR_DEBUG
177   code->verify_all_blocks_set();
178 #endif
179
180   if (event)
181     event->started_compaction();
182
183   tenured_space* tenured = data->tenured;
184   mark_bits* data_forwarding_map = &tenured->state;
185   mark_bits* code_forwarding_map = &code->allocator->state;
186
187   /* Figure out where blocks are going to go */
188   data_forwarding_map->compute_forwarding();
189   code_forwarding_map->compute_forwarding();
190
191   const object* data_finger = (object*)tenured->start;
192   const code_block* code_finger = (code_block*)code->allocator->start;
193
194   {
195     compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
196                            &code_finger);
197
198     slot_visitor<compaction_fixup> data_forwarder(this, fixup);
199     code_block_visitor<compaction_fixup> code_forwarder(this, fixup);
200
201     code_forwarder.visit_uninitialized_code_blocks();
202
203     /* Object start offsets get recomputed by the object_compaction_updater */
204     data->tenured->starts.clear_object_start_offsets();
205
206     /* Slide everything in tenured space up, and update data and code heap
207        pointers inside objects. */
208     {
209       object_compaction_updater object_updater(this, fixup);
210       tenured->compact(object_updater, fixup, &data_finger);
211     }
212
213     /* Slide everything in the code heap up, and update data and code heap
214        pointers inside code blocks. */
215     {
216       code_block_compaction_updater<compaction_fixup> code_block_updater(
217           this, fixup, data_forwarder, code_forwarder);
218       code->allocator->compact(code_block_updater, fixup, &code_finger);
219     }
220
221     data_forwarder.visit_roots();
222     if (trace_contexts_p) {
223       data_forwarder.visit_contexts();
224       code_forwarder.visit_context_code_blocks();
225     }
226   }
227
228   update_code_roots_for_compaction();
229   callbacks->update();
230
231   code->initialize_all_blocks_set();
232
233   if (event)
234     event->ended_compaction();
235 }
236
237 struct code_compaction_fixup {
238   static const bool translated_code_block_map = false;
239
240   mark_bits* code_forwarding_map;
241   const code_block** code_finger;
242
243   code_compaction_fixup(mark_bits* code_forwarding_map,
244                         const code_block** code_finger)
245       : code_forwarding_map(code_forwarding_map), code_finger(code_finger) {}
246
247   object* fixup_data(object* obj) { return obj; }
248
249   code_block* fixup_code(code_block* compiled) {
250     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
251   }
252
253   object* translate_data(const object* obj) { return fixup_data((object*)obj); }
254
255   code_block* translate_code(const code_block* compiled) {
256     if (compiled < *code_finger)
257       return fixup_code((code_block*)compiled);
258     else
259       return (code_block*)compiled;
260   }
261
262   cell size(object* obj) { return obj->size(); }
263
264   cell size(code_block* compiled) {
265     if (code_forwarding_map->marked_p((cell)compiled))
266       return compiled->size(*this);
267     else
268       return code_forwarding_map->unmarked_block_size((cell)compiled);
269   }
270 };
271
272 struct object_grow_heap_updater {
273   code_block_visitor<code_compaction_fixup> code_forwarder;
274
275   explicit object_grow_heap_updater(
276       code_block_visitor<code_compaction_fixup> code_forwarder)
277       : code_forwarder(code_forwarder) {}
278
279   void operator()(object* obj) { code_forwarder.visit_object_code_block(obj); }
280 };
281
282 /* Compact just the code heap, after growing the data heap */
283 void factor_vm::collect_compact_code_impl(bool trace_contexts_p) {
284   /* Figure out where blocks are going to go */
285   mark_bits* code_forwarding_map = &code->allocator->state;
286   code_forwarding_map->compute_forwarding();
287
288   const code_block* code_finger = (code_block*)code->allocator->start;
289
290   code_compaction_fixup fixup(code_forwarding_map, &code_finger);
291   slot_visitor<code_compaction_fixup> data_forwarder(this, fixup);
292   code_block_visitor<code_compaction_fixup> code_forwarder(this, fixup);
293
294   code_forwarder.visit_uninitialized_code_blocks();
295
296   if (trace_contexts_p)
297     code_forwarder.visit_context_code_blocks();
298
299   /* Update code heap references in data heap */
300   object_grow_heap_updater object_updater(code_forwarder);
301   each_object(object_updater);
302
303   /* Slide everything in the code heap up, and update code heap
304         pointers inside code blocks. */
305   code_block_compaction_updater<code_compaction_fixup> code_block_updater(
306       this, fixup, data_forwarder, code_forwarder);
307   code->allocator->compact(code_block_updater, fixup, &code_finger);
308
309   update_code_roots_for_compaction();
310   callbacks->update();
311   code->initialize_all_blocks_set();
312 }
313
314 void factor_vm::collect_compact(bool trace_contexts_p) {
315   collect_mark_impl(trace_contexts_p);
316   collect_compact_impl(trace_contexts_p);
317
318   if (data->high_fragmentation_p()) {
319     /* Compaction did not free up enough memory. Grow the heap. */
320     set_current_gc_op(collect_growing_heap_op);
321     collect_growing_heap(0, trace_contexts_p);
322   }
323
324   code->flush_icache();
325 }
326
327 void factor_vm::collect_growing_heap(cell requested_size,
328                                      bool trace_contexts_p) {
329   /* Grow the data heap and copy all live objects to the new heap. */
330   data_heap* old = data;
331   set_data_heap(data->grow(&nursery, requested_size));
332   collect_mark_impl(trace_contexts_p);
333   collect_compact_code_impl(trace_contexts_p);
334   code->flush_icache();
335   delete old;
336 }
337
338 }