]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
VM: Remove unnecessary _ suffix in constructors
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6   static const bool translated_code_block_map = false;
7
8   mark_bits<object>* data_forwarding_map;
9   mark_bits<code_block>* code_forwarding_map;
10   const object** data_finger;
11   const code_block** code_finger;
12
13   compaction_fixup(mark_bits<object>* data_forwarding_map,
14                    mark_bits<code_block>* code_forwarding_map,
15                    const object** data_finger,
16                    const code_block** code_finger)
17       : data_forwarding_map(data_forwarding_map),
18         code_forwarding_map(code_forwarding_map),
19         data_finger(data_finger),
20         code_finger(code_finger) {}
21
22   object* fixup_data(object* obj) {
23     return data_forwarding_map->forward_block(obj);
24   }
25
26   code_block* fixup_code(code_block* compiled) {
27     return code_forwarding_map->forward_block(compiled);
28   }
29
30   object* translate_data(const object* obj) {
31     if (obj < *data_finger)
32       return fixup_data((object*)obj);
33     else
34       return (object*)obj;
35   }
36
37   code_block* translate_code(const code_block* compiled) {
38     if (compiled < *code_finger)
39       return fixup_code((code_block*)compiled);
40     else
41       return (code_block*)compiled;
42   }
43
44   cell size(object* obj) {
45     if (data_forwarding_map->marked_p(obj))
46       return obj->size(*this);
47     else
48       return data_forwarding_map->unmarked_block_size(obj);
49   }
50
51   cell size(code_block* compiled) {
52     if (code_forwarding_map->marked_p(compiled))
53       return compiled->size(*this);
54     else
55       return code_forwarding_map->unmarked_block_size(compiled);
56   }
57 };
58
59 struct object_compaction_updater {
60   factor_vm* parent;
61   compaction_fixup fixup;
62   object_start_map* starts;
63
64   object_compaction_updater(factor_vm* parent, compaction_fixup fixup)
65       : parent(parent),
66         fixup(fixup),
67         starts(&parent->data->tenured->starts) {}
68
69   void operator()(object* old_address, object* new_address, cell size) {
70     slot_visitor<compaction_fixup> slot_forwarder(parent, fixup);
71     slot_forwarder.visit_slots(new_address);
72
73     code_block_visitor<compaction_fixup> code_forwarder(parent, fixup);
74     code_forwarder.visit_object_code_block(new_address);
75
76     starts->record_object_start_offset(new_address);
77   }
78 };
79
80 template <typename Fixup> struct code_block_compaction_relocation_visitor {
81   factor_vm* parent;
82   code_block* old_address;
83   Fixup fixup;
84
85   code_block_compaction_relocation_visitor(factor_vm* parent,
86                                            code_block* old_address,
87                                            Fixup fixup)
88       : parent(parent), old_address(old_address), fixup(fixup) {}
89
90   void operator()(instruction_operand op) {
91     cell old_offset = op.rel_offset() + (cell) old_address->entry_point();
92
93     switch (op.rel_type()) {
94       case RT_LITERAL: {
95         cell value = op.load_value(old_offset);
96         if (immediate_p(value))
97           op.store_value(value);
98         else
99           op.store_value(
100               RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
101         break;
102       }
103       case RT_ENTRY_POINT:
104       case RT_ENTRY_POINT_PIC:
105       case RT_ENTRY_POINT_PIC_TAIL:
106       case RT_HERE: {
107         cell value = op.load_value(old_offset);
108         cell offset = TAG(value);
109         code_block* compiled = (code_block*)UNTAG(value);
110         op.store_value((cell) fixup.fixup_code(compiled) + offset);
111         break;
112       }
113       case RT_THIS:
114       case RT_CARDS_OFFSET:
115       case RT_DECKS_OFFSET:
116         parent->store_external_address(op);
117         break;
118       default:
119         op.store_value(op.load_value(old_offset));
120         break;
121     }
122   }
123 };
124
125 template <typename Fixup> struct code_block_compaction_updater {
126   factor_vm* parent;
127   Fixup fixup;
128   slot_visitor<Fixup> data_forwarder;
129   code_block_visitor<Fixup> code_forwarder;
130
131   code_block_compaction_updater(
132       factor_vm* parent, Fixup fixup, slot_visitor<Fixup> data_forwarder,
133       code_block_visitor<Fixup> code_forwarder)
134       : parent(parent),
135         fixup(fixup),
136         data_forwarder(data_forwarder),
137         code_forwarder(code_forwarder) {}
138
139   void operator()(code_block* old_address, code_block* new_address, cell size) {
140     data_forwarder.visit_code_block_objects(new_address);
141
142     code_block_compaction_relocation_visitor<Fixup> visitor(parent, old_address,
143                                                             fixup);
144     new_address->each_instruction_operand(visitor);
145   }
146 };
147
148 /* After a compaction, invalidate any code heap roots which are not
149 marked, and also slide the valid roots up so that call sites can be updated
150 correctly in case an inline cache compilation triggered compaction. */
151 void factor_vm::update_code_roots_for_compaction() {
152   std::vector<code_root*>::const_iterator iter = code_roots.begin();
153   std::vector<code_root*>::const_iterator end = code_roots.end();
154
155   mark_bits<code_block>* state = &code->allocator->state;
156
157   for (; iter < end; iter++) {
158     code_root* root = *iter;
159     code_block* block = (code_block*)(root->value & (~data_alignment + 1));
160
161     /* Offset of return address within 16-byte allocation line */
162     cell offset = root->value - (cell) block;
163
164     if (root->valid && state->marked_p(block)) {
165       block = state->forward_block(block);
166       root->value = (cell) block + offset;
167     } else
168       root->valid = false;
169   }
170 }
171
172 /* Compact data and code heaps */
173 void factor_vm::collect_compact_impl(bool trace_contexts_p) {
174   gc_event* event = current_gc->event;
175
176 #if defined(FACTOR_DEBUG)
177   code->verify_all_blocks_set();
178 #endif
179
180   if (event)
181     event->started_compaction();
182
183   tenured_space* tenured = data->tenured;
184   mark_bits<object>* data_forwarding_map = &tenured->state;
185   mark_bits<code_block>* code_forwarding_map = &code->allocator->state;
186
187   /* Figure out where blocks are going to go */
188   data_forwarding_map->compute_forwarding();
189   code_forwarding_map->compute_forwarding();
190
191   const object* data_finger = tenured->first_block();
192   const code_block* code_finger = code->allocator->first_block();
193
194   compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
195                          &code_finger);
196   slot_visitor<compaction_fixup> data_forwarder(this, fixup);
197   code_block_visitor<compaction_fixup> code_forwarder(this, fixup);
198
199   code_forwarder.visit_code_roots();
200
201   /* Object start offsets get recomputed by the object_compaction_updater */
202   data->tenured->starts.clear_object_start_offsets();
203
204   /* Slide everything in tenured space up, and update data and code heap
205         pointers inside objects. */
206   object_compaction_updater object_updater(this, fixup);
207   tenured->compact(object_updater, fixup, &data_finger);
208
209   /* Slide everything in the code heap up, and update data and code heap
210         pointers inside code blocks. */
211   code_block_compaction_updater<compaction_fixup> code_block_updater(
212       this, fixup, data_forwarder, code_forwarder);
213   code->allocator->compact(code_block_updater, fixup, &code_finger);
214
215   data_forwarder.visit_roots();
216   if (trace_contexts_p) {
217     data_forwarder.visit_contexts();
218     code_forwarder.visit_context_code_blocks();
219   }
220
221   update_code_roots_for_compaction();
222   callbacks->update();
223
224   code->initialize_all_blocks_set();
225
226   if (event)
227     event->ended_compaction();
228 }
229
230 struct code_compaction_fixup {
231   static const bool translated_code_block_map = false;
232
233   mark_bits<code_block>* code_forwarding_map;
234   const code_block** code_finger;
235
236   code_compaction_fixup(mark_bits<code_block>* code_forwarding_map,
237                         const code_block** code_finger)
238       : code_forwarding_map(code_forwarding_map), code_finger(code_finger) {}
239
240   object* fixup_data(object* obj) { return obj; }
241
242   code_block* fixup_code(code_block* compiled) {
243     return code_forwarding_map->forward_block(compiled);
244   }
245
246   object* translate_data(const object* obj) { return fixup_data((object*)obj); }
247
248   code_block* translate_code(const code_block* compiled) {
249     if (compiled < *code_finger)
250       return fixup_code((code_block*)compiled);
251     else
252       return (code_block*)compiled;
253   }
254
255   cell size(object* obj) { return obj->size(); }
256
257   cell size(code_block* compiled) {
258     if (code_forwarding_map->marked_p(compiled))
259       return compiled->size(*this);
260     else
261       return code_forwarding_map->unmarked_block_size(compiled);
262   }
263 };
264
265 struct object_grow_heap_updater {
266   code_block_visitor<code_compaction_fixup> code_forwarder;
267
268   explicit object_grow_heap_updater(
269       code_block_visitor<code_compaction_fixup> code_forwarder)
270       : code_forwarder(code_forwarder) {}
271
272   void operator()(object* obj) { code_forwarder.visit_object_code_block(obj); }
273 };
274
275 /* Compact just the code heap, after growing the data heap */
276 void factor_vm::collect_compact_code_impl(bool trace_contexts_p) {
277   /* Figure out where blocks are going to go */
278   mark_bits<code_block>* code_forwarding_map = &code->allocator->state;
279   code_forwarding_map->compute_forwarding();
280
281   const code_block* code_finger = code->allocator->first_block();
282
283   code_compaction_fixup fixup(code_forwarding_map, &code_finger);
284   slot_visitor<code_compaction_fixup> data_forwarder(this, fixup);
285   code_block_visitor<code_compaction_fixup> code_forwarder(this, fixup);
286
287   code_forwarder.visit_code_roots();
288
289   if (trace_contexts_p)
290     code_forwarder.visit_context_code_blocks();
291
292   /* Update code heap references in data heap */
293   object_grow_heap_updater object_updater(code_forwarder);
294   each_object(object_updater);
295
296   /* Slide everything in the code heap up, and update code heap
297         pointers inside code blocks. */
298   code_block_compaction_updater<code_compaction_fixup> code_block_updater(
299       this, fixup, data_forwarder, code_forwarder);
300   code->allocator->compact(code_block_updater, fixup, &code_finger);
301
302   update_code_roots_for_compaction();
303   callbacks->update();
304   code->initialize_all_blocks_set();
305 }
306
307 void factor_vm::collect_compact(bool trace_contexts_p) {
308   collect_mark_impl(trace_contexts_p);
309   collect_compact_impl(trace_contexts_p);
310
311   if (data->high_fragmentation_p()) {
312     /* Compaction did not free up enough memory. Grow the heap. */
313     set_current_gc_op(collect_growing_heap_op);
314     collect_growing_heap(0, trace_contexts_p);
315   }
316
317   code->flush_icache();
318 }
319
320 void factor_vm::collect_growing_heap(cell requested_size,
321                                      bool trace_contexts_p) {
322   /* Grow the data heap and copy all live objects to the new heap. */
323   data_heap* old = data;
324   set_data_heap(data->grow(requested_size));
325   collect_mark_impl(trace_contexts_p);
326   collect_compact_code_impl(trace_contexts_p);
327   code->flush_icache();
328   delete old;
329 }
330
331 }