]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
VM: Refactor compaction.cpp to Factor style
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6   static const bool translated_code_block_map = false;
7
8   mark_bits<object>* data_forwarding_map;
9   mark_bits<code_block>* code_forwarding_map;
10   const object** data_finger;
11   const code_block** code_finger;
12
13   explicit compaction_fixup(mark_bits<object>* data_forwarding_map_,
14                             mark_bits<code_block>* code_forwarding_map_,
15                             const object** data_finger_,
16                             const code_block** code_finger_)
17       : data_forwarding_map(data_forwarding_map_),
18         code_forwarding_map(code_forwarding_map_),
19         data_finger(data_finger_),
20         code_finger(code_finger_) {}
21
22   object* fixup_data(object* obj) {
23     return data_forwarding_map->forward_block(obj);
24   }
25
26   code_block* fixup_code(code_block* compiled) {
27     return code_forwarding_map->forward_block(compiled);
28   }
29
30   object* translate_data(const object* obj) {
31     if (obj < *data_finger)
32       return fixup_data((object*)obj);
33     else
34       return (object*)obj;
35   }
36
37   code_block* translate_code(const code_block* compiled) {
38     if (compiled < *code_finger)
39       return fixup_code((code_block*)compiled);
40     else
41       return (code_block*)compiled;
42   }
43
44   cell size(object* obj) {
45     if (data_forwarding_map->marked_p(obj))
46       return obj->size(*this);
47     else
48       return data_forwarding_map->unmarked_block_size(obj);
49   }
50
51   cell size(code_block* compiled) {
52     if (code_forwarding_map->marked_p(compiled))
53       return compiled->size(*this);
54     else
55       return code_forwarding_map->unmarked_block_size(compiled);
56   }
57 };
58
59 struct object_compaction_updater {
60   factor_vm* parent;
61   compaction_fixup fixup;
62   object_start_map* starts;
63
64   explicit object_compaction_updater(factor_vm* parent_,
65                                      compaction_fixup fixup_)
66       : parent(parent_),
67         fixup(fixup_),
68         starts(&parent->data->tenured->starts) {}
69
70   void operator()(object* old_address, object* new_address, cell size) {
71     slot_visitor<compaction_fixup> slot_forwarder(parent, fixup);
72     slot_forwarder.visit_slots(new_address);
73
74     code_block_visitor<compaction_fixup> code_forwarder(parent, fixup);
75     code_forwarder.visit_object_code_block(new_address);
76
77     starts->record_object_start_offset(new_address);
78   }
79 };
80
81 template <typename Fixup> struct code_block_compaction_relocation_visitor {
82   factor_vm* parent;
83   code_block* old_address;
84   Fixup fixup;
85
86   explicit code_block_compaction_relocation_visitor(factor_vm* parent_,
87                                                     code_block* old_address_,
88                                                     Fixup fixup_)
89       : parent(parent_), old_address(old_address_), fixup(fixup_) {}
90
91   void operator()(instruction_operand op) {
92     cell old_offset = op.rel_offset() + (cell) old_address->entry_point();
93
94     switch (op.rel_type()) {
95       case RT_LITERAL: {
96         cell value = op.load_value(old_offset);
97         if (immediate_p(value))
98           op.store_value(value);
99         else
100           op.store_value(
101               RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
102         break;
103       }
104       case RT_ENTRY_POINT:
105       case RT_ENTRY_POINT_PIC:
106       case RT_ENTRY_POINT_PIC_TAIL:
107       case RT_HERE: {
108         cell value = op.load_value(old_offset);
109         cell offset = TAG(value);
110         code_block* compiled = (code_block*)UNTAG(value);
111         op.store_value((cell) fixup.fixup_code(compiled) + offset);
112         break;
113       }
114       case RT_THIS:
115       case RT_CARDS_OFFSET:
116       case RT_DECKS_OFFSET:
117         parent->store_external_address(op);
118         break;
119       default:
120         op.store_value(op.load_value(old_offset));
121         break;
122     }
123   }
124 };
125
126 template <typename Fixup> struct code_block_compaction_updater {
127   factor_vm* parent;
128   Fixup fixup;
129   slot_visitor<Fixup> data_forwarder;
130   code_block_visitor<Fixup> code_forwarder;
131
132   explicit code_block_compaction_updater(
133       factor_vm* parent_, Fixup fixup_, slot_visitor<Fixup> data_forwarder_,
134       code_block_visitor<Fixup> code_forwarder_)
135       : parent(parent_),
136         fixup(fixup_),
137         data_forwarder(data_forwarder_),
138         code_forwarder(code_forwarder_) {}
139
140   void operator()(code_block* old_address, code_block* new_address, cell size) {
141     data_forwarder.visit_code_block_objects(new_address);
142
143     code_block_compaction_relocation_visitor<Fixup> visitor(parent, old_address,
144                                                             fixup);
145     new_address->each_instruction_operand(visitor);
146   }
147 };
148
149 /* After a compaction, invalidate any code heap roots which are not
150 marked, and also slide the valid roots up so that call sites can be updated
151 correctly in case an inline cache compilation triggered compaction. */
152 void factor_vm::update_code_roots_for_compaction() {
153   std::vector<code_root*>::const_iterator iter = code_roots.begin();
154   std::vector<code_root*>::const_iterator end = code_roots.end();
155
156   mark_bits<code_block>* state = &code->allocator->state;
157
158   for (; iter < end; iter++) {
159     code_root* root = *iter;
160     code_block* block = (code_block*)(root->value & (~data_alignment + 1));
161
162     /* Offset of return address within 16-byte allocation line */
163     cell offset = root->value - (cell) block;
164
165     if (root->valid && state->marked_p(block)) {
166       block = state->forward_block(block);
167       root->value = (cell) block + offset;
168     } else
169       root->valid = false;
170   }
171 }
172
173 /* Compact data and code heaps */
174 void factor_vm::collect_compact_impl(bool trace_contexts_p) {
175   gc_event* event = current_gc->event;
176
177 #if defined(FACTOR_DEBUG)
178   code->verify_all_blocks_set();
179 #endif
180
181   if (event)
182     event->started_compaction();
183
184   tenured_space* tenured = data->tenured;
185   mark_bits<object>* data_forwarding_map = &tenured->state;
186   mark_bits<code_block>* code_forwarding_map = &code->allocator->state;
187
188   /* Figure out where blocks are going to go */
189   data_forwarding_map->compute_forwarding();
190   code_forwarding_map->compute_forwarding();
191
192   const object* data_finger = tenured->first_block();
193   const code_block* code_finger = code->allocator->first_block();
194
195   compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
196                          &code_finger);
197   slot_visitor<compaction_fixup> data_forwarder(this, fixup);
198   code_block_visitor<compaction_fixup> code_forwarder(this, fixup);
199
200   code_forwarder.visit_code_roots();
201
202   /* Object start offsets get recomputed by the object_compaction_updater */
203   data->tenured->starts.clear_object_start_offsets();
204
205   /* Slide everything in tenured space up, and update data and code heap
206         pointers inside objects. */
207   object_compaction_updater object_updater(this, fixup);
208   tenured->compact(object_updater, fixup, &data_finger);
209
210   /* Slide everything in the code heap up, and update data and code heap
211         pointers inside code blocks. */
212   code_block_compaction_updater<compaction_fixup> code_block_updater(
213       this, fixup, data_forwarder, code_forwarder);
214   code->allocator->compact(code_block_updater, fixup, &code_finger);
215
216   data_forwarder.visit_roots();
217   if (trace_contexts_p) {
218     data_forwarder.visit_contexts();
219     code_forwarder.visit_context_code_blocks();
220   }
221
222   update_code_roots_for_compaction();
223   callbacks->update();
224
225   code->initialize_all_blocks_set();
226
227   if (event)
228     event->ended_compaction();
229 }
230
231 struct code_compaction_fixup {
232   static const bool translated_code_block_map = false;
233
234   mark_bits<code_block>* code_forwarding_map;
235   const code_block** code_finger;
236
237   explicit code_compaction_fixup(mark_bits<code_block>* code_forwarding_map_,
238                                  const code_block** code_finger_)
239       : code_forwarding_map(code_forwarding_map_), code_finger(code_finger_) {}
240
241   object* fixup_data(object* obj) { return obj; }
242
243   code_block* fixup_code(code_block* compiled) {
244     return code_forwarding_map->forward_block(compiled);
245   }
246
247   object* translate_data(const object* obj) { return fixup_data((object*)obj); }
248
249   code_block* translate_code(const code_block* compiled) {
250     if (compiled < *code_finger)
251       return fixup_code((code_block*)compiled);
252     else
253       return (code_block*)compiled;
254   }
255
256   cell size(object* obj) { return obj->size(); }
257
258   cell size(code_block* compiled) {
259     if (code_forwarding_map->marked_p(compiled))
260       return compiled->size(*this);
261     else
262       return code_forwarding_map->unmarked_block_size(compiled);
263   }
264 };
265
266 struct object_grow_heap_updater {
267   code_block_visitor<code_compaction_fixup> code_forwarder;
268
269   explicit object_grow_heap_updater(
270       code_block_visitor<code_compaction_fixup> code_forwarder_)
271       : code_forwarder(code_forwarder_) {}
272
273   void operator()(object* obj) { code_forwarder.visit_object_code_block(obj); }
274 };
275
276 /* Compact just the code heap, after growing the data heap */
277 void factor_vm::collect_compact_code_impl(bool trace_contexts_p) {
278   /* Figure out where blocks are going to go */
279   mark_bits<code_block>* code_forwarding_map = &code->allocator->state;
280   code_forwarding_map->compute_forwarding();
281
282   const code_block* code_finger = code->allocator->first_block();
283
284   code_compaction_fixup fixup(code_forwarding_map, &code_finger);
285   slot_visitor<code_compaction_fixup> data_forwarder(this, fixup);
286   code_block_visitor<code_compaction_fixup> code_forwarder(this, fixup);
287
288   code_forwarder.visit_code_roots();
289
290   if (trace_contexts_p)
291     code_forwarder.visit_context_code_blocks();
292
293   /* Update code heap references in data heap */
294   object_grow_heap_updater object_updater(code_forwarder);
295   each_object(object_updater);
296
297   /* Slide everything in the code heap up, and update code heap
298         pointers inside code blocks. */
299   code_block_compaction_updater<code_compaction_fixup> code_block_updater(
300       this, fixup, data_forwarder, code_forwarder);
301   code->allocator->compact(code_block_updater, fixup, &code_finger);
302
303   update_code_roots_for_compaction();
304   callbacks->update();
305   code->initialize_all_blocks_set();
306 }
307
308 void factor_vm::collect_compact(bool trace_contexts_p) {
309   collect_mark_impl(trace_contexts_p);
310   collect_compact_impl(trace_contexts_p);
311
312   if (data->high_fragmentation_p()) {
313     /* Compaction did not free up enough memory. Grow the heap. */
314     set_current_gc_op(collect_growing_heap_op);
315     collect_growing_heap(0, trace_contexts_p);
316   }
317
318   code->flush_icache();
319 }
320
321 void factor_vm::collect_growing_heap(cell requested_size,
322                                      bool trace_contexts_p) {
323   /* Grow the data heap and copy all live objects to the new heap. */
324   data_heap* old = data;
325   set_data_heap(data->grow(requested_size));
326   collect_mark_impl(trace_contexts_p);
327   collect_compact_code_impl(trace_contexts_p);
328   code->flush_icache();
329   delete old;
330 }
331
332 }