]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
VM: merge of slot_visitor and code_block_visitor
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6   static const bool translated_code_block_map = false;
7
8   mark_bits* data_forwarding_map;
9   mark_bits* code_forwarding_map;
10   const object** data_finger;
11   const code_block** code_finger;
12
13   compaction_fixup(mark_bits* data_forwarding_map,
14                    mark_bits* code_forwarding_map,
15                    const object** data_finger,
16                    const code_block** code_finger)
17       : data_forwarding_map(data_forwarding_map),
18         code_forwarding_map(code_forwarding_map),
19         data_finger(data_finger),
20         code_finger(code_finger) {}
21
22   object* fixup_data(object* obj) {
23     return (object*)data_forwarding_map->forward_block((cell)obj);
24   }
25
26   code_block* fixup_code(code_block* compiled) {
27     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
28   }
29
30   object* translate_data(const object* obj) {
31     if (obj < *data_finger)
32       return fixup_data((object*)obj);
33     else
34       return (object*)obj;
35   }
36
37   code_block* translate_code(const code_block* compiled) {
38     if (compiled < *code_finger)
39       return fixup_code((code_block*)compiled);
40     else
41       return (code_block*)compiled;
42   }
43
44   cell size(object* obj) {
45     if (data_forwarding_map->marked_p((cell)obj))
46       return obj->size(*this);
47     else
48       return data_forwarding_map->unmarked_block_size((cell)obj);
49   }
50
51   cell size(code_block* compiled) {
52     if (code_forwarding_map->marked_p((cell)compiled))
53       return compiled->size(*this);
54     else
55       return code_forwarding_map->unmarked_block_size((cell)compiled);
56   }
57 };
58
59 struct object_compaction_updater {
60   factor_vm* parent;
61   compaction_fixup fixup;
62   object_start_map* starts;
63
64   object_compaction_updater(factor_vm* parent, compaction_fixup fixup)
65       : parent(parent),
66         fixup(fixup),
67         starts(&parent->data->tenured->starts) {}
68
69   void operator()(object* old_address, object* new_address, cell size) {
70     slot_visitor<compaction_fixup> forwarder(parent, fixup);
71     forwarder.visit_slots(new_address);
72     forwarder.visit_object_code_block(new_address);
73     starts->record_object_start_offset(new_address);
74   }
75 };
76
77 template <typename Fixup> struct code_block_compaction_relocation_visitor {
78   factor_vm* parent;
79   code_block* old_address;
80   Fixup fixup;
81
82   code_block_compaction_relocation_visitor(factor_vm* parent,
83                                            code_block* old_address,
84                                            Fixup fixup)
85       : parent(parent), old_address(old_address), fixup(fixup) {}
86
87   void operator()(instruction_operand op) {
88     cell old_offset = op.rel_offset() + old_address->entry_point();
89
90     switch (op.rel_type()) {
91       case RT_LITERAL: {
92         cell value = op.load_value(old_offset);
93         if (immediate_p(value))
94           op.store_value(value);
95         else
96           op.store_value(
97               RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
98         break;
99       }
100       case RT_ENTRY_POINT:
101       case RT_ENTRY_POINT_PIC:
102       case RT_ENTRY_POINT_PIC_TAIL:
103       case RT_HERE: {
104         cell value = op.load_value(old_offset);
105         cell offset = TAG(value);
106         code_block* compiled = (code_block*)UNTAG(value);
107         op.store_value((cell)fixup.fixup_code(compiled) + offset);
108         break;
109       }
110       case RT_THIS:
111       case RT_CARDS_OFFSET:
112       case RT_DECKS_OFFSET:
113         parent->store_external_address(op);
114         break;
115       default:
116         op.store_value(op.load_value(old_offset));
117         break;
118     }
119   }
120 };
121
122 template <typename Fixup> struct code_block_compaction_updater {
123   factor_vm* parent;
124   Fixup fixup;
125   slot_visitor<Fixup> forwarder;
126
127   code_block_compaction_updater(
128       factor_vm* parent, Fixup fixup, slot_visitor<Fixup> forwarder)
129       : parent(parent),
130         fixup(fixup),
131         forwarder(forwarder) { }
132
133   void operator()(code_block* old_address, code_block* new_address, cell size) {
134     forwarder.visit_code_block_objects(new_address);
135
136     code_block_compaction_relocation_visitor<Fixup> visitor(parent, old_address,
137                                                             fixup);
138     new_address->each_instruction_operand(visitor);
139   }
140 };
141
142 /* After a compaction, invalidate any code heap roots which are not
143 marked, and also slide the valid roots up so that call sites can be updated
144 correctly in case an inline cache compilation triggered compaction. */
145 void factor_vm::update_code_roots_for_compaction() {
146   std::vector<code_root*>::const_iterator iter = code_roots.begin();
147   std::vector<code_root*>::const_iterator end = code_roots.end();
148
149   mark_bits* state = &code->allocator->state;
150
151   for (; iter < end; iter++) {
152     code_root* root = *iter;
153     cell block = root->value & (~data_alignment + 1);
154
155     /* Offset of return address within 16-byte allocation line */
156     cell offset = root->value - block;
157
158     if (root->valid && state->marked_p(block)) {
159       block = state->forward_block(block);
160       root->value = block + offset;
161     } else
162       root->valid = false;
163   }
164 }
165
166 /* Compact data and code heaps */
167 void factor_vm::collect_compact_impl(bool trace_contexts_p) {
168   gc_event* event = current_gc->event;
169
170 #ifdef FACTOR_DEBUG
171   code->verify_all_blocks_set();
172 #endif
173
174   if (event)
175     event->started_compaction();
176
177   tenured_space* tenured = data->tenured;
178   mark_bits* data_forwarding_map = &tenured->state;
179   mark_bits* code_forwarding_map = &code->allocator->state;
180
181   /* Figure out where blocks are going to go */
182   data_forwarding_map->compute_forwarding();
183   code_forwarding_map->compute_forwarding();
184
185   const object* data_finger = (object*)tenured->start;
186   const code_block* code_finger = (code_block*)code->allocator->start;
187
188   {
189     compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
190                            &code_finger);
191
192     slot_visitor<compaction_fixup> forwarder(this, fixup);
193
194     forwarder.visit_uninitialized_code_blocks();
195
196     /* Object start offsets get recomputed by the object_compaction_updater */
197     data->tenured->starts.clear_object_start_offsets();
198
199     /* Slide everything in tenured space up, and update data and code heap
200        pointers inside objects. */
201     {
202       object_compaction_updater object_updater(this, fixup);
203       tenured->compact(object_updater, fixup, &data_finger);
204     }
205
206     /* Slide everything in the code heap up, and update data and code heap
207        pointers inside code blocks. */
208     {
209       code_block_compaction_updater<compaction_fixup> code_block_updater(
210           this, fixup, forwarder);
211       code->allocator->compact(code_block_updater, fixup, &code_finger);
212     }
213
214     forwarder.visit_roots();
215     if (trace_contexts_p) {
216       forwarder.visit_contexts();
217       forwarder.visit_context_code_blocks();
218     }
219   }
220
221   update_code_roots_for_compaction();
222   callbacks->update();
223
224   code->initialize_all_blocks_set();
225
226   if (event)
227     event->ended_compaction();
228 }
229
230 struct code_compaction_fixup {
231   static const bool translated_code_block_map = false;
232
233   mark_bits* code_forwarding_map;
234   const code_block** code_finger;
235
236   code_compaction_fixup(mark_bits* code_forwarding_map,
237                         const code_block** code_finger)
238       : code_forwarding_map(code_forwarding_map), code_finger(code_finger) {}
239
240   object* fixup_data(object* obj) { return obj; }
241
242   code_block* fixup_code(code_block* compiled) {
243     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
244   }
245
246   object* translate_data(const object* obj) { return fixup_data((object*)obj); }
247
248   code_block* translate_code(const code_block* compiled) {
249     if (compiled < *code_finger)
250       return fixup_code((code_block*)compiled);
251     else
252       return (code_block*)compiled;
253   }
254
255   cell size(object* obj) { return obj->size(); }
256
257   cell size(code_block* compiled) {
258     if (code_forwarding_map->marked_p((cell)compiled))
259       return compiled->size(*this);
260     else
261       return code_forwarding_map->unmarked_block_size((cell)compiled);
262   }
263 };
264
265 struct object_grow_heap_updater {
266   slot_visitor<code_compaction_fixup> forwarder;
267
268   explicit object_grow_heap_updater(
269       slot_visitor<code_compaction_fixup> forwarder)
270       : forwarder(forwarder) {}
271
272   void operator()(object* obj) { forwarder.visit_object_code_block(obj); }
273 };
274
275 /* Compact just the code heap, after growing the data heap */
276 void factor_vm::collect_compact_code_impl(bool trace_contexts_p) {
277   /* Figure out where blocks are going to go */
278   mark_bits* code_forwarding_map = &code->allocator->state;
279   code_forwarding_map->compute_forwarding();
280
281   const code_block* code_finger = (code_block*)code->allocator->start;
282
283   code_compaction_fixup fixup(code_forwarding_map, &code_finger);
284   slot_visitor<code_compaction_fixup> forwarder(this, fixup);
285
286   forwarder.visit_uninitialized_code_blocks();
287
288   if (trace_contexts_p)
289     forwarder.visit_context_code_blocks();
290
291   /* Update code heap references in data heap */
292   object_grow_heap_updater object_updater(forwarder);
293   each_object(object_updater);
294
295   /* Slide everything in the code heap up, and update code heap
296         pointers inside code blocks. */
297   code_block_compaction_updater<code_compaction_fixup> code_block_updater(
298       this, fixup, forwarder);
299   code->allocator->compact(code_block_updater, fixup, &code_finger);
300
301   update_code_roots_for_compaction();
302   callbacks->update();
303   code->initialize_all_blocks_set();
304 }
305
306 void factor_vm::collect_compact(bool trace_contexts_p) {
307   collect_mark_impl(trace_contexts_p);
308   collect_compact_impl(trace_contexts_p);
309
310   if (data->high_fragmentation_p()) {
311     /* Compaction did not free up enough memory. Grow the heap. */
312     set_current_gc_op(collect_growing_heap_op);
313     collect_growing_heap(0, trace_contexts_p);
314   }
315
316   code->flush_icache();
317 }
318
319 void factor_vm::collect_growing_heap(cell requested_size,
320                                      bool trace_contexts_p) {
321   /* Grow the data heap and copy all live objects to the new heap. */
322   data_heap* old = data;
323   set_data_heap(data->grow(&nursery, requested_size));
324   collect_mark_impl(trace_contexts_p);
325   collect_compact_code_impl(trace_contexts_p);
326   code->flush_icache();
327   delete old;
328 }
329
330 }