]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
VM: visit_roots and visit_contexts where always called in tandem, make a new method...
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6   static const bool translated_code_block_map = false;
7
8   mark_bits* data_forwarding_map;
9   mark_bits* code_forwarding_map;
10   const object** data_finger;
11   const code_block** code_finger;
12
13   compaction_fixup(mark_bits* data_forwarding_map,
14                    mark_bits* code_forwarding_map,
15                    const object** data_finger,
16                    const code_block** code_finger)
17       : data_forwarding_map(data_forwarding_map),
18         code_forwarding_map(code_forwarding_map),
19         data_finger(data_finger),
20         code_finger(code_finger) {}
21
22   object* fixup_data(object* obj) {
23     return (object*)data_forwarding_map->forward_block((cell)obj);
24   }
25
26   code_block* fixup_code(code_block* compiled) {
27     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
28   }
29
30   object* translate_data(const object* obj) {
31     if (obj < *data_finger)
32       return fixup_data((object*)obj);
33     else
34       return (object*)obj;
35   }
36
37   code_block* translate_code(const code_block* compiled) {
38     if (compiled < *code_finger)
39       return fixup_code((code_block*)compiled);
40     else
41       return (code_block*)compiled;
42   }
43
44   cell size(object* obj) {
45     if (data_forwarding_map->marked_p((cell)obj))
46       return obj->size(*this);
47     else
48       return data_forwarding_map->unmarked_block_size((cell)obj);
49   }
50
51   cell size(code_block* compiled) {
52     if (code_forwarding_map->marked_p((cell)compiled))
53       return compiled->size(*this);
54     else
55       return code_forwarding_map->unmarked_block_size((cell)compiled);
56   }
57 };
58
59 struct object_compaction_updater {
60   factor_vm* parent;
61   compaction_fixup fixup;
62   object_start_map* starts;
63
64   object_compaction_updater(factor_vm* parent, compaction_fixup fixup)
65       : parent(parent),
66         fixup(fixup),
67         starts(&parent->data->tenured->starts) {}
68
69   void operator()(object* old_address, object* new_address, cell size) {
70     slot_visitor<compaction_fixup> forwarder(parent, fixup);
71     forwarder.visit_slots(new_address);
72     forwarder.visit_object_code_block(new_address);
73     starts->record_object_start_offset(new_address);
74   }
75 };
76
77 template <typename Fixup> struct code_block_compaction_relocation_visitor {
78   factor_vm* parent;
79   code_block* old_address;
80   Fixup fixup;
81
82   code_block_compaction_relocation_visitor(factor_vm* parent,
83                                            code_block* old_address,
84                                            Fixup fixup)
85       : parent(parent), old_address(old_address), fixup(fixup) {}
86
87   void operator()(instruction_operand op) {
88     cell old_offset = op.rel_offset() + old_address->entry_point();
89
90     switch (op.rel_type()) {
91       case RT_LITERAL: {
92         cell value = op.load_value(old_offset);
93         if (immediate_p(value))
94           op.store_value(value);
95         else
96           op.store_value(
97               RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
98         break;
99       }
100       case RT_ENTRY_POINT:
101       case RT_ENTRY_POINT_PIC:
102       case RT_ENTRY_POINT_PIC_TAIL:
103       case RT_HERE: {
104         cell value = op.load_value(old_offset);
105         cell offset = TAG(value);
106         code_block* compiled = (code_block*)UNTAG(value);
107         op.store_value((cell)fixup.fixup_code(compiled) + offset);
108         break;
109       }
110       case RT_THIS:
111       case RT_CARDS_OFFSET:
112       case RT_DECKS_OFFSET:
113         parent->store_external_address(op);
114         break;
115       default:
116         op.store_value(op.load_value(old_offset));
117         break;
118     }
119   }
120 };
121
122 template <typename Fixup> struct code_block_compaction_updater {
123   factor_vm* parent;
124   Fixup fixup;
125   slot_visitor<Fixup> forwarder;
126
127   code_block_compaction_updater(
128       factor_vm* parent, Fixup fixup, slot_visitor<Fixup> forwarder)
129       : parent(parent),
130         fixup(fixup),
131         forwarder(forwarder) { }
132
133   void operator()(code_block* old_address, code_block* new_address, cell size) {
134     forwarder.visit_code_block_objects(new_address);
135
136     code_block_compaction_relocation_visitor<Fixup> visitor(parent, old_address,
137                                                             fixup);
138     new_address->each_instruction_operand(visitor);
139   }
140 };
141
142 /* After a compaction, invalidate any code heap roots which are not
143 marked, and also slide the valid roots up so that call sites can be updated
144 correctly in case an inline cache compilation triggered compaction. */
145 void factor_vm::update_code_roots_for_compaction() {
146   std::vector<code_root*>::const_iterator iter = code_roots.begin();
147   std::vector<code_root*>::const_iterator end = code_roots.end();
148
149   mark_bits* state = &code->allocator->state;
150
151   for (; iter < end; iter++) {
152     code_root* root = *iter;
153     cell block = root->value & (~data_alignment + 1);
154
155     /* Offset of return address within 16-byte allocation line */
156     cell offset = root->value - block;
157
158     if (root->valid && state->marked_p(block)) {
159       block = state->forward_block(block);
160       root->value = block + offset;
161     } else
162       root->valid = false;
163   }
164 }
165
166 /* Compact data and code heaps */
167 void factor_vm::collect_compact_impl() {
168   gc_event* event = current_gc->event;
169
170 #ifdef FACTOR_DEBUG
171   code->verify_all_blocks_set();
172 #endif
173
174   if (event)
175     event->started_compaction();
176
177   tenured_space* tenured = data->tenured;
178   mark_bits* data_forwarding_map = &tenured->state;
179   mark_bits* code_forwarding_map = &code->allocator->state;
180
181   /* Figure out where blocks are going to go */
182   data_forwarding_map->compute_forwarding();
183   code_forwarding_map->compute_forwarding();
184
185   const object* data_finger = (object*)tenured->start;
186   const code_block* code_finger = (code_block*)code->allocator->start;
187
188   {
189     compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
190                            &code_finger);
191
192     slot_visitor<compaction_fixup> forwarder(this, fixup);
193
194     forwarder.visit_uninitialized_code_blocks();
195
196     /* Object start offsets get recomputed by the object_compaction_updater */
197     data->tenured->starts.clear_object_start_offsets();
198
199     /* Slide everything in tenured space up, and update data and code heap
200        pointers inside objects. */
201     {
202       object_compaction_updater object_updater(this, fixup);
203       tenured->compact(object_updater, fixup, &data_finger);
204     }
205
206     /* Slide everything in the code heap up, and update data and code heap
207        pointers inside code blocks. */
208     {
209       code_block_compaction_updater<compaction_fixup> code_block_updater(
210           this, fixup, forwarder);
211       code->allocator->compact(code_block_updater, fixup, &code_finger);
212     }
213
214     forwarder.visit_all_roots();
215     forwarder.visit_context_code_blocks();
216   }
217
218   update_code_roots_for_compaction();
219   callbacks->update();
220
221   code->initialize_all_blocks_set();
222
223   if (event)
224     event->ended_compaction();
225 }
226
227 struct code_compaction_fixup {
228   static const bool translated_code_block_map = false;
229
230   mark_bits* code_forwarding_map;
231   const code_block** code_finger;
232
233   code_compaction_fixup(mark_bits* code_forwarding_map,
234                         const code_block** code_finger)
235       : code_forwarding_map(code_forwarding_map), code_finger(code_finger) {}
236
237   object* fixup_data(object* obj) { return obj; }
238
239   code_block* fixup_code(code_block* compiled) {
240     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
241   }
242
243   object* translate_data(const object* obj) { return fixup_data((object*)obj); }
244
245   code_block* translate_code(const code_block* compiled) {
246     if (compiled < *code_finger)
247       return fixup_code((code_block*)compiled);
248     else
249       return (code_block*)compiled;
250   }
251
252   cell size(object* obj) { return obj->size(); }
253
254   cell size(code_block* compiled) {
255     if (code_forwarding_map->marked_p((cell)compiled))
256       return compiled->size(*this);
257     else
258       return code_forwarding_map->unmarked_block_size((cell)compiled);
259   }
260 };
261
262 struct object_grow_heap_updater {
263   slot_visitor<code_compaction_fixup> forwarder;
264
265   explicit object_grow_heap_updater(
266       slot_visitor<code_compaction_fixup> forwarder)
267       : forwarder(forwarder) {}
268
269   void operator()(object* obj) { forwarder.visit_object_code_block(obj); }
270 };
271
272 /* Compact just the code heap, after growing the data heap */
273 void factor_vm::collect_compact_code_impl() {
274   /* Figure out where blocks are going to go */
275   mark_bits* code_forwarding_map = &code->allocator->state;
276   code_forwarding_map->compute_forwarding();
277
278   const code_block* code_finger = (code_block*)code->allocator->start;
279
280   code_compaction_fixup fixup(code_forwarding_map, &code_finger);
281   slot_visitor<code_compaction_fixup> forwarder(this, fixup);
282
283   forwarder.visit_uninitialized_code_blocks();
284   forwarder.visit_context_code_blocks();
285
286   /* Update code heap references in data heap */
287   object_grow_heap_updater object_updater(forwarder);
288   each_object(object_updater);
289
290   /* Slide everything in the code heap up, and update code heap
291         pointers inside code blocks. */
292   code_block_compaction_updater<code_compaction_fixup> code_block_updater(
293       this, fixup, forwarder);
294   code->allocator->compact(code_block_updater, fixup, &code_finger);
295
296   update_code_roots_for_compaction();
297   callbacks->update();
298   code->initialize_all_blocks_set();
299 }
300
301 void factor_vm::collect_compact() {
302   collect_mark_impl();
303   collect_compact_impl();
304
305   if (data->high_fragmentation_p()) {
306     /* Compaction did not free up enough memory. Grow the heap. */
307     set_current_gc_op(collect_growing_heap_op);
308     collect_growing_heap(0);
309   }
310
311   code->flush_icache();
312 }
313
314 void factor_vm::collect_growing_heap(cell requested_size) {
315   /* Grow the data heap and copy all live objects to the new heap. */
316   data_heap* old = data;
317   set_data_heap(data->grow(&nursery, requested_size));
318   collect_mark_impl();
319   collect_compact_code_impl();
320   code->flush_icache();
321   delete old;
322 }
323
324 }