]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
VM: use a function update_relocation to replace the
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6   static const bool translated_code_block_map = false;
7
8   mark_bits* data_forwarding_map;
9   mark_bits* code_forwarding_map;
10   const object** data_finger;
11   const code_block** code_finger;
12
13   compaction_fixup(mark_bits* data_forwarding_map,
14                    mark_bits* code_forwarding_map,
15                    const object** data_finger,
16                    const code_block** code_finger)
17       : data_forwarding_map(data_forwarding_map),
18         code_forwarding_map(code_forwarding_map),
19         data_finger(data_finger),
20         code_finger(code_finger) {}
21
22   object* fixup_data(object* obj) {
23     return (object*)data_forwarding_map->forward_block((cell)obj);
24   }
25
26   code_block* fixup_code(code_block* compiled) {
27     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
28   }
29
30   object* translate_data(const object* obj) {
31     if (obj < *data_finger)
32       return fixup_data((object*)obj);
33     return (object*)obj;
34   }
35
36   code_block* translate_code(const code_block* compiled) {
37     if (compiled < *code_finger)
38       return fixup_code((code_block*)compiled);
39     return (code_block*)compiled;
40   }
41
42   cell size(object* obj) {
43     if (data_forwarding_map->marked_p((cell)obj))
44       return obj->size(*this);
45     return data_forwarding_map->unmarked_block_size((cell)obj);
46   }
47
48   cell size(code_block* compiled) {
49     if (code_forwarding_map->marked_p((cell)compiled))
50       return compiled->size(*this);
51     return code_forwarding_map->unmarked_block_size((cell)compiled);
52   }
53 };
54
55 struct code_compaction_fixup {
56   static const bool translated_code_block_map = false;
57
58   mark_bits* code_forwarding_map;
59   const code_block** code_finger;
60
61   code_compaction_fixup(mark_bits* code_forwarding_map,
62                         const code_block** code_finger)
63       : code_forwarding_map(code_forwarding_map), code_finger(code_finger) {}
64
65   object* fixup_data(object* obj) { return obj; }
66
67   code_block* fixup_code(code_block* compiled) {
68     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
69   }
70
71   object* translate_data(const object* obj) { return fixup_data((object*)obj); }
72
73   code_block* translate_code(const code_block* compiled) {
74     if (compiled < *code_finger)
75       return fixup_code((code_block*)compiled);
76     return (code_block*)compiled;
77   }
78
79   cell size(object* obj) { return obj->size(); }
80
81   cell size(code_block* compiled) {
82     if (code_forwarding_map->marked_p((cell)compiled))
83       return compiled->size(*this);
84     return code_forwarding_map->unmarked_block_size((cell)compiled);
85   }
86 };
87
88 struct object_compaction_updater {
89   factor_vm* parent;
90   compaction_fixup fixup;
91   object_start_map* starts;
92
93   object_compaction_updater(factor_vm* parent, compaction_fixup fixup)
94       : parent(parent),
95         fixup(fixup),
96         starts(&parent->data->tenured->starts) {}
97
98   void operator()(object* old_address, object* new_address, cell size) {
99     slot_visitor<compaction_fixup> forwarder(parent, fixup);
100     forwarder.visit_slots(new_address);
101     forwarder.visit_object_code_block(new_address);
102     starts->record_object_start_offset(new_address);
103   }
104 };
105
106 template <typename Fixup>
107 void update_relocation(factor_vm* parent,
108                        cell old_entry_point,
109                        Fixup fixup,
110                        instruction_operand op) {
111   cell old_offset = op.rel_offset() + old_entry_point;
112
113   switch (op.rel_type()) {
114     case RT_LITERAL: {
115       cell value = op.load_value(old_offset);
116       if (immediate_p(value))
117         op.store_value(value);
118       else
119         op.store_value(
120             RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
121       break;
122     }
123     case RT_ENTRY_POINT:
124     case RT_ENTRY_POINT_PIC:
125     case RT_ENTRY_POINT_PIC_TAIL:
126     case RT_HERE: {
127       cell value = op.load_value(old_offset);
128       cell offset = TAG(value);
129       code_block* compiled = (code_block*)UNTAG(value);
130       op.store_value((cell)fixup.fixup_code(compiled) + offset);
131       break;
132     }
133     case RT_THIS:
134     case RT_CARDS_OFFSET:
135     case RT_DECKS_OFFSET:
136       parent->store_external_address(op);
137       break;
138     default:
139       op.store_value(op.load_value(old_offset));
140       break;
141   }
142 }
143
144 template <typename Fixup> struct code_block_compaction_updater {
145   factor_vm* parent;
146   Fixup fixup;
147   slot_visitor<Fixup> forwarder;
148
149   code_block_compaction_updater(
150       factor_vm* parent, Fixup fixup, slot_visitor<Fixup> forwarder)
151       : parent(parent),
152         fixup(fixup),
153         forwarder(forwarder) { }
154
155   void operator()(code_block* old_address, code_block* new_address, cell size) {
156     forwarder.visit_code_block_objects(new_address);
157
158     cell old_entry_point = old_address->entry_point();
159     auto update_func = [&](instruction_operand op) {
160       update_relocation(parent, old_entry_point, fixup, op);
161     };
162     new_address->each_instruction_operand(update_func);
163   }
164 };
165
166 /* After a compaction, invalidate any code heap roots which are not
167 marked, and also slide the valid roots up so that call sites can be updated
168 correctly in case an inline cache compilation triggered compaction. */
169 void factor_vm::update_code_roots_for_compaction() {
170
171   mark_bits* state = &code->allocator->state;
172
173   FACTOR_FOR_EACH(code_roots) {
174     code_root* root = *iter;
175     cell block = root->value & (~data_alignment + 1);
176
177     /* Offset of return address within 16-byte allocation line */
178     cell offset = root->value - block;
179
180     if (root->valid && state->marked_p(block)) {
181       block = state->forward_block(block);
182       root->value = block + offset;
183     } else
184       root->valid = false;
185   }
186 }
187
188 /* Compact data and code heaps */
189 void factor_vm::collect_compact_impl() {
190   gc_event* event = current_gc->event;
191
192 #ifdef FACTOR_DEBUG
193   code->verify_all_blocks_set();
194 #endif
195
196   if (event)
197     event->started_compaction();
198
199   tenured_space* tenured = data->tenured;
200   mark_bits* data_forwarding_map = &tenured->state;
201   mark_bits* code_forwarding_map = &code->allocator->state;
202
203   /* Figure out where blocks are going to go */
204   data_forwarding_map->compute_forwarding();
205   code_forwarding_map->compute_forwarding();
206
207   const object* data_finger = (object*)tenured->start;
208   const code_block* code_finger = (code_block*)code->allocator->start;
209
210   {
211     compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
212                            &code_finger);
213
214     slot_visitor<compaction_fixup> forwarder(this, fixup);
215
216     forwarder.visit_uninitialized_code_blocks();
217
218     /* Object start offsets get recomputed by the object_compaction_updater */
219     data->tenured->starts.clear_object_start_offsets();
220
221     /* Slide everything in tenured space up, and update data and code heap
222        pointers inside objects. */
223     {
224       object_compaction_updater object_updater(this, fixup);
225       tenured->compact(object_updater, fixup, &data_finger);
226     }
227
228     /* Slide everything in the code heap up, and update data and code heap
229        pointers inside code blocks. */
230     {
231       code_block_compaction_updater<compaction_fixup> code_block_updater(
232           this, fixup, forwarder);
233       code->allocator->compact(code_block_updater, fixup, &code_finger);
234     }
235
236     forwarder.visit_all_roots();
237     forwarder.visit_context_code_blocks();
238   }
239
240   update_code_roots_for_compaction();
241   callbacks->update();
242
243   code->initialize_all_blocks_set();
244
245   if (event)
246     event->ended_compaction();
247 }
248
249 /* Compact just the code heap, after growing the data heap */
250 void factor_vm::collect_compact_code_impl() {
251   /* Figure out where blocks are going to go */
252   mark_bits* code_forwarding_map = &code->allocator->state;
253   code_forwarding_map->compute_forwarding();
254
255   const code_block* code_finger = (code_block*)code->allocator->start;
256
257   code_compaction_fixup fixup(code_forwarding_map, &code_finger);
258   slot_visitor<code_compaction_fixup> forwarder(this, fixup);
259
260   forwarder.visit_uninitialized_code_blocks();
261   forwarder.visit_context_code_blocks();
262
263   /* Update code heap references in data heap */
264   auto object_grow_heap_updater = [&](object* obj) {
265     forwarder.visit_object_code_block(obj);
266   };
267   each_object(object_grow_heap_updater);
268
269   /* Slide everything in the code heap up, and update code heap
270         pointers inside code blocks. */
271   code_block_compaction_updater<code_compaction_fixup> code_block_updater(
272       this, fixup, forwarder);
273   code->allocator->compact(code_block_updater, fixup, &code_finger);
274
275   update_code_roots_for_compaction();
276   callbacks->update();
277   code->initialize_all_blocks_set();
278 }
279
280 void factor_vm::collect_compact() {
281   collect_mark_impl();
282   collect_compact_impl();
283
284   if (data->high_fragmentation_p()) {
285     /* Compaction did not free up enough memory. Grow the heap. */
286     set_current_gc_op(collect_growing_heap_op);
287     collect_growing_heap(0);
288   }
289
290   code->flush_icache();
291 }
292
293 void factor_vm::collect_growing_heap(cell requested_size) {
294   /* Grow the data heap and copy all live objects to the new heap. */
295   data_heap* old = data;
296   set_data_heap(data->grow(&nursery, requested_size));
297   collect_mark_impl();
298   collect_compact_code_impl();
299   code->flush_icache();
300   delete old;
301 }
302
303 }