]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
VM: object_compaction_updater -> lambda func
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6   static const bool translated_code_block_map = false;
7
8   mark_bits* data_forwarding_map;
9   mark_bits* code_forwarding_map;
10   const object** data_finger;
11   const code_block** code_finger;
12
13   compaction_fixup(mark_bits* data_forwarding_map,
14                    mark_bits* code_forwarding_map,
15                    const object** data_finger,
16                    const code_block** code_finger)
17       : data_forwarding_map(data_forwarding_map),
18         code_forwarding_map(code_forwarding_map),
19         data_finger(data_finger),
20         code_finger(code_finger) {}
21
22   object* fixup_data(object* obj) {
23     return (object*)data_forwarding_map->forward_block((cell)obj);
24   }
25
26   code_block* fixup_code(code_block* compiled) {
27     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
28   }
29
30   object* translate_data(const object* obj) {
31     if (obj < *data_finger)
32       return fixup_data((object*)obj);
33     return (object*)obj;
34   }
35
36   code_block* translate_code(const code_block* compiled) {
37     if (compiled < *code_finger)
38       return fixup_code((code_block*)compiled);
39     return (code_block*)compiled;
40   }
41
42   cell size(object* obj) {
43     if (data_forwarding_map->marked_p((cell)obj))
44       return obj->size(*this);
45     return data_forwarding_map->unmarked_block_size((cell)obj);
46   }
47
48   cell size(code_block* compiled) {
49     if (code_forwarding_map->marked_p((cell)compiled))
50       return compiled->size(*this);
51     return code_forwarding_map->unmarked_block_size((cell)compiled);
52   }
53 };
54
55 struct code_compaction_fixup {
56   static const bool translated_code_block_map = false;
57
58   mark_bits* code_forwarding_map;
59   const code_block** code_finger;
60
61   code_compaction_fixup(mark_bits* code_forwarding_map,
62                         const code_block** code_finger)
63       : code_forwarding_map(code_forwarding_map), code_finger(code_finger) {}
64
65   object* fixup_data(object* obj) { return obj; }
66
67   code_block* fixup_code(code_block* compiled) {
68     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
69   }
70
71   object* translate_data(const object* obj) { return fixup_data((object*)obj); }
72
73   code_block* translate_code(const code_block* compiled) {
74     if (compiled < *code_finger)
75       return fixup_code((code_block*)compiled);
76     return (code_block*)compiled;
77   }
78
79   cell size(object* obj) { return obj->size(); }
80
81   cell size(code_block* compiled) {
82     if (code_forwarding_map->marked_p((cell)compiled))
83       return compiled->size(*this);
84     return code_forwarding_map->unmarked_block_size((cell)compiled);
85   }
86 };
87
88 template <typename Fixup>
89 void update_relocation(factor_vm* parent,
90                        cell old_entry_point,
91                        Fixup fixup,
92                        instruction_operand op) {
93   cell old_offset = op.rel_offset() + old_entry_point;
94
95   switch (op.rel_type()) {
96     case RT_LITERAL: {
97       cell value = op.load_value(old_offset);
98       if (immediate_p(value))
99         op.store_value(value);
100       else
101         op.store_value(
102             RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
103       break;
104     }
105     case RT_ENTRY_POINT:
106     case RT_ENTRY_POINT_PIC:
107     case RT_ENTRY_POINT_PIC_TAIL:
108     case RT_HERE: {
109       cell value = op.load_value(old_offset);
110       cell offset = TAG(value);
111       code_block* compiled = (code_block*)UNTAG(value);
112       op.store_value((cell)fixup.fixup_code(compiled) + offset);
113       break;
114     }
115     case RT_THIS:
116     case RT_CARDS_OFFSET:
117     case RT_DECKS_OFFSET:
118       parent->store_external_address(op);
119       break;
120     default:
121       op.store_value(op.load_value(old_offset));
122       break;
123   }
124 }
125
126 template <typename Fixup> struct code_block_compaction_updater {
127   factor_vm* parent;
128   Fixup fixup;
129   slot_visitor<Fixup> forwarder;
130
131   code_block_compaction_updater(
132       factor_vm* parent, Fixup fixup, slot_visitor<Fixup> forwarder)
133       : parent(parent),
134         fixup(fixup),
135         forwarder(forwarder) { }
136
137   void operator()(code_block* old_address, code_block* new_address, cell size) {
138     forwarder.visit_code_block_objects(new_address);
139
140     cell old_entry_point = old_address->entry_point();
141     auto update_func = [&](instruction_operand op) {
142       update_relocation(parent, old_entry_point, fixup, op);
143     };
144     new_address->each_instruction_operand(update_func);
145   }
146 };
147
148 /* After a compaction, invalidate any code heap roots which are not
149 marked, and also slide the valid roots up so that call sites can be updated
150 correctly in case an inline cache compilation triggered compaction. */
151 void factor_vm::update_code_roots_for_compaction() {
152
153   mark_bits* state = &code->allocator->state;
154
155   FACTOR_FOR_EACH(code_roots) {
156     code_root* root = *iter;
157     cell block = root->value & (~data_alignment + 1);
158
159     /* Offset of return address within 16-byte allocation line */
160     cell offset = root->value - block;
161
162     if (root->valid && state->marked_p(block)) {
163       block = state->forward_block(block);
164       root->value = block + offset;
165     } else
166       root->valid = false;
167   }
168 }
169
170 /* Compact data and code heaps */
171 void factor_vm::collect_compact_impl() {
172   gc_event* event = current_gc->event;
173
174 #ifdef FACTOR_DEBUG
175   code->verify_all_blocks_set();
176 #endif
177
178   if (event)
179     event->started_compaction();
180
181   tenured_space* tenured = data->tenured;
182   mark_bits* data_forwarding_map = &tenured->state;
183   mark_bits* code_forwarding_map = &code->allocator->state;
184
185   /* Figure out where blocks are going to go */
186   data_forwarding_map->compute_forwarding();
187   code_forwarding_map->compute_forwarding();
188
189   const object* data_finger = (object*)tenured->start;
190   const code_block* code_finger = (code_block*)code->allocator->start;
191
192   {
193     compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
194                            &code_finger);
195
196     slot_visitor<compaction_fixup> forwarder(this, fixup);
197
198     forwarder.visit_uninitialized_code_blocks();
199
200     /* Object start offsets get recomputed by the object_compaction_updater */
201     data->tenured->starts.clear_object_start_offsets();
202
203     /* Slide everything in tenured space up, and update data and code heap
204        pointers inside objects. */
205     auto compact_object_func = [&](object* old_addr, object* new_addr, cell size) {
206       forwarder.visit_slots(new_addr);
207       forwarder.visit_object_code_block(new_addr);
208       tenured->starts.record_object_start_offset(new_addr);
209     };
210     tenured->compact(compact_object_func, fixup, &data_finger);
211
212     /* Slide everything in the code heap up, and update data and code heap
213        pointers inside code blocks. */
214     {
215       code_block_compaction_updater<compaction_fixup> code_block_updater(
216           this, fixup, forwarder);
217       code->allocator->compact(code_block_updater, fixup, &code_finger);
218     }
219
220     forwarder.visit_all_roots();
221     forwarder.visit_context_code_blocks();
222   }
223
224   update_code_roots_for_compaction();
225   callbacks->update();
226
227   code->initialize_all_blocks_set();
228
229   if (event)
230     event->ended_compaction();
231 }
232
233 /* Compact just the code heap, after growing the data heap */
234 void factor_vm::collect_compact_code_impl() {
235   /* Figure out where blocks are going to go */
236   mark_bits* code_forwarding_map = &code->allocator->state;
237   code_forwarding_map->compute_forwarding();
238
239   const code_block* code_finger = (code_block*)code->allocator->start;
240
241   code_compaction_fixup fixup(code_forwarding_map, &code_finger);
242   slot_visitor<code_compaction_fixup> forwarder(this, fixup);
243
244   forwarder.visit_uninitialized_code_blocks();
245   forwarder.visit_context_code_blocks();
246
247   /* Update code heap references in data heap */
248   auto object_grow_heap_updater = [&](object* obj) {
249     forwarder.visit_object_code_block(obj);
250   };
251   each_object(object_grow_heap_updater);
252
253   /* Slide everything in the code heap up, and update code heap
254         pointers inside code blocks. */
255   code_block_compaction_updater<code_compaction_fixup> code_block_updater(
256       this, fixup, forwarder);
257   code->allocator->compact(code_block_updater, fixup, &code_finger);
258
259   update_code_roots_for_compaction();
260   callbacks->update();
261   code->initialize_all_blocks_set();
262 }
263
264 void factor_vm::collect_compact() {
265   collect_mark_impl();
266   collect_compact_impl();
267
268   if (data->high_fragmentation_p()) {
269     /* Compaction did not free up enough memory. Grow the heap. */
270     set_current_gc_op(collect_growing_heap_op);
271     collect_growing_heap(0);
272   }
273
274   code->flush_icache();
275 }
276
277 void factor_vm::collect_growing_heap(cell requested_size) {
278   /* Grow the data heap and copy all live objects to the new heap. */
279   data_heap* old = data;
280   set_data_heap(data->grow(&nursery, requested_size));
281   collect_mark_impl();
282   collect_compact_code_impl();
283   code->flush_icache();
284   delete old;
285 }
286
287 }