]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
4490919e7e8feb9b1d2673c2865ef3d1bbc371d8
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6         static const bool translated_code_block_map = false;
7
8         mark_bits<object> *data_forwarding_map;
9         mark_bits<code_block> *code_forwarding_map;
10         const object **data_finger;
11         const code_block **code_finger;
12
13         explicit compaction_fixup(
14                 mark_bits<object> *data_forwarding_map_,
15                 mark_bits<code_block> *code_forwarding_map_,
16                 const object **data_finger_,
17                 const code_block **code_finger_) :
18                 data_forwarding_map(data_forwarding_map_),
19                 code_forwarding_map(code_forwarding_map_),
20                 data_finger(data_finger_),
21                 code_finger(code_finger_) {}
22
23         object *fixup_data(object *obj)
24         {
25                 return data_forwarding_map->forward_block(obj);
26         }
27
28         code_block *fixup_code(code_block *compiled)
29         {
30                 return code_forwarding_map->forward_block(compiled);
31         }
32
33         object *translate_data(const object *obj)
34         {
35                 if(obj < *data_finger)
36                         return fixup_data((object *)obj);
37                 else
38                         return (object *)obj;
39         }
40
41         code_block *translate_code(const code_block *compiled)
42         {
43                 if(compiled < *code_finger)
44                         return fixup_code((code_block *)compiled);
45                 else
46                         return (code_block *)compiled;
47         }
48
49         cell size(object *obj)
50         {
51                 if(data_forwarding_map->marked_p(obj))
52                         return obj->size(*this);
53                 else
54                         return data_forwarding_map->unmarked_block_size(obj);
55         }
56
57         cell size(code_block *compiled)
58         {
59                 if(code_forwarding_map->marked_p(compiled))
60                         return compiled->size(*this);
61                 else
62                         return code_forwarding_map->unmarked_block_size(compiled);
63         }
64 };
65
66 struct object_compaction_updater {
67         factor_vm *parent;
68         compaction_fixup fixup;
69         object_start_map *starts;
70
71         explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
72                 parent(parent_),
73                 fixup(fixup_),
74                 starts(&parent->data->tenured->starts) {}
75
76         void operator()(object *old_address, object *new_address, cell size)
77         {
78                 slot_visitor<compaction_fixup> slot_forwarder(parent,fixup);
79                 slot_forwarder.visit_slots(new_address);
80
81                 code_block_visitor<compaction_fixup> code_forwarder(parent,fixup);
82                 code_forwarder.visit_object_code_block(new_address);
83
84                 starts->record_object_start_offset(new_address);
85         }
86 };
87
88 template<typename Fixup>
89 struct code_block_compaction_relocation_visitor {
90         factor_vm *parent;
91         code_block *old_address;
92         Fixup fixup;
93
94         explicit code_block_compaction_relocation_visitor(factor_vm *parent_,
95                 code_block *old_address_,
96                 Fixup fixup_) :
97                 parent(parent_),
98                 old_address(old_address_),
99                 fixup(fixup_) {}
100
101         void operator()(instruction_operand op)
102         {
103                 cell old_offset = op.rel_offset() + (cell)old_address->entry_point();
104
105                 switch(op.rel_type())
106                 {
107                 case RT_LITERAL:
108                         {
109                                 cell value = op.load_value(old_offset);
110                                 if(immediate_p(value))
111                                         op.store_value(value);
112                                 else
113                                         op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
114                                 break;
115                         }
116                 case RT_ENTRY_POINT:
117                 case RT_ENTRY_POINT_PIC:
118                 case RT_ENTRY_POINT_PIC_TAIL:
119                 case RT_HERE:
120                         {
121                                 cell value = op.load_value(old_offset);
122                                 cell offset = TAG(value);
123                                 code_block *compiled = (code_block *)UNTAG(value);
124                                 op.store_value((cell)fixup.fixup_code(compiled) + offset);
125                                 break;
126                         }
127                 case RT_THIS:
128                 case RT_CARDS_OFFSET:
129                 case RT_DECKS_OFFSET:
130                         parent->store_external_address(op);
131                         break;
132                 default:
133                         op.store_value(op.load_value(old_offset));
134                         break;
135                 }
136         }
137 };
138
139 template<typename Fixup>
140 struct code_block_compaction_updater {
141         factor_vm *parent;
142         Fixup fixup;
143         slot_visitor<Fixup> data_forwarder;
144         code_block_visitor<Fixup> code_forwarder;
145
146         explicit code_block_compaction_updater(factor_vm *parent_,
147                 Fixup fixup_,
148                 slot_visitor<Fixup> data_forwarder_,
149                 code_block_visitor<Fixup> code_forwarder_) :
150                 parent(parent_),
151                 fixup(fixup_),
152                 data_forwarder(data_forwarder_),
153                 code_forwarder(code_forwarder_) {}
154
155         void operator()(code_block *old_address, code_block *new_address, cell size)
156         {
157                 data_forwarder.visit_code_block_objects(new_address);
158
159                 code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
160                 new_address->each_instruction_operand(visitor);
161         }
162 };
163
164 /* After a compaction, invalidate any code heap roots which are not
165 marked, and also slide the valid roots up so that call sites can be updated
166 correctly in case an inline cache compilation triggered compaction. */
167 void factor_vm::update_code_roots_for_compaction()
168 {
169         std::vector<code_root *>::const_iterator iter = code_roots.begin();
170         std::vector<code_root *>::const_iterator end = code_roots.end();
171
172         mark_bits<code_block> *state = &code->allocator->state;
173
174         for(; iter < end; iter++)
175         {
176                 code_root *root = *iter;
177                 code_block *block = (code_block *)(root->value & (~data_alignment + 1));
178
179                 /* Offset of return address within 16-byte allocation line */
180                 cell offset = root->value - (cell)block;
181
182                 if(root->valid && state->marked_p(block))
183                 {
184                         block = state->forward_block(block);
185                         root->value = (cell)block + offset;
186                 }
187                 else
188                         root->valid = false;
189         }
190 }
191
192 /* Compact data and code heaps */
193 void factor_vm::collect_compact_impl(bool trace_contexts_p)
194 {
195         gc_event *event = current_gc->event;
196
197 #if defined(FACTOR_DEBUG)
198         code->verify_all_blocks_set();
199 #endif
200
201         if(event) event->started_compaction();
202
203         tenured_space *tenured = data->tenured;
204         mark_bits<object> *data_forwarding_map = &tenured->state;
205         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
206
207         /* Figure out where blocks are going to go */
208         data_forwarding_map->compute_forwarding();
209         code_forwarding_map->compute_forwarding();
210
211         const object *data_finger = tenured->first_block();
212         const code_block *code_finger = code->allocator->first_block();
213
214         compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
215         slot_visitor<compaction_fixup> data_forwarder(this,fixup);
216         code_block_visitor<compaction_fixup> code_forwarder(this,fixup);
217
218         code_forwarder.visit_code_roots();
219
220         /* Object start offsets get recomputed by the object_compaction_updater */
221         data->tenured->starts.clear_object_start_offsets();
222
223         /* Slide everything in tenured space up, and update data and code heap
224         pointers inside objects. */
225         object_compaction_updater object_updater(this,fixup);
226         tenured->compact(object_updater,fixup,&data_finger);
227
228         /* Slide everything in the code heap up, and update data and code heap
229         pointers inside code blocks. */
230         code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
231         code->allocator->compact(code_block_updater,fixup,&code_finger);
232
233         data_forwarder.visit_roots();
234         if(trace_contexts_p)
235         {
236                 data_forwarder.visit_contexts();
237                 code_forwarder.visit_context_code_blocks();
238         }
239
240         update_code_roots_for_compaction();
241         callbacks->update();
242
243         code->initialize_all_blocks_set();
244
245         if(event) event->ended_compaction();
246 }
247
248 struct code_compaction_fixup {
249         static const bool translated_code_block_map = false;
250
251         mark_bits<code_block> *code_forwarding_map;
252         const code_block **code_finger;
253
254         explicit code_compaction_fixup(mark_bits<code_block> *code_forwarding_map_,
255                 const code_block **code_finger_) :
256                 code_forwarding_map(code_forwarding_map_),
257                 code_finger(code_finger_) {}
258
259         object *fixup_data(object *obj)
260         {
261                 return obj;
262         }
263
264         code_block *fixup_code(code_block *compiled)
265         {
266                 return code_forwarding_map->forward_block(compiled);
267         }
268
269         object *translate_data(const object *obj)
270         {
271                 return fixup_data((object *)obj);
272         }
273
274         code_block *translate_code(const code_block *compiled)
275         {
276                 if(compiled < *code_finger)
277                         return fixup_code((code_block *)compiled);
278                 else
279                         return (code_block *)compiled;
280         }
281
282         cell size(object *obj)
283         {
284                 return obj->size();
285         }
286
287         cell size(code_block *compiled)
288         {
289                 if(code_forwarding_map->marked_p(compiled))
290                         return compiled->size(*this);
291                 else
292                         return code_forwarding_map->unmarked_block_size(compiled);
293         }
294 };
295
296 struct object_grow_heap_updater {
297         code_block_visitor<code_compaction_fixup> code_forwarder;
298
299         explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
300                 code_forwarder(code_forwarder_) {}
301
302         void operator()(object *obj)
303         {
304                 code_forwarder.visit_object_code_block(obj);
305         }
306 };
307
308 /* Compact just the code heap, after growing the data heap */
309 void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
310 {
311         /* Figure out where blocks are going to go */
312         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
313         code_forwarding_map->compute_forwarding();
314
315         const code_block *code_finger = code->allocator->first_block();
316
317         code_compaction_fixup fixup(code_forwarding_map,&code_finger);
318         slot_visitor<code_compaction_fixup> data_forwarder(this,fixup);
319         code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup);
320
321         code_forwarder.visit_code_roots();
322
323         if(trace_contexts_p)
324                 code_forwarder.visit_context_code_blocks();
325
326         /* Update code heap references in data heap */
327         object_grow_heap_updater object_updater(code_forwarder);
328         each_object(object_updater);
329
330         /* Slide everything in the code heap up, and update code heap
331         pointers inside code blocks. */
332         code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
333         code->allocator->compact(code_block_updater,fixup,&code_finger);
334
335         update_code_roots_for_compaction();
336         callbacks->update();
337         code->initialize_all_blocks_set();
338 }
339
340 void factor_vm::collect_compact(bool trace_contexts_p)
341 {
342         collect_mark_impl(trace_contexts_p);
343         collect_compact_impl(trace_contexts_p);
344         
345         if(data->high_fragmentation_p())
346         {
347                 /* Compaction did not free up enough memory. Grow the heap. */
348                 set_current_gc_op(collect_growing_heap_op);
349                 collect_growing_heap(0,trace_contexts_p);
350         }
351
352         code->flush_icache();
353 }
354
355 void factor_vm::collect_growing_heap(cell requested_size, bool trace_contexts_p)
356 {
357         /* Grow the data heap and copy all live objects to the new heap. */
358         data_heap *old = data;
359         set_data_heap(data->grow(requested_size));
360         collect_mark_impl(trace_contexts_p);
361         collect_compact_code_impl(trace_contexts_p);
362         code->flush_icache();
363         delete old;
364 }
365
366 }