]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
Merge remote branch 'origin/new-icons'
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6         mark_bits<object> *data_forwarding_map;
7         mark_bits<code_block> *code_forwarding_map;
8         const object **data_finger;
9         const code_block **code_finger;
10
11         explicit compaction_fixup(
12                 mark_bits<object> *data_forwarding_map_,
13                 mark_bits<code_block> *code_forwarding_map_,
14                 const object **data_finger_,
15                 const code_block **code_finger_) :
16                 data_forwarding_map(data_forwarding_map_),
17                 code_forwarding_map(code_forwarding_map_),
18                 data_finger(data_finger_),
19                 code_finger(code_finger_) {}
20
21         object *fixup_data(object *obj)
22         {
23                 return data_forwarding_map->forward_block(obj);
24         }
25
26         code_block *fixup_code(code_block *compiled)
27         {
28                 return code_forwarding_map->forward_block(compiled);
29         }
30
31         object *translate_data(const object *obj)
32         {
33                 if(obj < *data_finger)
34                         return fixup_data((object *)obj);
35                 else
36                         return (object *)obj;
37         }
38
39         code_block *translate_code(const code_block *compiled)
40         {
41                 if(compiled < *code_finger)
42                         return fixup_code((code_block *)compiled);
43                 else
44                         return (code_block *)compiled;
45         }
46
47         cell size(object *obj)
48         {
49                 if(data_forwarding_map->marked_p(obj))
50                         return obj->size(*this);
51                 else
52                         return data_forwarding_map->unmarked_block_size(obj);
53         }
54
55         cell size(code_block *compiled)
56         {
57                 if(code_forwarding_map->marked_p(compiled))
58                         return compiled->size(*this);
59                 else
60                         return code_forwarding_map->unmarked_block_size(compiled);
61         }
62 };
63
64 struct object_compaction_updater {
65         factor_vm *parent;
66         compaction_fixup fixup;
67         object_start_map *starts;
68
69         explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
70                 parent(parent_),
71                 fixup(fixup_),
72                 starts(&parent->data->tenured->starts) {}
73
74         void operator()(object *old_address, object *new_address, cell size)
75         {
76                 slot_visitor<compaction_fixup> slot_forwarder(parent,fixup);
77                 slot_forwarder.visit_slots(new_address);
78
79                 code_block_visitor<compaction_fixup> code_forwarder(parent,fixup);
80                 code_forwarder.visit_object_code_block(new_address);
81
82                 starts->record_object_start_offset(new_address);
83         }
84 };
85
86 template<typename Fixup>
87 struct code_block_compaction_relocation_visitor {
88         factor_vm *parent;
89         code_block *old_address;
90         Fixup fixup;
91
92         explicit code_block_compaction_relocation_visitor(factor_vm *parent_,
93                 code_block *old_address_,
94                 Fixup fixup_) :
95                 parent(parent_),
96                 old_address(old_address_),
97                 fixup(fixup_) {}
98
99         void operator()(instruction_operand op)
100         {
101                 cell old_offset = op.rel_offset() + (cell)old_address->entry_point();
102
103                 switch(op.rel_type())
104                 {
105                 case RT_LITERAL:
106                         {
107                                 cell value = op.load_value(old_offset);
108                                 if(immediate_p(value))
109                                         op.store_value(value);
110                                 else
111                                         op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
112                                 break;
113                         }
114                 case RT_ENTRY_POINT:
115                 case RT_ENTRY_POINT_PIC:
116                 case RT_ENTRY_POINT_PIC_TAIL:
117                 case RT_HERE:
118                         {
119                                 cell value = op.load_value(old_offset);
120                                 cell offset = TAG(value);
121                                 code_block *compiled = (code_block *)UNTAG(value);
122                                 op.store_value((cell)fixup.fixup_code(compiled) + offset);
123                                 break;
124                         }
125                 case RT_THIS:
126                 case RT_CARDS_OFFSET:
127                 case RT_DECKS_OFFSET:
128                         parent->store_external_address(op);
129                         break;
130                 default:
131                         op.store_value(op.load_value(old_offset));
132                         break;
133                 }
134         }
135 };
136
137 template<typename Fixup>
138 struct code_block_compaction_updater {
139         factor_vm *parent;
140         Fixup fixup;
141         slot_visitor<Fixup> data_forwarder;
142         code_block_visitor<Fixup> code_forwarder;
143
144         explicit code_block_compaction_updater(factor_vm *parent_,
145                 Fixup fixup_,
146                 slot_visitor<Fixup> data_forwarder_,
147                 code_block_visitor<Fixup> code_forwarder_) :
148                 parent(parent_),
149                 fixup(fixup_),
150                 data_forwarder(data_forwarder_),
151                 code_forwarder(code_forwarder_) {}
152
153         void operator()(code_block *old_address, code_block *new_address, cell size)
154         {
155                 data_forwarder.visit_code_block_objects(new_address);
156
157                 code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
158                 new_address->each_instruction_operand(visitor);
159         }
160 };
161
162 /* After a compaction, invalidate any code heap roots which are not
163 marked, and also slide the valid roots up so that call sites can be updated
164 correctly in case an inline cache compilation triggered compaction. */
165 void factor_vm::update_code_roots_for_compaction()
166 {
167         std::vector<code_root *>::const_iterator iter = code_roots.begin();
168         std::vector<code_root *>::const_iterator end = code_roots.end();
169
170         mark_bits<code_block> *state = &code->allocator->state;
171
172         for(; iter < end; iter++)
173         {
174                 code_root *root = *iter;
175                 code_block *block = (code_block *)(root->value & (~data_alignment + 1));
176
177                 /* Offset of return address within 16-byte allocation line */
178                 cell offset = root->value - (cell)block;
179
180                 if(root->valid && state->marked_p(block))
181                 {
182                         block = state->forward_block(block);
183                         root->value = (cell)block + offset;
184                 }
185                 else
186                         root->valid = false;
187         }
188 }
189
190 /* Compact data and code heaps */
191 void factor_vm::collect_compact_impl(bool trace_contexts_p)
192 {
193         current_gc->event->started_compaction();
194
195         tenured_space *tenured = data->tenured;
196         mark_bits<object> *data_forwarding_map = &tenured->state;
197         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
198
199         /* Figure out where blocks are going to go */
200         data_forwarding_map->compute_forwarding();
201         code_forwarding_map->compute_forwarding();
202
203         const object *data_finger = tenured->first_block();
204         const code_block *code_finger = code->allocator->first_block();
205
206         compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
207         slot_visitor<compaction_fixup> data_forwarder(this,fixup);
208         code_block_visitor<compaction_fixup> code_forwarder(this,fixup);
209
210         code_forwarder.visit_uninitialized_code_blocks();
211
212         /* Object start offsets get recomputed by the object_compaction_updater */
213         data->tenured->starts.clear_object_start_offsets();
214
215         /* Slide everything in tenured space up, and update data and code heap
216         pointers inside objects. */
217         object_compaction_updater object_updater(this,fixup);
218         tenured->compact(object_updater,fixup,&data_finger);
219
220         /* Slide everything in the code heap up, and update data and code heap
221         pointers inside code blocks. */
222         code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
223         code->allocator->compact(code_block_updater,fixup,&code_finger);
224
225         data_forwarder.visit_roots();
226         if(trace_contexts_p)
227         {
228                 data_forwarder.visit_contexts();
229                 code_forwarder.visit_context_code_blocks();
230         }
231
232         update_code_roots_for_compaction();
233         callbacks->update();
234
235         current_gc->event->ended_compaction();
236 }
237
238 struct code_compaction_fixup {
239         mark_bits<code_block> *code_forwarding_map;
240         const code_block **code_finger;
241
242         explicit code_compaction_fixup(mark_bits<code_block> *code_forwarding_map_,
243                 const code_block **code_finger_) :
244                 code_forwarding_map(code_forwarding_map_),
245                 code_finger(code_finger_) {}
246
247         object *fixup_data(object *obj)
248         {
249                 return obj;
250         }
251
252         code_block *fixup_code(code_block *compiled)
253         {
254                 return code_forwarding_map->forward_block(compiled);
255         }
256
257         object *translate_data(const object *obj)
258         {
259                 return fixup_data((object *)obj);
260         }
261
262         code_block *translate_code(const code_block *compiled)
263         {
264                 if(compiled >= *code_finger)
265                         return fixup_code((code_block *)compiled);
266                 else
267                         return (code_block *)compiled;
268         }
269
270         cell size(object *obj)
271         {
272                 return obj->size();
273         }
274
275         cell size(code_block *compiled)
276         {
277                 if(code_forwarding_map->marked_p(compiled))
278                         return compiled->size(*this);
279                 else
280                         return code_forwarding_map->unmarked_block_size(compiled);
281         }
282 };
283
284 struct object_grow_heap_updater {
285         code_block_visitor<code_compaction_fixup> code_forwarder;
286
287         explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
288                 code_forwarder(code_forwarder_) {}
289
290         void operator()(object *obj)
291         {
292                 code_forwarder.visit_object_code_block(obj);
293         }
294 };
295
296 /* Compact just the code heap, after growing the data heap */
297 void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
298 {
299         /* Figure out where blocks are going to go */
300         mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
301         code_forwarding_map->compute_forwarding();
302
303         const code_block *code_finger = code->allocator->first_block();
304
305         code_compaction_fixup fixup(code_forwarding_map,&code_finger);
306         slot_visitor<code_compaction_fixup> data_forwarder(this,fixup);
307         code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup);
308
309         code_forwarder.visit_uninitialized_code_blocks();
310
311         if(trace_contexts_p)
312                 code_forwarder.visit_context_code_blocks();
313
314         /* Update code heap references in data heap */
315         object_grow_heap_updater object_updater(code_forwarder);
316         each_object(object_updater);
317
318         /* Slide everything in the code heap up, and update code heap
319         pointers inside code blocks. */
320         code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
321         code->allocator->compact(code_block_updater,fixup,&code_finger);
322
323         update_code_roots_for_compaction();
324         callbacks->update();
325 }
326
327 void factor_vm::collect_compact(bool trace_contexts_p)
328 {
329         collect_mark_impl(trace_contexts_p);
330         collect_compact_impl(trace_contexts_p);
331         code->flush_icache();
332 }
333
334 void factor_vm::collect_growing_heap(cell requested_bytes, bool trace_contexts_p)
335 {
336         /* Grow the data heap and copy all live objects to the new heap. */
337         data_heap *old = data;
338         set_data_heap(data->grow(requested_bytes));
339         collect_mark_impl(trace_contexts_p);
340         collect_compact_code_impl(trace_contexts_p);
341         code->flush_icache();
342         delete old;
343 }
344
345 }