5 struct compaction_fixup {
6 mark_bits<object> *data_forwarding_map;
7 mark_bits<code_block> *code_forwarding_map;
8 const object **data_finger;
9 const code_block **code_finger;
11 explicit compaction_fixup(
12 mark_bits<object> *data_forwarding_map_,
13 mark_bits<code_block> *code_forwarding_map_,
14 const object **data_finger_,
15 const code_block **code_finger_) :
16 data_forwarding_map(data_forwarding_map_),
17 code_forwarding_map(code_forwarding_map_),
18 data_finger(data_finger_),
19 code_finger(code_finger_) {}
21 object *fixup_data(object *obj)
23 return data_forwarding_map->forward_block(obj);
26 code_block *fixup_code(code_block *compiled)
28 return code_forwarding_map->forward_block(compiled);
31 object *translate_data(const object *obj)
33 if(obj < *data_finger)
34 return fixup_data((object *)obj);
39 code_block *translate_code(const code_block *compiled)
41 if(compiled < *code_finger)
42 return fixup_code((code_block *)compiled);
44 return (code_block *)compiled;
47 cell size(object *obj)
49 if(data_forwarding_map->marked_p(obj))
50 return obj->size(*this);
52 return data_forwarding_map->unmarked_block_size(obj);
55 cell size(code_block *compiled)
57 if(code_forwarding_map->marked_p(compiled))
58 return compiled->size(*this);
60 return code_forwarding_map->unmarked_block_size(compiled);
64 struct object_compaction_updater {
66 compaction_fixup fixup;
67 object_start_map *starts;
69 explicit object_compaction_updater(factor_vm *parent_, compaction_fixup fixup_) :
72 starts(&parent->data->tenured->starts) {}
74 void operator()(object *old_address, object *new_address, cell size)
76 slot_visitor<compaction_fixup> slot_forwarder(parent,fixup);
77 slot_forwarder.visit_slots(new_address);
79 code_block_visitor<compaction_fixup> code_forwarder(parent,fixup);
80 code_forwarder.visit_object_code_block(new_address);
82 starts->record_object_start_offset(new_address);
86 template<typename Fixup>
87 struct code_block_compaction_relocation_visitor {
89 code_block *old_address;
92 explicit code_block_compaction_relocation_visitor(factor_vm *parent_,
93 code_block *old_address_,
96 old_address(old_address_),
99 void operator()(instruction_operand op)
101 cell old_offset = op.rel_offset() + (cell)old_address->entry_point();
103 switch(op.rel_type())
107 cell value = op.load_value(old_offset);
108 if(immediate_p(value))
109 op.store_value(value);
111 op.store_value(RETAG(fixup.fixup_data(untag<object>(value)),TAG(value)));
115 case RT_ENTRY_POINT_PIC:
116 case RT_ENTRY_POINT_PIC_TAIL:
119 cell value = op.load_value(old_offset);
120 cell offset = TAG(value);
121 code_block *compiled = (code_block *)UNTAG(value);
122 op.store_value((cell)fixup.fixup_code(compiled) + offset);
126 case RT_CARDS_OFFSET:
127 case RT_DECKS_OFFSET:
128 parent->store_external_address(op);
131 op.store_value(op.load_value(old_offset));
137 template<typename Fixup>
138 struct code_block_compaction_updater {
141 slot_visitor<Fixup> data_forwarder;
142 code_block_visitor<Fixup> code_forwarder;
144 explicit code_block_compaction_updater(factor_vm *parent_,
146 slot_visitor<Fixup> data_forwarder_,
147 code_block_visitor<Fixup> code_forwarder_) :
150 data_forwarder(data_forwarder_),
151 code_forwarder(code_forwarder_) {}
153 void operator()(code_block *old_address, code_block *new_address, cell size)
155 data_forwarder.visit_code_block_objects(new_address);
157 code_block_compaction_relocation_visitor<Fixup> visitor(parent,old_address,fixup);
158 new_address->each_instruction_operand(visitor);
162 /* After a compaction, invalidate any code heap roots which are not
163 marked, and also slide the valid roots up so that call sites can be updated
164 correctly in case an inline cache compilation triggered compaction. */
165 void factor_vm::update_code_roots_for_compaction()
167 std::vector<code_root *>::const_iterator iter = code_roots.begin();
168 std::vector<code_root *>::const_iterator end = code_roots.end();
170 mark_bits<code_block> *state = &code->allocator->state;
172 for(; iter < end; iter++)
174 code_root *root = *iter;
175 code_block *block = (code_block *)(root->value & (~data_alignment + 1));
177 /* Offset of return address within 16-byte allocation line */
178 cell offset = root->value - (cell)block;
180 if(root->valid && state->marked_p(block))
182 block = state->forward_block(block);
183 root->value = (cell)block + offset;
190 /* Compact data and code heaps */
191 void factor_vm::collect_compact_impl(bool trace_contexts_p)
193 gc_event *event = current_gc->event;
195 if(event) event->started_compaction();
197 tenured_space *tenured = data->tenured;
198 mark_bits<object> *data_forwarding_map = &tenured->state;
199 mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
201 /* Figure out where blocks are going to go */
202 data_forwarding_map->compute_forwarding();
203 code_forwarding_map->compute_forwarding();
205 const object *data_finger = tenured->first_block();
206 const code_block *code_finger = code->allocator->first_block();
208 compaction_fixup fixup(data_forwarding_map,code_forwarding_map,&data_finger,&code_finger);
209 slot_visitor<compaction_fixup> data_forwarder(this,fixup);
210 code_block_visitor<compaction_fixup> code_forwarder(this,fixup);
212 code_forwarder.visit_code_roots();
214 /* Object start offsets get recomputed by the object_compaction_updater */
215 data->tenured->starts.clear_object_start_offsets();
217 /* Slide everything in tenured space up, and update data and code heap
218 pointers inside objects. */
219 object_compaction_updater object_updater(this,fixup);
220 tenured->compact(object_updater,fixup,&data_finger);
222 /* Slide everything in the code heap up, and update data and code heap
223 pointers inside code blocks. */
224 code_block_compaction_updater<compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
225 code->allocator->compact(code_block_updater,fixup,&code_finger);
227 code->update_all_blocks_set(code_forwarding_map);
229 data_forwarder.visit_roots();
232 data_forwarder.visit_contexts();
233 code_forwarder.visit_context_code_blocks();
236 update_code_roots_for_compaction();
239 code->initialize_all_blocks_set();
241 if(event) event->ended_compaction();
244 struct code_compaction_fixup {
245 mark_bits<code_block> *code_forwarding_map;
246 const code_block **code_finger;
248 explicit code_compaction_fixup(mark_bits<code_block> *code_forwarding_map_,
249 const code_block **code_finger_) :
250 code_forwarding_map(code_forwarding_map_),
251 code_finger(code_finger_) {}
253 object *fixup_data(object *obj)
258 code_block *fixup_code(code_block *compiled)
260 return code_forwarding_map->forward_block(compiled);
263 object *translate_data(const object *obj)
265 return fixup_data((object *)obj);
268 code_block *translate_code(const code_block *compiled)
270 if(compiled >= *code_finger)
271 return fixup_code((code_block *)compiled);
273 return (code_block *)compiled;
276 cell size(object *obj)
281 cell size(code_block *compiled)
283 if(code_forwarding_map->marked_p(compiled))
284 return compiled->size(*this);
286 return code_forwarding_map->unmarked_block_size(compiled);
290 struct object_grow_heap_updater {
291 code_block_visitor<code_compaction_fixup> code_forwarder;
293 explicit object_grow_heap_updater(code_block_visitor<code_compaction_fixup> code_forwarder_) :
294 code_forwarder(code_forwarder_) {}
296 void operator()(object *obj)
298 code_forwarder.visit_object_code_block(obj);
302 /* Compact just the code heap, after growing the data heap */
303 void factor_vm::collect_compact_code_impl(bool trace_contexts_p)
305 /* Figure out where blocks are going to go */
306 mark_bits<code_block> *code_forwarding_map = &code->allocator->state;
307 code_forwarding_map->compute_forwarding();
309 const code_block *code_finger = code->allocator->first_block();
311 code_compaction_fixup fixup(code_forwarding_map,&code_finger);
312 slot_visitor<code_compaction_fixup> data_forwarder(this,fixup);
313 code_block_visitor<code_compaction_fixup> code_forwarder(this,fixup);
315 code_forwarder.visit_code_roots();
318 code_forwarder.visit_context_code_blocks();
320 /* Update code heap references in data heap */
321 object_grow_heap_updater object_updater(code_forwarder);
322 each_object(object_updater);
324 /* Slide everything in the code heap up, and update code heap
325 pointers inside code blocks. */
326 code_block_compaction_updater<code_compaction_fixup> code_block_updater(this,fixup,data_forwarder,code_forwarder);
327 code->allocator->compact(code_block_updater,fixup,&code_finger);
329 update_code_roots_for_compaction();
333 void factor_vm::collect_compact(bool trace_contexts_p)
335 collect_mark_impl(trace_contexts_p);
336 collect_compact_impl(trace_contexts_p);
338 if(data->high_fragmentation_p())
340 /* Compaction did not free up enough memory. Grow the heap. */
341 set_current_gc_op(collect_growing_heap_op);
342 collect_growing_heap(0,trace_contexts_p);
345 code->flush_icache();
348 void factor_vm::collect_growing_heap(cell requested_size, bool trace_contexts_p)
350 /* Grow the data heap and copy all live objects to the new heap. */
351 data_heap *old = data;
352 set_data_heap(data->grow(requested_size));
353 collect_mark_impl(trace_contexts_p);
354 collect_compact_code_impl(trace_contexts_p);
355 code->flush_icache();