5 struct compaction_fixup {
6 static const bool translated_code_block_map = false;
8 mark_bits* data_forwarding_map;
9 mark_bits* code_forwarding_map;
10 const object** data_finger;
11 const code_block** code_finger;
13 compaction_fixup(mark_bits* data_forwarding_map,
14 mark_bits* code_forwarding_map,
15 const object** data_finger,
16 const code_block** code_finger)
17 : data_forwarding_map(data_forwarding_map),
18 code_forwarding_map(code_forwarding_map),
19 data_finger(data_finger),
20 code_finger(code_finger) {}
22 object* fixup_data(object* obj) {
23 return (object*)data_forwarding_map->forward_block((cell)obj);
26 code_block* fixup_code(code_block* compiled) {
27 return (code_block*)code_forwarding_map->forward_block((cell)compiled);
30 object* translate_data(const object* obj) {
31 if (obj < *data_finger)
32 return fixup_data((object*)obj);
36 code_block* translate_code(const code_block* compiled) {
37 if (compiled < *code_finger)
38 return fixup_code((code_block*)compiled);
39 return (code_block*)compiled;
42 cell size(object* obj) {
43 if (data_forwarding_map->marked_p((cell)obj))
44 return obj->size(*this);
45 return data_forwarding_map->unmarked_block_size((cell)obj);
48 cell size(code_block* compiled) {
49 if (code_forwarding_map->marked_p((cell)compiled))
50 return compiled->size(*this);
51 return code_forwarding_map->unmarked_block_size((cell)compiled);
55 /* After a compaction, invalidate any code heap roots which are not
56 marked, and also slide the valid roots up so that call sites can be updated
57 correctly in case an inline cache compilation triggered compaction. */
58 void factor_vm::update_code_roots_for_compaction() {
60 mark_bits* state = &code->allocator->state;
62 FACTOR_FOR_EACH(code_roots) {
63 code_root* root = *iter;
64 cell block = root->value & (~data_alignment + 1);
66 /* Offset of return address within 16-byte allocation line */
67 cell offset = root->value - block;
69 if (root->valid && state->marked_p(block)) {
70 block = state->forward_block(block);
71 root->value = block + offset;
77 /* Compact data and code heaps */
78 void factor_vm::collect_compact_impl() {
79 gc_event* event = current_gc->event;
82 code->verify_all_blocks_set();
86 event->started_compaction();
88 tenured_space* tenured = data->tenured;
89 mark_bits* data_forwarding_map = &tenured->state;
90 mark_bits* code_forwarding_map = &code->allocator->state;
92 /* Figure out where blocks are going to go */
93 data_forwarding_map->compute_forwarding();
94 code_forwarding_map->compute_forwarding();
96 const object* data_finger = (object*)tenured->start;
97 const code_block* code_finger = (code_block*)code->allocator->start;
100 compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
102 slot_visitor<compaction_fixup> forwarder(this, fixup);
104 forwarder.visit_uninitialized_code_blocks();
106 /* Object start offsets get recomputed by the object_compaction_updater */
107 data->tenured->starts.clear_object_start_offsets();
109 /* Slide everything in tenured space up, and update data and code heap
110 pointers inside objects. */
111 auto compact_object_func = [&](object* old_addr, object* new_addr, cell size) {
112 forwarder.visit_slots(new_addr);
113 forwarder.visit_object_code_block(new_addr);
114 tenured->starts.record_object_start_offset(new_addr);
116 tenured->compact(compact_object_func, fixup, &data_finger);
118 /* Slide everything in the code heap up, and update data and code heap
119 pointers inside code blocks. */
120 auto compact_code_func = [&](code_block* old_addr,
121 code_block* new_addr,
123 forwarder.visit_code_block_objects(new_addr);
124 cell old_entry_point = old_addr->entry_point();
125 forwarder.visit_instruction_operands(new_addr, old_entry_point);
127 code->allocator->compact(compact_code_func, fixup, &code_finger);
129 forwarder.visit_all_roots();
130 forwarder.visit_context_code_blocks();
133 update_code_roots_for_compaction();
135 /* Each callback has a relocation with a pointer to a code block in
136 the code heap. Since the code heap has now been compacted, those
137 pointers are invalid and we need to update them. */
138 auto callback_updater = [&](code_block* stub, cell size) {
139 callbacks->update(stub);
141 callbacks->allocator->iterate(callback_updater);
143 code->initialize_all_blocks_set();
146 event->ended_compaction();
149 void factor_vm::collect_compact() {
151 collect_compact_impl();
153 if (data->high_fragmentation_p()) {
154 /* Compaction did not free up enough memory. Grow the heap. */
155 set_current_gc_op(collect_growing_heap_op);
156 collect_growing_heap(0);
159 code->flush_icache();
162 void factor_vm::collect_growing_heap(cell requested_size) {
163 /* Grow the data heap and copy all live objects to the new heap. */
164 data_heap* old = data;
165 set_data_heap(data->grow(&nursery, requested_size));
167 collect_compact_impl();
168 code->flush_icache();