5 struct compaction_fixup {
6 static const bool translated_code_block_map = false;
8 mark_bits* data_forwarding_map;
9 mark_bits* code_forwarding_map;
10 const object** data_finger;
11 const code_block** code_finger;
13 compaction_fixup(mark_bits* data_forwarding_map,
14 mark_bits* code_forwarding_map,
15 const object** data_finger,
16 const code_block** code_finger)
17 : data_forwarding_map(data_forwarding_map),
18 code_forwarding_map(code_forwarding_map),
19 data_finger(data_finger),
20 code_finger(code_finger) {}
22 object* fixup_data(object* obj) {
23 return (object*)data_forwarding_map->forward_block((cell)obj);
26 code_block* fixup_code(code_block* compiled) {
27 return (code_block*)code_forwarding_map->forward_block((cell)compiled);
30 object* translate_data(const object* obj) {
31 if (obj < *data_finger)
32 return fixup_data((object*)obj);
36 code_block* translate_code(const code_block* compiled) {
37 if (compiled < *code_finger)
38 return fixup_code((code_block*)compiled);
39 return (code_block*)compiled;
42 cell size(object* obj) {
43 if (data_forwarding_map->marked_p((cell)obj))
44 return obj->size(*this);
45 return data_forwarding_map->unmarked_block_size((cell)obj);
48 cell size(code_block* compiled) {
49 if (code_forwarding_map->marked_p((cell)compiled))
50 return compiled->size(*this);
51 return code_forwarding_map->unmarked_block_size((cell)compiled);
55 template <typename Fixup>
56 void update_relocation(factor_vm* parent,
59 instruction_operand op) {
60 cell old_offset = op.rel_offset() + old_entry_point;
62 switch (op.rel_type()) {
64 cell value = op.load_value(old_offset);
65 if (immediate_p(value))
66 op.store_value(value);
69 RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
73 case RT_ENTRY_POINT_PIC:
74 case RT_ENTRY_POINT_PIC_TAIL:
76 cell value = op.load_value(old_offset);
77 cell offset = TAG(value);
78 code_block* compiled = (code_block*)UNTAG(value);
79 op.store_value((cell)fixup.fixup_code(compiled) + offset);
85 parent->store_external_address(op);
88 op.store_value(op.load_value(old_offset));
93 template <typename Fixup> struct code_block_compaction_updater {
95 slot_visitor<Fixup> forwarder;
97 code_block_compaction_updater(
98 factor_vm* parent, slot_visitor<Fixup> forwarder)
99 : parent(parent), forwarder(forwarder) { }
101 void operator()(code_block* old_address, code_block* new_address, cell size) {
102 forwarder.visit_code_block_objects(new_address);
104 cell old_entry_point = old_address->entry_point();
105 auto update_func = [&](instruction_operand op) {
106 update_relocation(parent, old_entry_point, forwarder.fixup, op);
108 new_address->each_instruction_operand(update_func);
112 /* After a compaction, invalidate any code heap roots which are not
113 marked, and also slide the valid roots up so that call sites can be updated
114 correctly in case an inline cache compilation triggered compaction. */
115 void factor_vm::update_code_roots_for_compaction() {
117 mark_bits* state = &code->allocator->state;
119 FACTOR_FOR_EACH(code_roots) {
120 code_root* root = *iter;
121 cell block = root->value & (~data_alignment + 1);
123 /* Offset of return address within 16-byte allocation line */
124 cell offset = root->value - block;
126 if (root->valid && state->marked_p(block)) {
127 block = state->forward_block(block);
128 root->value = block + offset;
134 /* Compact data and code heaps */
135 void factor_vm::collect_compact_impl() {
136 gc_event* event = current_gc->event;
139 code->verify_all_blocks_set();
143 event->started_compaction();
145 tenured_space* tenured = data->tenured;
146 mark_bits* data_forwarding_map = &tenured->state;
147 mark_bits* code_forwarding_map = &code->allocator->state;
149 /* Figure out where blocks are going to go */
150 data_forwarding_map->compute_forwarding();
151 code_forwarding_map->compute_forwarding();
153 const object* data_finger = (object*)tenured->start;
154 const code_block* code_finger = (code_block*)code->allocator->start;
157 compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
159 slot_visitor<compaction_fixup> forwarder(this, fixup);
161 forwarder.visit_uninitialized_code_blocks();
163 /* Object start offsets get recomputed by the object_compaction_updater */
164 data->tenured->starts.clear_object_start_offsets();
166 /* Slide everything in tenured space up, and update data and code heap
167 pointers inside objects. */
168 auto compact_object_func = [&](object* old_addr, object* new_addr, cell size) {
169 forwarder.visit_slots(new_addr);
170 forwarder.visit_object_code_block(new_addr);
171 tenured->starts.record_object_start_offset(new_addr);
173 tenured->compact(compact_object_func, fixup, &data_finger);
175 /* Slide everything in the code heap up, and update data and code heap
176 pointers inside code blocks. */
178 code_block_compaction_updater<compaction_fixup> code_block_updater(
180 code->allocator->compact(code_block_updater, fixup, &code_finger);
183 forwarder.visit_all_roots();
184 forwarder.visit_context_code_blocks();
187 update_code_roots_for_compaction();
190 code->initialize_all_blocks_set();
193 event->ended_compaction();
196 void factor_vm::collect_compact() {
198 collect_compact_impl();
200 if (data->high_fragmentation_p()) {
201 /* Compaction did not free up enough memory. Grow the heap. */
202 set_current_gc_op(collect_growing_heap_op);
203 collect_growing_heap(0);
206 code->flush_icache();
209 void factor_vm::collect_growing_heap(cell requested_size) {
210 /* Grow the data heap and copy all live objects to the new heap. */
211 data_heap* old = data;
212 set_data_heap(data->grow(&nursery, requested_size));
214 collect_compact_impl();
215 code->flush_icache();