]> gitweb.factorcode.org Git - factor.git/blob - vm/compaction.cpp
VM: remove the collect_compact_code_impl(), collect_compact_impl() can
[factor.git] / vm / compaction.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 struct compaction_fixup {
6   static const bool translated_code_block_map = false;
7
8   mark_bits* data_forwarding_map;
9   mark_bits* code_forwarding_map;
10   const object** data_finger;
11   const code_block** code_finger;
12
13   compaction_fixup(mark_bits* data_forwarding_map,
14                    mark_bits* code_forwarding_map,
15                    const object** data_finger,
16                    const code_block** code_finger)
17       : data_forwarding_map(data_forwarding_map),
18         code_forwarding_map(code_forwarding_map),
19         data_finger(data_finger),
20         code_finger(code_finger) {}
21
22   object* fixup_data(object* obj) {
23     return (object*)data_forwarding_map->forward_block((cell)obj);
24   }
25
26   code_block* fixup_code(code_block* compiled) {
27     return (code_block*)code_forwarding_map->forward_block((cell)compiled);
28   }
29
30   object* translate_data(const object* obj) {
31     if (obj < *data_finger)
32       return fixup_data((object*)obj);
33     return (object*)obj;
34   }
35
36   code_block* translate_code(const code_block* compiled) {
37     if (compiled < *code_finger)
38       return fixup_code((code_block*)compiled);
39     return (code_block*)compiled;
40   }
41
42   cell size(object* obj) {
43     if (data_forwarding_map->marked_p((cell)obj))
44       return obj->size(*this);
45     return data_forwarding_map->unmarked_block_size((cell)obj);
46   }
47
48   cell size(code_block* compiled) {
49     if (code_forwarding_map->marked_p((cell)compiled))
50       return compiled->size(*this);
51     return code_forwarding_map->unmarked_block_size((cell)compiled);
52   }
53 };
54
55 template <typename Fixup>
56 void update_relocation(factor_vm* parent,
57                        cell old_entry_point,
58                        Fixup fixup,
59                        instruction_operand op) {
60   cell old_offset = op.rel_offset() + old_entry_point;
61
62   switch (op.rel_type()) {
63     case RT_LITERAL: {
64       cell value = op.load_value(old_offset);
65       if (immediate_p(value))
66         op.store_value(value);
67       else
68         op.store_value(
69             RETAG(fixup.fixup_data(untag<object>(value)), TAG(value)));
70       break;
71     }
72     case RT_ENTRY_POINT:
73     case RT_ENTRY_POINT_PIC:
74     case RT_ENTRY_POINT_PIC_TAIL:
75     case RT_HERE: {
76       cell value = op.load_value(old_offset);
77       cell offset = TAG(value);
78       code_block* compiled = (code_block*)UNTAG(value);
79       op.store_value((cell)fixup.fixup_code(compiled) + offset);
80       break;
81     }
82     case RT_THIS:
83     case RT_CARDS_OFFSET:
84     case RT_DECKS_OFFSET:
85       parent->store_external_address(op);
86       break;
87     default:
88       op.store_value(op.load_value(old_offset));
89       break;
90   }
91 }
92
93 template <typename Fixup> struct code_block_compaction_updater {
94   factor_vm* parent;
95   slot_visitor<Fixup> forwarder;
96
97   code_block_compaction_updater(
98       factor_vm* parent, slot_visitor<Fixup> forwarder)
99       : parent(parent), forwarder(forwarder) { }
100
101   void operator()(code_block* old_address, code_block* new_address, cell size) {
102     forwarder.visit_code_block_objects(new_address);
103
104     cell old_entry_point = old_address->entry_point();
105     auto update_func = [&](instruction_operand op) {
106       update_relocation(parent, old_entry_point, forwarder.fixup, op);
107     };
108     new_address->each_instruction_operand(update_func);
109   }
110 };
111
112 /* After a compaction, invalidate any code heap roots which are not
113 marked, and also slide the valid roots up so that call sites can be updated
114 correctly in case an inline cache compilation triggered compaction. */
115 void factor_vm::update_code_roots_for_compaction() {
116
117   mark_bits* state = &code->allocator->state;
118
119   FACTOR_FOR_EACH(code_roots) {
120     code_root* root = *iter;
121     cell block = root->value & (~data_alignment + 1);
122
123     /* Offset of return address within 16-byte allocation line */
124     cell offset = root->value - block;
125
126     if (root->valid && state->marked_p(block)) {
127       block = state->forward_block(block);
128       root->value = block + offset;
129     } else
130       root->valid = false;
131   }
132 }
133
134 /* Compact data and code heaps */
135 void factor_vm::collect_compact_impl() {
136   gc_event* event = current_gc->event;
137
138 #ifdef FACTOR_DEBUG
139   code->verify_all_blocks_set();
140 #endif
141
142   if (event)
143     event->started_compaction();
144
145   tenured_space* tenured = data->tenured;
146   mark_bits* data_forwarding_map = &tenured->state;
147   mark_bits* code_forwarding_map = &code->allocator->state;
148
149   /* Figure out where blocks are going to go */
150   data_forwarding_map->compute_forwarding();
151   code_forwarding_map->compute_forwarding();
152
153   const object* data_finger = (object*)tenured->start;
154   const code_block* code_finger = (code_block*)code->allocator->start;
155
156   {
157     compaction_fixup fixup(data_forwarding_map, code_forwarding_map, &data_finger,
158                            &code_finger);
159     slot_visitor<compaction_fixup> forwarder(this, fixup);
160
161     forwarder.visit_uninitialized_code_blocks();
162
163     /* Object start offsets get recomputed by the object_compaction_updater */
164     data->tenured->starts.clear_object_start_offsets();
165
166     /* Slide everything in tenured space up, and update data and code heap
167        pointers inside objects. */
168     auto compact_object_func = [&](object* old_addr, object* new_addr, cell size) {
169       forwarder.visit_slots(new_addr);
170       forwarder.visit_object_code_block(new_addr);
171       tenured->starts.record_object_start_offset(new_addr);
172     };
173     tenured->compact(compact_object_func, fixup, &data_finger);
174
175     /* Slide everything in the code heap up, and update data and code heap
176        pointers inside code blocks. */
177     {
178       code_block_compaction_updater<compaction_fixup> code_block_updater(
179           this, forwarder);
180       code->allocator->compact(code_block_updater, fixup, &code_finger);
181     }
182
183     forwarder.visit_all_roots();
184     forwarder.visit_context_code_blocks();
185   }
186
187   update_code_roots_for_compaction();
188   callbacks->update();
189
190   code->initialize_all_blocks_set();
191
192   if (event)
193     event->ended_compaction();
194 }
195
196 void factor_vm::collect_compact() {
197   collect_mark_impl();
198   collect_compact_impl();
199
200   if (data->high_fragmentation_p()) {
201     /* Compaction did not free up enough memory. Grow the heap. */
202     set_current_gc_op(collect_growing_heap_op);
203     collect_growing_heap(0);
204   }
205
206   code->flush_icache();
207 }
208
209 void factor_vm::collect_growing_heap(cell requested_size) {
210   /* Grow the data heap and copy all live objects to the new heap. */
211   data_heap* old = data;
212   set_data_heap(data->grow(&nursery, requested_size));
213   collect_mark_impl();
214   collect_compact_impl();
215   code->flush_icache();
216   delete old;
217 }
218
219 }