]> gitweb.factorcode.org Git - factor.git/blob - vm/full_collector.cpp
VM: Refactor full_collector to Factor style
[factor.git] / vm / full_collector.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 full_collector::full_collector(factor_vm* parent_)
6     : collector<tenured_space, full_policy>(parent_, parent_->data->tenured,
7                                             full_policy(parent_)),
8       code_visitor(parent, workhorse) {}
9
10 void full_collector::trace_code_block(code_block* compiled) {
11   data_visitor.visit_code_block_objects(compiled);
12   data_visitor.visit_embedded_literals(compiled);
13   code_visitor.visit_embedded_code_pointers(compiled);
14 }
15
16 void full_collector::trace_context_code_blocks() {
17   code_visitor.visit_context_code_blocks();
18 }
19
20 void full_collector::trace_code_roots() { code_visitor.visit_code_roots(); }
21
22 void full_collector::trace_object_code_block(object* obj) {
23   code_visitor.visit_object_code_block(obj);
24 }
25
26 /* After a sweep, invalidate any code heap roots which are not marked,
27    so that if a block makes a tail call to a generic word, and the PIC
28    compiler triggers a GC, and the caller block gets gets GCd as a result,
29    the PIC code won't try to overwrite the call site */
30 void factor_vm::update_code_roots_for_sweep() {
31   std::vector<code_root*>::const_iterator iter = code_roots.begin();
32   std::vector<code_root*>::const_iterator end = code_roots.end();
33
34   mark_bits<code_block>* state = &code->allocator->state;
35
36   for (; iter < end; iter++) {
37     code_root* root = *iter;
38     code_block* block = (code_block*)(root->value & (~data_alignment - 1));
39     if (root->valid && !state->marked_p(block))
40       root->valid = false;
41   }
42 }
43
44 void factor_vm::collect_mark_impl(bool trace_contexts_p) {
45   full_collector collector(this);
46
47   mark_stack.clear();
48
49   code->clear_mark_bits();
50   data->tenured->clear_mark_bits();
51
52   collector.trace_roots();
53   if (trace_contexts_p) {
54     collector.trace_contexts();
55     collector.trace_context_code_blocks();
56     collector.trace_code_roots();
57   }
58
59   while (!mark_stack.empty()) {
60     cell ptr = mark_stack.back();
61     mark_stack.pop_back();
62
63     if (ptr & 1) {
64       code_block* compiled = (code_block*)(ptr - 1);
65       collector.trace_code_block(compiled);
66     } else {
67       object* obj = (object*)ptr;
68       collector.trace_object(obj);
69       collector.trace_object_code_block(obj);
70     }
71   }
72
73   data->reset_generation(data->tenured);
74   data->reset_generation(data->aging);
75   data->reset_generation(&nursery);
76   code->clear_remembered_set();
77 }
78
79 void factor_vm::collect_sweep_impl() {
80   gc_event* event = current_gc->event;
81
82   if (event)
83     event->started_data_sweep();
84   data->tenured->sweep();
85   if (event)
86     event->ended_data_sweep();
87
88   update_code_roots_for_sweep();
89
90   if (event)
91     event->started_code_sweep();
92   code->sweep();
93   if (event)
94     event->ended_code_sweep();
95 }
96
97 void factor_vm::collect_full(bool trace_contexts_p) {
98   collect_mark_impl(trace_contexts_p);
99   collect_sweep_impl();
100
101   if (data->low_memory_p()) {
102     /* Full GC did not free up enough memory. Grow the heap. */
103     set_current_gc_op(collect_growing_heap_op);
104     collect_growing_heap(0, trace_contexts_p);
105   } else if (data->high_fragmentation_p()) {
106     /* Enough free memory, but it is not contiguous. Perform a
107        compaction. */
108     set_current_gc_op(collect_compact_op);
109     collect_compact_impl(trace_contexts_p);
110   }
111
112   code->flush_icache();
113 }
114
115 }