]> gitweb.factorcode.org Git - factor.git/blob - vm/full_collector.cpp
GC maps for more compact inline GC checks
[factor.git] / vm / full_collector.cpp
1 #include "master.hpp"
2
3 namespace factor
4 {
5
6 full_collector::full_collector(factor_vm *parent_) :
7         collector<tenured_space,full_policy>(parent_,parent_->data->tenured,full_policy(parent_)),
8         code_visitor(parent,workhorse) {}
9
10 void full_collector::trace_code_block(code_block *compiled)
11 {
12         data_visitor.visit_code_block_objects(compiled);
13         data_visitor.visit_embedded_literals(compiled);
14         code_visitor.visit_embedded_code_pointers(compiled);
15 }
16
17 void full_collector::trace_context_code_blocks()
18 {
19         code_visitor.visit_context_code_blocks();
20 }
21
22 void full_collector::trace_uninitialized_code_blocks()
23 {
24         code_visitor.visit_uninitialized_code_blocks();
25 }
26
27 void full_collector::trace_object_code_block(object *obj)
28 {
29         code_visitor.visit_object_code_block(obj);
30 }
31
32 /* After a sweep, invalidate any code heap roots which are not marked,
33 so that if a block makes a tail call to a generic word, and the PIC
34 compiler triggers a GC, and the caller block gets gets GCd as a result,
35 the PIC code won't try to overwrite the call site */
36 void factor_vm::update_code_roots_for_sweep()
37 {
38         std::vector<code_root *>::const_iterator iter = code_roots.begin();
39         std::vector<code_root *>::const_iterator end = code_roots.end();
40
41         mark_bits<code_block> *state = &code->allocator->state;
42
43         for(; iter < end; iter++)
44         {
45                 code_root *root = *iter;
46                 code_block *block = (code_block *)(root->value & (~data_alignment - 1));
47                 if(root->valid && !state->marked_p(block))
48                         root->valid = false;
49         }
50 }
51
52 void factor_vm::collect_mark_impl(bool trace_contexts_p)
53 {
54         full_collector collector(this);
55
56         mark_stack.clear();
57
58         code->clear_mark_bits();
59         data->tenured->clear_mark_bits();
60
61         collector.trace_roots();
62         if(trace_contexts_p)
63         {
64                 collector.trace_contexts();
65                 collector.trace_context_code_blocks();
66                 collector.trace_uninitialized_code_blocks();
67         }
68
69         while(!mark_stack.empty())
70         {
71                 cell ptr = mark_stack.back();
72                 mark_stack.pop_back();
73
74                 if(ptr & 1)
75                 {
76                         code_block *compiled = (code_block *)(ptr - 1);
77                         collector.trace_code_block(compiled);
78                 }
79                 else
80                 {
81                         object *obj = (object *)ptr;
82                         collector.trace_object(obj);
83                         collector.trace_object_code_block(obj);
84                 }
85         }
86
87         data->reset_generation(data->tenured);
88         data->reset_generation(data->aging);
89         data->reset_generation(&nursery);
90         code->clear_remembered_set();
91 }
92
93 void factor_vm::collect_sweep_impl()
94 {
95         current_gc->event->started_data_sweep();
96         data->tenured->sweep();
97         current_gc->event->ended_data_sweep();
98
99         update_code_roots_for_sweep();
100
101         current_gc->event->started_code_sweep();
102         code->allocator->sweep();
103         current_gc->event->ended_code_sweep();
104 }
105
106 void factor_vm::collect_full(bool trace_contexts_p)
107 {
108         collect_mark_impl(trace_contexts_p);
109         collect_sweep_impl();
110
111         if(data->low_memory_p())
112         {
113                 current_gc->op = collect_growing_heap_op;
114                 current_gc->event->op = collect_growing_heap_op;
115                 collect_growing_heap(0,trace_contexts_p);
116         }
117         else if(data->high_fragmentation_p())
118         {
119                 current_gc->op = collect_compact_op;
120                 current_gc->event->op = collect_compact_op;
121                 collect_compact_impl(trace_contexts_p);
122         }
123
124         code->flush_icache();
125 }
126
127 }