]> gitweb.factorcode.org Git - factor.git/blob - vm/full_collector.cpp
Big cleanup of literal table and relocation-related code
[factor.git] / vm / full_collector.cpp
1 #include "master.hpp"
2
3 namespace factor
4 {
5
6 inline static code_block_visitor<code_workhorse> make_code_visitor(factor_vm *parent)
7 {
8         return code_block_visitor<code_workhorse>(parent,code_workhorse(parent));
9 }
10
11 full_collector::full_collector(factor_vm *parent_) :
12         collector<tenured_space,full_policy>(
13                 parent_,
14                 parent_->data->tenured,
15                 full_policy(parent_)),
16         code_visitor(make_code_visitor(parent_)) {}
17
18 void full_collector::trace_code_block(code_block *compiled)
19 {
20         data_visitor.visit_code_block_objects(compiled);
21         data_visitor.visit_embedded_literals(compiled);
22         code_visitor.visit_embedded_code_pointers(compiled);
23 }
24
25 void full_collector::trace_context_code_blocks()
26 {
27         code_visitor.visit_context_code_blocks();
28 }
29
30 void full_collector::trace_object_code_block(object *obj)
31 {
32         code_visitor.visit_object_code_block(obj);
33 }
34
35 /* After a sweep, invalidate any code heap roots which are not marked,
36 so that if a block makes a tail call to a generic word, and the PIC
37 compiler triggers a GC, and the caller block gets gets GCd as a result,
38 the PIC code won't try to overwrite the call site */
39 void factor_vm::update_code_roots_for_sweep()
40 {
41         std::vector<code_root *>::const_iterator iter = code_roots.begin();
42         std::vector<code_root *>::const_iterator end = code_roots.end();
43
44         mark_bits<code_block> *state = &code->allocator->state;
45
46         for(; iter < end; iter++)
47         {
48                 code_root *root = *iter;
49                 code_block *block = (code_block *)(root->value & -data_alignment);
50                 if(root->valid && !state->marked_p(block))
51                         root->valid = false;
52         }
53 }
54
55 /* After a compaction, invalidate any code heap roots which are not
56 marked as above, and also slide the valid roots up so that call sites
57 can be updated correctly. */
58 void factor_vm::update_code_roots_for_compaction()
59 {
60         std::vector<code_root *>::const_iterator iter = code_roots.begin();
61         std::vector<code_root *>::const_iterator end = code_roots.end();
62
63         mark_bits<code_block> *state = &code->allocator->state;
64
65         for(; iter < end; iter++)
66         {
67                 code_root *root = *iter;
68                 code_block *block = (code_block *)(root->value & -data_alignment);
69
70                 /* Offset of return address within 16-byte allocation line */
71                 cell offset = root->value - (cell)block;
72
73                 if(root->valid && state->marked_p((code_block *)root->value))
74                 {
75                         block = state->forward_block(block);
76                         root->value = (cell)block + offset;
77                 }
78                 else
79                         root->valid = false;
80         }
81 }
82
83 void factor_vm::collect_mark_impl(bool trace_contexts_p)
84 {
85         full_collector collector(this);
86
87         mark_stack.clear();
88
89         code->clear_mark_bits();
90         data->tenured->clear_mark_bits();
91
92         collector.trace_roots();
93         if(trace_contexts_p)
94         {
95                 collector.trace_contexts();
96                 collector.trace_context_code_blocks();
97         }
98
99         while(!mark_stack.empty())
100         {
101                 cell ptr = mark_stack.back();
102                 mark_stack.pop_back();
103
104                 if(ptr & 1)
105                 {
106                         code_block *compiled = (code_block *)(ptr - 1);
107                         collector.trace_code_block(compiled);
108                 }
109                 else
110                 {
111                         object *obj = (object *)ptr;
112                         collector.trace_object(obj);
113                         collector.trace_object_code_block(obj);
114                 }
115         }
116
117         data->reset_generation(data->tenured);
118         data->reset_generation(data->aging);
119         data->reset_generation(&nursery);
120         code->clear_remembered_set();
121 }
122
123 void factor_vm::collect_sweep_impl()
124 {
125         current_gc->event->started_data_sweep();
126         data->tenured->sweep();
127         current_gc->event->ended_data_sweep();
128
129         update_code_roots_for_sweep();
130
131         current_gc->event->started_code_sweep();
132         code->allocator->sweep();
133         current_gc->event->ended_code_sweep();
134 }
135
136 void factor_vm::collect_full(bool trace_contexts_p)
137 {
138         collect_mark_impl(trace_contexts_p);
139         collect_sweep_impl();
140         if(data->low_memory_p())
141         {
142                 current_gc->op = collect_compact_op;
143                 current_gc->event->op = collect_compact_op;
144                 collect_compact_impl(trace_contexts_p);
145         }
146         code->flush_icache();
147 }
148
149 }