]> gitweb.factorcode.org Git - factor.git/blob - vm/full_collector.cpp
vm: mark sweep now traces code block references; rename embedded_pointer to instructi...
[factor.git] / vm / full_collector.cpp
1 #include "master.hpp"
2
3 namespace factor
4 {
5
6 inline static code_block_visitor<code_workhorse> make_code_visitor(factor_vm *parent)
7 {
8         return code_block_visitor<code_workhorse>(parent,code_workhorse(parent));
9 }
10
11 full_collector::full_collector(factor_vm *parent_) :
12         collector<tenured_space,full_policy>(
13                 parent_,
14                 parent_->data->tenured,
15                 full_policy(parent_)),
16         code_visitor(make_code_visitor(parent_)) {}
17
18 void full_collector::trace_code_block(code_block *compiled)
19 {
20         data_visitor.visit_referenced_literals(compiled);
21         code_visitor.visit_referenced_code_blocks(compiled);
22 }
23
24 void full_collector::trace_context_code_blocks()
25 {
26         code_visitor.visit_context_code_blocks();
27 }
28
29 void full_collector::trace_callback_code_blocks()
30 {
31         code_visitor.visit_callback_code_blocks();
32 }
33
34 void full_collector::trace_object_code_block(object *obj)
35 {
36         code_visitor.visit_object_code_block(obj);
37 }
38
39 /* After a sweep, invalidate any code heap roots which are not marked,
40 so that if a block makes a tail call to a generic word, and the PIC
41 compiler triggers a GC, and the caller block gets gets GCd as a result,
42 the PIC code won't try to overwrite the call site */
43 void factor_vm::update_code_roots_for_sweep()
44 {
45         std::vector<code_root *>::const_iterator iter = code_roots.begin();
46         std::vector<code_root *>::const_iterator end = code_roots.end();
47
48         mark_bits<code_block> *state = &code->allocator->state;
49
50         for(; iter < end; iter++)
51         {
52                 code_root *root = *iter;
53                 code_block *block = (code_block *)(root->value & -block_granularity);
54                 if(root->valid && !state->marked_p(block))
55                         root->valid = false;
56         }
57 }
58
59 /* After a compaction, invalidate any code heap roots which are not
60 marked as above, and also slide the valid roots up so that call sites
61 can be updated correctly. */
62 void factor_vm::update_code_roots_for_compaction()
63 {
64         std::vector<code_root *>::const_iterator iter = code_roots.begin();
65         std::vector<code_root *>::const_iterator end = code_roots.end();
66
67         mark_bits<code_block> *state = &code->allocator->state;
68
69         for(; iter < end; iter++)
70         {
71                 code_root *root = *iter;
72                 code_block *block = (code_block *)(root->value & -block_granularity);
73
74                 /* Offset of return address within 16-byte allocation line */
75                 cell offset = root->value - (cell)block;
76
77                 if(root->valid && state->marked_p((code_block *)root->value))
78                 {
79                         block = state->forward_block(block);
80                         root->value = (cell)block + offset;
81                 }
82                 else
83                         root->valid = false;
84         }
85 }
86
87 void factor_vm::collect_mark_impl(bool trace_contexts_p)
88 {
89         full_collector collector(this);
90
91         mark_stack.clear();
92
93         code->clear_mark_bits();
94         data->tenured->clear_mark_bits();
95
96         collector.trace_roots();
97         if(trace_contexts_p)
98         {
99                 collector.trace_contexts();
100                 collector.trace_context_code_blocks();
101                 collector.trace_callback_code_blocks();
102         }
103
104         while(!mark_stack.empty())
105         {
106                 cell ptr = mark_stack.back();
107                 mark_stack.pop_back();
108
109                 if(ptr & 1)
110                 {
111                         code_block *compiled = (code_block *)(ptr - 1);
112                         collector.trace_code_block(compiled);
113                 }
114                 else
115                 {
116                         object *obj = (object *)ptr;
117                         collector.trace_object(obj);
118                         collector.trace_object_code_block(obj);
119                 }
120         }
121
122         data->reset_generation(data->tenured);
123         data->reset_generation(data->aging);
124         data->reset_generation(&nursery);
125         code->clear_remembered_set();
126 }
127
128 void factor_vm::collect_sweep_impl()
129 {
130         current_gc->event->started_data_sweep();
131         data->tenured->sweep();
132         current_gc->event->ended_data_sweep();
133
134         update_code_roots_for_sweep();
135
136         current_gc->event->started_code_sweep();
137         code->allocator->sweep();
138         current_gc->event->ended_code_sweep();
139 }
140
141 void factor_vm::collect_full(bool trace_contexts_p)
142 {
143         collect_mark_impl(trace_contexts_p);
144         collect_sweep_impl();
145         if(data->low_memory_p())
146         {
147                 current_gc->op = collect_compact_op;
148                 current_gc->event->op = collect_compact_op;
149                 collect_compact_impl(trace_contexts_p);
150         }
151 }
152
153 void factor_vm::collect_compact(bool trace_contexts_p)
154 {
155         collect_mark_impl(trace_contexts_p);
156         collect_compact_impl(trace_contexts_p);
157 }
158
159 void factor_vm::collect_growing_heap(cell requested_bytes, bool trace_contexts_p)
160 {
161         /* Grow the data heap and copy all live objects to the new heap. */
162         data_heap *old = data;
163         set_data_heap(data->grow(requested_bytes));
164         collect_mark_impl(trace_contexts_p);
165         collect_compact_code_impl(trace_contexts_p);
166         delete old;
167 }
168
169 }