]> gitweb.factorcode.org Git - factor.git/blob - vm/full_collector.cpp
VM: replaced the reset_generation() generic method with one method for each generation
[factor.git] / vm / full_collector.cpp
1 #include "master.hpp"
2
3 namespace factor {
4
5 /* After a sweep, invalidate any code heap roots which are not marked,
6    so that if a block makes a tail call to a generic word, and the PIC
7    compiler triggers a GC, and the caller block gets GCd as a result,
8    the PIC code won't try to overwrite the call site */
9 void factor_vm::update_code_roots_for_sweep() {
10   std::vector<code_root*>::const_iterator iter = code_roots.begin();
11   std::vector<code_root*>::const_iterator end = code_roots.end();
12
13   mark_bits* state = &code->allocator->state;
14
15   for (; iter < end; iter++) {
16     code_root* root = *iter;
17     cell block = root->value & (~data_alignment - 1);
18     if (root->valid && !state->marked_p(block))
19       root->valid = false;
20   }
21 }
22
23 void factor_vm::collect_mark_impl(bool trace_contexts_p) {
24   gc_workhorse<tenured_space, full_policy>
25       workhorse(this, this->data->tenured, full_policy(this));
26
27   slot_visitor<gc_workhorse<tenured_space, full_policy> >
28                 data_visitor(this, workhorse);
29
30   code_block_visitor<gc_workhorse<tenured_space, full_policy> >
31                 code_visitor(this, workhorse);
32
33   mark_stack.clear();
34
35   code->allocator->state.clear_mark_bits();
36   data->tenured->state.clear_mark_bits();
37
38   data_visitor.visit_roots();
39   if (trace_contexts_p) {
40     data_visitor.visit_contexts();
41     code_visitor.visit_context_code_blocks();
42     code_visitor.visit_uninitialized_code_blocks();
43   }
44
45   while (!mark_stack.empty()) {
46     cell ptr = mark_stack.back();
47     mark_stack.pop_back();
48
49     if (ptr & 1) {
50       code_block* compiled = (code_block*)(ptr - 1);
51       data_visitor.visit_code_block_objects(compiled);
52       data_visitor.visit_embedded_literals(compiled);
53       code_visitor.visit_embedded_code_pointers(compiled);
54     } else {
55       object* obj = (object*)ptr;
56       data_visitor.visit_slots(obj);
57       if (obj->type() == ALIEN_TYPE)
58         ((alien*)obj)->update_address();
59       code_visitor.visit_object_code_block(obj);
60     }
61   }
62   data->reset_tenured();
63   data->reset_aging();
64   data->reset_nursery();
65   code->clear_remembered_set();
66 }
67
68 void factor_vm::collect_sweep_impl() {
69   gc_event* event = current_gc->event;
70
71   if (event)
72     event->started_data_sweep();
73   data->tenured->sweep();
74   if (event)
75     event->ended_data_sweep();
76
77   update_code_roots_for_sweep();
78
79   if (event)
80     event->started_code_sweep();
81   code->sweep();
82   if (event)
83     event->ended_code_sweep();
84 }
85
86 void factor_vm::collect_full(bool trace_contexts_p) {
87   collect_mark_impl(trace_contexts_p);
88   collect_sweep_impl();
89
90   if (data->low_memory_p()) {
91     /* Full GC did not free up enough memory. Grow the heap. */
92     set_current_gc_op(collect_growing_heap_op);
93     collect_growing_heap(0, trace_contexts_p);
94   } else if (data->high_fragmentation_p()) {
95     /* Enough free memory, but it is not contiguous. Perform a
96        compaction. */
97     set_current_gc_op(collect_compact_op);
98     collect_compact_impl(trace_contexts_p);
99   }
100
101   code->flush_icache();
102 }
103
104 }