5 struct full_collection_copier : no_fixup {
6 tenured_space* tenured;
8 std::vector<cell> *mark_stack;
10 full_collection_copier(tenured_space* tenured,
12 std::vector<cell> *mark_stack)
13 : tenured(tenured), code(code), mark_stack(mark_stack) { }
15 object* fixup_data(object* obj) {
16 if (tenured->contains_p(obj)) {
17 if (!tenured->state.marked_p((cell)obj)) {
18 tenured->state.set_marked_p((cell)obj, obj->size());
19 mark_stack->push_back((cell)obj);
24 // Is there another forwarding pointer?
25 while (obj->forwarding_pointer_p()) {
26 object* dest = obj->forwarding_pointer();
30 if (tenured->contains_p(obj)) {
31 if (!tenured->state.marked_p((cell)obj)) {
32 tenured->state.set_marked_p((cell)obj, obj->size());
33 mark_stack->push_back((cell)obj);
38 cell size = obj->size();
39 object* newpointer = tenured->allot(size);
41 throw must_start_gc_again();
42 memcpy(newpointer, obj, size);
43 obj->forward_to(newpointer);
45 tenured->state.set_marked_p((cell)newpointer, newpointer->size());
46 mark_stack->push_back((cell)newpointer);
50 code_block* fixup_code(code_block* compiled) {
51 if (!code->allocator->state.marked_p((cell)compiled)) {
52 code->allocator->state.set_marked_p((cell)compiled, compiled->size());
53 mark_stack->push_back((cell)compiled + 1);
59 // After a sweep, invalidate any code heap roots which are not marked,
60 // so that if a block makes a tail call to a generic word, and the PIC
61 // compiler triggers a GC, and the caller block gets GCd as a result,
62 // the PIC code won't try to overwrite the call site
63 void factor_vm::update_code_roots_for_sweep() {
64 mark_bits* state = &code->allocator->state;
66 FACTOR_FOR_EACH(code_roots) {
67 code_root* root = *iter;
68 cell block = root->value & (~data_alignment - 1);
69 if (root->valid && !state->marked_p(block))
74 void factor_vm::collect_mark_impl() {
75 slot_visitor<full_collection_copier>
76 visitor(this, full_collection_copier(data->tenured, code, &mark_stack));
80 code->allocator->state.clear_mark_bits();
81 data->tenured->state.clear_mark_bits();
83 visitor.visit_all_roots();
84 visitor.visit_context_code_blocks();
85 visitor.visit_uninitialized_code_blocks();
87 visitor.visit_mark_stack(&mark_stack);
89 data->reset_tenured();
91 data->reset_nursery();
92 code->clear_remembered_set();
95 void factor_vm::collect_sweep_impl() {
96 gc_event* event = current_gc->event;
100 data->tenured->sweep();
102 event->ended_data_sweep();
104 update_code_roots_for_sweep();
107 event->reset_timer();
110 event->ended_code_sweep();
113 void factor_vm::collect_full() {
115 collect_sweep_impl();
117 if (data->low_memory_p()) {
118 // Full GC did not free up enough memory. Grow the heap.
119 set_current_gc_op(collect_growing_heap_op);
120 collect_growing_heap(0);
121 } else if (data->high_fragmentation_p()) {
122 // Enough free memory, but it is not contiguous. Perform a
124 set_current_gc_op(collect_compact_op);
125 collect_compact_impl();
128 code->flush_icache();