}
};
-// After a sweep, invalidate any code heap roots which are not marked,
-// so that if a block makes a tail call to a generic word, and the PIC
-// compiler triggers a GC, and the caller block gets GCd as a result,
-// the PIC code won't try to overwrite the call site
-void factor_vm::update_code_roots_for_sweep() {
- mark_bits* state = &code->allocator->state;
-
- FACTOR_FOR_EACH(code_roots) {
- code_root* root = *iter;
- cell block = root->value & (~data_alignment - 1);
- if (root->valid && !state->marked_p(block))
- root->valid = false;
- }
-}
-
void factor_vm::collect_mark_impl() {
+ gc_event* event = current_gc->event;
+ if (event)
+ event->reset_timer();
+
slot_visitor<full_collection_copier>
visitor(this, full_collection_copier(data->tenured, code, &mark_stack));
data->reset_aging();
data->reset_nursery();
code->clear_remembered_set();
+
+ if (event)
+ event->ended_phase(PHASE_MARKING);
}
void factor_vm::collect_sweep_impl() {
gc_event* event = current_gc->event;
-
if (event)
event->reset_timer();
data->tenured->sweep();
if (event)
- event->ended_data_sweep();
+ event->ended_phase(PHASE_DATA_SWEEP);
- update_code_roots_for_sweep();
+ // After a sweep, invalidate any code heap roots which are not
+ // marked, so that if a block makes a tail call to a generic word,
+ // and the PIC compiler triggers a GC, and the caller block gets GCd
+ // as a result, the PIC code won't try to overwrite the call site
+ mark_bits* state = &code->allocator->state;
+ FACTOR_FOR_EACH(code_roots) {
+ code_root* root = *iter;
+ cell block = root->value & (~data_alignment - 1);
+ if (root->valid && !state->marked_p(block))
+ root->valid = false;
+ }
if (event)
event->reset_timer();
code->sweep();
if (event)
- event->ended_code_sweep();
+ event->ended_phase(PHASE_CODE_SWEEP);
}
void factor_vm::collect_full() {
if (data->low_memory_p()) {
// Full GC did not free up enough memory. Grow the heap.
- set_current_gc_op(collect_growing_heap_op);
- collect_growing_heap(0);
+ set_current_gc_op(COLLECT_GROWING_DATA_HEAP_OP);
+ collect_growing_data_heap(0);
} else if (data->high_fragmentation_p()) {
// Enough free memory, but it is not contiguous. Perform a
// compaction.
- set_current_gc_op(collect_compact_op);
+ set_current_gc_op(COLLECT_COMPACT_OP);
collect_compact_impl();
}