6 gc_state::gc_state(data_heap *data_, bool growing_data_heap_, cell collecting_gen_) :
8 growing_data_heap(growing_data_heap_),
9 collecting_gen(collecting_gen_),
10 collecting_aging_again(false),
11 start_time(current_micros()) { }
13 gc_state::~gc_state() { }
15 struct literal_and_word_reference_updater {
18 literal_and_word_reference_updater(factor_vm *myvm_) : myvm(myvm_) {}
20 void operator()(heap_block *block)
22 code_block *compiled = (code_block *)block;
23 myvm->update_literal_references(compiled);
24 myvm->update_word_references(compiled);
28 void factor_vm::free_unmarked_code_blocks()
30 literal_and_word_reference_updater updater(this);
31 code->free_unmarked(updater);
32 code->points_to_nursery.clear();
33 code->points_to_aging.clear();
36 void factor_vm::update_dirty_code_blocks(std::set<code_block *> *remembered_set)
38 /* The youngest generation that any code block can now reference */
39 std::set<code_block *>::const_iterator iter = remembered_set->begin();
40 std::set<code_block *>::const_iterator end = remembered_set->end();
42 for(; iter != end; iter++) update_literal_references(*iter);
45 void factor_vm::record_gc_stats()
47 generation_statistics *s = &gc_stats.generations[current_gc->collecting_gen];
49 cell gc_elapsed = (current_micros() - current_gc->start_time);
51 s->gc_time += gc_elapsed;
52 if(s->max_gc_time < gc_elapsed)
53 s->max_gc_time = gc_elapsed;
56 /* Collect gen and all younger generations.
57 If growing_data_heap_ is true, we must grow the data heap to such a size that
58 an allocation of requested_bytes won't fail */
59 void factor_vm::garbage_collection(cell collecting_gen_, bool growing_data_heap_, bool trace_contexts_p, cell requested_bytes)
66 current_gc = new gc_state(data,growing_data_heap_,collecting_gen_);
68 /* Keep trying to GC higher and higher generations until we don't run out
70 if(setjmp(current_gc->gc_unwind))
72 /* We come back here if a generation is full */
74 /* We have no older generations we can try collecting, so we
75 resort to growing the data heap */
76 if(current_gc->collecting_tenured_p())
78 current_gc->growing_data_heap = true;
80 /* Since we start tracing again, any previously
81 marked code blocks must be re-marked and re-traced */
82 code->clear_mark_bits();
84 /* we try collecting aging space twice before going on to
86 else if(current_gc->collecting_aging_p()
87 && !current_gc->collecting_aging_again)
89 current_gc->collecting_aging_again = true;
91 /* Collect the next oldest generation */
94 current_gc->collecting_gen++;
98 if(current_gc->collecting_nursery_p())
100 else if(current_gc->collecting_aging_p())
102 if(current_gc->collecting_aging_again)
103 collect_to_tenured();
107 else if(current_gc->collecting_tenured_p())
108 collect_full(requested_bytes,trace_contexts_p);
118 garbage_collection(tenured_gen,false,true,0);
121 void factor_vm::primitive_gc()
126 void factor_vm::primitive_gc_stats()
128 growable_array result(this);
131 u64 total_gc_time = 0;
133 for(i = 0; i < gen_count; i++)
135 generation_statistics *s = &gc_stats.generations[i];
136 result.add(allot_cell(s->collections));
137 result.add(tag<bignum>(long_long_to_bignum(s->gc_time)));
138 result.add(tag<bignum>(long_long_to_bignum(s->max_gc_time)));
139 result.add(allot_cell(s->collections == 0 ? 0 : s->gc_time / s->collections));
140 result.add(allot_cell(s->object_count));
141 result.add(tag<bignum>(long_long_to_bignum(s->bytes_copied)));
143 total_gc_time += s->gc_time;
146 result.add(tag<bignum>(ulong_long_to_bignum(total_gc_time)));
147 result.add(tag<bignum>(ulong_long_to_bignum(gc_stats.cards_scanned)));
148 result.add(tag<bignum>(ulong_long_to_bignum(gc_stats.decks_scanned)));
149 result.add(tag<bignum>(ulong_long_to_bignum(gc_stats.card_scan_time)));
150 result.add(allot_cell(gc_stats.code_blocks_scanned));
153 dpush(result.elements.value());
156 void factor_vm::clear_gc_stats()
158 memset(&gc_stats,0,sizeof(gc_statistics));
161 void factor_vm::primitive_clear_gc_stats()
166 /* classes.tuple uses this to reshape tuples; tools.deploy.shaker uses this
167 to coalesce equal but distinct quotations and wrappers. */
168 void factor_vm::primitive_become()
170 array *new_objects = untag_check<array>(dpop());
171 array *old_objects = untag_check<array>(dpop());
173 cell capacity = array_capacity(new_objects);
174 if(capacity != array_capacity(old_objects))
175 critical_error("bad parameters to become",0);
179 for(i = 0; i < capacity; i++)
181 tagged<object> old_obj(array_nth(old_objects,i));
182 tagged<object> new_obj(array_nth(new_objects,i));
184 if(old_obj != new_obj)
185 old_obj->h.forward_to(new_obj.untagged());
190 /* If a word's definition quotation was in old_objects and the
191 quotation in new_objects is not compiled, we might leak memory
192 by referencing the old quotation unless we recompile all
193 unoptimized words. */
197 void factor_vm::inline_gc(cell *gc_roots_base, cell gc_roots_size)
199 for(cell i = 0; i < gc_roots_size; i++)
200 gc_locals.push_back((cell)&gc_roots_base[i]);
202 garbage_collection(nursery_gen,false,true,0);
204 for(cell i = 0; i < gc_roots_size; i++)
205 gc_locals.pop_back();
208 VM_C_API void inline_gc(cell *gc_roots_base, cell gc_roots_size, factor_vm *myvm)
211 VM_PTR->inline_gc(gc_roots_base,gc_roots_size);
215 * It is up to the caller to fill in the object's fields in a meaningful
218 object *factor_vm::allot_object(header header, cell size)
227 if(nursery.size > size)
229 /* If there is insufficient room, collect the nursery */
230 if(nursery.here + size > nursery.end)
231 garbage_collection(nursery_gen,false,true,0);
233 obj = nursery.allot(size);
235 /* If the object is bigger than the nursery, allocate it in
239 /* If tenured space does not have enough room, collect */
240 if(data->tenured->here + size > data->tenured->end)
243 /* If it still won't fit, grow the heap */
244 if(data->tenured->here + size > data->tenured->end)
245 garbage_collection(tenured_gen,true,true,size);
247 obj = data->tenured->allot(size);
249 /* Allows initialization code to store old->new pointers
250 without hitting the write barrier in the common case of
251 a nursery allocation */