]> gitweb.factorcode.org Git - factor.git/blob - vm/gc.cpp
Merge branch 'master' of git://github.com/slavapestov/factor
[factor.git] / vm / gc.cpp
1 #include "master.hpp"
2
3 namespace factor
4 {
5
6 gc_event::gc_event(gc_op op_, factor_vm *parent) :
7         op(op_),
8         cards_scanned(0),
9         decks_scanned(0),
10         code_blocks_scanned(0),
11         start_time(nano_count()),
12         card_scan_time(0),
13         code_scan_time(0),
14         data_sweep_time(0),
15         code_sweep_time(0),
16         compaction_time(0)
17 {
18         data_heap_before = parent->data_room();
19         code_heap_before = parent->code_room();
20         start_time = nano_count();
21 }
22
23 void gc_event::started_card_scan()
24 {
25         temp_time = nano_count();
26 }
27
28 void gc_event::ended_card_scan(cell cards_scanned_, cell decks_scanned_)
29 {
30         cards_scanned += cards_scanned_;
31         decks_scanned += decks_scanned_;
32         card_scan_time = (cell)(nano_count() - temp_time);
33 }
34
35 void gc_event::started_code_scan()
36 {
37         temp_time = nano_count();
38 }
39
40 void gc_event::ended_code_scan(cell code_blocks_scanned_)
41 {
42         code_blocks_scanned += code_blocks_scanned_;
43         code_scan_time = (cell)(nano_count() - temp_time);
44 }
45
46 void gc_event::started_data_sweep()
47 {
48         temp_time = nano_count();
49 }
50
51 void gc_event::ended_data_sweep()
52 {
53         data_sweep_time = (cell)(nano_count() - temp_time);
54 }
55
56 void gc_event::started_code_sweep()
57 {
58         temp_time = nano_count();
59 }
60
61 void gc_event::ended_code_sweep()
62 {
63         code_sweep_time = (cell)(nano_count() - temp_time);
64 }
65
66 void gc_event::started_compaction()
67 {
68         temp_time = nano_count();
69 }
70
71 void gc_event::ended_compaction()
72 {
73         compaction_time = (cell)(nano_count() - temp_time);
74 }
75
76 void gc_event::ended_gc(factor_vm *parent)
77 {
78         data_heap_after = parent->data_room();
79         code_heap_after = parent->code_room();
80         total_time = (cell)(nano_count() - start_time);
81 }
82
83 gc_state::gc_state(gc_op op_, factor_vm *parent) : op(op_)
84 {
85         if(parent->gc_events)
86         {
87                 event = new gc_event(op,parent);
88                 start_time = nano_count();
89         }
90         else
91                 event = NULL;
92 }
93
94 gc_state::~gc_state()
95 {
96         if(event)
97         {
98                 delete event;
99                 event = NULL;
100         }
101 }
102
103 void factor_vm::end_gc()
104 {
105         if(gc_events)
106         {
107                 current_gc->event->ended_gc(this);
108                 gc_events->push_back(*current_gc->event);
109         }
110 }
111
112 void factor_vm::start_gc_again()
113 {
114         end_gc();
115
116         switch(current_gc->op)
117         {
118         case collect_nursery_op:
119                 current_gc->op = collect_aging_op;
120                 break;
121         case collect_aging_op:
122                 current_gc->op = collect_to_tenured_op;
123                 break;
124         case collect_to_tenured_op:
125                 current_gc->op = collect_full_op;
126                 break;
127         case collect_full_op:
128         case collect_compact_op:
129                 current_gc->op = collect_growing_heap_op;
130                 break;
131         default:
132                 critical_error("Bad GC op",current_gc->op);
133                 break;
134         }
135
136         if(gc_events)
137                 current_gc->event = new gc_event(current_gc->op,this);
138 }
139
140 void factor_vm::set_current_gc_op(gc_op op)
141 {
142         current_gc->op = op;
143         if(gc_events) current_gc->event->op = op;
144 }
145
146 void factor_vm::gc(gc_op op, cell requested_bytes, bool trace_contexts_p)
147 {
148         assert(!gc_off);
149         assert(!current_gc);
150
151         current_gc = new gc_state(op,this);
152
153         /* Keep trying to GC higher and higher generations until we don't run out
154         of space */
155         for(;;)
156         {
157                 try
158                 {
159                         if(gc_events) current_gc->event->op = current_gc->op;
160
161                         switch(current_gc->op)
162                         {
163                         case collect_nursery_op:
164                                 collect_nursery();
165                                 break;
166                         case collect_aging_op:
167                                 collect_aging();
168                                 if(data->high_fragmentation_p())
169                                 {
170                                         set_current_gc_op(collect_full_op);
171                                         collect_full(trace_contexts_p);
172                                 }
173                                 break;
174                         case collect_to_tenured_op:
175                                 collect_to_tenured();
176                                 if(data->high_fragmentation_p())
177                                 {
178                                         set_current_gc_op(collect_full_op);
179                                         collect_full(trace_contexts_p);
180                                 }
181                                 break;
182                         case collect_full_op:
183                                 collect_full(trace_contexts_p);
184                                 break;
185                         case collect_compact_op:
186                                 collect_compact(trace_contexts_p);
187                                 break;
188                         case collect_growing_heap_op:
189                                 collect_growing_heap(requested_bytes,trace_contexts_p);
190                                 break;
191                         default:
192                                 critical_error("Bad GC op",current_gc->op);
193                                 break;
194                         }
195
196                         break;
197                 }
198                 catch(const must_start_gc_again &)
199                 {
200                         /* We come back here if a generation is full */
201                         start_gc_again();
202                         continue;
203                 }
204         }
205
206         end_gc();
207
208         delete current_gc;
209         current_gc = NULL;
210 }
211
212 /* primitive_minor_gc() is invoked by inline GC checks, and it needs to fill in
213 uninitialized stack locations before actually calling the GC. See the comment
214 in compiler.cfg.stacks.uninitialized for details. */
215
216 struct call_frame_scrubber {
217         factor_vm *parent;
218         context *ctx;
219
220         explicit call_frame_scrubber(factor_vm *parent_, context *ctx_) :
221                 parent(parent_), ctx(ctx_) {}
222
223         void operator()(stack_frame *frame)
224         {
225                 cell return_address = parent->frame_offset(frame);
226                 if(return_address == (cell)-1)
227                         return;
228
229                 code_block *compiled = parent->frame_code(frame);
230                 gc_info *info = compiled->block_gc_info();
231
232                 assert(return_address < compiled->size());
233                 cell index = info->return_address_index(return_address);
234                 if(index != (cell)-1)
235                         ctx->scrub_stacks(info,index);
236         }
237 };
238
239 void factor_vm::scrub_context(context *ctx)
240 {
241         call_frame_scrubber scrubber(this,ctx);
242         iterate_callstack(ctx,scrubber);
243 }
244
245 void factor_vm::scrub_contexts()
246 {
247         std::set<context *>::const_iterator begin = active_contexts.begin();
248         std::set<context *>::const_iterator end = active_contexts.end();
249         while(begin != end)
250         {
251                 scrub_context(*begin);
252                 begin++;
253         }
254 }
255
256 void factor_vm::primitive_minor_gc()
257 {
258         scrub_contexts();
259
260         gc(collect_nursery_op,
261                 0, /* requested size */
262                 true /* trace contexts? */);
263 }
264
265 void factor_vm::primitive_full_gc()
266 {
267         gc(collect_full_op,
268                 0, /* requested size */
269                 true /* trace contexts? */);
270 }
271
272 void factor_vm::primitive_compact_gc()
273 {
274         gc(collect_compact_op,
275                 0, /* requested size */
276                 true /* trace contexts? */);
277 }
278
279 /*
280  * It is up to the caller to fill in the object's fields in a meaningful
281  * fashion!
282  */
283 object *factor_vm::allot_large_object(cell type, cell size)
284 {
285         /* If tenured space does not have enough room, collect and compact */
286         if(!data->tenured->can_allot_p(size))
287         {
288                 primitive_compact_gc();
289
290                 /* If it still won't fit, grow the heap */
291                 if(!data->tenured->can_allot_p(size))
292                 {
293                         gc(collect_growing_heap_op,
294                                 size, /* requested size */
295                                 true /* trace contexts? */);
296                 }
297         }
298
299         object *obj = data->tenured->allot(size);
300
301         /* Allows initialization code to store old->new pointers
302         without hitting the write barrier in the common case of
303         a nursery allocation */
304         write_barrier(obj,size);
305
306         obj->initialize(type);
307         return obj;
308 }
309
310 void factor_vm::primitive_enable_gc_events()
311 {
312         gc_events = new std::vector<gc_event>();
313 }
314
315 void factor_vm::primitive_disable_gc_events()
316 {
317         if(gc_events)
318         {
319                 growable_array result(this);
320
321                 std::vector<gc_event> *gc_events = this->gc_events;
322                 this->gc_events = NULL;
323
324                 std::vector<gc_event>::const_iterator iter = gc_events->begin();
325                 std::vector<gc_event>::const_iterator end = gc_events->end();
326
327                 for(; iter != end; iter++)
328                 {
329                         gc_event event = *iter;
330                         byte_array *obj = byte_array_from_value(&event);
331                         result.add(tag<byte_array>(obj));
332                 }
333
334                 result.trim();
335                 ctx->push(result.elements.value());
336
337                 delete this->gc_events;
338         }
339         else
340                 ctx->push(false_object);
341 }
342
343 }