]> gitweb.factorcode.org Git - factor.git/blob - vm/code_gc.c
Merge branch 'master' into experimental (untested!)
[factor.git] / vm / code_gc.c
1 #include "master.h"
2
3 /* This malloc-style heap code is reasonably generic. Maybe in the future, it
4 will be used for the data heap too, if we ever get incremental
5 mark/sweep/compact GC. */
6 void new_heap(F_HEAP *heap, CELL size)
7 {
8         heap->segment = alloc_segment(align_page(size));
9         if(!heap->segment)
10                 fatal_error("Out of memory in new_heap",size);
11         heap->free_list = NULL;
12 }
13
14 /* Allocate a code heap during startup */
15 void init_code_heap(CELL size)
16 {
17         new_heap(&code_heap,size);
18 }
19
20 bool in_code_heap_p(CELL ptr)
21 {
22         return (ptr >= code_heap.segment->start
23                 && ptr <= code_heap.segment->end);
24 }
25
26 /* If there is no previous block, next_free becomes the head of the free list,
27 else its linked in */
28 INLINE void update_free_list(F_HEAP *heap, F_BLOCK *prev, F_BLOCK *next_free)
29 {
30         if(prev)
31                 prev->next_free = next_free;
32         else
33                 heap->free_list = next_free;
34 }
35
36 /* Called after reading the code heap from the image file, and after code GC.
37
38 In the former case, we must add a large free block from compiling.base + size to
39 compiling.limit. */
40 void build_free_list(F_HEAP *heap, CELL size)
41 {
42         F_BLOCK *prev = NULL;
43         F_BLOCK *prev_free = NULL;
44         F_BLOCK *scan = first_block(heap);
45         F_BLOCK *end = (F_BLOCK *)(heap->segment->start + size);
46
47         /* Add all free blocks to the free list */
48         while(scan && scan < end)
49         {
50                 switch(scan->status)
51                 {
52                 case B_FREE:
53                         update_free_list(heap,prev_free,scan);
54                         prev_free = scan;
55                         break;
56                 case B_ALLOCATED:
57                         break;
58                 default:
59                         critical_error("Invalid scan->status",(CELL)scan);
60                         break;
61                 }
62
63                 prev = scan;
64                 scan = next_block(heap,scan);
65         }
66
67         /* If there is room at the end of the heap, add a free block. This
68         branch is only taken after loading a new image, not after code GC */
69         if((CELL)(end + 1) <= heap->segment->end)
70         {
71                 end->status = B_FREE;
72                 end->next_free = NULL;
73                 end->size = heap->segment->end - (CELL)end;
74
75                 /* add final free block */
76                 update_free_list(heap,prev_free,end);
77         }
78         /* This branch is taken if the newly loaded image fits exactly, or
79         after code GC */
80         else
81         {
82                 /* even if there's no room at the end of the heap for a new
83                 free block, we might have to jigger it up by a few bytes in
84                 case prev + prev->size */
85                 if(prev)
86                         prev->size = heap->segment->end - (CELL)prev;
87
88                 /* this is the last free block */
89                 update_free_list(heap,prev_free,NULL);
90         }
91
92 }
93
94 /* Allocate a block of memory from the mark and sweep GC heap */
95 CELL heap_allot(F_HEAP *heap, CELL size)
96 {
97         F_BLOCK *prev = NULL;
98         F_BLOCK *scan = heap->free_list;
99
100         size = (size + 31) & ~31;
101
102         while(scan)
103         {
104                 CELL this_size = scan->size - sizeof(F_BLOCK);
105
106                 if(scan->status != B_FREE)
107                         critical_error("Invalid block in free list",(CELL)scan);
108
109                 if(this_size < size)
110                 {
111                         prev = scan;
112                         scan = scan->next_free;
113                         continue;
114                 }
115
116                 /* we found a candidate block */
117                 F_BLOCK *next_free;
118
119                 if(this_size - size <= sizeof(F_BLOCK))
120                 {
121                         /* too small to be split */
122                         next_free = scan->next_free;
123                 }
124                 else
125                 {
126                         /* split the block in two */
127                         CELL new_size = size + sizeof(F_BLOCK);
128                         F_BLOCK *split = (F_BLOCK *)((CELL)scan + new_size);
129                         split->status = B_FREE;
130                         split->size = scan->size - new_size;
131                         split->next_free = scan->next_free;
132                         scan->size = new_size;
133                         next_free = split;
134                 }
135
136                 /* update the free list */
137                 update_free_list(heap,prev,next_free);
138
139                 /* this is our new block */
140                 scan->status = B_ALLOCATED;
141
142                 return (CELL)(scan + 1);
143         }
144
145         return 0;
146 }
147
148 /* If in the middle of code GC, we have to grow the heap, GC restarts from
149 scratch, so we have to unmark any marked blocks. */
150 void unmark_marked(F_HEAP *heap)
151 {
152         F_BLOCK *scan = first_block(heap);
153
154         while(scan)
155         {
156                 if(scan->status == B_MARKED)
157                         scan->status = B_ALLOCATED;
158
159                 scan = next_block(heap,scan);
160         }
161 }
162
163 /* After code GC, all referenced code blocks have status set to B_MARKED, so any
164 which are allocated and not marked can be reclaimed. */
165 void free_unmarked(F_HEAP *heap)
166 {
167         F_BLOCK *prev = NULL;
168         F_BLOCK *scan = first_block(heap);
169
170         while(scan)
171         {
172                 switch(scan->status)
173                 {
174                 case B_ALLOCATED:
175                         if(prev && prev->status == B_FREE)
176                                 prev->size += scan->size;
177                         else
178                         {
179                                 scan->status = B_FREE;
180                                 prev = scan;
181                         }
182                         break;
183                 case B_FREE:
184                         if(prev && prev->status == B_FREE)
185                                 prev->size += scan->size;
186                         break;
187                 case B_MARKED:
188                         scan->status = B_ALLOCATED;
189                         prev = scan;
190                         break;
191                 default:
192                         critical_error("Invalid scan->status",(CELL)scan);
193                 }
194
195                 scan = next_block(heap,scan);
196         }
197
198         build_free_list(heap,heap->segment->size);
199 }
200
201 /* Compute total sum of sizes of free blocks, and size of largest free block */
202 void heap_usage(F_HEAP *heap, CELL *used, CELL *total_free, CELL *max_free)
203 {
204         *used = 0;
205         *total_free = 0;
206         *max_free = 0;
207
208         F_BLOCK *scan = first_block(heap);
209
210         while(scan)
211         {
212                 switch(scan->status)
213                 {
214                 case B_ALLOCATED:
215                         *used += scan->size;
216                         break;
217                 case B_FREE:
218                         *total_free += scan->size;
219                         if(scan->size > *max_free)
220                                 *max_free = scan->size;
221                         break;
222                 default:
223                         critical_error("Invalid scan->status",(CELL)scan);
224                 }
225
226                 scan = next_block(heap,scan);
227         }
228 }
229
230 /* The size of the heap, not including the last block if it's free */
231 CELL heap_size(F_HEAP *heap)
232 {
233         F_BLOCK *scan = first_block(heap);
234
235         while(next_block(heap,scan) != NULL)
236                 scan = next_block(heap,scan);
237
238         /* this is the last block in the heap, and it is free */
239         if(scan->status == B_FREE)
240                 return (CELL)scan - heap->segment->start;
241         /* otherwise the last block is allocated */
242         else
243                 return heap->segment->size;
244 }
245
246 /* Apply a function to every code block */
247 void iterate_code_heap(CODE_HEAP_ITERATOR iter)
248 {
249         F_BLOCK *scan = first_block(&code_heap);
250
251         while(scan)
252         {
253                 if(scan->status != B_FREE)
254                         iterate_code_heap_step(block_to_compiled(scan),iter);
255                 scan = next_block(&code_heap,scan);
256         }
257 }
258
259 /* Copy all literals referenced from a code block to newspace */
260 void collect_literals_step(F_COMPILED *compiled, CELL code_start, CELL literals_start)
261 {
262         if(collecting_gen >= compiled->last_scan)
263         {
264                 CELL scan;
265                 CELL literal_end = literals_start + compiled->literals_length;
266
267                 if(collecting_accumulation_gen_p())
268                         compiled->last_scan = collecting_gen;
269                 else
270                         compiled->last_scan = collecting_gen + 1;
271
272                 for(scan = literals_start; scan < literal_end; scan += CELLS)
273                         copy_handle((CELL*)scan);
274
275                 if(compiled->relocation != F)
276                 {
277                         copy_handle(&compiled->relocation);
278
279                         F_BYTE_ARRAY *relocation = untag_object(compiled->relocation);
280
281                         F_REL *rel = (F_REL *)(relocation + 1);
282                         F_REL *rel_end = (F_REL *)((char *)rel + byte_array_capacity(relocation));
283
284                         while(rel < rel_end)
285                         {
286                                 if(REL_TYPE(rel) == RT_IMMEDIATE)
287                                 {
288                                         CELL offset = rel->offset + code_start;
289                                         F_FIXNUM absolute_value = get(CREF(literals_start,REL_ARGUMENT(rel)));
290                                         apply_relocation(REL_CLASS(rel),offset,absolute_value);
291                                 }
292
293                                 rel++;
294                         }
295                 }
296
297                 flush_icache(code_start,literals_start - code_start);
298         }
299 }
300
301 /* Copy literals referenced from all code blocks to newspace */
302 void collect_literals(void)
303 {
304         iterate_code_heap(collect_literals_step);
305 }
306
307 /* Mark all XTs and literals referenced from a word XT */
308 void recursive_mark(F_BLOCK *block)
309 {
310         /* If already marked, do nothing */
311         switch(block->status)
312         {
313         case B_MARKED:
314                 return;
315         case B_ALLOCATED:
316                 block->status = B_MARKED;
317                 break;
318         default:
319                 critical_error("Marking the wrong block",(CELL)block);
320                 break;
321         }
322
323         F_COMPILED *compiled = block_to_compiled(block);
324         iterate_code_heap_step(compiled,collect_literals_step);
325 }
326
327 /* Push the free space and total size of the code heap */
328 void primitive_code_room(void)
329 {
330         CELL used, total_free, max_free;
331         heap_usage(&code_heap,&used,&total_free,&max_free);
332         dpush(tag_fixnum((code_heap.segment->size) / 1024));
333         dpush(tag_fixnum(used / 1024));
334         dpush(tag_fixnum(total_free / 1024));
335         dpush(tag_fixnum(max_free / 1024));
336 }
337
338 /* Dump all code blocks for debugging */
339 void dump_heap(F_HEAP *heap)
340 {
341         CELL size = 0;
342
343         F_BLOCK *scan = first_block(heap);
344
345         while(scan)
346         {
347                 char *status;
348                 switch(scan->status)
349                 {
350                 case B_FREE:
351                         status = "free";
352                         break;
353                 case B_ALLOCATED:
354                         size += object_size(block_to_compiled(scan)->relocation);
355                         status = "allocated";
356                         break;
357                 case B_MARKED:
358                         size += object_size(block_to_compiled(scan)->relocation);
359                         status = "marked";
360                         break;
361                 default:
362                         status = "invalid";
363                         break;
364                 }
365
366                 print_cell_hex((CELL)scan); print_string(" ");
367                 print_cell_hex(scan->size); print_string(" ");
368                 print_string(status); print_string("\n");
369
370                 scan = next_block(heap,scan);
371         }
372         
373         print_cell(size); print_string(" bytes of relocation data\n");
374 }
375
376 /* Compute where each block is going to go, after compaction */
377 CELL compute_heap_forwarding(F_HEAP *heap)
378 {
379         F_BLOCK *scan = first_block(heap);
380         CELL address = (CELL)first_block(heap);
381
382         while(scan)
383         {
384                 if(scan->status == B_ALLOCATED)
385                 {
386                         scan->forwarding = (F_BLOCK *)address;
387                         address += scan->size;
388                 }
389                 else if(scan->status == B_MARKED)
390                         critical_error("Why is the block marked?",0);
391
392                 scan = next_block(heap,scan);
393         }
394
395         return address - heap->segment->start;
396 }
397
398 F_COMPILED *forward_xt(F_COMPILED *compiled)
399 {
400         return block_to_compiled(compiled_to_block(compiled)->forwarding);
401 }
402
403 void forward_frame_xt(F_STACK_FRAME *frame)
404 {
405         CELL offset = (CELL)FRAME_RETURN_ADDRESS(frame) - (CELL)frame_code(frame);
406         F_COMPILED *forwarded = forward_xt(frame_code(frame));
407         frame->xt = (XT)(forwarded + 1);
408         FRAME_RETURN_ADDRESS(frame) = (XT)((CELL)forwarded + offset);
409 }
410
411 void forward_object_xts(void)
412 {
413         begin_scan();
414
415         CELL obj;
416
417         while((obj = next_object()) != F)
418         {
419                 if(type_of(obj) == WORD_TYPE)
420                 {
421                         F_WORD *word = untag_object(obj);
422
423                         word->code = forward_xt(word->code);
424                         if(word->profiling)
425                                 word->profiling = forward_xt(word->profiling);
426                 }
427                 else if(type_of(obj) == QUOTATION_TYPE)
428                 {
429                         F_QUOTATION *quot = untag_object(obj);
430
431                         if(quot->compiledp != F)
432                                 quot->code = forward_xt(quot->code);
433                 }
434                 else if(type_of(obj) == CALLSTACK_TYPE)
435                 {
436                         F_CALLSTACK *stack = untag_object(obj);
437                         iterate_callstack_object(stack,forward_frame_xt);
438                 }
439         }
440
441         /* End the heap scan */
442         gc_off = false;
443 }
444
445 /* Set the XT fields now that the heap has been compacted */
446 void fixup_object_xts(void)
447 {
448         begin_scan();
449
450         CELL obj;
451
452         while((obj = next_object()) != F)
453         {
454                 if(type_of(obj) == WORD_TYPE)
455                 {
456                         F_WORD *word = untag_object(obj);
457                         update_word_xt(word);
458                 }
459                 else if(type_of(obj) == QUOTATION_TYPE)
460                 {
461                         F_QUOTATION *quot = untag_object(obj);
462
463                         if(quot->compiledp != F)
464                                 set_quot_xt(quot,quot->code);
465                 }
466         }
467
468         /* End the heap scan */
469         gc_off = false;
470 }
471
472 void compact_heap(F_HEAP *heap)
473 {
474         F_BLOCK *scan = first_block(heap);
475
476         while(scan)
477         {
478                 F_BLOCK *next = next_block(heap,scan);
479
480                 if(scan->status == B_ALLOCATED && scan != scan->forwarding)
481                         memcpy(scan->forwarding,scan,scan->size);
482                 scan = next;
483         }
484 }
485
486 /* Move all free space to the end of the code heap. This is not very efficient,
487 since it makes several passes over the code and data heaps, but we only ever
488 do this before saving a deployed image and exiting, so performaance is not
489 critical here */
490 void compact_code_heap(void)
491 {
492         /* Free all unreachable code blocks */
493         gc();
494
495         /* Figure out where the code heap blocks are going to end up */
496         CELL size = compute_heap_forwarding(&code_heap);
497
498         /* Update word and quotation code pointers */
499         forward_object_xts();
500
501         /* Actually perform the compaction */
502         compact_heap(&code_heap);
503
504         /* Update word and quotation XTs */
505         fixup_object_xts();
506
507         /* Now update the free list; there will be a single free block at
508         the end */
509         build_free_list(&code_heap,size);
510 }