4 static const cell free_list_count = 16;
5 static const cell block_size_increment = 32;
7 struct heap_free_list {
8 free_heap_block *small_blocks[free_list_count];
9 free_heap_block *large_blocks;
16 unordered_map<heap_block *, char *> forwarding;
18 explicit heap(bool secure_gc_, cell size);
20 inline heap_block *next_block(heap_block *block)
22 cell next = ((cell)block + block->size());
26 return (heap_block *)next;
29 inline heap_block *first_block()
31 return (heap_block *)seg->start;
34 inline heap_block *last_block()
36 return (heap_block *)seg->end;
39 void clear_free_list();
40 void new_heap(cell size);
41 void add_to_free_list(free_heap_block *block);
42 void build_free_list(cell size);
43 void assert_free_block(free_heap_block *block);
44 free_heap_block *find_free_block(cell size);
45 free_heap_block *split_free_block(free_heap_block *block, cell size);
46 heap_block *heap_allot(cell size, cell type);
47 void heap_free(heap_block *block);
48 void mark_block(heap_block *block);
49 void clear_mark_bits();
50 void heap_usage(cell *used, cell *total_free, cell *max_free);
52 cell compute_heap_forwarding();
55 heap_block *free_allocated(heap_block *prev, heap_block *scan);
57 /* After code GC, all referenced code blocks have status set to B_MARKED, so any
58 which are allocated and not marked can be reclaimed. */
59 template<typename Iterator> void free_unmarked(Iterator &iter)
63 heap_block *prev = NULL;
64 heap_block *scan = first_block();
68 if(scan->type() == FREE_BLOCK_TYPE)
70 if(prev && prev->type() == FREE_BLOCK_TYPE)
71 prev->set_size(prev->size() + scan->size());
75 else if(scan->marked_p())
77 if(prev && prev->type() == FREE_BLOCK_TYPE)
78 add_to_free_list((free_heap_block *)prev);
79 scan->set_marked_p(false);
84 prev = free_allocated(prev,scan);
86 scan = next_block(scan);
89 if(prev && prev->type() == FREE_BLOCK_TYPE)
90 add_to_free_list((free_heap_block *)prev);