4 static const cell free_list_count = 32;
5 static const cell block_size_increment = 16;
7 struct heap_free_list {
8 free_heap_block *small_blocks[free_list_count];
9 free_heap_block *large_blocks;
16 mark_bits<heap_block,block_size_increment> *state;
17 unordered_map<heap_block *, char *> forwarding;
19 explicit heap(bool secure_gc_, cell size, bool executable_p);
22 inline heap_block *next_block(heap_block *block)
24 cell next = ((cell)block + block->size());
28 return (heap_block *)next;
31 inline heap_block *first_block()
33 return (heap_block *)seg->start;
36 inline heap_block *last_block()
38 return (heap_block *)seg->end;
41 void clear_free_list();
42 void new_heap(cell size);
43 void add_to_free_list(free_heap_block *block);
44 void build_free_list(cell size);
45 void assert_free_block(free_heap_block *block);
46 free_heap_block *find_free_block(cell size);
47 free_heap_block *split_free_block(free_heap_block *block, cell size);
48 heap_block *heap_allot(cell size, cell type);
49 void heap_free(heap_block *block);
50 void mark_block(heap_block *block);
51 void heap_usage(cell *used, cell *total_free, cell *max_free);
55 heap_block *free_allocated(heap_block *prev, heap_block *scan);
57 /* After code GC, all referenced code blocks have status set to B_MARKED, so any
58 which are allocated and not marked can be reclaimed. */
59 template<typename Iterator> void free_unmarked(Iterator &iter)
63 heap_block *prev = NULL;
64 heap_block *scan = first_block();
68 if(scan->type() == FREE_BLOCK_TYPE)
70 if(prev && prev->type() == FREE_BLOCK_TYPE)
71 prev->set_size(prev->size() + scan->size());
75 else if(state->is_marked_p(scan))
77 if(prev && prev->type() == FREE_BLOCK_TYPE)
78 add_to_free_list((free_heap_block *)prev);
83 prev = free_allocated(prev,scan);
85 scan = next_block(scan);
88 if(prev && prev->type() == FREE_BLOCK_TYPE)
89 add_to_free_list((free_heap_block *)prev);