4 const int forwarding_granularity = 64;
6 template<typename Block, int Granularity> struct mark_bits {
14 void clear_mark_bits()
16 memset(marked,0,bits_size * sizeof(u64));
19 void clear_allocated_bits()
21 memset(allocated,0,bits_size * sizeof(u64));
24 void clear_forwarding()
26 memset(forwarding,0,bits_size * sizeof(cell));
29 explicit mark_bits(cell start_, cell size_) :
32 bits_size(size / Granularity / forwarding_granularity),
33 marked(new u64[bits_size]),
34 allocated(new u64[bits_size]),
35 forwarding(new cell[bits_size])
38 clear_allocated_bits();
52 cell block_line(Block *address)
54 return (((cell)address - start) / Granularity);
57 Block *line_block(cell line)
59 return (Block *)(line * Granularity + start);
62 std::pair<cell,cell> bitmap_deref(Block *address)
64 cell line_number = block_line(address);
65 cell word_index = (line_number >> 6);
66 cell word_shift = (line_number & 63);
69 assert(word_index < bits_size);
72 return std::make_pair(word_index,word_shift);
75 bool bitmap_elt(u64 *bits, Block *address)
77 std::pair<cell,cell> pair = bitmap_deref(address);
78 return (bits[pair.first] & ((u64)1 << pair.second)) != 0;
81 void set_bitmap_range(u64 *bits, Block *address)
83 std::pair<cell,cell> start = bitmap_deref(address);
84 std::pair<cell,cell> end = bitmap_deref(address->next());
86 u64 start_mask = ((u64)1 << start.second) - 1;
87 u64 end_mask = ((u64)1 << end.second) - 1;
89 if(start.first == end.first)
90 bits[start.first] |= start_mask ^ end_mask;
93 bits[start.first] |= ~start_mask;
95 for(cell index = start.first + 1; index < end.first; index++)
96 bits[index] = (u64)-1;
98 bits[end.first] |= end_mask;
102 bool is_marked_p(Block *address)
104 return bitmap_elt(marked,address);
107 void set_marked_p(Block *address)
109 set_bitmap_range(marked,address);
112 bool is_allocated_p(Block *address)
114 return bitmap_elt(allocated,address);
117 void set_allocated_p(Block *address)
119 set_bitmap_range(allocated,address);
122 /* From http://chessprogramming.wikispaces.com/Population+Count */
125 u64 k1 = 0x5555555555555555ll;
126 u64 k2 = 0x3333333333333333ll;
127 u64 k4 = 0x0f0f0f0f0f0f0f0fll;
128 u64 kf = 0x0101010101010101ll;
129 x = x - ((x >> 1) & k1); // put count of each 2 bits into those 2 bits
130 x = (x & k2) + ((x >> 2) & k2); // put count of each 4 bits into those 4 bits
131 x = (x + (x >> 4)) & k4 ; // put count of each 8 bits into those 8 bits
132 x = (x * kf) >> 56; // returns 8 most significant bits of x + (x<<8) + (x<<16) + (x<<24) + ...
137 /* The eventual destination of a block after compaction is just the number
138 of marked blocks before it. Live blocks must be marked on entry. */
139 void compute_forwarding()
142 for(cell index = 0; index < bits_size; index++)
144 forwarding[index] = accum;
145 accum += popcount(marked[index]);
149 /* We have the popcount for every 64 entries; look up and compute the rest */
150 Block *forward_block(Block *original)
152 std::pair<cell,cell> pair = bitmap_deref(original);
154 cell approx_popcount = forwarding[pair.first];
155 u64 mask = ((u64)1 << pair.second) - 1;
157 cell new_line_number = approx_popcount + popcount(marked[pair.first] & mask);
158 return line_block(new_line_number);
162 template<typename Block, int Granularity, typename Iterator> struct heap_compacter {
163 mark_bits<Block,Granularity> *state;
167 explicit heap_compacter(mark_bits<Block,Granularity> *state_, Block *address_, Iterator &iter_) :
168 state(state_), address((char *)address_), iter(iter_) {}
170 void operator()(Block *block, cell size)
172 if(this->state->is_marked_p(block))
174 memmove(address,block,size);
175 iter((Block *)address,size);