method returns, except when compiling words with the non-optimizing
compiler at the beginning of bootstrap */
this->code->uninitialized_blocks.insert(std::make_pair(compiled,literals.value()));
+ this->code->all_blocks.insert(compiled);
/* next time we do a minor GC, we have to trace this code block, since
the fields of the code_block struct might point into nursery or aging */
assert(!uninitialized_p(compiled));
points_to_nursery.erase(compiled);
points_to_aging.erase(compiled);
+ all_blocks.erase(compiled);
allocator->free(compiled);
}
factor::flush_icache(seg->start,seg->size);
}
-struct address_finder {
- cell address;
- code_block *found_code_block;
-
- address_finder(cell address)
- : address(address), found_code_block(NULL) {}
+code_block *code_heap::code_block_for_address(cell address)
+{
+ std::set<code_block*>::const_iterator blocki =
+ all_blocks.upper_bound((code_block*)address);
+ assert(blocki != all_blocks.begin());
+ --blocki;
+ code_block* found_block = *blocki;
+ assert((cell)found_block->entry_point() <= address
+ && address - (cell)found_block->entry_point() < found_block->size());
+ return found_block;
+}
- void operator()(code_block *block, cell size)
+void code_heap::update_all_blocks_map(mark_bits<code_block> *code_forwarding_map)
+{
+ std::cout << "updating block map" << std::endl;
+ std::set<code_block *> new_all_blocks;
+ for (std::set<code_block *>::const_iterator oldi = all_blocks.begin();
+ oldi != all_blocks.end();
+ ++oldi)
{
- if ((cell)block->entry_point() <= address
- && address - (cell)block->entry_point() < block->size())
- {
- assert(found_code_block == NULL);
- found_code_block = block;
- }
+ code_block *new_block = code_forwarding_map->forward_block(*oldi);
+ std::cout << "compact " << (void*)*oldi << " -> " << (void*)new_block << std::endl;
+ new_all_blocks.insert(new_block);
}
-};
-
-code_block *code_heap::code_block_for_address(cell address)
-{
- address_finder finder(address);
- allocator->iterate(finder);
- return finder.found_code_block;
+ std::cout << "updated" << std::endl;
+ all_blocks.swap(new_all_blocks);
}
/* Allocate a code heap during startup */
const cell seh_area_size = 0;
#endif
+struct compaction_fixup;
+
struct code_heap {
/* The actual memory area */
segment *seg;
/* Memory allocator */
free_list_allocator<code_block> *allocator;
+ std::set<code_block *> all_blocks;
+
/* Keys are blocks which need to be initialized by initialize_code_block().
Values are literal tables. Literal table arrays are GC roots until the
time the block is initialized, after which point they are discarded. */
void flush_icache();
void guard_safepoint();
void unguard_safepoint();
+ void update_all_blocks_map(mark_bits<code_block> *code_forwarding_map);
code_block *code_block_for_address(cell address);
cell page_mask = ~(getpagesize() - 1);
return (addr & page_mask) == (cell)safepoint_page;
}
+
};
struct code_heap_room {