cell factor_vm::frame_type(stack_frame *frame)
{
- return frame_code(frame)->type;
+ return frame_code(frame)->type();
}
cell factor_vm::frame_executing(stack_frame *frame)
void factor_vm::flush_icache_for(code_block *block)
{
- flush_icache((cell)block,block->size);
+ flush_icache((cell)block,block->size());
}
int factor_vm::number_of_parameters(relocation_type type)
/* Update pointers to literals from compiled code. */
void factor_vm::update_literal_references(code_block *compiled)
{
- if(!compiled->needs_fixup)
+ if(!code->needs_fixup_p(compiled))
{
literal_references_updater updater(this);
iterate_relocations(compiled,updater);
or dlsyms. */
void factor_vm::update_word_references(code_block *compiled)
{
- if(compiled->needs_fixup)
+ if(code->needs_fixup_p(compiled))
relocate_code_block(compiled);
/* update_word_references() is always applied to every block in
the code heap. Since it resets all call sites to point to
are referenced after this is done. So instead of polluting
the code heap with dead PICs that will be freed on the next
GC, we add them to the free list immediately. */
- else if(compiled->type == PIC_TYPE)
+ else if(compiled->type() == PIC_TYPE)
code->code_heap_free(compiled);
else
{
/* Perform all fixups on a code block */
void factor_vm::relocate_code_block(code_block *compiled)
{
- compiled->needs_fixup = false;
+ code->needs_fixup.erase(compiled);
code_block_relocator relocator(this);
iterate_relocations(compiled,relocator);
flush_icache_for(compiled);
}
/* Might GC */
-code_block *factor_vm::allot_code_block(cell size)
+code_block *factor_vm::allot_code_block(cell size, cell type)
{
- heap_block *block = code->heap_allot(size + sizeof(code_block));
+ heap_block *block = code->heap_allot(size + sizeof(code_block),type);
/* If allocation failed, do a code GC */
if(block == NULL)
{
gc();
- block = code->heap_allot(size + sizeof(code_block));
+ block = code->heap_allot(size + sizeof(code_block),type);
/* Insufficient room even after code GC, give up */
if(block == NULL)
gc_root<array> literals(literals_,this);
cell code_length = align8(array_capacity(code.untagged()));
- code_block *compiled = allot_code_block(code_length);
-
- /* compiled header */
- compiled->type = type;
- compiled->needs_fixup = true;
+ code_block *compiled = allot_code_block(code_length,type);
/* slight space optimization */
if(relocation.type() == BYTE_ARRAY_TYPE && array_capacity(relocation.untagged()) == 0)
/* next time we do a minor GC, we have to scan the code heap for
literals */
this->code->write_barrier(compiled);
+ this->code->needs_fixup.insert(compiled);
return compiled;
}
youngest_referenced_generation = myvm->data->nursery();
}
+bool code_heap::needs_fixup_p(code_block *compiled)
+{
+ return needs_fixup.count(compiled) > 0;
+}
+
void code_heap::code_heap_free(code_block *compiled)
{
remembered_set.erase(compiled);
+ needs_fixup.erase(compiled);
heap_free(compiled);
}
{
struct code_heap : heap {
+ /* Set of blocks which need full relocation. */
+ unordered_set<code_block *> needs_fixup;
+
/* Maps code blocks to the youngest generation containing
one of their literals. If this is tenured (0), the code block
is not part of the remembered set. */
unordered_map<code_block *, cell> remembered_set;
-
+
/* Minimum value in the above map. */
cell youngest_referenced_generation;
explicit code_heap(factor_vm *myvm, cell size);
void write_barrier(code_block *compiled);
+ bool needs_fixup_p(code_block *compiled);
void code_heap_free(code_block *compiled);
};
while(scan)
{
const char *status;
- switch(scan->status)
- {
- case B_FREE:
+ if(scan->type() == FREE_BLOCK_TYPE)
status = "free";
- break;
- case B_ALLOCATED:
+ else if(scan->marked_p())
+ {
reloc_size += object_size(((code_block *)scan)->relocation);
literal_size += object_size(((code_block *)scan)->literals);
- status = "allocated";
- break;
- case B_MARKED:
+ status = "marked";
+ }
+ else
+ {
reloc_size += object_size(((code_block *)scan)->relocation);
literal_size += object_size(((code_block *)scan)->literals);
- status = "marked";
- break;
- default:
- status = "invalid";
- break;
+ status = "allocated";
}
print_cell_hex((cell)scan); print_string(" ");
- print_cell_hex(scan->size); print_string(" ");
+ print_cell_hex(scan->size()); print_string(" ");
print_string(status); print_string("\n");
scan = code->next_block(scan);
void heap::add_to_free_list(free_heap_block *block)
{
- if(block->size < free_list_count * block_size_increment)
+ if(block->size() < free_list_count * block_size_increment)
{
- int index = block->size / block_size_increment;
+ int index = block->size() / block_size_increment;
block->next_free = free.small_blocks[index];
free.small_blocks[index] = block;
}
/* Add all free blocks to the free list */
while(scan && scan < (heap_block *)end)
{
- switch(scan->status)
- {
- case B_FREE:
+ if(scan->type() == FREE_BLOCK_TYPE)
add_to_free_list((free_heap_block *)scan);
- break;
- case B_ALLOCATED:
- break;
- default:
- myvm->critical_error("Invalid scan->status",(cell)scan);
- break;
- }
prev = scan;
scan = next_block(scan);
branch is only taken after loading a new image, not after code GC */
if((cell)(end + 1) <= seg->end)
{
- end->status = B_FREE;
- end->size = seg->end - (cell)end;
+ end->set_marked_p(false);
+ end->set_type(FREE_BLOCK_TYPE);
+ end->set_size(seg->end - (cell)end);
/* add final free block */
add_to_free_list(end);
/* even if there's no room at the end of the heap for a new
free block, we might have to jigger it up by a few bytes in
case prev + prev->size */
- if(prev) prev->size = seg->end - (cell)prev;
+ if(prev) prev->set_size(seg->end - (cell)prev);
}
}
void heap::assert_free_block(free_heap_block *block)
{
- if(block->status != B_FREE)
+ if(block->type() != FREE_BLOCK_TYPE)
myvm->critical_error("Invalid block in free list",(cell)block);
}
while(block)
{
assert_free_block(block);
- if(block->size >= size)
+ if(block->size() >= size)
{
if(prev)
prev->next_free = block->next_free;
free_heap_block *heap::split_free_block(free_heap_block *block, cell size)
{
- if(block->size != size )
+ if(block->size() != size )
{
/* split the block in two */
free_heap_block *split = (free_heap_block *)((cell)block + size);
- split->status = B_FREE;
- split->size = block->size - size;
+ split->set_type(FREE_BLOCK_TYPE);
+ split->set_size(block->size() - size);
split->next_free = block->next_free;
- block->size = size;
+ block->set_size(size);
add_to_free_list(split);
}
}
/* Allocate a block of memory from the mark and sweep GC heap */
-heap_block *heap::heap_allot(cell size)
+heap_block *heap::heap_allot(cell size, cell type)
{
size = (size + block_size_increment - 1) & ~(block_size_increment - 1);
if(block)
{
block = split_free_block(block,size);
-
- block->status = B_ALLOCATED;
+ block->set_type(type);
+ block->set_marked_p(false);
return block;
}
else
/* Deallocates a block manually */
void heap::heap_free(heap_block *block)
{
- block->status = B_FREE;
+ block->set_type(FREE_BLOCK_TYPE);
add_to_free_list((free_heap_block *)block);
}
void heap::mark_block(heap_block *block)
{
- /* If already marked, do nothing */
- switch(block->status)
- {
- case B_MARKED:
- return;
- case B_ALLOCATED:
- block->status = B_MARKED;
- break;
- default:
- myvm->critical_error("Marking the wrong block",(cell)block);
- break;
- }
+ block->set_marked_p(true);
}
/* If in the middle of code GC, we have to grow the heap, data GC restarts from
while(scan)
{
- if(scan->status == B_MARKED)
- scan->status = B_ALLOCATED;
-
+ scan->set_marked_p(false);
scan = next_block(scan);
}
}
while(scan)
{
- switch(scan->status)
+ cell size = scan->size();
+
+ if(scan->type() == FREE_BLOCK_TYPE)
{
- case B_ALLOCATED:
- *used += scan->size;
- break;
- case B_FREE:
- *total_free += scan->size;
- if(scan->size > *max_free)
- *max_free = scan->size;
- break;
- default:
- myvm->critical_error("Invalid scan->status",(cell)scan);
+ *total_free += size;
+ if(size > *max_free)
+ *max_free = size;
}
+ else
+ *used += size;
scan = next_block(scan);
}
scan = next_block(scan);
/* this is the last block in the heap, and it is free */
- if(scan->status == B_FREE)
+ if(scan->type() == FREE_BLOCK_TYPE)
return (cell)scan - seg->start;
/* otherwise the last block is allocated */
else
while(scan)
{
- if(scan->status == B_ALLOCATED)
+ if(scan->type() != FREE_BLOCK_TYPE)
{
forwarding[scan] = address;
- address += scan->size;
+ address += scan->size();
}
- else if(scan->status == B_MARKED)
- myvm->critical_error("Why is the block marked?",0);
-
scan = next_block(scan);
}
{
heap_block *next = next_block(scan);
- if(scan->status == B_ALLOCATED)
- memmove(forwarding[scan],scan,scan->size);
+ if(scan->type() != FREE_BLOCK_TYPE)
+ memmove(forwarding[scan],scan,scan->size());
scan = next;
}
}
heap_block *heap::free_allocated(heap_block *prev, heap_block *scan)
{
if(myvm->secure_gc)
- memset(scan + 1,0,scan->size - sizeof(heap_block));
+ memset(scan + 1,0,scan->size() - sizeof(heap_block));
- if(prev && prev->status == B_FREE)
+ if(prev && prev->type() == FREE_BLOCK_TYPE)
{
- prev->size += scan->size;
+ prev->set_size(prev->size() + scan->size());
return prev;
}
else
{
- scan->status = B_FREE;
+ scan->set_type(FREE_BLOCK_TYPE);
return scan;
}
}
inline heap_block *next_block(heap_block *block)
{
- cell next = ((cell)block + block->size);
+ cell next = ((cell)block + block->size());
if(next == seg->end)
return NULL;
else
void assert_free_block(free_heap_block *block);
free_heap_block *find_free_block(cell size);
free_heap_block *split_free_block(free_heap_block *block, cell size);
- heap_block *heap_allot(cell size);
+ heap_block *heap_allot(cell size, cell type);
void heap_free(heap_block *block);
void mark_block(heap_block *block);
void unmark_marked();
while(scan)
{
- switch(scan->status)
+ if(scan->type() == FREE_BLOCK_TYPE)
{
- case B_ALLOCATED:
- prev = free_allocated(prev,scan);
- break;
- case B_FREE:
- if(prev && prev->status == B_FREE)
- prev->size += scan->size;
+ if(prev && prev->type() == FREE_BLOCK_TYPE)
+ prev->set_size(prev->size() + scan->size());
else
prev = scan;
- break;
- case B_MARKED:
- if(prev && prev->status == B_FREE)
+ }
+ else if(scan->marked_p())
+ {
+ if(prev && prev->type() == FREE_BLOCK_TYPE)
add_to_free_list((free_heap_block *)prev);
- scan->status = B_ALLOCATED;
+ scan->set_marked_p(false);
prev = scan;
iter(scan);
- break;
}
-
+ else
+ prev = free_allocated(prev,scan);
+
scan = next_block(scan);
}
-
- if(prev && prev->status == B_FREE)
+
+ if(prev && prev->type() == FREE_BLOCK_TYPE)
add_to_free_list((free_heap_block *)prev);
}
};
check_code_pointer((cell)old_xt);
code_block *old_block = (code_block *)old_xt - 1;
- cell old_type = old_block->type;
+ cell old_type = old_block->type();
#ifdef FACTOR_DEBUG
/* The call target was either another PIC,
#define TYPE_COUNT 15
/* Not a real type, but code_block's type field can be set to this */
-#define PIC_TYPE 69
+#define PIC_TYPE 42
+#define FREE_BLOCK_TYPE 69
/* Constants used when floating-point trap exceptions are thrown */
enum
};
/* The compiled code heap is structured into blocks. */
-enum block_status
-{
- B_FREE,
- B_ALLOCATED,
- B_MARKED
-};
-
struct heap_block
{
- unsigned char status; /* free or allocated? */
- unsigned char type; /* this is WORD_TYPE or QUOTATION_TYPE */
- unsigned char unused;
- unsigned char needs_fixup; /* is this a new block that needs full fixup? */
+ /* Bit 0: mark
+ Bit 1-7: type
+ Bit 8-...: size */
+ cell header;
- /* In bytes, includes this header */
- cell size;
+ bool marked_p() { return header & 1; }
+ void set_marked_p(bool marked)
+ {
+ if(marked)
+ header |= 1;
+ else
+ header &= ~1;
+ }
+
+ cell type() { return (header >> 1) & 0x7f; }
+ void set_type(cell type)
+ {
+ header = ((header & ~(0x7f << 1)) | (type << 1));
+ }
+
+ cell size() { return (header >> 8); }
+ void set_size(cell size)
+ {
+ header = (header & 0xff) | (size << 8);
+ }
};
struct free_heap_block : public heap_block
{
- free_heap_block *next_free;
+ free_heap_block *next_free;
};
struct code_block : public heap_block
{
+ cell unused;
cell literals; /* # bytes */
cell relocation; /* tagged pointer to byte-array or f */
-
+
void *xt() { return (void *)(this + 1); }
};
#if __GNUC__ == 4
#include <tr1/unordered_map>
+ #include <tr1/unordered_set>
namespace factor
{
using std::tr1::unordered_map;
+ using std::tr1::unordered_set;
}
#elif __GNUC__ == 3
#include <boost/unordered_map.hpp>
+ #include <boost/unordered_set.hpp>
namespace factor
{
using boost::unordered_map;
+ using boost::unordered_set;
}
#else
#error Factor requires GCC 3.x or later
void factor_vm::set_quot_xt(quotation *quot, code_block *code)
{
- if(code->type != QUOTATION_TYPE)
+ if(code->type() != QUOTATION_TYPE)
critical_error("Bad param to set_quot_xt",(cell)code);
quot->code = code;
void check_code_address(cell address);
void relocate_code_block(code_block *compiled);
void fixup_labels(array *labels, code_block *compiled);
- code_block *allot_code_block(cell size);
- code_block *add_code_block(cell type,cell code_,cell labels_,cell relocation_,cell literals_);
+ code_block *allot_code_block(cell size, cell type);
+ code_block *add_code_block(cell type, cell code_, cell labels_, cell relocation_, cell literals_);
inline bool stack_traces_p()
{
return userenv[STACK_TRACES_ENV] != F;
while(scan)
{
- if(scan->status != B_FREE)
+ if(scan->type() != FREE_BLOCK_TYPE)
iter((code_block *)scan);
scan = code->next_block(scan);
}
if(profiling_p)
{
dpush(allot_cell((cell)w->profiling->xt()));
- dpush(allot_cell((cell)w->profiling + w->profiling->size));
+ dpush(allot_cell((cell)w->profiling + w->profiling->size()));
}
else
{
dpush(allot_cell((cell)w->code->xt()));
- dpush(allot_cell((cell)w->code + w->code->size));
+ dpush(allot_cell((cell)w->code + w->code->size()));
}
}
inline bool word_optimized_p(word *word)
{
- return word->code->type == WORD_TYPE;
+ return word->code->type() == WORD_TYPE;
}
}