*/
inline object *factor_vm::allot_object(cell type, cell size)
{
+#ifdef FACTOR_DEBUG
+ assert(!current_gc);
+#endif
+
/* If the object is smaller than the nursery, allocate it in the nursery,
after a GC if needed */
if(nursery.size > size)
VM_C_API bool to_boolean(cell value, factor_vm *parent)
{
- return parent->to_boolean(value);
+ return to_boolean(value);
}
}
VM_C_API void box_boolean(bool value, factor_vm *vm);
VM_C_API bool to_boolean(cell value, factor_vm *vm);
-inline cell factor_vm::tag_boolean(cell untagged)
-{
- return (untagged ? true_object : false_object);
-}
-
-inline bool factor_vm::to_boolean(cell value)
+inline static bool to_boolean(cell value)
{
return value != false_object;
}
callbacks = new callback_heap(size,this);
}
-void callback_heap::update(callback *stub)
+void callback_heap::update(code_block *stub)
{
tagged<array> code_template(parent->special_objects[CALLBACK_STUB]);
cell rel_class = untag_fixnum(array_nth(code_template.untagged(),1));
+ cell rel_type = untag_fixnum(array_nth(code_template.untagged(),2));
cell offset = untag_fixnum(array_nth(code_template.untagged(),3));
- instruction_operand op(rel_class,offset + (cell)(stub + 1));
- op.store_value((cell)(stub->compiled + 1));
+ relocation_entry rel(
+ (relocation_type)rel_type,
+ (relocation_class)rel_class,
+ offset);
- flush_icache((cell)stub,stub->size);
+ instruction_operand op(rel,stub,0);
+ op.store_value((cell)callback_xt(stub));
+
+ stub->flush_icache();
}
-callback *callback_heap::add(code_block *compiled)
+code_block *callback_heap::add(cell owner)
{
tagged<array> code_template(parent->special_objects[CALLBACK_STUB]);
tagged<byte_array> insns(array_nth(code_template.untagged(),0));
cell size = array_capacity(insns.untagged());
- cell bump = align(size,sizeof(cell)) + sizeof(callback);
+ cell bump = align(size + sizeof(code_block),data_alignment);
if(here + bump > seg->end) fatal_error("Out of callback space",0);
- callback *stub = (callback *)here;
- stub->compiled = compiled;
- memcpy(stub + 1,insns->data<void>(),size);
-
- stub->size = align(size,sizeof(cell));
+ free_heap_block *free_block = (free_heap_block *)here;
+ free_block->make_free(bump);
here += bump;
+ code_block *stub = (code_block *)free_block;
+ stub->owner = owner;
+ stub->literals = false_object;
+ stub->relocation = false_object;
+
+ memcpy(stub->xt(),insns->data<void>(),size);
update(stub);
return stub;
{
tagged<word> w(dpop());
w.untag_check(this);
-
- callback *stub = callbacks->add(w->code);
- box_alien(stub + 1);
+ box_alien(callbacks->add(w.value())->xt());
}
}
namespace factor
{
-struct callback {
- cell size;
- code_block *compiled;
- void *code() { return (void *)(this + 1); }
-};
+/* The callback heap is used to store the machine code that alien-callbacks
+actually jump to when C code invokes them.
+
+The callback heap has entries that look like code_blocks from the code heap,
+but callback heap entries are allocated contiguously, never deallocated, and all
+fields but the owner are set to false_object. The owner points to the callback
+bottom word, whose XT is the callback body itself, generated by the optimizing
+compiler. The machine code that follows a callback stub consists of a single
+CALLBACK_STUB machine code template, which performs a jump to a "far" address
+(on PowerPC and x86-64, its loaded into a register first).
+
+GC updates the CALLBACK_STUB code if the code block of the callback bottom word
+is ever moved. The callback stub itself won't move, though, and is never
+deallocated. This means that the callback stub itself is a stable function
+pointer that C code can hold on to until the associated Factor VM exits.
+
+Since callback stubs are GC roots, and are never deallocated, the associated
+callback code in the code heap is also never deallocated.
+
+The callback heap is not saved in the image. Running GC in a new session after
+saving the image will deallocate any code heap entries that were only reachable
+from the callback heap in the previous session when the image was saved. */
struct callback_heap {
segment *seg;
explicit callback_heap(cell size, factor_vm *parent);
~callback_heap();
- callback *add(code_block *compiled);
- void update(callback *stub);
+ void *callback_xt(code_block *stub)
+ {
+ word *w = (word *)UNTAG(stub->owner);
+ return w->xt;
+ }
+
+ void update(code_block *stub);
+ code_block *add(cell owner);
- callback *next(callback *stub)
+ code_block *next(code_block *stub)
{
- return (callback *)((cell)stub + stub->size + sizeof(callback));
+ return (code_block *)((cell)stub + stub->size());
}
- template<typename Iterator> void iterate(Iterator &iter)
+ template<typename Iterator> void each_callback(Iterator &iter)
{
- callback *scan = (callback *)seg->start;
- callback *end = (callback *)here;
+ code_block *scan = (code_block *)seg->start;
+ code_block *end = (code_block *)here;
while(scan < end)
{
iter(scan);
namespace factor
{
+/* Code block visitors iterate over sets of code blocks, applying a functor to
+each one. The functor returns a new code_block pointer, which may or may not
+equal the old one. This is stored back to the original location.
+
+This is used by GC's sweep and compact phases, and the implementation of the
+modify-code-heap primitive.
+
+Iteration is driven by visit_*() methods. Some of them define GC roots:
+- visit_context_code_blocks()
+- visit_callback_code_blocks() */
+
template<typename Visitor> struct code_block_visitor {
factor_vm *parent;
Visitor visitor;
void visit_object_code_block(object *obj);
void visit_embedded_code_pointers(code_block *compiled);
void visit_context_code_blocks();
- void visit_callback_code_blocks();
};
template<typename Visitor>
explicit embedded_code_pointers_visitor(Visitor visitor_) : visitor(visitor_) {}
- void operator()(relocation_entry rel, cell index, code_block *compiled)
+ void operator()(instruction_operand op)
{
- relocation_type type = rel.rel_type();
+ relocation_type type = op.rel_type();
if(type == RT_XT || type == RT_XT_PIC || type == RT_XT_PIC_TAIL)
- {
- instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
op.store_code_block(visitor(op.load_code_block()));
- }
}
};
if(!parent->code->needs_fixup_p(compiled))
{
embedded_code_pointers_visitor<Visitor> visitor(this->visitor);
- parent->iterate_relocations(compiled,visitor);
+ compiled->each_instruction_operand(visitor);
}
}
parent->iterate_active_frames(call_frame_visitor);
}
-template<typename Visitor>
-struct callback_code_block_visitor {
- callback_heap *callbacks;
- Visitor visitor;
-
- explicit callback_code_block_visitor(callback_heap *callbacks_, Visitor visitor_) :
- callbacks(callbacks_), visitor(visitor_) {}
-
- void operator()(callback *stub)
- {
- stub->compiled = visitor(stub->compiled);
- callbacks->update(stub);
- }
-};
-
-template<typename Visitor>
-void code_block_visitor<Visitor>::visit_callback_code_blocks()
-{
- callback_code_block_visitor<Visitor> callback_visitor(parent->callbacks,visitor);
- parent->callbacks->iterate(callback_visitor);
-}
-
}
explicit update_word_references_relocation_visitor(factor_vm *parent_) : parent(parent_) {}
- void operator()(relocation_entry rel, cell index, code_block *compiled)
+ void operator()(instruction_operand op)
{
- relocation_type type = rel.rel_type();
- instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
-
- switch(type)
+ switch(op.rel_type())
{
case RT_XT:
{
else
{
update_word_references_relocation_visitor visitor(this);
- iterate_relocations(compiled,visitor);
+ compiled->each_instruction_operand(visitor);
compiled->flush_icache();
}
}
explicit relocate_code_block_relocation_visitor(factor_vm *parent_) : parent(parent_) {}
- void operator()(relocation_entry rel, cell index, code_block *compiled)
+ void operator()(instruction_operand op)
{
- instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
- array *literals = (parent->to_boolean(compiled->literals)
- ? untag<array>(compiled->literals) : NULL);
+ code_block *compiled = op.parent_code_block();
+ array *literals = (to_boolean(compiled->literals) ? untag<array>(compiled->literals) : NULL);
+ cell index = op.parameter_index();
- switch(rel.rel_type())
+ switch(op.rel_type())
{
case RT_PRIMITIVE:
op.store_value(parent->compute_primitive_relocation(array_nth(literals,index)));
op.store_value(parent->compute_xt_pic_tail_relocation(array_nth(literals,index)));
break;
case RT_HERE:
- op.store_value(parent->compute_here_relocation(array_nth(literals,index),rel.rel_offset(),compiled));
+ op.store_value(parent->compute_here_relocation(array_nth(literals,index),op.rel_offset(),compiled));
break;
case RT_THIS:
op.store_value((cell)compiled->xt());
op.store_value(parent->decks_offset);
break;
default:
- critical_error("Bad rel type",rel.rel_type());
+ critical_error("Bad rel type",op.rel_type());
break;
}
}
{
code->needs_fixup.erase(compiled);
relocate_code_block_relocation_visitor visitor(this);
- iterate_relocations(compiled,visitor);
+ compiled->each_instruction_operand(visitor);
compiled->flush_icache();
}
/* Fixup labels. This is done at compile time, not image load time */
void factor_vm::fixup_labels(array *labels, code_block *compiled)
{
- cell i;
cell size = array_capacity(labels);
- for(i = 0; i < size; i += 3)
+ for(cell i = 0; i < size; i += 3)
{
- cell rel_class = untag_fixnum(array_nth(labels,i));
+ relocation_class rel_class = (relocation_class)untag_fixnum(array_nth(labels,i));
cell offset = untag_fixnum(array_nth(labels,i + 1));
cell target = untag_fixnum(array_nth(labels,i + 2));
- instruction_operand op(rel_class,offset + (cell)compiled->xt());
+ relocation_entry new_entry(RT_HERE,rel_class,offset);
+
+ instruction_operand op(new_entry,compiled,0);
op.store_value(target + (cell)compiled->xt());
}
}
{
factor::flush_icache((cell)this,size());
}
+
+ template<typename Iterator> void each_instruction_operand(Iterator &iter)
+ {
+ if(to_boolean(relocation))
+ {
+ byte_array *rels = (byte_array *)UNTAG(relocation);
+
+ cell index = 0;
+ cell length = (rels->capacity >> TAG_BITS) / sizeof(relocation_entry);
+
+ for(cell i = 0; i < length; i++)
+ {
+ relocation_entry rel = rels->data<relocation_entry>()[i];
+ iter(instruction_operand(rel,this,index));
+ index += rel.number_of_parameters();
+ }
+ }
+ }
};
}
allocator->free(compiled);
}
+void code_heap::flush_icache()
+{
+ factor::flush_icache(seg->start,seg->size);
+}
+
/* Allocate a code heap during startup */
void factor_vm::init_code_heap(cell size)
{
void factor_vm::update_code_heap_words()
{
word_updater updater(this);
- iterate_code_heap(updater);
+ each_code_block(updater);
}
void factor_vm::primitive_modify_code_heap()
void factor_vm::primitive_strip_stack_traces()
{
stack_trace_stripper stripper;
- iterate_code_heap(stripper);
+ each_code_block(stripper);
}
}
void set_marked_p(code_block *compiled);
void clear_mark_bits();
void code_heap_free(code_block *compiled);
+ void flush_icache();
};
struct code_heap_room {
slot_forwarder(slot_forwarder_),
code_forwarder(code_forwarder_) {}
- void operator()(relocation_entry rel, cell index, code_block *compiled)
+ void operator()(instruction_operand op)
{
- relocation_type type = rel.rel_type();
- instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
+ code_block *compiled = op.parent_code_block();
+ array *literals = (to_boolean(compiled->literals) ? untag<array>(compiled->literals) : NULL);
+ cell index = op.parameter_index();
- array *literals = (parent->to_boolean(compiled->literals)
- ? untag<array>(compiled->literals) : NULL);
+ cell old_offset = op.rel_offset() + (cell)old_address->xt();
- cell old_offset = rel.rel_offset() + (cell)old_address->xt();
-
- switch(type)
+ switch(op.rel_type())
{
case RT_IMMEDIATE:
op.store_value(slot_forwarder.visit_pointer(op.load_value(old_offset)));
op.store_code_block(code_forwarder.visit_code_block(op.load_code_block(old_offset)));
break;
case RT_HERE:
- op.store_value(parent->compute_here_relocation(array_nth(literals,index),rel.rel_offset(),compiled));
+ op.store_value(parent->compute_here_relocation(array_nth(literals,index),op.rel_offset(),compiled));
break;
case RT_THIS:
op.store_value((cell)compiled->xt());
slot_forwarder.visit_code_block_objects(new_address);
code_block_compaction_relocation_visitor<SlotForwarder> visitor(parent,old_address,slot_forwarder,code_forwarder);
- parent->iterate_relocations(new_address,visitor);
+ new_address->each_instruction_operand(visitor);
}
};
{
slot_forwarder.visit_contexts();
code_forwarder.visit_context_code_blocks();
- code_forwarder.visit_callback_code_blocks();
}
update_code_roots_for_compaction();
code_block_visitor<forwarder<code_block> > code_forwarder(this,forwarder<code_block>(code_forwarding_map));
if(trace_contexts_p)
- {
code_forwarder.visit_context_code_blocks();
- code_forwarder.visit_callback_code_blocks();
- }
/* Update code heap references in data heap */
object_grow_heap_updater updater(code_forwarder);
update_code_roots_for_compaction();
}
+void factor_vm::collect_compact(bool trace_contexts_p)
+{
+ collect_mark_impl(trace_contexts_p);
+ collect_compact_impl(trace_contexts_p);
+ code->flush_icache();
+}
+
+void factor_vm::collect_growing_heap(cell requested_bytes, bool trace_contexts_p)
+{
+ /* Grow the data heap and copy all live objects to the new heap. */
+ data_heap *old = data;
+ set_data_heap(data->grow(requested_bytes));
+ collect_mark_impl(trace_contexts_p);
+ collect_compact_code_impl(trace_contexts_p);
+ code->flush_icache();
+ delete old;
+}
+
}
free_block_count++;
free_space += size;
- if(size < free_list_count * block_granularity)
- small_blocks[size / block_granularity].push_back(block);
+ if(size < free_list_count * data_alignment)
+ small_blocks[size / data_alignment].push_back(block);
else
large_blocks.insert(block);
}
free_heap_block *free_list::find_free_block(cell size)
{
/* Check small free lists */
- if(size / block_granularity < free_list_count)
+ if(size / data_alignment < free_list_count)
{
- std::vector<free_heap_block *> &blocks = small_blocks[size / block_granularity];
+ std::vector<free_heap_block *> &blocks = small_blocks[size / data_alignment];
if(blocks.size() == 0)
{
/* Round up to a multiple of 'size' */
template<typename Block> Block *free_list_allocator<Block>::allot(cell size)
{
- size = align(size,block_granularity);
+ size = align(size,data_alignment);
free_heap_block *block = free_blocks.find_free_block(size);
if(block)
code_visitor.visit_context_code_blocks();
}
-void full_collector::trace_callback_code_blocks()
-{
- code_visitor.visit_callback_code_blocks();
-}
-
void full_collector::trace_object_code_block(object *obj)
{
code_visitor.visit_object_code_block(obj);
for(; iter < end; iter++)
{
code_root *root = *iter;
- code_block *block = (code_block *)(root->value & -block_granularity);
+ code_block *block = (code_block *)(root->value & -data_alignment);
if(root->valid && !state->marked_p(block))
root->valid = false;
}
for(; iter < end; iter++)
{
code_root *root = *iter;
- code_block *block = (code_block *)(root->value & -block_granularity);
+ code_block *block = (code_block *)(root->value & -data_alignment);
/* Offset of return address within 16-byte allocation line */
cell offset = root->value - (cell)block;
{
collector.trace_contexts();
collector.trace_context_code_blocks();
- collector.trace_callback_code_blocks();
}
while(!mark_stack.empty())
current_gc->event->op = collect_compact_op;
collect_compact_impl(trace_contexts_p);
}
- flush_icache(code->seg->start,code->seg->size);
-}
-
-void factor_vm::collect_compact(bool trace_contexts_p)
-{
- collect_mark_impl(trace_contexts_p);
- collect_compact_impl(trace_contexts_p);
- flush_icache(code->seg->start,code->seg->size);
-}
-
-void factor_vm::collect_growing_heap(cell requested_bytes, bool trace_contexts_p)
-{
- /* Grow the data heap and copy all live objects to the new heap. */
- data_heap *old = data;
- set_data_heap(data->grow(requested_bytes));
- collect_mark_impl(trace_contexts_p);
- collect_compact_code_impl(trace_contexts_p);
- flush_icache(code->seg->start,code->seg->size);
- delete old;
+ code->flush_icache();
}
}
explicit full_collector(factor_vm *parent_);
void trace_code_block(code_block *compiled);
void trace_context_code_blocks();
- void trace_callback_code_blocks();
void trace_object_code_block(object *obj);
};
alien *ptr = (alien *)obj;
- if(!parent->to_boolean(ptr->base))
- ptr->expired = parent->true_object;
- else
+ if(to_boolean(ptr->base))
ptr->update_address();
+ else
+ ptr->expired = parent->true_object;
break;
}
case DLL_TYPE:
data_visitor(slot_visitor<data_fixupper>(parent_,data_fixupper(data_offset_))),
code_visitor(code_fixupper(code_offset_)) {}
- void operator()(relocation_entry rel, cell index, code_block *compiled)
+ void operator()(instruction_operand op)
{
- relocation_type type = rel.rel_type();
- instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
-
- array *literals = (parent->to_boolean(compiled->literals)
- ? untag<array>(compiled->literals) : NULL);
+ code_block *compiled = op.parent_code_block();
+ array *literals = (to_boolean(compiled->literals) ? untag<array>(compiled->literals) : NULL);
+ cell index = op.parameter_index();
- cell old_offset = (cell)rel.rel_offset() + (cell)compiled->xt() - code_offset;
+ cell old_offset = op.rel_offset() + (cell)compiled->xt() - code_offset;
- switch(type)
+ switch(op.rel_type())
{
case RT_IMMEDIATE:
op.store_value(data_visitor.visit_pointer(op.load_value(old_offset)));
op.store_value(parent->compute_dlsym_relocation(literals,index));
break;
case RT_HERE:
- op.store_value(parent->compute_here_relocation(array_nth(literals,index),rel.rel_offset(),compiled));
+ op.store_value(parent->compute_here_relocation(array_nth(literals,index),op.rel_offset(),compiled));
break;
case RT_THIS:
op.store_value((cell)compiled->xt());
op.store_value(parent->decks_offset);
break;
default:
- critical_error("Bad rel type",rel.rel_type());
+ critical_error("Bad rel type",op.rel_type());
break;
}
}
data_visitor.visit_code_block_objects(compiled);
code_block_fixup_relocation_visitor code_visitor(parent,data_offset,code_offset);
- parent->iterate_relocations(compiled,code_visitor);
+ compiled->each_instruction_operand(code_visitor);
}
};
namespace factor
{
+instruction_operand::instruction_operand(relocation_entry rel_, code_block *compiled_, cell index_) :
+ rel(rel_), compiled(compiled_), index(index_), pointer((cell)compiled_->xt() + rel_.rel_offset()) {}
+
/* Load a 32-bit value from a PowerPC LIS/ORI sequence */
fixnum instruction_operand::load_value_2_2()
{
fixnum instruction_operand::load_value(cell relative_to)
{
- switch(rel_class)
+ switch(rel.rel_class())
{
case RC_ABSOLUTE_CELL:
return *(cell *)pointer;
case RC_INDIRECT_ARM_PC:
return load_value_masked(rel_indirect_arm_mask,0) + relative_to + sizeof(cell) * 2;
default:
- critical_error("Bad rel class",rel_class);
+ critical_error("Bad rel class",rel.rel_class());
return 0;
}
}
{
fixnum relative_value = absolute_value - pointer;
- switch(rel_class)
+ switch(rel.rel_class())
{
case RC_ABSOLUTE_CELL:
*(cell *)pointer = absolute_value;
store_value_masked(relative_value - sizeof(cell) * 2,rel_indirect_arm_mask,0);
break;
default:
- critical_error("Bad rel class",rel_class);
+ critical_error("Bad rel class",rel.rel_class());
break;
}
}
struct relocation_entry {
u32 value;
- relocation_entry(u32 value_) : value(value_) {}
+ explicit relocation_entry(u32 value_) : value(value_) {}
relocation_entry(relocation_type rel_type,
relocation_class rel_class,
};
struct instruction_operand {
- cell rel_class;
+ relocation_entry rel;
+ code_block *compiled;
+ cell index;
cell pointer;
- instruction_operand(cell rel_class_, cell pointer_) :
- rel_class(rel_class_), pointer(pointer_) {}
+ instruction_operand(relocation_entry rel_, code_block *compiled_, cell index_);
+
+ relocation_type rel_type()
+ {
+ return rel.rel_type();
+ }
+
+ cell rel_offset()
+ {
+ return rel.rel_offset();
+ }
+
+ cell parameter_index()
+ {
+ return index;
+ }
+
+ code_block *parent_code_block()
+ {
+ return compiled;
+ }
fixnum load_value_2_2();
fixnum load_value_masked(cell mask, fixnum shift);
cell capacity = array_capacity(code_template.untagged());
for(cell i = 1; i < capacity; i += 3)
{
- cell rel_class = array_nth(code_template.untagged(),i);
- cell rel_type = array_nth(code_template.untagged(),i + 1);
+ relocation_class rel_class = (relocation_class)untag_fixnum(array_nth(code_template.untagged(),i));
+ relocation_type rel_type = (relocation_type)untag_fixnum(array_nth(code_template.untagged(),i + 1));
cell offset = array_nth(code_template.untagged(),i + 2);
- relocation_entry new_entry(
- (relocation_type)untag_fixnum(rel_type),
- (relocation_class)untag_fixnum(rel_class),
- code.count + untag_fixnum(offset));
+ relocation_entry new_entry(rel_type,rel_class,code.count + untag_fixnum(offset));
relocation.append_bytes(&new_entry,sizeof(relocation_entry));
}
}
namespace factor
{
-const int block_granularity = 16;
const int mark_bits_granularity = sizeof(cell) * 8;
const int mark_bits_mask = sizeof(cell) * 8 - 1;
explicit mark_bits(cell size_, cell start_) :
size(size_),
start(start_),
- bits_size(size / block_granularity / mark_bits_granularity),
+ bits_size(size / data_alignment / mark_bits_granularity),
marked(new cell[bits_size]),
forwarding(new cell[bits_size])
{
cell block_line(Block *address)
{
- return (((cell)address - start) / block_granularity);
+ return (((cell)address - start) / data_alignment);
}
Block *line_block(cell line)
{
- return (Block *)(line * block_granularity + start);
+ return (Block *)(line * data_alignment + start);
}
std::pair<cell,cell> bitmap_deref(Block *address)
#include "errors.hpp"
#include "bignumint.hpp"
#include "bignum.hpp"
+#include "booleans.hpp"
#include "instruction_operands.hpp"
#include "code_blocks.hpp"
#include "bump_allocator.hpp"
#include "callstack.hpp"
#include "arrays.hpp"
#include "math.hpp"
-#include "booleans.hpp"
#include "byte_arrays.hpp"
#include "jit.hpp"
#include "quotations.hpp"
cell offset = object_start_offsets[index];
if(offset != card_starts_inside_object)
{
- mask >>= (offset / block_granularity);
+ mask >>= (offset / data_alignment);
if(mask == 0)
{
else
{
/* Move the object start forward if necessary */
- object_start_offsets[index] = offset + (rightmost_set_bit(mask) * block_granularity);
+ object_start_offsets[index] = offset + (rightmost_set_bit(mask) * data_alignment);
}
}
}
switch(tagged<object>(obj).type())
{
case WORD_TYPE:
- if(!parent->to_boolean(untag<word>(obj)->subprimitive))
+ if(!to_boolean(untag<word>(obj)->subprimitive))
return true;
break;
case QUOTATION_TYPE:
{
case WORD_TYPE:
/* Intrinsics */
- if(parent->to_boolean(obj.as<word>()->subprimitive))
+ if(to_boolean(obj.as<word>()->subprimitive))
emit_subprimitive(obj.value());
/* The (execute) primitive is special-cased */
else if(obj.value() == parent->special_objects[JIT_EXECUTE_WORD])
namespace factor
{
+/* Slot visitors iterate over the slots of an object, applying a functor to
+each one that is a non-immediate slot. The pointer is untagged first. The
+functor returns a new untagged object pointer. The return value may or may not equal the old one,
+however the new pointer receives the same tag before being stored back to the
+original location.
+
+Slots storing immediate values are left unchanged and the visitor does inspect
+them.
+
+This is used by GC's copying, sweep and compact phases, and the implementation
+of the become primitive.
+
+Iteration is driven by visit_*() methods. Some of them define GC roots:
+- visit_roots()
+- visit_contexts() */
+
template<typename Visitor> struct slot_visitor {
factor_vm *parent;
Visitor visitor;
void visit_stack_elements(segment *region, cell *top);
void visit_data_roots();
void visit_bignum_roots();
+ void visit_callback_roots();
void visit_roots();
void visit_contexts();
void visit_code_block_objects(code_block *compiled);
}
}
+template<typename Visitor>
+struct callback_slot_visitor {
+ callback_heap *callbacks;
+ slot_visitor<Visitor> *visitor;
+
+ explicit callback_slot_visitor(callback_heap *callbacks_, slot_visitor<Visitor> *visitor_) :
+ callbacks(callbacks_), visitor(visitor_) {}
+
+ void operator()(code_block *stub)
+ {
+ visitor->visit_handle(&stub->owner);
+ callbacks->update(stub);
+ }
+};
+
+template<typename Visitor>
+void slot_visitor<Visitor>::visit_callback_roots()
+{
+ callback_slot_visitor<Visitor> callback_visitor(parent->callbacks,this);
+ parent->callbacks->each_callback(callback_visitor);
+}
+
template<typename Visitor>
void slot_visitor<Visitor>::visit_roots()
{
visit_data_roots();
visit_bignum_roots();
+ visit_callback_roots();
for(cell i = 0; i < special_object_count; i++)
visit_handle(&parent->special_objects[i]);
explicit literal_references_visitor(slot_visitor<Visitor> *visitor_) : visitor(visitor_) {}
- void operator()(relocation_entry rel, cell index, code_block *compiled)
+ void operator()(instruction_operand op)
{
- if(rel.rel_type() == RT_IMMEDIATE)
- {
- instruction_operand op(rel.rel_class(),rel.rel_offset() + (cell)compiled->xt());
+ if(op.rel_type() == RT_IMMEDIATE)
op.store_value(visitor->visit_pointer(op.load_value()));
- }
}
};
if(!parent->code->needs_fixup_p(compiled))
{
literal_references_visitor<Visitor> visitor(this);
- parent->iterate_relocations(compiled,visitor);
+ compiled->each_instruction_operand(visitor);
}
}
//booleans
void box_boolean(bool value);
- bool to_boolean(cell value);
- inline cell tag_boolean(cell untagged);
+
+ inline cell tag_boolean(cell untagged)
+ {
+ return (untagged ? true_object : false_object);
+ }
//byte arrays
byte_array *allot_byte_array(cell size);
cell compute_context_relocation();
cell compute_vm_relocation(cell arg);
cell code_block_owner(code_block *compiled);
-
- template<typename Iterator> void iterate_relocations(code_block *compiled, Iterator &iter)
- {
- if(to_boolean(compiled->relocation))
- {
- byte_array *relocation = (byte_array *)UNTAG(compiled->relocation);
-
- cell index = 0;
- cell length = (relocation->capacity >> TAG_BITS) / sizeof(relocation_entry);
-
- for(cell i = 0; i < length; i++)
- {
- relocation_entry rel = relocation->data<relocation_entry>()[i];
- iter(rel,index,compiled);
- index += rel.number_of_parameters();
- }
- }
- }
-
void update_word_references(code_block *compiled);
void check_code_address(cell address);
void relocate_code_block(code_block *compiled);
void primitive_code_room();
void primitive_strip_stack_traces();
- /* Apply a function to every code block */
- template<typename Iterator> void iterate_code_heap(Iterator &iter)
+ template<typename Iterator> void each_code_block(Iterator &iter)
{
code->allocator->iterate(iter);
}