4 // I've had to copy inline implementations here to make dependencies work. Am hoping to move this code back into include files
5 // once the rest of the reentrant changes are done. -PD
9 inline cell factorvm::align_page(cell a)
11 return align(a,getpagesize());
16 inline card *factorvm::addr_to_card(cell a)
18 return (card*)(((cell)(a) >> card_bits) + cards_offset);
22 inline cell factorvm::card_to_addr(card *c)
24 return ((cell)c - cards_offset) << card_bits;
28 inline cell factorvm::card_offset(card *c)
30 return *(c - (cell)data->cards + (cell)data->allot_markers);
33 inline card_deck *factorvm::addr_to_deck(cell a)
35 return (card_deck *)(((cell)a >> deck_bits) + decks_offset);
38 inline cell factorvm::deck_to_addr(card_deck *c)
40 return ((cell)c - decks_offset) << deck_bits;
43 inline card *factorvm::deck_to_card(card_deck *d)
45 return (card *)((((cell)d - decks_offset) << (deck_bits - card_bits)) + cards_offset);
48 inline card *factorvm::addr_to_allot_marker(object *a)
50 return (card *)(((cell)a >> card_bits) + allot_markers_offset);
53 /* the write barrier must be called any time we are potentially storing a
54 pointer from an older generation to a younger one */
55 inline void factorvm::write_barrier(object *obj)
57 *addr_to_card((cell)obj) = card_mark_mask;
58 *addr_to_deck((cell)obj) = card_mark_mask;
61 /* we need to remember the first object allocated in the card */
62 inline void factorvm::allot_barrier(object *address)
64 card *ptr = addr_to_allot_marker(address);
65 if(*ptr == invalid_allot_marker)
66 *ptr = ((cell)address & addr_card_mask);
71 inline bool factorvm::collecting_accumulation_gen_p()
73 return ((data->have_aging_p()
74 && collecting_gen == data->aging()
75 && !collecting_aging_again)
76 || collecting_gen == data->tenured());
79 inline object *factorvm::allot_zone(zone *z, cell a)
82 z->here = h + align8(a);
83 object *obj = (object *)h;
89 * It is up to the caller to fill in the object's fields in a meaningful
92 inline object *factorvm::allot_object(header header, cell size)
101 if(nursery.size - allot_buffer_zone > size)
103 /* If there is insufficient room, collect the nursery */
104 if(nursery.here + allot_buffer_zone + size > nursery.end)
105 garbage_collection(data->nursery(),false,0);
107 cell h = nursery.here;
108 nursery.here = h + align8(size);
111 /* If the object is bigger than the nursery, allocate it in
115 zone *tenured = &data->generations[data->tenured()];
117 /* If tenured space does not have enough room, collect */
118 if(tenured->here + size > tenured->end)
121 tenured = &data->generations[data->tenured()];
124 /* If it still won't fit, grow the heap */
125 if(tenured->here + size > tenured->end)
127 garbage_collection(data->tenured(),true,size);
128 tenured = &data->generations[data->tenured()];
131 obj = allot_zone(tenured,size);
133 /* Allows initialization code to store old->new pointers
134 without hitting the write barrier in the common case of
135 a nursery allocation */
143 template<typename TYPE> TYPE *factorvm::allot(cell size)
145 return (TYPE *)allot_object(header(TYPE::type_number),size);
148 inline void factorvm::check_data_pointer(object *pointer)
151 if(!growing_data_heap)
153 assert((cell)pointer >= data->seg->start
154 && (cell)pointer < data->seg->end);
159 inline void factorvm::check_tagged_pointer(cell tagged)
162 if(!immediate_p(tagged))
164 object *obj = untag<object>(tagged);
165 check_data_pointer(obj);
172 template <typename TYPE>
173 struct gc_root : public tagged<TYPE>
177 void push() { myvm->check_tagged_pointer(tagged<TYPE>::value()); myvm->gc_locals.push_back((cell)this); }
179 //explicit gc_root(cell value_, factorvm *vm) : myvm(vm),tagged<TYPE>(value_) { push(); }
180 explicit gc_root(cell value_,factorvm *vm) : tagged<TYPE>(value_),myvm(vm) { push(); }
181 explicit gc_root(TYPE *value_, factorvm *vm) : tagged<TYPE>(value_),myvm(vm) { push(); }
183 const gc_root<TYPE>& operator=(const TYPE *x) { tagged<TYPE>::operator=(x); return *this; }
184 const gc_root<TYPE>& operator=(const cell &x) { tagged<TYPE>::operator=(x); return *this; }
188 assert(myvm->gc_locals.back() == (cell)this);
190 myvm->gc_locals.pop_back();
194 /* A similar hack for the bignum implementation */
199 gc_bignum(bignum **addr_, factorvm *vm) : addr(addr_), myvm(vm) {
201 myvm->check_data_pointer(*addr_);
202 myvm->gc_bignums.push_back((cell)addr);
207 assert(myvm->gc_bignums.back() == (cell)addr);
209 myvm->gc_bignums.pop_back();
213 #define GC_BIGNUM(x,vm) gc_bignum x##__gc_root(&x,vm)
216 template <typename TYPE> TYPE *factorvm::allot_array_internal(cell capacity)
218 TYPE *array = allot<TYPE>(array_size<TYPE>(capacity));
219 array->capacity = tag_fixnum(capacity);
223 template <typename TYPE> bool factorvm::reallot_array_in_place_p(TYPE *array, cell capacity)
225 return in_zone(&nursery,array) && capacity <= array_capacity(array);
228 template <typename TYPE> TYPE *factorvm::reallot_array(TYPE *array_, cell capacity)
230 gc_root<TYPE> array(array_,this);
232 if(reallot_array_in_place_p(array.untagged(),capacity))
234 array->capacity = tag_fixnum(capacity);
235 return array.untagged();
239 cell to_copy = array_capacity(array.untagged());
240 if(capacity < to_copy)
243 TYPE *new_array = allot_array_internal<TYPE>(capacity);
245 memcpy(new_array + 1,array.untagged() + 1,to_copy * TYPE::element_size);
246 memset((char *)(new_array + 1) + to_copy * TYPE::element_size,
247 0,(capacity - to_copy) * TYPE::element_size);
254 inline void factorvm::set_array_nth(array *array, cell slot, cell value)
257 assert(slot < array_capacity(array));
258 assert(array->h.hi_tag() == ARRAY_TYPE);
259 check_tagged_pointer(value);
261 array->data()[slot] = value;
262 write_barrier(array);
265 struct growable_array {
267 gc_root<array> elements;
269 growable_array(factorvm *myvm, cell capacity = 10) : count(0), elements(myvm->allot_array(capacity,F),myvm) {}
276 struct growable_byte_array {
278 gc_root<byte_array> elements;
280 growable_byte_array(factorvm *myvm,cell capacity = 40) : count(0), elements(myvm->allot_byte_array(capacity),myvm) { }
282 void append_bytes(void *elts, cell len);
283 void append_byte_array(cell elts);
289 inline cell factorvm::allot_integer(fixnum x)
291 if(x < fixnum_min || x > fixnum_max)
292 return tag<bignum>(fixnum_to_bignum(x));
294 return tag_fixnum(x);
297 inline cell factorvm::allot_cell(cell x)
299 if(x > (cell)fixnum_max)
300 return tag<bignum>(cell_to_bignum(x));
302 return tag_fixnum(x);
305 inline cell factorvm::allot_float(double n)
307 boxed_float *flo = allot<boxed_float>(sizeof(boxed_float));
312 inline bignum *factorvm::float_to_bignum(cell tagged)
314 return double_to_bignum(untag_float(tagged));
317 inline double factorvm::bignum_to_float(cell tagged)
319 return bignum_to_double(untag<bignum>(tagged));
322 inline double factorvm::untag_float(cell tagged)
324 return untag<boxed_float>(tagged)->n;
327 inline double factorvm::untag_float_check(cell tagged)
329 return untag_check<boxed_float>(tagged)->n;
332 inline fixnum factorvm::float_to_fixnum(cell tagged)
334 return (fixnum)untag_float(tagged);
337 inline double factorvm::fixnum_to_float(cell tagged)
339 return (double)untag_fixnum(tagged);
343 /* This is a little tricky. The iterator may allocate memory, so we
344 keep the callstack in a GC root and use relative offsets */
345 template<typename TYPE> void factorvm::iterate_callstack_object(callstack *stack_, TYPE &iterator)
347 gc_root<callstack> stack(stack_,vm);
348 fixnum frame_offset = untag_fixnum(stack->length) - sizeof(stack_frame);
350 while(frame_offset >= 0)
352 stack_frame *frame = stack->frame_at(frame_offset);
353 frame_offset -= frame->size;
354 iterator(frame,this);
359 inline cell factorvm::tag_boolean(cell untagged)
361 return (untagged ? T : F);
365 template<typename TYPE> void factorvm::iterate_callstack(cell top, cell bottom, TYPE &iterator)
367 stack_frame *frame = (stack_frame *)bottom - 1;
369 while((cell)frame >= top)
371 iterator(frame,this);
372 frame = frame_successor(frame);
378 /* Every object has a regular representation in the runtime, which makes GC
379 much simpler. Every slot of the object until binary_payload_start is a pointer
380 to some other object. */
382 inline void factorvm::do_slots(cell obj, void (* iter)(cell *,factorvm*))
385 cell payload_start = binary_payload_start((object *)obj);
386 cell end = obj + payload_start;
388 scan += sizeof(cell);
392 iter((cell *)scan,this);
393 scan += sizeof(cell);
399 inline void factorvm::check_code_pointer(cell ptr)
402 assert(in_code_heap_p(ptr));