4 // I've had to copy inline implementations here to make dependencies work. Am hoping to move this code back into include files
5 // once the rest of the reentrant changes are done. -PD
9 inline cell factor_vm::align_page(cell a)
11 return align(a,getpagesize());
16 inline card *factor_vm::addr_to_card(cell a)
18 return (card*)(((cell)(a) >> card_bits) + cards_offset);
21 inline cell factor_vm::card_to_addr(card *c)
23 return ((cell)c - cards_offset) << card_bits;
26 inline cell factor_vm::card_offset(card *c)
28 return *(c - (cell)data->cards + (cell)data->allot_markers);
31 inline card_deck *factor_vm::addr_to_deck(cell a)
33 return (card_deck *)(((cell)a >> deck_bits) + decks_offset);
36 inline cell factor_vm::deck_to_addr(card_deck *c)
38 return ((cell)c - decks_offset) << deck_bits;
41 inline card *factor_vm::deck_to_card(card_deck *d)
43 return (card *)((((cell)d - decks_offset) << (deck_bits - card_bits)) + cards_offset);
46 inline card *factor_vm::addr_to_allot_marker(object *a)
48 return (card *)(((cell)a >> card_bits) + allot_markers_offset);
51 /* the write barrier must be called any time we are potentially storing a
52 pointer from an older generation to a younger one */
53 inline void factor_vm::write_barrier(object *obj)
55 *addr_to_card((cell)obj) = card_mark_mask;
56 *addr_to_deck((cell)obj) = card_mark_mask;
59 /* we need to remember the first object allocated in the card */
60 inline void factor_vm::allot_barrier(object *address)
62 card *ptr = addr_to_allot_marker(address);
63 if(*ptr == invalid_allot_marker)
64 *ptr = ((cell)address & addr_card_mask);
68 inline bool factor_vm::collecting_accumulation_gen_p()
70 return ((data->have_aging_p()
71 && collecting_gen == data->aging()
72 && !collecting_aging_again)
73 || collecting_gen == data->tenured());
76 inline object *factor_vm::allot_zone(zone *z, cell a)
79 z->here = h + align8(a);
80 object *obj = (object *)h;
86 * It is up to the caller to fill in the object's fields in a meaningful
89 inline object *factor_vm::allot_object(header header, cell size)
98 if(nursery.size - allot_buffer_zone > size)
100 /* If there is insufficient room, collect the nursery */
101 if(nursery.here + allot_buffer_zone + size > nursery.end)
102 garbage_collection(data->nursery(),false,0);
104 cell h = nursery.here;
105 nursery.here = h + align8(size);
108 /* If the object is bigger than the nursery, allocate it in
112 zone *tenured = &data->generations[data->tenured()];
114 /* If tenured space does not have enough room, collect */
115 if(tenured->here + size > tenured->end)
118 tenured = &data->generations[data->tenured()];
121 /* If it still won't fit, grow the heap */
122 if(tenured->here + size > tenured->end)
124 garbage_collection(data->tenured(),true,size);
125 tenured = &data->generations[data->tenured()];
128 obj = allot_zone(tenured,size);
130 /* Allows initialization code to store old->new pointers
131 without hitting the write barrier in the common case of
132 a nursery allocation */
140 template<typename TYPE> TYPE *factor_vm::allot(cell size)
142 return (TYPE *)allot_object(header(TYPE::type_number),size);
145 inline void factor_vm::check_data_pointer(object *pointer)
148 if(!growing_data_heap)
150 assert((cell)pointer >= data->seg->start
151 && (cell)pointer < data->seg->end);
156 inline void factor_vm::check_tagged_pointer(cell tagged)
159 if(!immediate_p(tagged))
161 object *obj = untag<object>(tagged);
162 check_data_pointer(obj);
169 template <typename TYPE>
170 struct gc_root : public tagged<TYPE>
174 void push() { myvm->check_tagged_pointer(tagged<TYPE>::value()); myvm->gc_locals.push_back((cell)this); }
176 explicit gc_root(cell value_,factor_vm *vm) : tagged<TYPE>(value_),myvm(vm) { push(); }
177 explicit gc_root(TYPE *value_, factor_vm *vm) : tagged<TYPE>(value_),myvm(vm) { push(); }
179 const gc_root<TYPE>& operator=(const TYPE *x) { tagged<TYPE>::operator=(x); return *this; }
180 const gc_root<TYPE>& operator=(const cell &x) { tagged<TYPE>::operator=(x); return *this; }
184 assert(myvm->gc_locals.back() == (cell)this);
186 myvm->gc_locals.pop_back();
190 /* A similar hack for the bignum implementation */
195 gc_bignum(bignum **addr_, factor_vm *vm) : addr(addr_), myvm(vm) {
197 myvm->check_data_pointer(*addr_);
198 myvm->gc_bignums.push_back((cell)addr);
203 assert(myvm->gc_bignums.back() == (cell)addr);
205 myvm->gc_bignums.pop_back();
209 #define GC_BIGNUM(x) gc_bignum x##__gc_root(&x,this)
212 template <typename TYPE> TYPE *factor_vm::allot_array_internal(cell capacity)
214 TYPE *array = allot<TYPE>(array_size<TYPE>(capacity));
215 array->capacity = tag_fixnum(capacity);
219 template <typename TYPE> bool factor_vm::reallot_array_in_place_p(TYPE *array, cell capacity)
221 return in_zone(&nursery,array) && capacity <= array_capacity(array);
224 template <typename TYPE> TYPE *factor_vm::reallot_array(TYPE *array_, cell capacity)
226 gc_root<TYPE> array(array_,this);
228 if(reallot_array_in_place_p(array.untagged(),capacity))
230 array->capacity = tag_fixnum(capacity);
231 return array.untagged();
235 cell to_copy = array_capacity(array.untagged());
236 if(capacity < to_copy)
239 TYPE *new_array = allot_array_internal<TYPE>(capacity);
241 memcpy(new_array + 1,array.untagged() + 1,to_copy * TYPE::element_size);
242 memset((char *)(new_array + 1) + to_copy * TYPE::element_size,
243 0,(capacity - to_copy) * TYPE::element_size);
250 inline void factor_vm::set_array_nth(array *array, cell slot, cell value)
253 assert(slot < array_capacity(array));
254 assert(array->h.hi_tag() == ARRAY_TYPE);
255 check_tagged_pointer(value);
257 array->data()[slot] = value;
258 write_barrier(array);
261 struct growable_array {
263 gc_root<array> elements;
265 growable_array(factor_vm *myvm, cell capacity = 10) : count(0), elements(myvm->allot_array(capacity,F),myvm) {}
272 struct growable_byte_array {
274 gc_root<byte_array> elements;
276 growable_byte_array(factor_vm *myvm,cell capacity = 40) : count(0), elements(myvm->allot_byte_array(capacity),myvm) { }
278 void append_bytes(void *elts, cell len);
279 void append_byte_array(cell elts);
285 inline cell factor_vm::allot_integer(fixnum x)
287 if(x < fixnum_min || x > fixnum_max)
288 return tag<bignum>(fixnum_to_bignum(x));
290 return tag_fixnum(x);
293 inline cell factor_vm::allot_cell(cell x)
295 if(x > (cell)fixnum_max)
296 return tag<bignum>(cell_to_bignum(x));
298 return tag_fixnum(x);
301 inline cell factor_vm::allot_float(double n)
303 boxed_float *flo = allot<boxed_float>(sizeof(boxed_float));
308 inline bignum *factor_vm::float_to_bignum(cell tagged)
310 return double_to_bignum(untag_float(tagged));
313 inline double factor_vm::bignum_to_float(cell tagged)
315 return bignum_to_double(untag<bignum>(tagged));
318 inline double factor_vm::untag_float(cell tagged)
320 return untag<boxed_float>(tagged)->n;
323 inline double factor_vm::untag_float_check(cell tagged)
325 return untag_check<boxed_float>(tagged)->n;
328 inline fixnum factor_vm::float_to_fixnum(cell tagged)
330 return (fixnum)untag_float(tagged);
333 inline double factor_vm::fixnum_to_float(cell tagged)
335 return (double)untag_fixnum(tagged);
339 /* This is a little tricky. The iterator may allocate memory, so we
340 keep the callstack in a GC root and use relative offsets */
341 template<typename TYPE> void factor_vm::iterate_callstack_object(callstack *stack_, TYPE &iterator)
343 gc_root<callstack> stack(stack_,this);
344 fixnum frame_offset = untag_fixnum(stack->length) - sizeof(stack_frame);
346 while(frame_offset >= 0)
348 stack_frame *frame = stack->frame_at(frame_offset);
349 frame_offset -= frame->size;
350 iterator(frame,this);
355 inline cell factor_vm::tag_boolean(cell untagged)
357 return (untagged ? T : F);
361 template<typename TYPE> void factor_vm::iterate_callstack(cell top, cell bottom, TYPE &iterator)
363 stack_frame *frame = (stack_frame *)bottom - 1;
365 while((cell)frame >= top)
367 iterator(frame,this);
368 frame = frame_successor(frame);
373 /* Every object has a regular representation in the runtime, which makes GC
374 much simpler. Every slot of the object until binary_payload_start is a pointer
375 to some other object. */
377 inline void factor_vm::do_slots(cell obj, void (* iter)(cell *,factor_vm*))
380 cell payload_start = binary_payload_start((object *)obj);
381 cell end = obj + payload_start;
383 scan += sizeof(cell);
387 iter((cell *)scan,this);
388 scan += sizeof(cell);
394 inline void factor_vm::check_code_pointer(cell ptr)
397 assert(in_code_heap_p(ptr));