3 /* Size of the object pointed to by an untagged pointer */
4 template <typename Fixup> cell object::size(Fixup fixup) const {
6 return ((free_heap_block*)this)->size();
10 return align(array_size((array*)this), data_alignment);
12 return align(array_size((bignum*)this), data_alignment);
14 return align(array_size((byte_array*)this), data_alignment);
16 return align(string_size(string_capacity((string*)this)), data_alignment);
18 tuple_layout* layout = (tuple_layout*)fixup.translate_data(
19 untag<object>(((tuple*)this)->layout));
20 return align(tuple_size(layout), data_alignment);
23 return align(sizeof(quotation), data_alignment);
25 return align(sizeof(word), data_alignment);
27 return align(sizeof(boxed_float), data_alignment);
29 return align(sizeof(dll), data_alignment);
31 return align(sizeof(alien), data_alignment);
33 return align(sizeof(wrapper), data_alignment);
36 callstack_object_size(untag_fixnum(((callstack*)this)->length)),
39 critical_error("Invalid header in size", (cell)this);
40 return 0; /* can't happen */
44 inline cell object::size() const { return size(no_fixup()); }
46 /* The number of cells from the start of the object which should be scanned by
47 the GC. Some types have a binary payload at the end (string, word, DLL) which
49 template <typename Fixup> cell object::binary_payload_start(Fixup fixup) const {
54 /* these objects do not refer to other objects at all */
60 /* these objects have some binary data at the end */
62 return sizeof(word) - sizeof(cell);
64 return sizeof(cell) * 3;
66 return sizeof(cell) * 2;
68 return sizeof(quotation) - sizeof(cell);
70 return sizeof(string);
71 /* everything else consists entirely of pointers */
73 return array_size<array>(array_capacity((array*)this));
75 tuple_layout* layout = (tuple_layout*)fixup.translate_data(
76 untag<object>(((tuple*)this)->layout));
77 return tuple_size(layout);
80 return sizeof(wrapper);
82 critical_error("Invalid header in binary_payload_start", (cell)this);
83 return 0; /* can't happen */
87 inline cell object::binary_payload_start() const {
88 return binary_payload_start(no_fixup());
91 /* Slot visitors iterate over the slots of an object, applying a functor to
92 each one that is a non-immediate slot. The pointer is untagged first. The
93 functor returns a new untagged object pointer. The return value may or may not
95 however the new pointer receives the same tag before being stored back to the
98 Slots storing immediate values are left unchanged and the visitor does inspect
101 This is used by GC's copying, sweep and compact phases, and the implementation
102 of the become primitive.
104 Iteration is driven by visit_*() methods. Some of them define GC roots:
108 Code block visitors iterate over sets of code blocks, applying a functor to
109 each one. The functor returns a new code_block pointer, which may or may not
110 equal the old one. This is stored back to the original location.
112 This is used by GC's sweep and compact phases, and the implementation of the
113 modify-code-heap primitive.
115 Iteration is driven by visit_*() methods. Some of them define GC roots:
116 - visit_context_code_blocks()
117 - visit_callback_code_blocks() */
119 template <typename Fixup> struct slot_visitor {
123 slot_visitor<Fixup>(factor_vm* parent, Fixup fixup)
124 : parent(parent), fixup(fixup) {}
126 cell visit_pointer(cell pointer);
127 void visit_handle(cell* handle);
128 void visit_object_array(cell* start, cell* end);
129 void visit_slots(object* ptr, cell payload_start);
130 void visit_slots(object* ptr);
131 void visit_stack_elements(segment* region, cell* top);
132 void visit_data_roots();
133 void visit_callback_roots();
134 void visit_literal_table_roots();
136 void visit_callstack_object(callstack* stack);
137 void visit_callstack(context* ctx);
138 void visit_context(context *ctx);
139 void visit_contexts();
140 void visit_code_block_objects(code_block* compiled);
141 void visit_embedded_literals(code_block* compiled);
142 void visit_sample_callstacks();
143 void visit_sample_threads();
144 void visit_object_code_block(object* obj);
145 void visit_context_code_blocks();
146 void visit_uninitialized_code_blocks();
147 void visit_embedded_code_pointers(code_block* compiled);
150 template <typename Fixup>
151 cell slot_visitor<Fixup>::visit_pointer(cell pointer) {
152 if (immediate_p(pointer))
155 object* untagged = fixup.fixup_data(untag<object>(pointer));
156 return RETAG(untagged, TAG(pointer));
159 template <typename Fixup> void slot_visitor<Fixup>::visit_handle(cell* handle) {
160 *handle = visit_pointer(*handle);
163 template <typename Fixup>
164 void slot_visitor<Fixup>::visit_object_array(cell* start, cell* end) {
166 visit_handle(start++);
169 template <typename Fixup>
170 void slot_visitor<Fixup>::visit_slots(object* ptr, cell payload_start) {
171 cell* slot = (cell*)ptr;
172 cell* end = (cell*)((cell)ptr + payload_start);
176 visit_object_array(slot, end);
180 template <typename Fixup> void slot_visitor<Fixup>::visit_slots(object* obj) {
181 if (obj->type() == CALLSTACK_TYPE)
182 visit_callstack_object((callstack*)obj);
184 visit_slots(obj, obj->binary_payload_start(fixup));
187 template <typename Fixup>
188 void slot_visitor<Fixup>::visit_stack_elements(segment* region, cell* top) {
189 visit_object_array((cell*)region->start, top + 1);
192 template <typename Fixup> void slot_visitor<Fixup>::visit_data_roots() {
193 std::vector<cell*>::const_iterator iter =
194 parent->data_roots.begin();
195 std::vector<cell*>::const_iterator end =
196 parent->data_roots.end();
198 for (; iter < end; iter++) {
203 template <typename Fixup> struct callback_slot_visitor {
204 slot_visitor<Fixup>* visitor;
206 callback_slot_visitor(slot_visitor<Fixup>* visitor) : visitor(visitor) {}
208 void operator()(code_block* stub, cell size) {
209 visitor->visit_handle(&stub->owner);
213 template <typename Fixup> void slot_visitor<Fixup>::visit_callback_roots() {
214 callback_slot_visitor<Fixup> callback_visitor(this);
215 parent->callbacks->allocator->iterate(callback_visitor);
218 template <typename Fixup>
219 void slot_visitor<Fixup>::visit_literal_table_roots() {
220 std::map<code_block*, cell>* uninitialized_blocks =
221 &parent->code->uninitialized_blocks;
222 std::map<code_block*, cell>::iterator iter =
223 uninitialized_blocks->begin();
224 std::map<code_block*, cell>::iterator end = uninitialized_blocks->end();
226 for (; iter != end; iter++) {
227 iter->second = visit_pointer(iter->second);
231 template <typename Fixup> void slot_visitor<Fixup>::visit_sample_callstacks() {
232 for (std::vector<cell>::iterator iter = parent->sample_callstacks.begin();
233 iter != parent->sample_callstacks.end(); ++iter) {
234 visit_handle(&*iter);
238 template <typename Fixup> void slot_visitor<Fixup>::visit_sample_threads() {
239 for (std::vector<profiling_sample>::iterator iter = parent->samples.begin();
240 iter != parent->samples.end(); ++iter) {
241 visit_handle(&iter->thread);
245 template <typename Fixup> void slot_visitor<Fixup>::visit_roots() {
246 visit_handle(&parent->true_object);
247 visit_handle(&parent->bignum_zero);
248 visit_handle(&parent->bignum_pos_one);
249 visit_handle(&parent->bignum_neg_one);
252 visit_callback_roots();
253 visit_literal_table_roots();
254 visit_sample_callstacks();
255 visit_sample_threads();
257 visit_object_array(parent->special_objects,
258 parent->special_objects + special_object_count);
261 /* primitive_minor_gc() is invoked by inline GC checks, and it needs to fill in
262 uninitialized stack locations before actually calling the GC. See the
263 documentation in compiler.cfg.stacks.vacant for details.
265 So for each call frame:
267 - scrub some uninitialized locations
268 - trace some overinitialized locations
269 - trace roots in spill slots
271 template <typename Fixup> struct call_frame_slot_visitor {
273 slot_visitor<Fixup>* visitor;
274 /* NULL in case we're a visitor for a callstack object. */
277 void check_stack(cell stack, uint8_t* bitmap, cell base, uint32_t count) {
278 for (uint32_t loc = 0; loc < count; loc++) {
279 if (bitmap_p(bitmap, base + loc)) {
281 std::cout << "checking stack location " << loc << std::endl;
283 cell* value_ptr = ((cell*)stack + loc + 1);
284 visitor->visit_handle(value_ptr);
289 void scrub_stack(cell stack, uint8_t* bitmap, cell base, uint32_t count) {
290 for (cell loc = 0; loc < count; loc++) {
291 if (bitmap_p(bitmap, base + loc)) {
293 std::cout << "scrubbing stack location " << loc << std::endl;
295 *((cell*)stack - loc) = 0;
300 call_frame_slot_visitor(factor_vm* parent,
301 slot_visitor<Fixup>* visitor,
303 : parent(parent), visitor(visitor), ctx(ctx) {}
306 frame top -> [return address]
312 void operator()(cell frame_top, cell size, code_block* owner, cell addr) {
313 cell return_address = owner->offset(addr);
315 code_block* compiled =
316 Fixup::translated_code_block_map ? owner
317 : visitor->fixup.translate_code(owner);
318 gc_info* info = compiled->block_gc_info();
320 FACTOR_ASSERT(return_address < compiled->size());
321 cell callsite = info->return_address_index(return_address);
322 if (callsite == (cell)-1)
326 std::cout << "call frame code block " << compiled << " with offset "
327 << return_address << std::endl;
329 cell* stack_pointer = (cell*)frame_top;
330 uint8_t* bitmap = info->gc_info_bitmap();
333 /* Scrub vacant stack locations. */
334 scrub_stack(ctx->datastack,
336 info->callsite_scrub_d(callsite),
337 info->scrub_d_count);
338 scrub_stack(ctx->retainstack,
340 info->callsite_scrub_r(callsite),
341 info->scrub_r_count);
343 /* Trace overinitialized stack locations. */
344 check_stack(ctx->datastack,
346 info->callsite_check_d(callsite),
347 info->check_d_count);
348 check_stack(ctx->retainstack,
350 info->callsite_check_r(callsite),
351 info->check_r_count);
354 /* Subtract old value of base pointer from every derived pointer. */
355 for (cell spill_slot = 0; spill_slot < info->derived_root_count;
357 uint32_t base_pointer = info->lookup_base_pointer(callsite, spill_slot);
358 if (base_pointer != (uint32_t)-1) {
360 std::cout << "visiting derived root " << spill_slot
361 << " with base pointer " << base_pointer << std::endl;
363 stack_pointer[spill_slot] -= stack_pointer[base_pointer];
367 /* Update all GC roots, including base pointers. */
368 cell callsite_gc_roots = info->callsite_gc_roots(callsite);
370 for (cell spill_slot = 0; spill_slot < info->gc_root_count; spill_slot++) {
371 if (bitmap_p(bitmap, callsite_gc_roots + spill_slot)) {
373 std::cout << "visiting GC root " << spill_slot << std::endl;
375 visitor->visit_handle(stack_pointer + spill_slot);
379 /* Add the base pointers to obtain new derived pointer values. */
380 for (cell spill_slot = 0; spill_slot < info->derived_root_count;
382 uint32_t base_pointer = info->lookup_base_pointer(callsite, spill_slot);
383 if (base_pointer != (uint32_t)-1)
384 stack_pointer[spill_slot] += stack_pointer[base_pointer];
389 template <typename Fixup>
390 void slot_visitor<Fixup>::visit_callstack_object(callstack* stack) {
391 call_frame_slot_visitor<Fixup> call_frame_visitor(parent, this, NULL);
392 parent->iterate_callstack_object(stack, call_frame_visitor, fixup);
395 template <typename Fixup>
396 void slot_visitor<Fixup>::visit_callstack(context* ctx) {
397 call_frame_slot_visitor<Fixup> call_frame_visitor(parent, this, ctx);
398 parent->iterate_callstack(ctx, call_frame_visitor, fixup);
401 template <typename Fixup>
402 void slot_visitor<Fixup>::visit_context(context* ctx) {
403 /* Callstack is visited first because it scrubs the data and retain
405 visit_callstack(ctx);
407 visit_stack_elements(ctx->datastack_seg, (cell*)ctx->datastack);
408 visit_stack_elements(ctx->retainstack_seg, (cell*)ctx->retainstack);
409 visit_object_array(ctx->context_objects,
410 ctx->context_objects + context_object_count);
413 template <typename Fixup> void slot_visitor<Fixup>::visit_contexts() {
414 std::set<context*>::const_iterator begin = parent->active_contexts.begin();
415 std::set<context*>::const_iterator end = parent->active_contexts.end();
416 while (begin != end) {
417 visit_context(*begin);
422 template <typename Fixup> struct literal_references_visitor {
423 slot_visitor<Fixup>* visitor;
425 explicit literal_references_visitor(slot_visitor<Fixup>* visitor)
426 : visitor(visitor) {}
428 void operator()(instruction_operand op) {
429 if (op.rel_type() == RT_LITERAL)
430 op.store_value(visitor->visit_pointer(op.load_value()));
434 template <typename Fixup>
435 void slot_visitor<Fixup>::visit_code_block_objects(code_block* compiled) {
436 visit_handle(&compiled->owner);
437 visit_handle(&compiled->parameters);
438 visit_handle(&compiled->relocation);
441 template <typename Fixup>
442 void slot_visitor<Fixup>::visit_embedded_literals(code_block* compiled) {
443 if (!parent->code->uninitialized_p(compiled)) {
444 literal_references_visitor<Fixup> visitor(this);
445 compiled->each_instruction_operand(visitor);
449 template <typename Fixup> struct call_frame_code_block_visitor {
453 call_frame_code_block_visitor(factor_vm* parent, Fixup fixup)
454 : parent(parent), fixup(fixup) {}
456 void operator()(cell frame_top, cell size, code_block* owner, cell addr) {
457 code_block* compiled =
458 Fixup::translated_code_block_map ? owner : fixup.fixup_code(owner);
459 cell fixed_addr = compiled->address_for_offset(owner->offset(addr));
461 *(cell*)frame_top = fixed_addr;
465 template <typename Fixup>
466 void slot_visitor<Fixup>::visit_object_code_block(object* obj) {
467 switch (obj->type()) {
469 word* w = (word*)obj;
471 w->entry_point = fixup.fixup_code(w->code())->entry_point();
474 case QUOTATION_TYPE: {
475 quotation* q = (quotation*)obj;
477 q->entry_point = fixup.fixup_code(q->code())->entry_point();
480 case CALLSTACK_TYPE: {
481 callstack* stack = (callstack*)obj;
482 call_frame_code_block_visitor<Fixup> call_frame_visitor(parent, fixup);
483 parent->iterate_callstack_object(stack, call_frame_visitor, fixup);
489 template <typename Fixup>
490 void slot_visitor<Fixup>::visit_context_code_blocks() {
491 call_frame_code_block_visitor<Fixup> call_frame_visitor(parent, fixup);
492 std::set<context*>::const_iterator begin = parent->active_contexts.begin();
493 std::set<context*>::const_iterator end = parent->active_contexts.end();
494 while (begin != end) {
495 parent->iterate_callstack(*begin++, call_frame_visitor, fixup);
499 template <typename Fixup>
500 void slot_visitor<Fixup>::visit_uninitialized_code_blocks() {
501 std::map<code_block*, cell>* uninitialized_blocks =
502 &parent->code->uninitialized_blocks;
503 std::map<code_block*, cell>::const_iterator iter =
504 uninitialized_blocks->begin();
505 std::map<code_block*, cell>::const_iterator end = uninitialized_blocks->end();
507 std::map<code_block*, cell> new_uninitialized_blocks;
508 for (; iter != end; iter++) {
509 new_uninitialized_blocks.insert(
510 std::make_pair(fixup.fixup_code(iter->first), iter->second));
513 parent->code->uninitialized_blocks = new_uninitialized_blocks;
516 template <typename Fixup> struct embedded_code_pointers_visitor {
519 explicit embedded_code_pointers_visitor(Fixup fixup) : fixup(fixup) {}
521 void operator()(instruction_operand op) {
522 relocation_type type = op.rel_type();
523 if (type == RT_ENTRY_POINT || type == RT_ENTRY_POINT_PIC ||
524 type == RT_ENTRY_POINT_PIC_TAIL)
525 op.store_code_block(fixup.fixup_code(op.load_code_block()));
529 template <typename Fixup>
530 void slot_visitor<Fixup>::visit_embedded_code_pointers(code_block* compiled) {
531 if (!parent->code->uninitialized_p(compiled)) {
532 embedded_code_pointers_visitor<Fixup> operand_visitor(fixup);
533 compiled->each_instruction_operand(operand_visitor);