3 /* Size sans alignment. */
4 template <typename Fixup>
5 cell object::base_size(Fixup fixup) const {
8 return array_size((array*)this);
10 return array_size((bignum*)this);
12 return array_size((byte_array*)this);
14 return string_size(string_capacity((string*)this));
16 tuple_layout* layout = (tuple_layout*)fixup.translate_data(
17 untag<object>(((tuple*)this)->layout));
18 return tuple_size(layout);
21 return sizeof(quotation);
25 return sizeof(boxed_float);
31 return sizeof(wrapper);
32 case CALLSTACK_TYPE: {
33 cell callstack_length = untag_fixnum(((callstack*)this)->length);
34 return callstack_object_size(callstack_length);
37 critical_error("Invalid header in base_size", (cell)this);
42 /* Size of the object pointed to by an untagged pointer */
43 template <typename Fixup>
44 cell object::size(Fixup fixup) const {
46 return ((free_heap_block*)this)->size();
47 return align(base_size(fixup), data_alignment);
50 inline cell object::size() const { return size(no_fixup()); }
52 /* The number of slots (cells) in an object which should be scanned by
53 the GC. The number can vary in arrays and tuples, in all other
54 types the number is a constant. */
55 template <typename Fixup>
56 inline cell object::slot_count(Fixup fixup) const {
61 if (t == ARRAY_TYPE) {
62 /* capacity + n slots */
63 return 1 + array_capacity((array*)this);
64 } else if (t == TUPLE_TYPE) {
65 tuple_layout* layout = (tuple_layout*)fixup.translate_data(
66 untag<object>(((tuple*)this)->layout));
67 /* layout + n slots */
68 return 1 + tuple_capacity(layout);
71 /* these objects do not refer to other objects at all */
75 case CALLSTACK_TYPE: return 0;
76 case WORD_TYPE: return 8;
77 case ALIEN_TYPE: return 2;
78 case DLL_TYPE: return 1;
79 case QUOTATION_TYPE: return 3;
80 case STRING_TYPE: return 3;
81 case WRAPPER_TYPE: return 1;
83 critical_error("Invalid header in slot_count", (cell)this);
84 return 0; /* can't happen */
89 inline cell object::slot_count() const {
90 return slot_count(no_fixup());
93 /* Slot visitors iterate over the slots of an object, applying a functor to
94 each one that is a non-immediate slot. The pointer is untagged first. The
95 functor returns a new untagged object pointer. The return value may or may not
97 however the new pointer receives the same tag before being stored back to the
100 Slots storing immediate values are left unchanged and the visitor does inspect
103 This is used by GC's copying, sweep and compact phases, and the implementation
104 of the become primitive.
106 Iteration is driven by visit_*() methods. Only one of them define GC roots:
109 Code block visitors iterate over sets of code blocks, applying a functor to
110 each one. The functor returns a new code_block pointer, which may or may not
111 equal the old one. This is stored back to the original location.
113 This is used by GC's sweep and compact phases, and the implementation of the
114 modify-code-heap primitive.
116 Iteration is driven by visit_*() methods. Some of them define GC roots:
117 - visit_context_code_blocks()
118 - visit_callback_code_blocks()
121 template <typename Fixup> struct slot_visitor {
127 slot_visitor<Fixup>(factor_vm* parent, Fixup fixup)
133 cell visit_pointer(cell pointer);
134 void visit_handle(cell* handle);
135 void visit_object_array(cell* start, cell* end);
136 void visit_partial_objects(cell start, cell card_start, cell card_end);
137 void visit_slots(object* ptr);
138 void visit_stack_elements(segment* region, cell* top);
139 void visit_all_roots();
140 void visit_callstack_object(callstack* stack);
141 void visit_callstack(context* ctx);
142 void visit_context(context *ctx);
143 void visit_code_block_objects(code_block* compiled);
144 void visit_embedded_literals(code_block* compiled);
145 void visit_object_code_block(object* obj);
146 void visit_context_code_blocks();
147 void visit_uninitialized_code_blocks();
148 void visit_embedded_code_pointers(code_block* compiled);
149 void visit_object(object* obj);
150 void visit_mark_stack(std::vector<cell>* mark_stack);
151 void visit_instruction_operands(code_block* block, cell rel_base);
153 template <typename SourceGeneration>
154 cell visit_card(SourceGeneration* gen, cell index, cell start);
155 template <typename SourceGeneration>
156 void visit_cards(SourceGeneration* gen, card mask, card unmask);
157 void visit_code_heap_roots(std::set<code_block*>* remembered_set);
159 template <typename TargetGeneration>
160 void cheneys_algorithm(TargetGeneration* gen, cell scan);
163 template <typename Fixup>
164 cell slot_visitor<Fixup>::visit_pointer(cell pointer) {
165 if (immediate_p(pointer))
168 object* untagged = fixup.fixup_data(untag<object>(pointer));
169 return RETAG(untagged, TAG(pointer));
172 template <typename Fixup> void slot_visitor<Fixup>::visit_handle(cell* handle) {
173 *handle = visit_pointer(*handle);
176 template <typename Fixup>
177 void slot_visitor<Fixup>::visit_object_array(cell* start, cell* end) {
179 visit_handle(start++);
182 template <typename Fixup>
183 void slot_visitor<Fixup>::visit_partial_objects(cell start,
186 cell *scan_start = (cell*)start + 1;
187 cell *scan_end = scan_start + ((object*)start)->slot_count();
189 scan_start = std::max(scan_start, (cell*)card_start);
190 scan_end = std::min(scan_end, (cell*)card_end);
192 visit_object_array(scan_start, scan_end);
195 template <typename Fixup> void slot_visitor<Fixup>::visit_slots(object* obj) {
196 if (obj->type() == CALLSTACK_TYPE)
197 visit_callstack_object((callstack*)obj);
199 cell* start = (cell*)obj + 1;
200 cell* end = start + obj->slot_count(fixup);
201 visit_object_array(start, end);
205 template <typename Fixup>
206 void slot_visitor<Fixup>::visit_stack_elements(segment* region, cell* top) {
207 visit_object_array((cell*)region->start, top + 1);
210 template <typename Fixup> void slot_visitor<Fixup>::visit_all_roots() {
211 FACTOR_FOR_EACH(parent->data_roots) {
215 auto callback_slot_visitor = [&](code_block* stub, cell size) {
216 visit_handle(&stub->owner);
218 parent->callbacks->allocator->iterate(callback_slot_visitor);
220 FACTOR_FOR_EACH(parent->code->uninitialized_blocks) {
221 iter->second = visit_pointer(iter->second);
224 FACTOR_FOR_EACH(parent->sample_callstacks) {
225 visit_handle(&*iter);
228 FACTOR_FOR_EACH(parent->samples) {
229 visit_handle(&iter->thread);
232 visit_object_array(parent->special_objects,
233 parent->special_objects + special_object_count);
235 FACTOR_FOR_EACH(parent->active_contexts) {
236 visit_context(*iter);
240 /* primitive_minor_gc() is invoked by inline GC checks, and it needs to fill in
241 uninitialized stack locations before actually calling the GC. See the
242 documentation in compiler.cfg.stacks.vacant for details.
244 So for each call frame:
246 - scrub some uninitialized locations
247 - trace roots in spill slots
249 template <typename Fixup> struct call_frame_slot_visitor {
250 slot_visitor<Fixup>* visitor;
251 /* NULL in case we're a visitor for a callstack object. */
254 void scrub_stack(cell stack, uint8_t* bitmap, cell base, uint32_t count) {
255 for (cell loc = 0; loc < count; loc++) {
256 if (bitmap_p(bitmap, base + loc)) {
258 FACTOR_PRINT("scrubbing stack location " << loc);
260 *((cell*)stack - loc) = 0;
265 call_frame_slot_visitor(slot_visitor<Fixup>* visitor, context* ctx)
266 : visitor(visitor), ctx(ctx) {}
269 frame top -> [return address]
275 void operator()(cell frame_top, cell size, code_block* owner, cell addr) {
276 cell return_address = owner->offset(addr);
278 code_block* compiled =
279 Fixup::translated_code_block_map ? owner
280 : visitor->fixup.translate_code(owner);
281 gc_info* info = compiled->block_gc_info();
283 FACTOR_ASSERT(return_address < compiled->size());
284 cell callsite = info->return_address_index(return_address);
285 if (callsite == (cell)-1)
289 FACTOR_PRINT("call frame code block " << compiled << " with offset "
292 cell* stack_pointer = (cell*)frame_top;
293 uint8_t* bitmap = info->gc_info_bitmap();
296 /* Scrub vacant stack locations. */
297 scrub_stack(ctx->datastack,
299 info->callsite_scrub_d(callsite),
300 info->scrub_d_count);
301 scrub_stack(ctx->retainstack,
303 info->callsite_scrub_r(callsite),
304 info->scrub_r_count);
307 /* Subtract old value of base pointer from every derived pointer. */
308 for (cell spill_slot = 0; spill_slot < info->derived_root_count;
310 uint32_t base_pointer = info->lookup_base_pointer(callsite, spill_slot);
311 if (base_pointer != (uint32_t)-1) {
313 FACTOR_PRINT("visiting derived root " << spill_slot
314 << " with base pointer " << base_pointer);
316 stack_pointer[spill_slot] -= stack_pointer[base_pointer];
320 /* Update all GC roots, including base pointers. */
321 cell callsite_gc_roots = info->callsite_gc_roots(callsite);
323 for (cell spill_slot = 0; spill_slot < info->gc_root_count; spill_slot++) {
324 if (bitmap_p(bitmap, callsite_gc_roots + spill_slot)) {
326 FACTOR_PRINT("visiting GC root " << spill_slot);
328 visitor->visit_handle(stack_pointer + spill_slot);
332 /* Add the base pointers to obtain new derived pointer values. */
333 for (cell spill_slot = 0; spill_slot < info->derived_root_count;
335 uint32_t base_pointer = info->lookup_base_pointer(callsite, spill_slot);
336 if (base_pointer != (uint32_t)-1)
337 stack_pointer[spill_slot] += stack_pointer[base_pointer];
342 template <typename Fixup>
343 void slot_visitor<Fixup>::visit_callstack_object(callstack* stack) {
344 call_frame_slot_visitor<Fixup> call_frame_visitor(this, NULL);
345 parent->iterate_callstack_object(stack, call_frame_visitor, fixup);
348 template <typename Fixup>
349 void slot_visitor<Fixup>::visit_callstack(context* ctx) {
350 call_frame_slot_visitor<Fixup> call_frame_visitor(this, ctx);
351 parent->iterate_callstack(ctx, call_frame_visitor, fixup);
354 template <typename Fixup>
355 void slot_visitor<Fixup>::visit_context(context* ctx) {
356 /* Callstack is visited first because it scrubs the data and retain
358 visit_callstack(ctx);
360 cell ds_ptr = ctx->datastack;
361 cell rs_ptr = ctx->retainstack;
362 segment* ds_seg = ctx->datastack_seg;
363 segment* rs_seg = ctx->retainstack_seg;
364 visit_stack_elements(ds_seg, (cell*)ds_ptr);
365 visit_stack_elements(rs_seg, (cell*)rs_ptr);
366 visit_object_array(ctx->context_objects,
367 ctx->context_objects + context_object_count);
369 /* Clear out the space not visited with a known pattern. That makes
370 it easier to see if uninitialized reads are made. */
371 ctx->fill_stack_seg(ds_ptr, ds_seg, 0xbaadbadd);
372 ctx->fill_stack_seg(rs_ptr, rs_seg, 0xdaabdaab);
375 template <typename Fixup>
376 void slot_visitor<Fixup>::visit_code_block_objects(code_block* compiled) {
377 visit_handle(&compiled->owner);
378 visit_handle(&compiled->parameters);
379 visit_handle(&compiled->relocation);
382 template <typename Fixup>
383 void slot_visitor<Fixup>::visit_embedded_literals(code_block* compiled) {
384 if (parent->code->uninitialized_p(compiled))
387 auto update_literal_refs = [&](instruction_operand op) {
388 if (op.rel.type() == RT_LITERAL) {
389 fixnum value = op.load_value(op.pointer);
390 op.store_value(visit_pointer(value));
393 compiled->each_instruction_operand(update_literal_refs);
396 template <typename Fixup> struct call_frame_code_block_visitor {
399 call_frame_code_block_visitor(Fixup fixup)
402 void operator()(cell frame_top, cell size, code_block* owner, cell addr) {
403 code_block* compiled =
404 Fixup::translated_code_block_map ? owner : fixup.fixup_code(owner);
405 cell fixed_addr = compiled->address_for_offset(owner->offset(addr));
407 *(cell*)frame_top = fixed_addr;
411 template <typename Fixup>
412 void slot_visitor<Fixup>::visit_object_code_block(object* obj) {
413 switch (obj->type()) {
415 word* w = (word*)obj;
417 w->entry_point = fixup.fixup_code(w->code())->entry_point();
420 case QUOTATION_TYPE: {
421 quotation* q = (quotation*)obj;
423 q->entry_point = fixup.fixup_code(q->code())->entry_point();
426 case CALLSTACK_TYPE: {
427 callstack* stack = (callstack*)obj;
428 call_frame_code_block_visitor<Fixup> call_frame_visitor(fixup);
429 parent->iterate_callstack_object(stack, call_frame_visitor, fixup);
435 template <typename Fixup>
436 void slot_visitor<Fixup>::visit_context_code_blocks() {
437 call_frame_code_block_visitor<Fixup> call_frame_visitor(fixup);
438 FACTOR_FOR_EACH(parent->active_contexts) {
439 parent->iterate_callstack(*iter, call_frame_visitor, fixup);
443 template <typename Fixup>
444 void slot_visitor<Fixup>::visit_uninitialized_code_blocks() {
445 std::map<code_block*, cell> new_uninitialized_blocks;
446 FACTOR_FOR_EACH(parent->code->uninitialized_blocks) {
447 new_uninitialized_blocks.insert(
448 std::make_pair(fixup.fixup_code(iter->first), iter->second));
450 parent->code->uninitialized_blocks = new_uninitialized_blocks;
453 template <typename Fixup>
454 void slot_visitor<Fixup>::visit_embedded_code_pointers(code_block* compiled) {
455 if (parent->code->uninitialized_p(compiled))
457 auto update_code_block_refs = [&](instruction_operand op){
458 relocation_type type = op.rel.type();
459 if (type == RT_ENTRY_POINT ||
460 type == RT_ENTRY_POINT_PIC ||
461 type == RT_ENTRY_POINT_PIC_TAIL) {
462 code_block* block = fixup.fixup_code(op.load_code_block());
463 op.store_value(block->entry_point());
466 compiled->each_instruction_operand(update_code_block_refs);
469 template <typename Fixup>
470 void slot_visitor<Fixup>::visit_object(object *ptr) {
472 if (ptr->type() == ALIEN_TYPE)
473 ((alien*)ptr)->update_address();
476 /* Pops items from the mark stack and visits them until the stack is
477 empty. Used when doing a full collection and when collecting to
479 template <typename Fixup>
480 void slot_visitor<Fixup>::visit_mark_stack(std::vector<cell>* mark_stack) {
481 while (!mark_stack->empty()) {
482 cell ptr = mark_stack->back();
483 mark_stack->pop_back();
486 code_block* compiled = (code_block*)(ptr - 1);
487 visit_code_block_objects(compiled);
488 visit_embedded_literals(compiled);
489 visit_embedded_code_pointers(compiled);
491 object* obj = (object*)ptr;
493 visit_object_code_block(obj);
498 /* Visits the instruction operands in a code block. If the operand is
499 a pointer to a code block or data object, then the fixup is applied
500 to it. Otherwise, if it is an external addess, that address is
501 recomputed. If it is an untagged number literal (RT_UNTAGGED) or an
502 immediate value, then nothing is done with it. */
503 template <typename Fixup>
504 void slot_visitor<Fixup>::visit_instruction_operands(code_block* block,
506 auto visit_func = [&](instruction_operand op){
507 cell old_offset = rel_base + op.rel.offset();
508 cell value = op.load_value(old_offset);
509 switch (op.rel.type()) {
511 value = visit_pointer(value);
515 case RT_ENTRY_POINT_PIC:
516 case RT_ENTRY_POINT_PIC_TAIL:
518 cell offset = TAG(value);
519 code_block* compiled = (code_block*)UNTAG(value);
520 value = RETAG(fixup.fixup_code(compiled), offset);
526 value = parent->compute_external_address(op);
529 op.store_value(value);
531 block->each_instruction_operand(visit_func);
534 template <typename Fixup>
535 template <typename SourceGeneration>
536 cell slot_visitor<Fixup>::visit_card(SourceGeneration* gen,
537 cell index, cell start) {
538 cell heap_base = parent->data->start;
539 cell start_addr = heap_base + index * card_size;
540 cell end_addr = start_addr + card_size;
542 /* Forward to the next object whose address is in the card. */
543 if (!start || (start + ((object*)start)->size()) < start_addr) {
544 /* Optimization because finding the objects in a memory range is
545 expensive. It helps a lot when tracing consecutive cards. */
546 cell gen_start_card = (gen->start - heap_base) / card_size;
548 .find_object_containing_card(index - gen_start_card);
551 while (start && start < end_addr) {
552 visit_partial_objects(start, start_addr, end_addr);
553 if ((start + ((object*)start)->size()) >= end_addr) {
554 /* The object can overlap the card boundary, then the
555 remainder of it will be handled in the next card
556 tracing if that card is marked. */
559 start = gen->next_object_after(start);
564 template <typename Fixup>
565 template <typename SourceGeneration>
566 void slot_visitor<Fixup>::visit_cards(SourceGeneration* gen,
567 card mask, card unmask) {
568 card_deck* decks = parent->data->decks;
569 card_deck* cards = parent->data->cards;
570 cell heap_base = parent->data->start;
572 cell first_deck = (gen->start - heap_base) / deck_size;
573 cell last_deck = (gen->end - heap_base) / deck_size;
575 /* Address of last traced object. */
577 for (cell di = first_deck; di < last_deck; di++) {
578 if (decks[di] & mask) {
579 decks[di] &= ~unmask;
582 cell first_card = cards_per_deck * di;
583 cell last_card = first_card + cards_per_deck;
585 for (cell ci = first_card; ci < last_card; ci++) {
586 if (cards[ci] & mask) {
587 cards[ci] &= ~unmask;
590 start = visit_card(gen, ci, start);
592 /* At end of generation, no need to scan more cards. */
601 template <typename Fixup>
602 void slot_visitor<Fixup>::visit_code_heap_roots(std::set<code_block*>* remembered_set) {
603 FACTOR_FOR_EACH(*remembered_set) {
604 code_block* compiled = *iter;
605 visit_code_block_objects(compiled);
606 visit_embedded_literals(compiled);
607 compiled->flush_icache();
611 template <typename Fixup>
612 template <typename TargetGeneration>
613 void slot_visitor<Fixup>::cheneys_algorithm(TargetGeneration* gen, cell scan) {
614 while (scan && scan < gen->here) {
615 visit_object((object*)scan);
616 scan = gen->next_object_after(scan);