3 // Size sans alignment.
4 template <typename Fixup>
5 cell object::base_size(Fixup fixup) const {
8 return array_size((array*)this);
10 return array_size((bignum*)this);
12 return array_size((byte_array*)this);
14 return string_size(string_capacity((string*)this));
16 tuple_layout* layout = (tuple_layout*)fixup.translate_data(
17 untag<object>(((tuple*)this)->layout));
18 return tuple_size(layout);
21 return sizeof(quotation);
25 return sizeof(boxed_float);
31 return sizeof(wrapper);
32 case CALLSTACK_TYPE: {
33 cell callstack_length = untag_fixnum(((callstack*)this)->length);
34 return callstack_object_size(callstack_length);
37 critical_error("Invalid header in base_size", (cell)this);
42 // Size of the object pointed to by an untagged pointer
43 template <typename Fixup>
44 cell object::size(Fixup fixup) const {
46 return ((free_heap_block*)this)->size();
47 return align(base_size(fixup), data_alignment);
50 inline cell object::size() const { return size(no_fixup()); }
52 // The number of slots (cells) in an object which should be scanned by
53 // the GC. The number can vary in arrays and tuples, in all other
54 // types the number is a constant.
55 template <typename Fixup>
56 inline cell object::slot_count(Fixup fixup) const {
61 if (t == ARRAY_TYPE) {
63 return 1 + array_capacity((array*)this);
64 } else if (t == TUPLE_TYPE) {
65 tuple_layout* layout = (tuple_layout*)fixup.translate_data(
66 untag<object>(((tuple*)this)->layout));
68 return 1 + tuple_capacity(layout);
71 // these objects do not refer to other objects at all
75 case CALLSTACK_TYPE: return 0;
76 case WORD_TYPE: return 8;
77 case ALIEN_TYPE: return 2;
78 case DLL_TYPE: return 1;
79 case QUOTATION_TYPE: return 3;
80 case STRING_TYPE: return 3;
81 case WRAPPER_TYPE: return 1;
83 critical_error("Invalid header in slot_count", (cell)this);
84 return 0; // can't happen
89 inline cell object::slot_count() const {
90 return slot_count(no_fixup());
93 // Slot visitors iterate over the slots of an object, applying a functor to
94 // each one that is a non-immediate slot. The pointer is untagged first.
95 // The functor returns a new untagged object pointer. The return value may
96 // or may not equal the old one, however the new pointer receives the same
97 // tag before being stored back to the original location.
99 // Slots storing immediate values are left unchanged and the visitor does
102 // This is used by GC's copying, sweep and compact phases, and the
103 // implementation of the become primitive.
105 // Iteration is driven by visit_*() methods. Only one of them define GC
107 // - visit_all_roots()
109 // Code block visitors iterate over sets of code blocks, applying a functor
110 // to each one. The functor returns a new code_block pointer, which may or
111 // may not equal the old one. This is stored back to the original location.
113 // This is used by GC's sweep and compact phases, and the implementation of
114 // the modify-code-heap primitive.
116 // Iteration is driven by visit_*() methods. Some of them define GC roots:
117 // - visit_context_code_blocks()
118 // - visit_callback_code_blocks()
120 template <typename Fixup> struct slot_visitor {
126 slot_visitor<Fixup>(factor_vm* parent, Fixup fixup)
132 cell visit_pointer(cell pointer);
133 void visit_handle(cell* handle);
134 void visit_object_array(cell* start, cell* end);
135 void visit_partial_objects(cell start, cell card_start, cell card_end);
136 void visit_slots(object* ptr);
137 void visit_stack_elements(segment* region, cell* top);
138 void visit_all_roots();
139 void visit_callstack_object(callstack* stack);
140 void visit_callstack(context* ctx);
141 void visit_context(context *ctx);
142 void visit_object_code_block(object* obj);
143 void visit_context_code_blocks();
144 void visit_uninitialized_code_blocks();
145 void visit_object(object* obj);
146 void visit_mark_stack(std::vector<cell>* mark_stack);
149 template <typename SourceGeneration>
150 cell visit_card(SourceGeneration* gen, cell index, cell start);
151 template <typename SourceGeneration>
152 void visit_cards(SourceGeneration* gen, card mask, card unmask);
155 template <typename TargetGeneration>
156 void cheneys_algorithm(TargetGeneration* gen, cell scan);
158 // Visits the data pointers in code blocks in the remembered set.
159 void visit_code_heap_roots(std::set<code_block*>* remembered_set);
161 // Visits pointers embedded in instructions in code blocks.
162 void visit_instruction_operands(code_block* block, cell rel_base);
163 void visit_embedded_code_pointers(code_block* compiled);
164 void visit_embedded_literals(code_block* compiled);
166 // Visits data pointers in code blocks.
167 void visit_code_block_objects(code_block* compiled);
170 template <typename Fixup>
171 cell slot_visitor<Fixup>::visit_pointer(cell pointer) {
172 object* untagged = fixup.fixup_data(untag<object>(pointer));
173 return RETAG(untagged, TAG(pointer));
176 template <typename Fixup> void slot_visitor<Fixup>::visit_handle(cell* handle) {
177 if (!immediate_p(*handle)) {
178 *handle = visit_pointer(*handle);
182 template <typename Fixup>
183 void slot_visitor<Fixup>::visit_object_array(cell* start, cell* end) {
185 visit_handle(start++);
188 template <typename Fixup> void slot_visitor<Fixup>::visit_slots(object* obj) {
189 if (obj->type() == CALLSTACK_TYPE)
190 visit_callstack_object((callstack*)obj);
192 cell* start = (cell*)obj + 1;
193 cell* end = start + obj->slot_count(fixup);
194 visit_object_array(start, end);
198 template <typename Fixup>
199 void slot_visitor<Fixup>::visit_stack_elements(segment* region, cell* top) {
200 visit_object_array((cell*)region->start, top + 1);
203 template <typename Fixup> void slot_visitor<Fixup>::visit_all_roots() {
204 FACTOR_FOR_EACH(parent->data_roots) {
208 auto callback_slot_visitor = [&](code_block* stub, cell size) {
209 visit_handle(&stub->owner);
211 parent->callbacks->allocator->iterate(callback_slot_visitor, no_fixup());
213 FACTOR_FOR_EACH(parent->code->uninitialized_blocks) {
214 iter->second = visit_pointer(iter->second);
217 FACTOR_FOR_EACH(parent->sample_callstacks) {
218 visit_handle(&*iter);
221 FACTOR_FOR_EACH(parent->samples) {
222 visit_handle(&iter->thread);
225 visit_object_array(parent->special_objects,
226 parent->special_objects + special_object_count);
228 FACTOR_FOR_EACH(parent->active_contexts) {
229 visit_context(*iter);
233 // primitive_minor_gc() is invoked by inline GC checks, and it needs to
234 // visit spill slots which references objects in the heap.
236 // So for each call frame:
237 // - trace roots in spill slots
239 template <typename Fixup> struct call_frame_slot_visitor {
240 slot_visitor<Fixup>* visitor;
242 call_frame_slot_visitor(slot_visitor<Fixup>* visitor)
243 : visitor(visitor) {}
245 // frame top -> [return address]
251 void operator()(cell frame_top, cell size, code_block* owner, cell addr) {
252 cell return_address = owner->offset(addr);
254 code_block* compiled =
255 Fixup::translated_code_block_map ? owner
256 : visitor->fixup.translate_code(owner);
257 gc_info* info = compiled->block_gc_info();
259 FACTOR_ASSERT(return_address < compiled->size());
260 cell callsite = info->return_address_index(return_address);
261 if (callsite == (cell)-1)
265 FACTOR_PRINT("call frame code block " << compiled << " with offset "
268 cell* stack_pointer = (cell*)frame_top;
269 uint8_t* bitmap = info->gc_info_bitmap();
271 // Subtract old value of base pointer from every derived pointer.
272 for (cell spill_slot = 0; spill_slot < info->derived_root_count;
274 uint32_t base_pointer = info->lookup_base_pointer(callsite, spill_slot);
275 if (base_pointer != (uint32_t)-1) {
277 FACTOR_PRINT("visiting derived root " << spill_slot
278 << " with base pointer " << base_pointer);
280 stack_pointer[spill_slot] -= stack_pointer[base_pointer];
284 // Update all GC roots, including base pointers.
285 cell callsite_gc_roots = info->callsite_gc_roots(callsite);
287 for (cell spill_slot = 0; spill_slot < info->gc_root_count; spill_slot++) {
288 if (bitmap_p(bitmap, callsite_gc_roots + spill_slot)) {
290 FACTOR_PRINT("visiting GC root " << spill_slot);
292 visitor->visit_handle(stack_pointer + spill_slot);
296 // Add the base pointers to obtain new derived pointer values.
297 for (cell spill_slot = 0; spill_slot < info->derived_root_count;
299 uint32_t base_pointer = info->lookup_base_pointer(callsite, spill_slot);
300 if (base_pointer != (uint32_t)-1)
301 stack_pointer[spill_slot] += stack_pointer[base_pointer];
306 template <typename Fixup>
307 void slot_visitor<Fixup>::visit_callstack_object(callstack* stack) {
308 call_frame_slot_visitor<Fixup> call_frame_visitor(this);
309 parent->iterate_callstack_object(stack, call_frame_visitor, fixup);
312 template <typename Fixup>
313 void slot_visitor<Fixup>::visit_callstack(context* ctx) {
314 call_frame_slot_visitor<Fixup> call_frame_visitor(this);
315 parent->iterate_callstack(ctx, call_frame_visitor, fixup);
318 template <typename Fixup>
319 void slot_visitor<Fixup>::visit_context(context* ctx) {
320 visit_callstack(ctx);
322 cell ds_ptr = ctx->datastack;
323 cell rs_ptr = ctx->retainstack;
324 segment* ds_seg = ctx->datastack_seg;
325 segment* rs_seg = ctx->retainstack_seg;
326 visit_stack_elements(ds_seg, (cell*)ds_ptr);
327 visit_stack_elements(rs_seg, (cell*)rs_ptr);
328 visit_object_array(ctx->context_objects,
329 ctx->context_objects + context_object_count);
331 // Clear out the space not visited with a known pattern. That makes
332 // it easier to see if uninitialized reads are made.
333 ctx->fill_stack_seg(ds_ptr, ds_seg, 0xbaadbadd);
334 ctx->fill_stack_seg(rs_ptr, rs_seg, 0xdaabdabb);
337 template <typename Fixup>
338 void slot_visitor<Fixup>::visit_code_block_objects(code_block* compiled) {
339 visit_handle(&compiled->owner);
340 visit_handle(&compiled->parameters);
341 visit_handle(&compiled->relocation);
344 template <typename Fixup>
345 void slot_visitor<Fixup>::visit_embedded_literals(code_block* compiled) {
346 if (parent->code->uninitialized_p(compiled))
349 auto update_literal_refs = [&](instruction_operand op) {
350 if (op.rel.type() == RT_LITERAL) {
351 cell value = op.load_value(op.pointer);
352 if (!immediate_p(value)) {
353 op.store_value(visit_pointer(value));
357 compiled->each_instruction_operand(update_literal_refs);
360 template <typename Fixup> struct call_frame_code_block_visitor {
363 call_frame_code_block_visitor(Fixup fixup) : fixup(fixup) {}
365 void operator()(cell frame_top, cell size, code_block* owner, cell addr) {
366 code_block* compiled =
367 Fixup::translated_code_block_map ? owner : fixup.fixup_code(owner);
368 cell fixed_addr = compiled->address_for_offset(owner->offset(addr));
370 *(cell*)frame_top = fixed_addr;
374 template <typename Fixup>
375 void slot_visitor<Fixup>::visit_object_code_block(object* obj) {
376 switch (obj->type()) {
378 word* w = (word*)obj;
380 w->entry_point = fixup.fixup_code(w->code())->entry_point();
383 case QUOTATION_TYPE: {
384 quotation* q = (quotation*)obj;
386 q->entry_point = fixup.fixup_code(q->code())->entry_point();
389 case CALLSTACK_TYPE: {
390 callstack* stack = (callstack*)obj;
391 call_frame_code_block_visitor<Fixup> call_frame_visitor(fixup);
392 parent->iterate_callstack_object(stack, call_frame_visitor, fixup);
398 template <typename Fixup>
399 void slot_visitor<Fixup>::visit_context_code_blocks() {
400 call_frame_code_block_visitor<Fixup> call_frame_visitor(fixup);
401 FACTOR_FOR_EACH(parent->active_contexts) {
402 parent->iterate_callstack(*iter, call_frame_visitor, fixup);
406 template <typename Fixup>
407 void slot_visitor<Fixup>::visit_uninitialized_code_blocks() {
408 std::map<code_block*, cell> new_uninitialized_blocks;
409 FACTOR_FOR_EACH(parent->code->uninitialized_blocks) {
410 new_uninitialized_blocks.insert(
411 std::make_pair(fixup.fixup_code(iter->first), iter->second));
413 parent->code->uninitialized_blocks = new_uninitialized_blocks;
416 template <typename Fixup>
417 void slot_visitor<Fixup>::visit_embedded_code_pointers(code_block* compiled) {
418 if (parent->code->uninitialized_p(compiled))
420 auto update_code_block_refs = [&](instruction_operand op){
421 relocation_type type = op.rel.type();
422 if (type == RT_ENTRY_POINT ||
423 type == RT_ENTRY_POINT_PIC ||
424 type == RT_ENTRY_POINT_PIC_TAIL) {
425 code_block* block = fixup.fixup_code(op.load_code_block());
426 op.store_value(block->entry_point());
429 compiled->each_instruction_operand(update_code_block_refs);
432 template <typename Fixup>
433 void slot_visitor<Fixup>::visit_object(object *ptr) {
435 if (ptr->type() == ALIEN_TYPE)
436 ((alien*)ptr)->update_address();
439 // Pops items from the mark stack and visits them until the stack is
440 // empty. Used when doing a full collection and when collecting to
442 template <typename Fixup>
443 void slot_visitor<Fixup>::visit_mark_stack(std::vector<cell>* mark_stack) {
444 while (!mark_stack->empty()) {
445 cell ptr = mark_stack->back();
446 mark_stack->pop_back();
449 code_block* compiled = (code_block*)(ptr - 1);
450 visit_code_block_objects(compiled);
451 visit_embedded_literals(compiled);
452 visit_embedded_code_pointers(compiled);
454 object* obj = (object*)ptr;
456 visit_object_code_block(obj);
461 // Visits the instruction operands in a code block. If the operand is
462 // a pointer to a code block or data object, then the fixup is applied
463 // to it. Otherwise, if it is an external addess, that address is
464 // recomputed. If it is an untagged number literal (RT_UNTAGGED) or an
465 // immediate value, then nothing is done with it.
466 template <typename Fixup>
467 void slot_visitor<Fixup>::visit_instruction_operands(code_block* block,
469 auto visit_func = [&](instruction_operand op){
470 cell old_offset = rel_base + op.rel.offset();
471 cell old_value = op.load_value(old_offset);
472 switch (op.rel.type()) {
474 if (!immediate_p(old_value)) {
475 op.store_value(visit_pointer(old_value));
480 case RT_ENTRY_POINT_PIC:
481 case RT_ENTRY_POINT_PIC_TAIL:
483 cell offset = TAG(old_value);
484 code_block* compiled = (code_block*)UNTAG(old_value);
485 op.store_value(RETAG(fixup.fixup_code(compiled), offset));
491 op.store_value(parent->compute_external_address(op));
495 if (parent->code->uninitialized_p(block))
497 block->each_instruction_operand(visit_func);
500 template <typename Fixup>
501 void slot_visitor<Fixup>::visit_partial_objects(cell start,
504 cell *scan_start = (cell*)start + 1;
505 cell *scan_end = scan_start + ((object*)start)->slot_count();
507 scan_start = std::max(scan_start, (cell*)card_start);
508 scan_end = std::min(scan_end, (cell*)card_end);
510 visit_object_array(scan_start, scan_end);
513 template <typename Fixup>
514 template <typename SourceGeneration>
515 cell slot_visitor<Fixup>::visit_card(SourceGeneration* gen,
516 cell index, cell start) {
517 cell heap_base = parent->data->start;
518 cell start_addr = heap_base + index * card_size;
519 cell end_addr = start_addr + card_size;
521 // Forward to the next object whose address is in the card.
522 if (!start || (start + ((object*)start)->size()) < start_addr) {
523 // Optimization because finding the objects in a memory range is
524 // expensive. It helps a lot when tracing consecutive cards.
525 cell gen_start_card = (gen->start - heap_base) / card_size;
527 .find_object_containing_card(index - gen_start_card);
530 while (start && start < end_addr) {
531 visit_partial_objects(start, start_addr, end_addr);
532 if ((start + ((object*)start)->size()) >= end_addr) {
533 // The object can overlap the card boundary, then the
534 // remainder of it will be handled in the next card
535 // tracing if that card is marked.
538 start = gen->next_object_after(start);
543 template <typename Fixup>
544 template <typename SourceGeneration>
545 void slot_visitor<Fixup>::visit_cards(SourceGeneration* gen,
546 card mask, card unmask) {
547 card_deck* decks = parent->data->decks;
548 card_deck* cards = parent->data->cards;
549 cell heap_base = parent->data->start;
551 cell first_deck = (gen->start - heap_base) / deck_size;
552 cell last_deck = (gen->end - heap_base) / deck_size;
554 // Address of last traced object.
556 for (cell di = first_deck; di < last_deck; di++) {
557 if (decks[di] & mask) {
558 decks[di] &= ~unmask;
561 cell first_card = cards_per_deck * di;
562 cell last_card = first_card + cards_per_deck;
564 for (cell ci = first_card; ci < last_card; ci++) {
565 if (cards[ci] & mask) {
566 cards[ci] &= ~unmask;
569 start = visit_card(gen, ci, start);
571 // At end of generation, no need to scan more cards.
580 template <typename Fixup>
581 void slot_visitor<Fixup>::visit_code_heap_roots(std::set<code_block*>* remembered_set) {
582 FACTOR_FOR_EACH(*remembered_set) {
583 code_block* compiled = *iter;
584 visit_code_block_objects(compiled);
585 visit_embedded_literals(compiled);
586 compiled->flush_icache();
590 template <typename Fixup>
591 template <typename TargetGeneration>
592 void slot_visitor<Fixup>::cheneys_algorithm(TargetGeneration* gen, cell scan) {
593 while (scan && scan < gen->here) {
594 visit_object((object*)scan);
595 scan = gen->next_object_after(scan);