3 /* Size of the object pointed to by an untagged pointer */
4 template <typename Fixup> cell object::size(Fixup fixup) const {
6 return ((free_heap_block*)this)->size();
10 return align(array_size((array*)this), data_alignment);
12 return align(array_size((bignum*)this), data_alignment);
14 return align(array_size((byte_array*)this), data_alignment);
16 return align(string_size(string_capacity((string*)this)), data_alignment);
18 tuple_layout* layout = (tuple_layout*)fixup.translate_data(
19 untag<object>(((tuple*)this)->layout));
20 return align(tuple_size(layout), data_alignment);
23 return align(sizeof(quotation), data_alignment);
25 return align(sizeof(word), data_alignment);
27 return align(sizeof(boxed_float), data_alignment);
29 return align(sizeof(dll), data_alignment);
31 return align(sizeof(alien), data_alignment);
33 return align(sizeof(wrapper), data_alignment);
36 callstack_object_size(untag_fixnum(((callstack*)this)->length)),
39 critical_error("Invalid header in size", (cell)this);
40 return 0; /* can't happen */
44 inline cell object::size() const { return size(no_fixup()); }
46 /* The number of cells from the start of the object which should be scanned by
47 the GC. Some types have a binary payload at the end (string, word, DLL) which
49 template <typename Fixup> cell object::binary_payload_start(Fixup fixup) const {
54 /* these objects do not refer to other objects at all */
60 /* these objects have some binary data at the end */
62 return sizeof(word) - sizeof(cell);
64 return sizeof(cell) * 3;
66 return sizeof(cell) * 2;
68 return sizeof(quotation) - sizeof(cell);
70 return sizeof(string);
71 /* everything else consists entirely of pointers */
73 return array_size<array>(array_capacity((array*)this));
75 tuple_layout* layout = (tuple_layout*)fixup.translate_data(
76 untag<object>(((tuple*)this)->layout));
77 return tuple_size(layout);
80 return sizeof(wrapper);
82 critical_error("Invalid header in binary_payload_start", (cell)this);
83 return 0; /* can't happen */
87 inline cell object::binary_payload_start() const {
88 return binary_payload_start(no_fixup());
91 /* Slot visitors iterate over the slots of an object, applying a functor to
92 each one that is a non-immediate slot. The pointer is untagged first. The
93 functor returns a new untagged object pointer. The return value may or may not
95 however the new pointer receives the same tag before being stored back to the
98 Slots storing immediate values are left unchanged and the visitor does inspect
101 This is used by GC's copying, sweep and compact phases, and the implementation
102 of the become primitive.
104 Iteration is driven by visit_*() methods. Only one of them define GC roots:
107 Code block visitors iterate over sets of code blocks, applying a functor to
108 each one. The functor returns a new code_block pointer, which may or may not
109 equal the old one. This is stored back to the original location.
111 This is used by GC's sweep and compact phases, and the implementation of the
112 modify-code-heap primitive.
114 Iteration is driven by visit_*() methods. Some of them define GC roots:
115 - visit_context_code_blocks()
116 - visit_callback_code_blocks() */
118 template <typename Fixup> struct slot_visitor {
122 slot_visitor<Fixup>(factor_vm* parent, Fixup fixup)
123 : parent(parent), fixup(fixup) {}
125 cell visit_pointer(cell pointer);
126 void visit_handle(cell* handle);
127 void visit_object_array(cell* start, cell* end);
128 void visit_slots(object* ptr, cell payload_start);
129 void visit_slots(object* ptr);
130 void visit_stack_elements(segment* region, cell* top);
131 void visit_data_roots();
132 void visit_callback_roots();
133 void visit_literal_table_roots();
134 void visit_all_roots();
135 void visit_callstack_object(callstack* stack);
136 void visit_callstack(context* ctx);
137 void visit_context(context *ctx);
138 void visit_contexts();
139 void visit_code_block_objects(code_block* compiled);
140 void visit_embedded_literals(code_block* compiled);
141 void visit_sample_callstacks();
142 void visit_sample_threads();
143 void visit_object_code_block(object* obj);
144 void visit_context_code_blocks();
145 void visit_uninitialized_code_blocks();
146 void visit_embedded_code_pointers(code_block* compiled);
147 void visit_object(object* obj);
148 void visit_mark_stack(std::vector<cell>* mark_stack);
151 template <typename Fixup>
152 cell slot_visitor<Fixup>::visit_pointer(cell pointer) {
153 if (immediate_p(pointer))
156 object* untagged = fixup.fixup_data(untag<object>(pointer));
157 return RETAG(untagged, TAG(pointer));
160 template <typename Fixup> void slot_visitor<Fixup>::visit_handle(cell* handle) {
161 *handle = visit_pointer(*handle);
164 template <typename Fixup>
165 void slot_visitor<Fixup>::visit_object_array(cell* start, cell* end) {
167 visit_handle(start++);
170 template <typename Fixup>
171 void slot_visitor<Fixup>::visit_slots(object* ptr, cell payload_start) {
172 cell* slot = (cell*)ptr;
173 cell* end = (cell*)((cell)ptr + payload_start);
177 visit_object_array(slot, end);
181 template <typename Fixup> void slot_visitor<Fixup>::visit_slots(object* obj) {
182 if (obj->type() == CALLSTACK_TYPE)
183 visit_callstack_object((callstack*)obj);
185 visit_slots(obj, obj->binary_payload_start(fixup));
188 template <typename Fixup>
189 void slot_visitor<Fixup>::visit_stack_elements(segment* region, cell* top) {
190 visit_object_array((cell*)region->start, top + 1);
193 template <typename Fixup> void slot_visitor<Fixup>::visit_data_roots() {
194 std::vector<cell*>::const_iterator iter =
195 parent->data_roots.begin();
196 std::vector<cell*>::const_iterator end =
197 parent->data_roots.end();
199 for (; iter < end; iter++) {
204 template <typename Fixup> struct callback_slot_visitor {
205 slot_visitor<Fixup>* visitor;
207 callback_slot_visitor(slot_visitor<Fixup>* visitor) : visitor(visitor) {}
209 void operator()(code_block* stub, cell size) {
210 visitor->visit_handle(&stub->owner);
214 template <typename Fixup> void slot_visitor<Fixup>::visit_callback_roots() {
215 callback_slot_visitor<Fixup> callback_visitor(this);
216 parent->callbacks->allocator->iterate(callback_visitor);
219 template <typename Fixup>
220 void slot_visitor<Fixup>::visit_literal_table_roots() {
221 std::map<code_block*, cell>* uninitialized_blocks =
222 &parent->code->uninitialized_blocks;
223 std::map<code_block*, cell>::iterator iter =
224 uninitialized_blocks->begin();
225 std::map<code_block*, cell>::iterator end = uninitialized_blocks->end();
227 for (; iter != end; iter++) {
228 iter->second = visit_pointer(iter->second);
232 template <typename Fixup> void slot_visitor<Fixup>::visit_sample_callstacks() {
233 for (std::vector<cell>::iterator iter = parent->sample_callstacks.begin();
234 iter != parent->sample_callstacks.end(); ++iter) {
235 visit_handle(&*iter);
239 template <typename Fixup> void slot_visitor<Fixup>::visit_sample_threads() {
240 for (std::vector<profiling_sample>::iterator iter = parent->samples.begin();
241 iter != parent->samples.end(); ++iter) {
242 visit_handle(&iter->thread);
246 template <typename Fixup> void slot_visitor<Fixup>::visit_all_roots() {
247 visit_handle(&parent->true_object);
248 visit_handle(&parent->bignum_zero);
249 visit_handle(&parent->bignum_pos_one);
250 visit_handle(&parent->bignum_neg_one);
253 visit_callback_roots();
254 visit_literal_table_roots();
255 visit_sample_callstacks();
256 visit_sample_threads();
258 visit_object_array(parent->special_objects,
259 parent->special_objects + special_object_count);
264 /* primitive_minor_gc() is invoked by inline GC checks, and it needs to fill in
265 uninitialized stack locations before actually calling the GC. See the
266 documentation in compiler.cfg.stacks.vacant for details.
268 So for each call frame:
270 - scrub some uninitialized locations
271 - trace roots in spill slots
273 template <typename Fixup> struct call_frame_slot_visitor {
274 slot_visitor<Fixup>* visitor;
275 /* NULL in case we're a visitor for a callstack object. */
278 void scrub_stack(cell stack, uint8_t* bitmap, cell base, uint32_t count) {
279 for (cell loc = 0; loc < count; loc++) {
280 if (bitmap_p(bitmap, base + loc)) {
282 FACTOR_PRINT("scrubbing stack location " << loc);
284 *((cell*)stack - loc) = 0;
289 call_frame_slot_visitor(slot_visitor<Fixup>* visitor, context* ctx)
290 : visitor(visitor), ctx(ctx) {}
293 frame top -> [return address]
299 void operator()(cell frame_top, cell size, code_block* owner, cell addr) {
300 cell return_address = owner->offset(addr);
302 code_block* compiled =
303 Fixup::translated_code_block_map ? owner
304 : visitor->fixup.translate_code(owner);
305 gc_info* info = compiled->block_gc_info();
307 FACTOR_ASSERT(return_address < compiled->size());
308 cell callsite = info->return_address_index(return_address);
309 if (callsite == (cell)-1)
313 FACTOR_PRINT("call frame code block " << compiled << " with offset "
316 cell* stack_pointer = (cell*)frame_top;
317 uint8_t* bitmap = info->gc_info_bitmap();
320 /* Scrub vacant stack locations. */
321 scrub_stack(ctx->datastack,
323 info->callsite_scrub_d(callsite),
324 info->scrub_d_count);
325 scrub_stack(ctx->retainstack,
327 info->callsite_scrub_r(callsite),
328 info->scrub_r_count);
331 /* Subtract old value of base pointer from every derived pointer. */
332 for (cell spill_slot = 0; spill_slot < info->derived_root_count;
334 uint32_t base_pointer = info->lookup_base_pointer(callsite, spill_slot);
335 if (base_pointer != (uint32_t)-1) {
337 FACTOR_PRINT("visiting derived root " << spill_slot
338 << " with base pointer " << base_pointer);
340 stack_pointer[spill_slot] -= stack_pointer[base_pointer];
344 /* Update all GC roots, including base pointers. */
345 cell callsite_gc_roots = info->callsite_gc_roots(callsite);
347 for (cell spill_slot = 0; spill_slot < info->gc_root_count; spill_slot++) {
348 if (bitmap_p(bitmap, callsite_gc_roots + spill_slot)) {
350 FACTOR_PRINT("visiting GC root " << spill_slot);
352 visitor->visit_handle(stack_pointer + spill_slot);
356 /* Add the base pointers to obtain new derived pointer values. */
357 for (cell spill_slot = 0; spill_slot < info->derived_root_count;
359 uint32_t base_pointer = info->lookup_base_pointer(callsite, spill_slot);
360 if (base_pointer != (uint32_t)-1)
361 stack_pointer[spill_slot] += stack_pointer[base_pointer];
366 template <typename Fixup>
367 void slot_visitor<Fixup>::visit_callstack_object(callstack* stack) {
368 call_frame_slot_visitor<Fixup> call_frame_visitor(this, NULL);
369 parent->iterate_callstack_object(stack, call_frame_visitor, fixup);
372 template <typename Fixup>
373 void slot_visitor<Fixup>::visit_callstack(context* ctx) {
374 call_frame_slot_visitor<Fixup> call_frame_visitor(this, ctx);
375 parent->iterate_callstack(ctx, call_frame_visitor, fixup);
378 template <typename Fixup>
379 void slot_visitor<Fixup>::visit_context(context* ctx) {
380 /* Callstack is visited first because it scrubs the data and retain
382 visit_callstack(ctx);
384 cell ds_ptr = ctx->datastack;
385 cell rs_ptr = ctx->retainstack;
386 segment* ds_seg = ctx->datastack_seg;
387 segment* rs_seg = ctx->retainstack_seg;
388 visit_stack_elements(ds_seg, (cell*)ds_ptr);
389 visit_stack_elements(rs_seg, (cell*)rs_ptr);
390 visit_object_array(ctx->context_objects,
391 ctx->context_objects + context_object_count);
393 /* Clear out the space not visited with a known pattern. That makes
394 it easier to see if uninitialized reads are made. */
395 ctx->fill_stack_seg(ds_ptr, ds_seg, 0xbaadbadd);
396 ctx->fill_stack_seg(rs_ptr, rs_seg, 0xdaabdaab);
399 template <typename Fixup> void slot_visitor<Fixup>::visit_contexts() {
400 std::set<context*>::const_iterator begin = parent->active_contexts.begin();
401 std::set<context*>::const_iterator end = parent->active_contexts.end();
402 while (begin != end) {
403 visit_context(*begin);
408 template <typename Fixup> struct literal_references_visitor {
409 slot_visitor<Fixup>* visitor;
411 explicit literal_references_visitor(slot_visitor<Fixup>* visitor)
412 : visitor(visitor) {}
414 void operator()(instruction_operand op) {
415 if (op.rel_type() == RT_LITERAL)
416 op.store_value(visitor->visit_pointer(op.load_value()));
420 template <typename Fixup>
421 void slot_visitor<Fixup>::visit_code_block_objects(code_block* compiled) {
422 visit_handle(&compiled->owner);
423 visit_handle(&compiled->parameters);
424 visit_handle(&compiled->relocation);
427 template <typename Fixup>
428 void slot_visitor<Fixup>::visit_embedded_literals(code_block* compiled) {
429 if (!parent->code->uninitialized_p(compiled)) {
430 literal_references_visitor<Fixup> visitor(this);
431 compiled->each_instruction_operand(visitor);
435 template <typename Fixup> struct call_frame_code_block_visitor {
438 call_frame_code_block_visitor(Fixup fixup)
441 void operator()(cell frame_top, cell size, code_block* owner, cell addr) {
442 code_block* compiled =
443 Fixup::translated_code_block_map ? owner : fixup.fixup_code(owner);
444 cell fixed_addr = compiled->address_for_offset(owner->offset(addr));
446 *(cell*)frame_top = fixed_addr;
450 template <typename Fixup>
451 void slot_visitor<Fixup>::visit_object_code_block(object* obj) {
452 switch (obj->type()) {
454 word* w = (word*)obj;
456 w->entry_point = fixup.fixup_code(w->code())->entry_point();
459 case QUOTATION_TYPE: {
460 quotation* q = (quotation*)obj;
462 q->entry_point = fixup.fixup_code(q->code())->entry_point();
465 case CALLSTACK_TYPE: {
466 callstack* stack = (callstack*)obj;
467 call_frame_code_block_visitor<Fixup> call_frame_visitor(fixup);
468 parent->iterate_callstack_object(stack, call_frame_visitor, fixup);
474 template <typename Fixup>
475 void slot_visitor<Fixup>::visit_context_code_blocks() {
476 call_frame_code_block_visitor<Fixup> call_frame_visitor(fixup);
477 std::set<context*>::const_iterator begin = parent->active_contexts.begin();
478 std::set<context*>::const_iterator end = parent->active_contexts.end();
479 while (begin != end) {
480 parent->iterate_callstack(*begin, call_frame_visitor, fixup);
485 template <typename Fixup>
486 void slot_visitor<Fixup>::visit_uninitialized_code_blocks() {
487 std::map<code_block*, cell>* uninitialized_blocks =
488 &parent->code->uninitialized_blocks;
489 std::map<code_block*, cell>::const_iterator iter =
490 uninitialized_blocks->begin();
491 std::map<code_block*, cell>::const_iterator end = uninitialized_blocks->end();
493 std::map<code_block*, cell> new_uninitialized_blocks;
494 for (; iter != end; iter++) {
495 new_uninitialized_blocks.insert(
496 std::make_pair(fixup.fixup_code(iter->first), iter->second));
499 parent->code->uninitialized_blocks = new_uninitialized_blocks;
502 template <typename Fixup> struct embedded_code_pointers_visitor {
505 explicit embedded_code_pointers_visitor(Fixup fixup) : fixup(fixup) {}
507 void operator()(instruction_operand op) {
508 relocation_type type = op.rel_type();
509 if (type == RT_ENTRY_POINT || type == RT_ENTRY_POINT_PIC ||
510 type == RT_ENTRY_POINT_PIC_TAIL)
511 op.store_code_block(fixup.fixup_code(op.load_code_block()));
515 template <typename Fixup>
516 void slot_visitor<Fixup>::visit_embedded_code_pointers(code_block* compiled) {
517 if (!parent->code->uninitialized_p(compiled)) {
518 embedded_code_pointers_visitor<Fixup> operand_visitor(fixup);
519 compiled->each_instruction_operand(operand_visitor);
523 template <typename Fixup>
524 void slot_visitor<Fixup>::visit_object(object *ptr) {
526 if (ptr->type() == ALIEN_TYPE)
527 ((alien*)ptr)->update_address();
530 /* Pops items from the mark stack and visits them until the stack is
531 empty. Used when doing a full collection and when collecting to
533 template <typename Fixup>
534 void slot_visitor<Fixup>::visit_mark_stack(std::vector<cell>* mark_stack) {
535 while (!mark_stack->empty()) {
536 cell ptr = mark_stack->back();
538 mark_stack->pop_back();
541 code_block* compiled = (code_block*)(ptr - 1);
542 visit_code_block_objects(compiled);
543 visit_embedded_literals(compiled);
544 visit_embedded_code_pointers(compiled);
546 object* obj = (object*)ptr;
548 visit_object_code_block(obj);