5 cell code_block::owner_quot() const {
6 tagged<object> executing(owner);
7 if (!optimized_p() && executing->type() == WORD_TYPE)
8 executing = executing.as<word>()->def;
9 return executing.value();
12 /* If the code block is an unoptimized quotation, we can calculate the
13 scan offset. In all other cases -1 is returned. */
14 cell code_block::scan(factor_vm* vm, cell addr) const {
15 if (type() != code_block_unoptimized) {
16 return tag_fixnum(-1);
19 tagged<object> obj(owner);
20 if (obj.type_p(WORD_TYPE))
21 obj = obj.as<word>()->def;
22 if (!obj.type_p(QUOTATION_TYPE))
23 return tag_fixnum(-1);
25 cell ofs = offset(addr);
26 return tag_fixnum(vm->quot_code_offset_to_scan(obj.value(), ofs));
29 cell factor_vm::compute_entry_point_address(cell obj) {
30 switch (tagged<object>(obj).type()) {
32 return untag<word>(obj)->entry_point;
34 return untag<quotation>(obj)->entry_point;
36 critical_error("Expected word or quotation", obj);
41 cell factor_vm::compute_entry_point_pic_address(word* w, cell tagged_quot) {
42 if (!to_boolean(tagged_quot) || max_pic_size == 0)
43 return w->entry_point;
45 quotation* quot = untag<quotation>(tagged_quot);
46 if (quotation_compiled_p(quot))
47 return quot->entry_point;
49 return w->entry_point;
53 cell factor_vm::compute_entry_point_pic_address(cell w_) {
55 return compute_entry_point_pic_address(w.untagged(), w->pic_def);
58 cell factor_vm::compute_entry_point_pic_tail_address(cell w_) {
60 return compute_entry_point_pic_address(w.untagged(), w->pic_tail_def);
63 cell factor_vm::code_block_owner(code_block* compiled) {
64 tagged<object> owner(compiled->owner);
66 /* Cold generic word call sites point to quotations that call the
67 inline-cache-miss and inline-cache-miss-tail primitives. */
68 if (owner.type_p(QUOTATION_TYPE)) {
69 tagged<quotation> quot(owner.as<quotation>());
70 tagged<array> elements(quot->array);
72 FACTOR_ASSERT(array_capacity(elements.untagged()) == 5);
73 FACTOR_ASSERT(array_nth(elements.untagged(), 4) ==
74 special_objects[PIC_MISS_WORD] ||
75 array_nth(elements.untagged(), 4) ==
76 special_objects[PIC_MISS_TAIL_WORD]);
78 tagged<wrapper> word_wrapper(array_nth(elements.untagged(), 0));
79 return word_wrapper->object;
81 return compiled->owner;
84 struct update_word_references_relocation_visitor {
86 bool reset_inline_caches;
88 update_word_references_relocation_visitor(factor_vm* parent,
89 bool reset_inline_caches)
90 : parent(parent), reset_inline_caches(reset_inline_caches) {}
92 void operator()(instruction_operand op) {
93 code_block* compiled = op.load_code_block();
94 switch (op.rel_type()) {
95 case RT_ENTRY_POINT: {
96 cell owner = compiled->owner;
97 if (to_boolean(owner))
98 op.store_value(parent->compute_entry_point_address(owner));
101 case RT_ENTRY_POINT_PIC: {
102 if (reset_inline_caches || !compiled->pic_p()) {
103 cell owner = parent->code_block_owner(compiled);
104 if (to_boolean(owner))
105 op.store_value(parent->compute_entry_point_pic_address(owner));
109 case RT_ENTRY_POINT_PIC_TAIL: {
110 if (reset_inline_caches || !compiled->pic_p()) {
111 cell owner = parent->code_block_owner(compiled);
112 if (to_boolean(owner))
113 op.store_value(parent->compute_entry_point_pic_tail_address(owner));
123 /* Relocate new code blocks completely; updating references to literals,
124 dlsyms, and words. For all other words in the code heap, we only need
125 to update references to other words, without worrying about literals
127 void factor_vm::update_word_references(code_block* compiled,
128 bool reset_inline_caches) {
129 if (code->uninitialized_p(compiled))
130 initialize_code_block(compiled);
131 /* update_word_references() is always applied to every block in
132 the code heap. Since it resets all call sites to point to
133 their canonical entry point (cold entry point for non-tail calls,
134 standard entry point for tail calls), it means that no PICs
135 are referenced after this is done. So instead of polluting
136 the code heap with dead PICs that will be freed on the next
137 GC, we add them to the free list immediately. */
138 else if (reset_inline_caches && compiled->pic_p())
139 code->free(compiled);
141 update_word_references_relocation_visitor visitor(this,
142 reset_inline_caches);
143 compiled->each_instruction_operand(visitor);
144 compiled->flush_icache();
148 /* Look up an external library symbol referenced by a compiled code block */
149 cell factor_vm::compute_dlsym_address(array* parameters, cell index) {
150 cell symbol = array_nth(parameters, index);
151 cell library = array_nth(parameters, index + 1);
153 dll* d = (to_boolean(library) ? untag<dll>(library) : NULL);
155 void* undefined_symbol = (void*)factor::undefined_symbol;
156 undefined_symbol = FUNCTION_CODE_POINTER(undefined_symbol);
157 if (d != NULL && !d->handle)
158 return (cell)undefined_symbol;
160 switch (tagged<object>(symbol).type()) {
161 case BYTE_ARRAY_TYPE: {
162 symbol_char* name = alien_offset(symbol);
163 void* sym = ffi_dlsym(d, name);
168 return (cell)undefined_symbol;
171 array* names = untag<array>(symbol);
172 for (cell i = 0; i < array_capacity(names); i++) {
173 symbol_char* name = alien_offset(array_nth(names, i));
174 void* sym = ffi_dlsym(d, name);
179 return (cell)undefined_symbol;
187 cell factor_vm::compute_dlsym_toc_address(array* parameters, cell index) {
188 cell symbol = array_nth(parameters, index);
189 cell library = array_nth(parameters, index + 1);
191 dll* d = (to_boolean(library) ? untag<dll>(library) : NULL);
193 void* undefined_toc = (void*)factor::undefined_symbol;
194 undefined_toc = FUNCTION_TOC_POINTER(undefined_toc);
195 if (d != NULL && !d->handle)
196 return (cell)undefined_toc;
198 switch (tagged<object>(symbol).type()) {
199 case BYTE_ARRAY_TYPE: {
200 symbol_char* name = alien_offset(symbol);
201 void* toc = ffi_dlsym_toc(d, name);
205 return (cell)undefined_toc;
208 array* names = untag<array>(symbol);
209 for (cell i = 0; i < array_capacity(names); i++) {
210 symbol_char* name = alien_offset(array_nth(names, i));
211 void* toc = ffi_dlsym_toc(d, name);
216 return (cell)undefined_toc;
224 cell factor_vm::compute_vm_address(cell arg) {
225 return (cell)this + untag_fixnum(arg);
228 cell factor_vm::lookup_external_address(relocation_type rel_type,
229 code_block *compiled,
234 return compute_dlsym_address(parameters, index);
236 return compiled->entry_point();
237 case RT_MEGAMORPHIC_CACHE_HITS:
238 return (cell)&dispatch_stats.megamorphic_cache_hits;
240 return compute_vm_address(array_nth(parameters, index));
241 case RT_CARDS_OFFSET:
243 case RT_DECKS_OFFSET:
246 case RT_EXCEPTION_HANDLER:
247 return (cell)&factor::exception_handler;
251 return compute_dlsym_toc_address(parameters, index);
253 case RT_INLINE_CACHE_MISS:
254 return (cell)&factor::inline_cache_miss;
256 return (cell)code->safepoint_page;
262 cell factor_vm::compute_external_address(instruction_operand op) {
263 code_block* compiled = op.compiled;
264 array* parameters = to_boolean(compiled->parameters)
265 ? untag<array>(compiled->parameters)
268 relocation_type rel_type = op.rel_type();
270 cell ext_addr = lookup_external_address(rel_type, compiled, parameters, idx);
271 if (ext_addr == (cell)-1) {
273 print_obj(ss, compiled->owner);
276 if (rel_type == RT_DLSYM || rel_type == RT_DLSYM_TOC) {
277 ss << "Bad symbol specifier in compute_external_address";
278 arg = array_nth(parameters, idx);
280 ss << "Bad rel type in compute_external_address";
283 critical_error(ss.str().c_str(), arg);
288 void factor_vm::store_external_address(instruction_operand op) {
289 op.store_value(compute_external_address(op));
292 cell factor_vm::compute_here_address(cell arg, cell offset,
293 code_block* compiled) {
294 fixnum n = untag_fixnum(arg);
296 return compiled->entry_point() + offset + n;
298 return compiled->entry_point() - n;
301 struct initial_code_block_visitor {
306 initial_code_block_visitor(factor_vm* parent, cell literals)
307 : parent(parent), literals(literals), literal_index(0) {}
309 cell next_literal() {
310 return array_nth(untag<array>(literals), literal_index++);
313 void operator()(instruction_operand op) {
314 switch (op.rel_type()) {
316 op.store_value(next_literal());
319 op.store_value(parent->compute_entry_point_address(next_literal()));
321 case RT_ENTRY_POINT_PIC:
322 op.store_value(parent->compute_entry_point_pic_address(next_literal()));
324 case RT_ENTRY_POINT_PIC_TAIL:
326 parent->compute_entry_point_pic_tail_address(next_literal()));
329 op.store_value(parent->compute_here_address(
330 next_literal(), op.rel_offset(), op.compiled));
333 op.store_value(untag_fixnum(next_literal()));
336 parent->store_external_address(op);
342 /* Perform all fixups on a code block */
343 void factor_vm::initialize_code_block(code_block* compiled, cell literals) {
344 initial_code_block_visitor visitor(this, literals);
345 compiled->each_instruction_operand(visitor);
346 compiled->flush_icache();
348 /* next time we do a minor GC, we have to trace this code block, since
349 the newly-installed instruction operands might point to literals in
351 code->write_barrier(compiled);
354 void factor_vm::initialize_code_block(code_block* compiled) {
355 std::map<code_block*, cell>::iterator iter =
356 code->uninitialized_blocks.find(compiled);
357 initialize_code_block(compiled, iter->second);
358 code->uninitialized_blocks.erase(iter);
361 /* Fixup labels. This is done at compile time, not image load time */
362 void factor_vm::fixup_labels(array* labels, code_block* compiled) {
363 cell size = array_capacity(labels);
365 for (cell i = 0; i < size; i += 3) {
366 relocation_class rel_class =
367 (relocation_class) untag_fixnum(array_nth(labels, i));
368 cell offset = untag_fixnum(array_nth(labels, i + 1));
369 cell target = untag_fixnum(array_nth(labels, i + 2));
371 relocation_entry new_entry(RT_HERE, rel_class, offset);
373 instruction_operand op(new_entry, compiled, 0);
374 op.store_value(target + compiled->entry_point());
379 /* Allocates memory */
380 code_block* factor_vm::allot_code_block(cell size, code_block_type type) {
381 code_block* block = code->allocator->allot(size + sizeof(code_block));
383 /* If allocation failed, do a full GC and compact the code heap.
384 A full GC that occurs as a result of the data heap filling up does not
385 trigger a compaction. This setup ensures that most GCs do not compact
386 the code heap, but if the code fills up, it probably means it will be
387 fragmented after GC anyway, so its best to compact. */
389 primitive_compact_gc();
390 block = code->allocator->allot(size + sizeof(code_block));
392 /* Insufficient room even after code GC, give up */
394 std::cout << "Code heap used: " << code->allocator->occupied_space()
396 std::cout << "Code heap free: " << code->allocator->free_space() << "\n";
397 fatal_error("Out of memory in add-compiled-block", 0);
401 block->set_type(type);
406 /* Allocates memory */
407 code_block* factor_vm::add_code_block(code_block_type type, cell code_,
408 cell labels_, cell owner_,
409 cell relocation_, cell parameters_,
411 cell frame_size_untagged) {
412 data_root<byte_array> code(code_, this);
413 data_root<object> labels(labels_, this);
414 data_root<object> owner(owner_, this);
415 data_root<byte_array> relocation(relocation_, this);
416 data_root<array> parameters(parameters_, this);
417 data_root<array> literals(literals_, this);
419 cell code_length = array_capacity(code.untagged());
420 code_block* compiled = allot_code_block(code_length, type);
422 compiled->owner = owner.value();
424 /* slight space optimization */
425 if (relocation.type() == BYTE_ARRAY_TYPE &&
426 array_capacity(relocation.untagged()) == 0)
427 compiled->relocation = false_object;
429 compiled->relocation = relocation.value();
431 if (parameters.type() == ARRAY_TYPE &&
432 array_capacity(parameters.untagged()) == 0)
433 compiled->parameters = false_object;
435 compiled->parameters = parameters.value();
438 memcpy(compiled + 1, code.untagged() + 1, code_length);
441 if (to_boolean(labels.value()))
442 fixup_labels(labels.as<array>().untagged(), compiled);
444 compiled->set_stack_frame_size(frame_size_untagged);
446 /* Once we are ready, fill in literal and word references in this code
447 block's instruction operands. In most cases this is done right after this
448 method returns, except when compiling words with the non-optimizing
449 compiler at the beginning of bootstrap */
450 this->code->uninitialized_blocks.insert(
451 std::make_pair(compiled, literals.value()));
452 this->code->all_blocks.insert((cell)compiled);
454 /* next time we do a minor GC, we have to trace this code block, since
455 the fields of the code_block struct might point into nursery or aging */
456 this->code->write_barrier(compiled);
461 /* References to undefined symbols are patched up to call this function on
462 image load. It finds the symbol and library, and throws an error. */
463 void factor_vm::undefined_symbol() {
464 cell frame = ctx->callstack_top;
465 cell return_address = *(cell*)frame;
466 code_block* compiled = code->code_block_for_address(return_address);
468 /* Find the RT_DLSYM relocation nearest to the given return
470 cell symbol = false_object;
471 cell library = false_object;
473 auto find_symbol_at_address_visitor = [&](instruction_operand op) {
474 if (op.rel_type() == RT_DLSYM && op.pointer <= return_address) {
475 array* parameters = untag<array>(compiled->parameters);
476 cell index = op.index;
477 symbol = array_nth(parameters, index);
478 library = array_nth(parameters, index + 1);
481 compiled->each_instruction_operand(find_symbol_at_address_visitor);
483 if (!to_boolean(symbol))
484 critical_error("Can't find RT_DLSYM at return address", return_address);
486 general_error(ERROR_UNDEFINED_SYMBOL, symbol, library);
489 void undefined_symbol() {
490 return current_vm()->undefined_symbol();