5 cell code_block::owner_quot() const {
6 tagged<object> executing(owner);
7 if (!optimized_p() && executing->type() == WORD_TYPE)
8 executing = executing.as<word>()->def;
9 return executing.value();
12 /* If the code block is an unoptimized quotation, we can calculate the
13 scan offset. In all other cases -1 is returned. */
14 cell code_block::scan(factor_vm* vm, cell addr) const {
15 if (type() != code_block_unoptimized) {
16 return tag_fixnum(-1);
19 tagged<object> obj(owner);
20 if (obj.type_p(WORD_TYPE))
21 obj = obj.as<word>()->def;
22 if (!obj.type_p(QUOTATION_TYPE))
23 return tag_fixnum(-1);
25 cell ofs = offset(addr);
26 return tag_fixnum(vm->quot_code_offset_to_scan(obj.value(), ofs));
29 cell factor_vm::compute_entry_point_address(cell obj) {
30 switch (tagged<object>(obj).type()) {
32 return untag<word>(obj)->entry_point;
34 return untag<quotation>(obj)->entry_point;
36 critical_error("Expected word or quotation", obj);
41 cell factor_vm::compute_entry_point_pic_address(word* w, cell tagged_quot) {
42 if (!to_boolean(tagged_quot) || max_pic_size == 0)
43 return w->entry_point;
44 quotation* q = untag<quotation>(tagged_quot);
45 if (quotation_compiled_p(q))
46 return q->entry_point;
47 return w->entry_point;
50 cell factor_vm::compute_entry_point_pic_address(cell w_) {
52 return compute_entry_point_pic_address(w.untagged(), w->pic_def);
55 cell factor_vm::compute_entry_point_pic_tail_address(cell w_) {
57 return compute_entry_point_pic_address(w.untagged(), w->pic_tail_def);
60 cell factor_vm::code_block_owner(code_block* compiled) {
61 tagged<object> owner(compiled->owner);
63 /* Cold generic word call sites point to quotations that call the
64 inline-cache-miss and inline-cache-miss-tail primitives. */
65 if (owner.type_p(QUOTATION_TYPE)) {
66 tagged<quotation> quot(owner.as<quotation>());
67 tagged<array> elements(quot->array);
69 FACTOR_ASSERT(array_capacity(elements.untagged()) == 5);
70 FACTOR_ASSERT(array_nth(elements.untagged(), 4) ==
71 special_objects[PIC_MISS_WORD] ||
72 array_nth(elements.untagged(), 4) ==
73 special_objects[PIC_MISS_TAIL_WORD]);
75 tagged<wrapper> word_wrapper(array_nth(elements.untagged(), 0));
76 return word_wrapper->object;
78 return compiled->owner;
81 struct update_word_references_relocation_visitor {
83 bool reset_inline_caches;
85 update_word_references_relocation_visitor(factor_vm* parent,
86 bool reset_inline_caches)
87 : parent(parent), reset_inline_caches(reset_inline_caches) {}
89 void operator()(instruction_operand op) {
90 code_block* compiled = op.load_code_block();
91 switch (op.rel_type()) {
92 case RT_ENTRY_POINT: {
93 cell owner = compiled->owner;
94 if (to_boolean(owner))
95 op.store_value(parent->compute_entry_point_address(owner));
98 case RT_ENTRY_POINT_PIC: {
99 if (reset_inline_caches || !compiled->pic_p()) {
100 cell owner = parent->code_block_owner(compiled);
101 if (to_boolean(owner))
102 op.store_value(parent->compute_entry_point_pic_address(owner));
106 case RT_ENTRY_POINT_PIC_TAIL: {
107 if (reset_inline_caches || !compiled->pic_p()) {
108 cell owner = parent->code_block_owner(compiled);
109 if (to_boolean(owner))
110 op.store_value(parent->compute_entry_point_pic_tail_address(owner));
120 /* Relocate new code blocks completely; updating references to literals,
121 dlsyms, and words. For all other words in the code heap, we only need
122 to update references to other words, without worrying about literals
124 void factor_vm::update_word_references(code_block* compiled,
125 bool reset_inline_caches) {
126 if (code->uninitialized_p(compiled))
127 initialize_code_block(compiled);
128 /* update_word_references() is always applied to every block in
129 the code heap. Since it resets all call sites to point to
130 their canonical entry point (cold entry point for non-tail calls,
131 standard entry point for tail calls), it means that no PICs
132 are referenced after this is done. So instead of polluting
133 the code heap with dead PICs that will be freed on the next
134 GC, we add them to the free list immediately. */
135 else if (reset_inline_caches && compiled->pic_p())
136 code->free(compiled);
138 update_word_references_relocation_visitor visitor(this,
139 reset_inline_caches);
140 compiled->each_instruction_operand(visitor);
141 compiled->flush_icache();
145 /* Look up an external library symbol referenced by a compiled code block */
146 cell factor_vm::compute_dlsym_address(array* parameters, cell index) {
147 cell symbol = array_nth(parameters, index);
148 cell library = array_nth(parameters, index + 1);
150 dll* d = (to_boolean(library) ? untag<dll>(library) : NULL);
152 void* undefined_symbol = (void*)factor::undefined_symbol;
153 undefined_symbol = FUNCTION_CODE_POINTER(undefined_symbol);
154 if (d != NULL && !d->handle)
155 return (cell)undefined_symbol;
157 switch (tagged<object>(symbol).type()) {
158 case BYTE_ARRAY_TYPE: {
159 symbol_char* name = alien_offset(symbol);
160 void* sym = ffi_dlsym(d, name);
165 return (cell)undefined_symbol;
168 array* names = untag<array>(symbol);
169 for (cell i = 0; i < array_capacity(names); i++) {
170 symbol_char* name = alien_offset(array_nth(names, i));
171 void* sym = ffi_dlsym(d, name);
176 return (cell)undefined_symbol;
184 cell factor_vm::compute_dlsym_toc_address(array* parameters, cell index) {
185 cell symbol = array_nth(parameters, index);
186 cell library = array_nth(parameters, index + 1);
188 dll* d = (to_boolean(library) ? untag<dll>(library) : NULL);
190 void* undefined_toc = (void*)factor::undefined_symbol;
191 undefined_toc = FUNCTION_TOC_POINTER(undefined_toc);
192 if (d != NULL && !d->handle)
193 return (cell)undefined_toc;
195 switch (tagged<object>(symbol).type()) {
196 case BYTE_ARRAY_TYPE: {
197 symbol_char* name = alien_offset(symbol);
198 void* toc = ffi_dlsym_toc(d, name);
202 return (cell)undefined_toc;
205 array* names = untag<array>(symbol);
206 for (cell i = 0; i < array_capacity(names); i++) {
207 symbol_char* name = alien_offset(array_nth(names, i));
208 void* toc = ffi_dlsym_toc(d, name);
213 return (cell)undefined_toc;
221 cell factor_vm::compute_vm_address(cell arg) {
222 return (cell)this + untag_fixnum(arg);
225 cell factor_vm::lookup_external_address(relocation_type rel_type,
226 code_block *compiled,
231 return compute_dlsym_address(parameters, index);
233 return compiled->entry_point();
234 case RT_MEGAMORPHIC_CACHE_HITS:
235 return (cell)&dispatch_stats.megamorphic_cache_hits;
237 return compute_vm_address(array_nth(parameters, index));
238 case RT_CARDS_OFFSET:
240 case RT_DECKS_OFFSET:
243 case RT_EXCEPTION_HANDLER:
244 return (cell)&factor::exception_handler;
248 return compute_dlsym_toc_address(parameters, index);
250 case RT_INLINE_CACHE_MISS:
251 return (cell)&factor::inline_cache_miss;
253 return (cell)code->safepoint_page;
259 cell factor_vm::compute_external_address(instruction_operand op) {
260 code_block* compiled = op.compiled;
261 array* parameters = to_boolean(compiled->parameters)
262 ? untag<array>(compiled->parameters)
265 relocation_type rel_type = op.rel_type();
267 cell ext_addr = lookup_external_address(rel_type, compiled, parameters, idx);
268 if (ext_addr == (cell)-1) {
270 print_obj(ss, compiled->owner);
273 if (rel_type == RT_DLSYM || rel_type == RT_DLSYM_TOC) {
274 ss << "Bad symbol specifier in compute_external_address";
275 arg = array_nth(parameters, idx);
277 ss << "Bad rel type in compute_external_address";
280 critical_error(ss.str().c_str(), arg);
285 cell factor_vm::compute_here_address(cell arg, cell offset,
286 code_block* compiled) {
287 fixnum n = untag_fixnum(arg);
289 return compiled->entry_point() + offset + n;
290 return compiled->entry_point() - n;
293 struct initial_code_block_visitor {
298 initial_code_block_visitor(factor_vm* parent, cell literals)
299 : parent(parent), literals(literals), literal_index(0) {}
301 cell next_literal() {
302 return array_nth(untag<array>(literals), literal_index++);
305 fixnum compute_operand_value(instruction_operand op) {
306 switch (op.rel_type()) {
308 return next_literal();
310 return parent->compute_entry_point_address(next_literal());
311 case RT_ENTRY_POINT_PIC:
312 return parent->compute_entry_point_pic_address(next_literal());
313 case RT_ENTRY_POINT_PIC_TAIL:
314 return parent->compute_entry_point_pic_tail_address(next_literal());
316 return parent->compute_here_address(
317 next_literal(), op.rel_offset(), op.compiled);
319 return untag_fixnum(next_literal());
321 return parent->compute_external_address(op);
325 void operator()(instruction_operand op) {
326 op.store_value(compute_operand_value(op));
330 /* Perform all fixups on a code block */
331 void factor_vm::initialize_code_block(code_block* compiled, cell literals) {
332 initial_code_block_visitor visitor(this, literals);
333 compiled->each_instruction_operand(visitor);
334 compiled->flush_icache();
336 /* next time we do a minor GC, we have to trace this code block, since
337 the newly-installed instruction operands might point to literals in
339 code->write_barrier(compiled);
342 void factor_vm::initialize_code_block(code_block* compiled) {
343 std::map<code_block*, cell>::iterator iter =
344 code->uninitialized_blocks.find(compiled);
345 initialize_code_block(compiled, iter->second);
346 code->uninitialized_blocks.erase(iter);
349 /* Fixup labels. This is done at compile time, not image load time */
350 void factor_vm::fixup_labels(array* labels, code_block* compiled) {
351 cell size = array_capacity(labels);
353 for (cell i = 0; i < size; i += 3) {
354 relocation_class rel_class =
355 (relocation_class) untag_fixnum(array_nth(labels, i));
356 cell offset = untag_fixnum(array_nth(labels, i + 1));
357 cell target = untag_fixnum(array_nth(labels, i + 2));
359 relocation_entry new_entry(RT_HERE, rel_class, offset);
361 instruction_operand op(new_entry, compiled, 0);
362 op.store_value(target + compiled->entry_point());
367 /* Allocates memory */
368 code_block* factor_vm::allot_code_block(cell size, code_block_type type) {
369 code_block* block = code->allocator->allot(size + sizeof(code_block));
371 /* If allocation failed, do a full GC and compact the code heap.
372 A full GC that occurs as a result of the data heap filling up does not
373 trigger a compaction. This setup ensures that most GCs do not compact
374 the code heap, but if the code fills up, it probably means it will be
375 fragmented after GC anyway, so its best to compact. */
377 primitive_compact_gc();
378 block = code->allocator->allot(size + sizeof(code_block));
380 /* Insufficient room even after code GC, give up */
382 std::cout << "Code heap used: " << code->allocator->occupied_space()
384 std::cout << "Code heap free: " << code->allocator->free_space() << "\n";
385 fatal_error("Out of memory in add-compiled-block", 0);
389 block->set_type(type);
394 /* Allocates memory */
395 code_block* factor_vm::add_code_block(code_block_type type, cell code_,
396 cell labels_, cell owner_,
397 cell relocation_, cell parameters_,
399 cell frame_size_untagged) {
400 data_root<byte_array> code(code_, this);
401 data_root<object> labels(labels_, this);
402 data_root<object> owner(owner_, this);
403 data_root<byte_array> relocation(relocation_, this);
404 data_root<array> parameters(parameters_, this);
405 data_root<array> literals(literals_, this);
407 cell code_length = array_capacity(code.untagged());
408 code_block* compiled = allot_code_block(code_length, type);
410 compiled->owner = owner.value();
412 /* slight space optimization */
413 if (relocation.type() == BYTE_ARRAY_TYPE &&
414 array_capacity(relocation.untagged()) == 0)
415 compiled->relocation = false_object;
417 compiled->relocation = relocation.value();
419 if (parameters.type() == ARRAY_TYPE &&
420 array_capacity(parameters.untagged()) == 0)
421 compiled->parameters = false_object;
423 compiled->parameters = parameters.value();
426 memcpy(compiled + 1, code.untagged() + 1, code_length);
429 if (to_boolean(labels.value()))
430 fixup_labels(labels.as<array>().untagged(), compiled);
432 compiled->set_stack_frame_size(frame_size_untagged);
434 /* Once we are ready, fill in literal and word references in this code
435 block's instruction operands. In most cases this is done right after this
436 method returns, except when compiling words with the non-optimizing
437 compiler at the beginning of bootstrap */
438 this->code->uninitialized_blocks.insert(
439 std::make_pair(compiled, literals.value()));
440 this->code->all_blocks.insert((cell)compiled);
442 /* next time we do a minor GC, we have to trace this code block, since
443 the fields of the code_block struct might point into nursery or aging */
444 this->code->write_barrier(compiled);
449 /* References to undefined symbols are patched up to call this function on
450 image load. It finds the symbol and library, and throws an error. */
451 void factor_vm::undefined_symbol() {
452 cell frame = ctx->callstack_top;
453 cell return_address = *(cell*)frame;
454 code_block* compiled = code->code_block_for_address(return_address);
456 /* Find the RT_DLSYM relocation nearest to the given return
458 cell symbol = false_object;
459 cell library = false_object;
461 auto find_symbol_at_address_visitor = [&](instruction_operand op) {
462 if (op.rel_type() == RT_DLSYM && op.pointer <= return_address) {
463 array* parameters = untag<array>(compiled->parameters);
464 cell index = op.index;
465 symbol = array_nth(parameters, index);
466 library = array_nth(parameters, index + 1);
469 compiled->each_instruction_operand(find_symbol_at_address_visitor);
471 if (!to_boolean(symbol))
472 critical_error("Can't find RT_DLSYM at return address", return_address);
474 general_error(ERROR_UNDEFINED_SYMBOL, symbol, library);
477 void undefined_symbol() {
478 return current_vm()->undefined_symbol();