5 static cell code_block_owner(code_block* compiled) {
6 cell owner = compiled->owner;
8 // Cold generic word call sites point to quotations that call the
9 // inline-cache-miss and inline-cache-miss-tail primitives.
10 if (TAG(owner) != QUOTATION_TYPE)
13 quotation* quot = untag<quotation>(owner);
14 array* elements = untag<array>(quot->array);
16 FACTOR_ASSERT(array_capacity(elements) == 5);
17 wrapper* wrap = untag<wrapper>(array_nth(elements, 0));
21 static cell compute_entry_point_address(cell obj) {
24 return untag<word>(obj)->entry_point;
26 return untag<quotation>(obj)->entry_point;
28 critical_error("Expected word or quotation", obj);
33 static cell compute_here_address(cell arg, cell offset, code_block* compiled) {
34 fixnum n = untag_fixnum(arg);
36 return compiled->entry_point() + offset + n;
37 return compiled->entry_point() - n;
40 cell code_block::owner_quot() const {
41 if (!optimized_p() && TAG(owner) == WORD_TYPE)
42 return untag<word>(owner)->def;
46 // If the code block is an unoptimized quotation, we can calculate the
47 // scan offset. In all other cases -1 is returned.
48 // Allocates memory (quot_code_offset_to_scan)
49 cell code_block::scan(factor_vm* vm, cell addr) const {
50 if (type() != code_block_unoptimized) {
51 return tag_fixnum(-1);
55 if (TAG(ptr) == WORD_TYPE)
56 ptr = untag<word>(ptr)->def;
57 if (TAG(ptr) != QUOTATION_TYPE)
58 return tag_fixnum(-1);
59 cell ofs = offset(addr);
60 return tag_fixnum(vm->quot_code_offset_to_scan(ptr, ofs));
63 cell factor_vm::compute_entry_point_pic_address(word* w, cell tagged_quot) {
64 if (!to_boolean(tagged_quot) || max_pic_size == 0)
65 return w->entry_point;
66 quotation* q = untag<quotation>(tagged_quot);
67 if (quotation_compiled_p(q))
68 return q->entry_point;
69 return w->entry_point;
72 cell factor_vm::compute_entry_point_pic_address(cell w_) {
74 return compute_entry_point_pic_address(w.untagged(), w->pic_def);
77 cell factor_vm::compute_entry_point_pic_tail_address(cell w_) {
79 return compute_entry_point_pic_address(w.untagged(), w->pic_tail_def);
82 // Relocate new code blocks completely; updating references to literals,
83 // dlsyms, and words. For all other words in the code heap, we only need
84 // to update references to other words, without worrying about literals
86 void factor_vm::update_word_references(code_block* compiled,
87 bool reset_inline_caches) {
88 if (code->uninitialized_p(compiled)) {
89 initialize_code_block(compiled);
90 // update_word_references() is always applied to every block in
91 // the code heap. Since it resets all call sites to point to
92 // their canonical entry point (cold entry point for non-tail calls,
93 // standard entry point for tail calls), it means that no PICs
94 // are referenced after this is done. So instead of polluting
95 // the code heap with dead PICs that will be freed on the next
96 // GC, we add them to the free list immediately.
97 } else if (reset_inline_caches && compiled->pic_p()) {
100 auto visit_func = [&](instruction_operand op) {
102 switch (op.rel.type()) {
103 case RT_ENTRY_POINT: {
104 code_block* dest = op.load_code_block();
105 cell owner = dest->owner;
106 if (to_boolean(owner))
107 op.store_value(compute_entry_point_address(owner));
110 case RT_ENTRY_POINT_PIC: {
111 code_block* dest = op.load_code_block();
112 if (reset_inline_caches || !dest->pic_p()) {
113 cell owner = code_block_owner(dest);
114 if (to_boolean(owner))
115 op.store_value(compute_entry_point_pic_address(owner));
119 case RT_ENTRY_POINT_PIC_TAIL: {
120 code_block* dest = op.load_code_block();
121 if (reset_inline_caches || !dest->pic_p()) {
122 cell owner = code_block_owner(dest);
123 if (to_boolean(owner))
124 op.store_value(compute_entry_point_pic_tail_address(owner));
132 compiled->each_instruction_operand(visit_func);
133 compiled->flush_icache();
137 // Look up an external library symbol referenced by a compiled code block
138 cell factor_vm::compute_dlsym_address(array* parameters,
141 cell symbol = array_nth(parameters, index);
142 cell library = array_nth(parameters, index + 1);
143 dll* d = to_boolean(library) ? untag<dll>(library) : NULL;
145 cell undef = (cell)factor::undefined_symbol;
146 undef = toc ? FUNCTION_TOC_POINTER(undef) : FUNCTION_CODE_POINTER(undef);
147 if (d != NULL && !d->handle)
150 FACTOR_ASSERT(TAG(symbol) == BYTE_ARRAY_TYPE);
151 symbol_char* name = alien_offset(symbol);
152 cell sym = ffi_dlsym_raw(d, name);
153 sym = toc ? FUNCTION_TOC_POINTER(sym) : FUNCTION_CODE_POINTER(sym);
154 return sym ? sym : undef;
157 cell factor_vm::lookup_external_address(relocation_type rel_type,
158 code_block *compiled,
163 return compute_dlsym_address(parameters, index, false);
165 return compiled->entry_point();
166 case RT_MEGAMORPHIC_CACHE_HITS:
167 return (cell)&dispatch_stats.megamorphic_cache_hits;
169 return (cell)this + untag_fixnum(array_nth(parameters, index));
170 case RT_CARDS_OFFSET:
172 case RT_DECKS_OFFSET:
176 return compute_dlsym_address(parameters, index, true);
178 case RT_INLINE_CACHE_MISS:
179 return (cell)&factor::inline_cache_miss;
181 return code->safepoint_page;
187 cell factor_vm::compute_external_address(instruction_operand op) {
188 code_block* compiled = op.compiled;
189 array* parameters = to_boolean(compiled->parameters)
190 ? untag<array>(compiled->parameters)
193 relocation_type rel_type = op.rel.type();
195 cell ext_addr = lookup_external_address(rel_type, compiled, parameters, idx);
196 if (ext_addr == (cell)-1) {
198 print_obj(ss, compiled->owner);
201 if (rel_type == RT_DLSYM || rel_type == RT_DLSYM_TOC) {
202 ss << "Bad symbol specifier in compute_external_address";
203 arg = array_nth(parameters, idx);
205 ss << "Bad rel type in compute_external_address";
208 critical_error(ss.str().c_str(), arg);
213 struct initial_code_block_visitor {
218 initial_code_block_visitor(factor_vm* parent, cell literals)
219 : parent(parent), literals(literals), literal_index(0) {}
221 cell next_literal() {
222 return array_nth(untag<array>(literals), literal_index++);
225 fixnum compute_operand_value(instruction_operand op) {
226 switch (op.rel.type()) {
228 return next_literal();
230 return compute_entry_point_address(next_literal());
231 case RT_ENTRY_POINT_PIC:
232 return parent->compute_entry_point_pic_address(next_literal());
233 case RT_ENTRY_POINT_PIC_TAIL:
234 return parent->compute_entry_point_pic_tail_address(next_literal());
236 return compute_here_address(
237 next_literal(), op.rel.offset(), op.compiled);
239 return untag_fixnum(next_literal());
241 return parent->compute_external_address(op);
245 void operator()(instruction_operand op) {
246 op.store_value(compute_operand_value(op));
250 // Perform all fixups on a code block
251 void factor_vm::initialize_code_block(code_block* compiled, cell literals) {
252 initial_code_block_visitor visitor(this, literals);
253 compiled->each_instruction_operand(visitor);
254 compiled->flush_icache();
256 // next time we do a minor GC, we have to trace this code block, since
257 // the newly-installed instruction operands might point to literals in
259 code->write_barrier(compiled);
262 void factor_vm::initialize_code_block(code_block* compiled) {
263 std::map<code_block*, cell>::iterator iter =
264 code->uninitialized_blocks.find(compiled);
265 initialize_code_block(compiled, iter->second);
266 code->uninitialized_blocks.erase(iter);
269 // Fixup labels. This is done at compile time, not image load time
270 void factor_vm::fixup_labels(array* labels, code_block* compiled) {
271 cell size = array_capacity(labels);
273 for (cell i = 0; i < size; i += 3) {
274 relocation_class rel_class =
275 (relocation_class) untag_fixnum(array_nth(labels, i));
276 cell offset = untag_fixnum(array_nth(labels, i + 1));
277 cell target = untag_fixnum(array_nth(labels, i + 2));
279 relocation_entry new_entry(RT_HERE, rel_class, offset);
281 instruction_operand op(new_entry, compiled, 0);
282 op.store_value(target + compiled->entry_point());
288 code_block* factor_vm::allot_code_block(cell size, code_block_type type) {
289 code_block* block = code->allocator->allot(size + sizeof(code_block));
291 // If allocation failed, do a full GC and compact the code heap.
292 // A full GC that occurs as a result of the data heap filling up does not
293 // trigger a compaction. This setup ensures that most GCs do not compact
294 // the code heap, but if the code fills up, it probably means it will be
295 // fragmented after GC anyway, so its best to compact.
297 primitive_compact_gc();
298 block = code->allocator->allot(size + sizeof(code_block));
300 // Insufficient room even after code GC, give up
302 std::cout << "Code heap used: " << code->allocator->occupied_space()
304 std::cout << "Code heap free: " << code->allocator->free_space << "\n";
305 fatal_error("Out of memory in add-compiled-block", 0);
309 block->set_type(type);
315 code_block* factor_vm::add_code_block(code_block_type type, cell code_,
316 cell labels_, cell owner_,
317 cell relocation_, cell parameters_,
319 cell frame_size_untagged) {
320 data_root<byte_array> code(code_, this);
321 data_root<object> labels(labels_, this);
322 data_root<object> owner(owner_, this);
323 data_root<byte_array> relocation(relocation_, this);
324 data_root<array> parameters(parameters_, this);
325 data_root<array> literals(literals_, this);
327 cell code_length = array_capacity(code.untagged());
328 code_block* compiled = allot_code_block(code_length, type);
330 compiled->owner = owner.value();
332 // slight space optimization
333 if (relocation.type() == BYTE_ARRAY_TYPE &&
334 array_capacity(relocation.untagged()) == 0)
335 compiled->relocation = false_object;
337 compiled->relocation = relocation.value();
339 if (parameters.type() == ARRAY_TYPE &&
340 array_capacity(parameters.untagged()) == 0)
341 compiled->parameters = false_object;
343 compiled->parameters = parameters.value();
346 memcpy(compiled + 1, code.untagged() + 1, code_length);
349 if (to_boolean(labels.value()))
350 fixup_labels(labels.as<array>().untagged(), compiled);
352 compiled->set_stack_frame_size(frame_size_untagged);
354 // Once we are ready, fill in literal and word references in this code
355 // block's instruction operands. In most cases this is done right after this
356 // method returns, except when compiling words with the non-optimizing
357 // compiler at the beginning of bootstrap
358 this->code->uninitialized_blocks.insert(
359 std::make_pair(compiled, literals.value()));
360 this->code->all_blocks.insert((cell)compiled);
362 // next time we do a minor GC, we have to trace this code block, since
363 // the fields of the code_block struct might point into nursery or aging
364 this->code->write_barrier(compiled);
369 // References to undefined symbols are patched up to call this function on
370 // image load. It finds the symbol and library, and throws an error.
371 void factor_vm::undefined_symbol() {
372 cell frame = ctx->callstack_top;
373 cell return_address = *(cell*)frame;
374 code_block* compiled = code->code_block_for_address(return_address);
376 // Find the RT_DLSYM relocation nearest to the given return address.
377 cell symbol = false_object;
378 cell library = false_object;
380 auto find_symbol_at_address_visitor = [&](instruction_operand op) {
381 if (op.rel.type() == RT_DLSYM && op.pointer <= return_address) {
382 array* parameters = untag<array>(compiled->parameters);
383 cell index = op.index;
384 symbol = array_nth(parameters, index);
385 library = array_nth(parameters, index + 1);
388 compiled->each_instruction_operand(find_symbol_at_address_visitor);
390 if (!to_boolean(symbol))
391 critical_error("Can't find RT_DLSYM at return address", return_address);
393 general_error(ERROR_UNDEFINED_SYMBOL, symbol, library);
396 void undefined_symbol() {
397 return current_vm()->undefined_symbol();