5 cell code_block_owner(code_block* compiled) {
6 cell owner = compiled->owner;
8 /* Cold generic word call sites point to quotations that call the
9 inline-cache-miss and inline-cache-miss-tail primitives. */
10 if (TAG(owner) != QUOTATION_TYPE)
13 quotation* quot = untag<quotation>(owner);
14 array* elements = untag<array>(quot->array);
16 FACTOR_ASSERT(array_capacity(elements) == 5);
17 wrapper* wrap = untag<wrapper>(array_nth(elements, 0));
21 cell compute_entry_point_address(cell obj) {
24 return untag<word>(obj)->entry_point;
26 return untag<quotation>(obj)->entry_point;
28 critical_error("Expected word or quotation", obj);
33 cell code_block::owner_quot() const {
34 if (!optimized_p() && TAG(owner) == WORD_TYPE)
35 return untag<word>(owner)->def;
39 /* If the code block is an unoptimized quotation, we can calculate the
40 scan offset. In all other cases -1 is returned. */
41 cell code_block::scan(factor_vm* vm, cell addr) const {
42 if (type() != code_block_unoptimized) {
43 return tag_fixnum(-1);
47 if (TAG(ptr) == WORD_TYPE)
48 ptr = untag<word>(ptr)->def;
49 if (TAG(ptr) != QUOTATION_TYPE)
50 return tag_fixnum(-1);
51 cell ofs = offset(addr);
52 return tag_fixnum(vm->quot_code_offset_to_scan(ptr, ofs));
55 cell factor_vm::compute_entry_point_pic_address(word* w, cell tagged_quot) {
56 if (!to_boolean(tagged_quot) || max_pic_size == 0)
57 return w->entry_point;
58 quotation* q = untag<quotation>(tagged_quot);
59 if (quotation_compiled_p(q))
60 return q->entry_point;
61 return w->entry_point;
64 cell factor_vm::compute_entry_point_pic_address(cell w_) {
66 return compute_entry_point_pic_address(w.untagged(), w->pic_def);
69 cell factor_vm::compute_entry_point_pic_tail_address(cell w_) {
71 return compute_entry_point_pic_address(w.untagged(), w->pic_tail_def);
74 struct update_word_references_relocation_visitor {
76 bool reset_inline_caches;
78 update_word_references_relocation_visitor(factor_vm* parent,
79 bool reset_inline_caches)
80 : parent(parent), reset_inline_caches(reset_inline_caches) {}
82 void operator()(instruction_operand op) {
83 code_block* compiled = op.load_code_block();
84 switch (op.rel.type()) {
85 case RT_ENTRY_POINT: {
86 cell owner = compiled->owner;
87 if (to_boolean(owner))
88 op.store_value(compute_entry_point_address(owner));
91 case RT_ENTRY_POINT_PIC: {
92 if (reset_inline_caches || !compiled->pic_p()) {
93 cell owner = code_block_owner(compiled);
94 if (to_boolean(owner))
95 op.store_value(parent->compute_entry_point_pic_address(owner));
99 case RT_ENTRY_POINT_PIC_TAIL: {
100 if (reset_inline_caches || !compiled->pic_p()) {
101 cell owner = code_block_owner(compiled);
102 if (to_boolean(owner))
103 op.store_value(parent->compute_entry_point_pic_tail_address(owner));
113 /* Relocate new code blocks completely; updating references to literals,
114 dlsyms, and words. For all other words in the code heap, we only need
115 to update references to other words, without worrying about literals
117 void factor_vm::update_word_references(code_block* compiled,
118 bool reset_inline_caches) {
119 if (code->uninitialized_p(compiled))
120 initialize_code_block(compiled);
121 /* update_word_references() is always applied to every block in
122 the code heap. Since it resets all call sites to point to
123 their canonical entry point (cold entry point for non-tail calls,
124 standard entry point for tail calls), it means that no PICs
125 are referenced after this is done. So instead of polluting
126 the code heap with dead PICs that will be freed on the next
127 GC, we add them to the free list immediately. */
128 else if (reset_inline_caches && compiled->pic_p())
129 code->free(compiled);
131 update_word_references_relocation_visitor visitor(this,
132 reset_inline_caches);
133 compiled->each_instruction_operand(visitor);
134 compiled->flush_icache();
138 /* Look up an external library symbol referenced by a compiled code
140 cell factor_vm::compute_dlsym_address(array* parameters,
143 cell symbol = array_nth(parameters, index);
144 cell library = array_nth(parameters, index + 1);
145 dll* d = to_boolean(library) ? untag<dll>(library) : NULL;
147 cell undef = (cell)factor::undefined_symbol;
148 undef = toc ? FUNCTION_TOC_POINTER(undef) : FUNCTION_CODE_POINTER(undef);
149 if (d != NULL && !d->handle)
152 FACTOR_ASSERT(TAG(symbol) == BYTE_ARRAY_TYPE);
153 symbol_char* name = alien_offset(symbol);
154 cell sym = ffi_dlsym_raw(d, name);
155 sym = toc ? FUNCTION_TOC_POINTER(sym) : FUNCTION_CODE_POINTER(sym);
156 return sym ? sym : undef;
159 cell factor_vm::lookup_external_address(relocation_type rel_type,
160 code_block *compiled,
165 return compute_dlsym_address(parameters, index, false);
167 return compiled->entry_point();
168 case RT_MEGAMORPHIC_CACHE_HITS:
169 return (cell)&dispatch_stats.megamorphic_cache_hits;
171 return (cell)this + untag_fixnum(array_nth(parameters, index));
172 case RT_CARDS_OFFSET:
174 case RT_DECKS_OFFSET:
178 return compute_dlsym_address(parameters, index, true);
180 case RT_INLINE_CACHE_MISS:
181 return (cell)&factor::inline_cache_miss;
183 return code->safepoint_page;
189 cell factor_vm::compute_external_address(instruction_operand op) {
190 code_block* compiled = op.compiled;
191 array* parameters = to_boolean(compiled->parameters)
192 ? untag<array>(compiled->parameters)
195 relocation_type rel_type = op.rel.type();
197 cell ext_addr = lookup_external_address(rel_type, compiled, parameters, idx);
198 if (ext_addr == (cell)-1) {
200 print_obj(ss, compiled->owner);
203 if (rel_type == RT_DLSYM || rel_type == RT_DLSYM_TOC) {
204 ss << "Bad symbol specifier in compute_external_address";
205 arg = array_nth(parameters, idx);
207 ss << "Bad rel type in compute_external_address";
210 critical_error(ss.str().c_str(), arg);
215 cell factor_vm::compute_here_address(cell arg, cell offset,
216 code_block* compiled) {
217 fixnum n = untag_fixnum(arg);
219 return compiled->entry_point() + offset + n;
220 return compiled->entry_point() - n;
223 struct initial_code_block_visitor {
228 initial_code_block_visitor(factor_vm* parent, cell literals)
229 : parent(parent), literals(literals), literal_index(0) {}
231 cell next_literal() {
232 return array_nth(untag<array>(literals), literal_index++);
235 fixnum compute_operand_value(instruction_operand op) {
236 switch (op.rel.type()) {
238 return next_literal();
240 return compute_entry_point_address(next_literal());
241 case RT_ENTRY_POINT_PIC:
242 return parent->compute_entry_point_pic_address(next_literal());
243 case RT_ENTRY_POINT_PIC_TAIL:
244 return parent->compute_entry_point_pic_tail_address(next_literal());
246 return parent->compute_here_address(
247 next_literal(), op.rel.offset(), op.compiled);
249 return untag_fixnum(next_literal());
251 return parent->compute_external_address(op);
255 void operator()(instruction_operand op) {
256 op.store_value(compute_operand_value(op));
260 /* Perform all fixups on a code block */
261 void factor_vm::initialize_code_block(code_block* compiled, cell literals) {
262 initial_code_block_visitor visitor(this, literals);
263 compiled->each_instruction_operand(visitor);
264 compiled->flush_icache();
266 /* next time we do a minor GC, we have to trace this code block, since
267 the newly-installed instruction operands might point to literals in
269 code->write_barrier(compiled);
272 void factor_vm::initialize_code_block(code_block* compiled) {
273 std::map<code_block*, cell>::iterator iter =
274 code->uninitialized_blocks.find(compiled);
275 initialize_code_block(compiled, iter->second);
276 code->uninitialized_blocks.erase(iter);
279 /* Fixup labels. This is done at compile time, not image load time */
280 void factor_vm::fixup_labels(array* labels, code_block* compiled) {
281 cell size = array_capacity(labels);
283 for (cell i = 0; i < size; i += 3) {
284 relocation_class rel_class =
285 (relocation_class) untag_fixnum(array_nth(labels, i));
286 cell offset = untag_fixnum(array_nth(labels, i + 1));
287 cell target = untag_fixnum(array_nth(labels, i + 2));
289 relocation_entry new_entry(RT_HERE, rel_class, offset);
291 instruction_operand op(new_entry, compiled, 0);
292 op.store_value(target + compiled->entry_point());
297 /* Allocates memory */
298 code_block* factor_vm::allot_code_block(cell size, code_block_type type) {
299 code_block* block = code->allocator->allot(size + sizeof(code_block));
301 /* If allocation failed, do a full GC and compact the code heap.
302 A full GC that occurs as a result of the data heap filling up does not
303 trigger a compaction. This setup ensures that most GCs do not compact
304 the code heap, but if the code fills up, it probably means it will be
305 fragmented after GC anyway, so its best to compact. */
307 primitive_compact_gc();
308 block = code->allocator->allot(size + sizeof(code_block));
310 /* Insufficient room even after code GC, give up */
312 std::cout << "Code heap used: " << code->allocator->occupied_space()
314 std::cout << "Code heap free: " << code->allocator->free_space() << "\n";
315 fatal_error("Out of memory in add-compiled-block", 0);
319 block->set_type(type);
324 /* Allocates memory */
325 code_block* factor_vm::add_code_block(code_block_type type, cell code_,
326 cell labels_, cell owner_,
327 cell relocation_, cell parameters_,
329 cell frame_size_untagged) {
330 data_root<byte_array> code(code_, this);
331 data_root<object> labels(labels_, this);
332 data_root<object> owner(owner_, this);
333 data_root<byte_array> relocation(relocation_, this);
334 data_root<array> parameters(parameters_, this);
335 data_root<array> literals(literals_, this);
337 cell code_length = array_capacity(code.untagged());
338 code_block* compiled = allot_code_block(code_length, type);
340 compiled->owner = owner.value();
342 /* slight space optimization */
343 if (relocation.type() == BYTE_ARRAY_TYPE &&
344 array_capacity(relocation.untagged()) == 0)
345 compiled->relocation = false_object;
347 compiled->relocation = relocation.value();
349 if (parameters.type() == ARRAY_TYPE &&
350 array_capacity(parameters.untagged()) == 0)
351 compiled->parameters = false_object;
353 compiled->parameters = parameters.value();
356 memcpy(compiled + 1, code.untagged() + 1, code_length);
359 if (to_boolean(labels.value()))
360 fixup_labels(labels.as<array>().untagged(), compiled);
362 compiled->set_stack_frame_size(frame_size_untagged);
364 /* Once we are ready, fill in literal and word references in this code
365 block's instruction operands. In most cases this is done right after this
366 method returns, except when compiling words with the non-optimizing
367 compiler at the beginning of bootstrap */
368 this->code->uninitialized_blocks.insert(
369 std::make_pair(compiled, literals.value()));
370 this->code->all_blocks.insert((cell)compiled);
372 /* next time we do a minor GC, we have to trace this code block, since
373 the fields of the code_block struct might point into nursery or aging */
374 this->code->write_barrier(compiled);
379 /* References to undefined symbols are patched up to call this function on
380 image load. It finds the symbol and library, and throws an error. */
381 void factor_vm::undefined_symbol() {
382 cell frame = ctx->callstack_top;
383 cell return_address = *(cell*)frame;
384 code_block* compiled = code->code_block_for_address(return_address);
386 /* Find the RT_DLSYM relocation nearest to the given return
388 cell symbol = false_object;
389 cell library = false_object;
391 auto find_symbol_at_address_visitor = [&](instruction_operand op) {
392 if (op.rel.type() == RT_DLSYM && op.pointer <= return_address) {
393 array* parameters = untag<array>(compiled->parameters);
394 cell index = op.index;
395 symbol = array_nth(parameters, index);
396 library = array_nth(parameters, index + 1);
399 compiled->each_instruction_operand(find_symbol_at_address_visitor);
401 if (!to_boolean(symbol))
402 critical_error("Can't find RT_DLSYM at return address", return_address);
404 general_error(ERROR_UNDEFINED_SYMBOL, symbol, library);
407 void undefined_symbol() {
408 return current_vm()->undefined_symbol();