5 cell code_block::owner_quot() const {
6 tagged<object> executing(owner);
7 if (!optimized_p() && executing->type() == WORD_TYPE)
8 executing = executing.as<word>()->def;
9 return executing.value();
12 cell code_block::scan(factor_vm* vm, void* addr) const {
14 case code_block_unoptimized: {
15 tagged<object> obj(owner);
16 if (obj.type_p(WORD_TYPE))
17 obj = obj.as<word>()->def;
19 if (obj.type_p(QUOTATION_TYPE))
21 vm->quot_code_offset_to_scan(obj.value(), offset(addr)));
25 case code_block_optimized:
29 critical_error("Bad frame type", type());
34 cell factor_vm::compute_entry_point_address(cell obj) {
35 switch (tagged<object>(obj).type()) {
37 return (cell) untag<word>(obj)->entry_point;
39 return (cell) untag<quotation>(obj)->entry_point;
41 critical_error("Expected word or quotation", obj);
46 cell factor_vm::compute_entry_point_pic_address(word* w, cell tagged_quot) {
47 if (!to_boolean(tagged_quot) || max_pic_size == 0)
48 return (cell) w->entry_point;
50 quotation* quot = untag<quotation>(tagged_quot);
51 if (quot_compiled_p(quot))
52 return (cell) quot->entry_point;
54 return (cell) w->entry_point;
58 cell factor_vm::compute_entry_point_pic_address(cell w_) {
60 return compute_entry_point_pic_address(w.untagged(), w->pic_def);
63 cell factor_vm::compute_entry_point_pic_tail_address(cell w_) {
65 return compute_entry_point_pic_address(w.untagged(), w->pic_tail_def);
68 cell factor_vm::code_block_owner(code_block* compiled) {
69 tagged<object> owner(compiled->owner);
71 /* Cold generic word call sites point to quotations that call the
72 inline-cache-miss and inline-cache-miss-tail primitives. */
73 if (owner.type_p(QUOTATION_TYPE)) {
74 tagged<quotation> quot(owner.as<quotation>());
75 tagged<array> elements(quot->array);
77 FACTOR_ASSERT(array_capacity(elements.untagged()) == 5);
78 FACTOR_ASSERT(array_nth(elements.untagged(), 4) ==
79 special_objects[PIC_MISS_WORD] ||
80 array_nth(elements.untagged(), 4) ==
81 special_objects[PIC_MISS_TAIL_WORD]);
83 tagged<wrapper> word_wrapper(array_nth(elements.untagged(), 0));
84 return word_wrapper->object;
86 return compiled->owner;
89 struct update_word_references_relocation_visitor {
91 bool reset_inline_caches;
93 update_word_references_relocation_visitor(factor_vm* parent_,
94 bool reset_inline_caches_)
95 : parent(parent_), reset_inline_caches(reset_inline_caches_) {}
97 void operator()(instruction_operand op) {
98 switch (op.rel_type()) {
99 case RT_ENTRY_POINT: {
100 code_block* compiled = op.load_code_block();
101 cell owner = compiled->owner;
102 if (to_boolean(owner))
103 op.store_value(parent->compute_entry_point_address(owner));
106 case RT_ENTRY_POINT_PIC: {
107 code_block* compiled = op.load_code_block();
108 if (reset_inline_caches || !compiled->pic_p()) {
109 cell owner = parent->code_block_owner(compiled);
110 if (to_boolean(owner))
111 op.store_value(parent->compute_entry_point_pic_address(owner));
115 case RT_ENTRY_POINT_PIC_TAIL: {
116 code_block* compiled = op.load_code_block();
117 if (reset_inline_caches || !compiled->pic_p()) {
118 cell owner = parent->code_block_owner(compiled);
119 if (to_boolean(owner))
120 op.store_value(parent->compute_entry_point_pic_tail_address(owner));
130 /* Relocate new code blocks completely; updating references to literals,
131 dlsyms, and words. For all other words in the code heap, we only need
132 to update references to other words, without worrying about literals
134 void factor_vm::update_word_references(code_block* compiled,
135 bool reset_inline_caches) {
136 if (code->uninitialized_p(compiled))
137 initialize_code_block(compiled);
138 /* update_word_references() is always applied to every block in
139 the code heap. Since it resets all call sites to point to
140 their canonical entry point (cold entry point for non-tail calls,
141 standard entry point for tail calls), it means that no PICs
142 are referenced after this is done. So instead of polluting
143 the code heap with dead PICs that will be freed on the next
144 GC, we add them to the free list immediately. */
145 else if (reset_inline_caches && compiled->pic_p())
146 code->free(compiled);
148 update_word_references_relocation_visitor visitor(this,
149 reset_inline_caches);
150 compiled->each_instruction_operand(visitor);
151 compiled->flush_icache();
155 /* Look up an external library symbol referenced by a compiled code block */
156 cell factor_vm::compute_dlsym_address(array* parameters, cell index) {
157 cell symbol = array_nth(parameters, index);
158 cell library = array_nth(parameters, index + 1);
160 dll* d = (to_boolean(library) ? untag<dll>(library) : NULL);
162 void* undefined_symbol = (void*)factor::undefined_symbol;
163 undefined_symbol = FUNCTION_CODE_POINTER(undefined_symbol);
164 if (d != NULL && !d->handle)
165 return (cell) undefined_symbol;
167 switch (tagged<object>(symbol).type()) {
168 case BYTE_ARRAY_TYPE: {
169 symbol_char* name = alien_offset(symbol);
170 void* sym = ffi_dlsym(d, name);
175 return (cell) undefined_symbol;
178 array* names = untag<array>(symbol);
179 for (cell i = 0; i < array_capacity(names); i++) {
180 symbol_char* name = alien_offset(array_nth(names, i));
181 void* sym = ffi_dlsym(d, name);
186 return (cell) undefined_symbol;
189 critical_error("Bad symbol specifier", symbol);
190 return (cell) undefined_symbol;
195 cell factor_vm::compute_dlsym_toc_address(array* parameters, cell index) {
196 cell symbol = array_nth(parameters, index);
197 cell library = array_nth(parameters, index + 1);
199 dll* d = (to_boolean(library) ? untag<dll>(library) : NULL);
201 void* undefined_toc = (void*)factor::undefined_symbol;
202 undefined_toc = FUNCTION_TOC_POINTER(undefined_toc);
203 if (d != NULL && !d->handle)
204 return (cell) undefined_toc;
206 switch (tagged<object>(symbol).type()) {
207 case BYTE_ARRAY_TYPE: {
208 symbol_char* name = alien_offset(symbol);
209 void* toc = ffi_dlsym_toc(d, name);
213 return (cell) undefined_toc;
216 array* names = untag<array>(symbol);
217 for (cell i = 0; i < array_capacity(names); i++) {
218 symbol_char* name = alien_offset(array_nth(names, i));
219 void* toc = ffi_dlsym_toc(d, name);
224 return (cell) undefined_toc;
227 critical_error("Bad symbol specifier", symbol);
228 return (cell) undefined_toc;
233 cell factor_vm::compute_vm_address(cell arg) {
234 return (cell) this + untag_fixnum(arg);
237 void factor_vm::store_external_address(instruction_operand op) {
238 code_block* compiled = op.compiled;
240 (to_boolean(compiled->parameters) ? untag<array>(compiled->parameters)
242 cell index = op.index;
244 switch (op.rel_type()) {
246 op.store_value(compute_dlsym_address(parameters, index));
249 op.store_value((cell) compiled->entry_point());
251 case RT_MEGAMORPHIC_CACHE_HITS:
252 op.store_value((cell) & dispatch_stats.megamorphic_cache_hits);
255 op.store_value(compute_vm_address(array_nth(parameters, index)));
257 case RT_CARDS_OFFSET:
258 op.store_value(cards_offset);
260 case RT_DECKS_OFFSET:
261 op.store_value(decks_offset);
264 case RT_EXCEPTION_HANDLER:
265 op.store_value((cell) & factor::exception_handler);
270 op.store_value(compute_dlsym_toc_address(parameters, index));
273 case RT_INLINE_CACHE_MISS:
274 op.store_value((cell) & factor::inline_cache_miss);
277 op.store_value((cell) code->safepoint_page);
280 critical_error("Bad rel type in store_external_address()", op.rel_type());
285 cell factor_vm::compute_here_address(cell arg, cell offset,
286 code_block* compiled) {
287 fixnum n = untag_fixnum(arg);
289 return (cell) compiled->entry_point() + offset + n;
291 return (cell) compiled->entry_point() - n;
294 struct initial_code_block_visitor {
299 explicit initial_code_block_visitor(factor_vm* parent_, cell literals_)
300 : parent(parent_), literals(literals_), literal_index(0) {}
302 cell next_literal() {
303 return array_nth(untag<array>(literals), literal_index++);
306 void operator()(instruction_operand op) {
307 switch (op.rel_type()) {
309 op.store_value(next_literal());
312 op.store_value(parent->compute_entry_point_address(next_literal()));
314 case RT_ENTRY_POINT_PIC:
315 op.store_value(parent->compute_entry_point_pic_address(next_literal()));
317 case RT_ENTRY_POINT_PIC_TAIL:
319 parent->compute_entry_point_pic_tail_address(next_literal()));
322 op.store_value(parent->compute_here_address(
323 next_literal(), op.rel_offset(), op.compiled));
326 op.store_value(untag_fixnum(next_literal()));
329 parent->store_external_address(op);
335 /* Perform all fixups on a code block */
336 void factor_vm::initialize_code_block(code_block* compiled, cell literals) {
337 initial_code_block_visitor visitor(this, literals);
338 compiled->each_instruction_operand(visitor);
339 compiled->flush_icache();
341 /* next time we do a minor GC, we have to trace this code block, since
342 the newly-installed instruction operands might point to literals in
344 code->write_barrier(compiled);
347 void factor_vm::initialize_code_block(code_block* compiled) {
348 std::map<code_block*, cell>::iterator iter =
349 code->uninitialized_blocks.find(compiled);
350 initialize_code_block(compiled, iter->second);
351 code->uninitialized_blocks.erase(iter);
354 /* Fixup labels. This is done at compile time, not image load time */
355 void factor_vm::fixup_labels(array* labels, code_block* compiled) {
356 cell size = array_capacity(labels);
358 for (cell i = 0; i < size; i += 3) {
359 relocation_class rel_class =
360 (relocation_class) untag_fixnum(array_nth(labels, i));
361 cell offset = untag_fixnum(array_nth(labels, i + 1));
362 cell target = untag_fixnum(array_nth(labels, i + 2));
364 relocation_entry new_entry(RT_HERE, rel_class, offset);
366 instruction_operand op(new_entry, compiled, 0);
367 op.store_value(target + (cell) compiled->entry_point());
372 /* Allocates memory */
373 code_block* factor_vm::allot_code_block(cell size, code_block_type type) {
374 code_block* block = code->allocator->allot(size + sizeof(code_block));
376 /* If allocation failed, do a full GC and compact the code heap.
377 A full GC that occurs as a result of the data heap filling up does not
378 trigger a compaction. This setup ensures that most GCs do not compact
379 the code heap, but if the code fills up, it probably means it will be
380 fragmented after GC anyway, so its best to compact. */
382 primitive_compact_gc();
383 block = code->allocator->allot(size + sizeof(code_block));
385 /* Insufficient room even after code GC, give up */
387 std::cout << "Code heap used: " << code->allocator->occupied_space()
389 std::cout << "Code heap free: " << code->allocator->free_space() << "\n";
390 fatal_error("Out of memory in add-compiled-block", 0);
394 block->set_type(type);
399 /* Allocates memory */
400 code_block* factor_vm::add_code_block(code_block_type type, cell code_,
401 cell labels_, cell owner_,
402 cell relocation_, cell parameters_,
404 cell frame_size_untagged) {
405 data_root<byte_array> code(code_, this);
406 data_root<object> labels(labels_, this);
407 data_root<object> owner(owner_, this);
408 data_root<byte_array> relocation(relocation_, this);
409 data_root<array> parameters(parameters_, this);
410 data_root<array> literals(literals_, this);
412 cell code_length = array_capacity(code.untagged());
413 code_block* compiled = allot_code_block(code_length, type);
415 compiled->owner = owner.value();
417 /* slight space optimization */
418 if (relocation.type() == BYTE_ARRAY_TYPE &&
419 array_capacity(relocation.untagged()) == 0)
420 compiled->relocation = false_object;
422 compiled->relocation = relocation.value();
424 if (parameters.type() == ARRAY_TYPE &&
425 array_capacity(parameters.untagged()) == 0)
426 compiled->parameters = false_object;
428 compiled->parameters = parameters.value();
431 memcpy(compiled + 1, code.untagged() + 1, code_length);
434 if (to_boolean(labels.value()))
435 fixup_labels(labels.as<array>().untagged(), compiled);
437 compiled->set_stack_frame_size(frame_size_untagged);
439 /* Once we are ready, fill in literal and word references in this code
440 block's instruction operands. In most cases this is done right after this
441 method returns, except when compiling words with the non-optimizing
442 compiler at the beginning of bootstrap */
443 this->code->uninitialized_blocks
444 .insert(std::make_pair(compiled, literals.value()));
445 this->code->all_blocks.insert((cell) compiled);
447 /* next time we do a minor GC, we have to trace this code block, since
448 the fields of the code_block struct might point into nursery or aging */
449 this->code->write_barrier(compiled);
454 /* Find the RT_DLSYM relocation nearest to the given return address. */
455 struct find_symbol_at_address_visitor {
461 find_symbol_at_address_visitor(factor_vm* parent_, cell return_address_)
463 return_address(return_address_),
464 symbol(false_object),
465 library(false_object) {}
467 void operator()(instruction_operand op) {
468 if (op.rel_type() == RT_DLSYM && op.pointer <= return_address) {
469 code_block* compiled = op.compiled;
470 array* parameters = untag<array>(compiled->parameters);
471 cell index = op.index;
472 symbol = array_nth(parameters, index);
473 library = array_nth(parameters, index + 1);
478 /* References to undefined symbols are patched up to call this function on
479 image load. It finds the symbol and library, and throws an error. */
480 void factor_vm::undefined_symbol() {
481 void* frame = ctx->callstack_top;
482 void* return_address = frame_return_address(frame);
483 code_block* compiled = code->code_block_for_address((cell) return_address);
484 find_symbol_at_address_visitor visitor(this, (cell) return_address);
485 compiled->each_instruction_operand(visitor);
486 if (!to_boolean(visitor.symbol))
487 critical_error("Can't find RT_DLSYM at return address",
488 (cell) return_address);
490 general_error(ERROR_UNDEFINED_SYMBOL, visitor.symbol, visitor.library);
493 void undefined_symbol() { return current_vm()->undefined_symbol(); }