4 #define FACTOR_CPU_STRING "ppc"
5 #define VM_ASM_API VM_C_API
7 register cell ds asm("r13");
8 register cell rs asm("r14");
10 /* In the instruction sequence:
15 the offset from the immediate operand to LOAD32 to the instruction after
16 the branch is two instructions. */
17 static const fixnum xt_tail_pic_offset = 4 * 2;
19 inline static void check_call_site(cell return_address)
22 cell insn = *(cell *)return_address;
23 /* Check that absolute bit is 0 */
24 assert((insn & 0x2) == 0x0);
25 /* Check that instruction is branch */
26 assert((insn >> 26) == 0x12);
30 #define B_MASK 0x3fffffc
32 inline static void *get_call_target(cell return_address)
34 return_address -= sizeof(cell);
35 check_call_site(return_address);
37 cell insn = *(cell *)return_address;
38 cell unsigned_addr = (insn & B_MASK);
39 fixnum signed_addr = (fixnum)(unsigned_addr << 6) >> 6;
40 return (void *)(signed_addr + return_address);
43 inline static void set_call_target(cell return_address, void *target)
45 return_address -= sizeof(cell);
46 check_call_site(return_address);
48 cell insn = *(cell *)return_address;
50 fixnum relative_address = ((cell)target - return_address);
51 insn = ((insn & ~B_MASK) | (relative_address & B_MASK));
52 *(cell *)return_address = insn;
54 /* Flush the cache line containing the call we just patched */
55 __asm__ __volatile__ ("icbi 0, %0\n" "sync\n"::"r" (return_address):);
58 inline static bool tail_call_site_p(cell return_address)
60 return_address -= sizeof(cell);
61 cell insn = *(cell *)return_address;
62 return (insn & 0x1) == 0;
65 /* Defined in assembly */
66 VM_ASM_API void c_to_factor(cell quot);
67 VM_ASM_API void throw_impl(cell quot, stack_frame *rewind);
68 VM_ASM_API void lazy_jit_compile(cell quot);
69 VM_ASM_API void flush_icache(cell start, cell len);
71 VM_ASM_API void set_callstack(stack_frame *to,
74 void *(*memcpy)(void*,const void*, size_t));