4 #define FACTOR_CPU_STRING "ppc.64"
6 #define FACTOR_CPU_STRING "ppc.32"
9 #define CALLSTACK_BOTTOM(ctx) (void*)(ctx->callstack_seg->end - 32)
11 /* In the instruction sequence:
16 the offset from the immediate operand to LOAD32 to the instruction after
17 the branch is one instruction. */
18 static const fixnum xt_tail_pic_offset = 4;
20 inline static void check_call_site(cell return_address) {
21 u32 insn = *(u32*)return_address;
22 /* Check that absolute bit is 0 */
23 FACTOR_ASSERT((insn & 0x2) == 0x0);
24 /* Check that instruction is branch */
25 FACTOR_ASSERT((insn >> 26) == 0x12);
28 static const u32 b_mask = 0x3fffffc;
30 inline static void* get_call_target(cell return_address) {
32 check_call_site(return_address);
34 u32 insn = *(u32*)return_address;
35 u32 unsigned_addr = (insn & b_mask);
36 s32 signed_addr = (s32)(unsigned_addr << 6) >> 6;
37 return (void*)(signed_addr + return_address);
40 inline static void set_call_target(cell return_address, void* target) {
42 check_call_site(return_address);
44 u32 insn = *(u32*)return_address;
46 fixnum relative_address = ((cell) target - return_address);
47 insn = ((insn & ~b_mask) | (relative_address & b_mask));
48 *(u32*)return_address = insn;
50 /* Flush the cache line containing the call we just patched */
51 __asm__ __volatile__("icbi 0, %0\n"
52 "sync\n" ::"r"(return_address)
56 inline static bool tail_call_site_p(cell return_address) {
58 u32 insn = *(u32*)return_address;
59 return (insn & 0x1) == 0;
62 inline static unsigned int fpu_status(unsigned int status) {
65 if (status & 0x20000000)
66 r |= FP_TRAP_INVALID_OPERATION;
67 if (status & 0x10000000)
68 r |= FP_TRAP_OVERFLOW;
69 if (status & 0x08000000)
70 r |= FP_TRAP_UNDERFLOW;
71 if (status & 0x04000000)
72 r |= FP_TRAP_ZERO_DIVIDE;
73 if (status & 0x02000000)
79 /* Defined in assembly */
80 VM_C_API void flush_icache(cell start, cell len);