1 |
dpavlin |
7 |
/* |
2 |
|
|
* Cisco router simulation platform. |
3 |
|
|
* Copyright (c) 2005,2006 Christophe Fillot (cf@utc.fr) |
4 |
|
|
*/ |
5 |
|
|
|
6 |
|
|
#include <stdio.h> |
7 |
|
|
#include <stdlib.h> |
8 |
|
|
#include <unistd.h> |
9 |
|
|
#include <string.h> |
10 |
|
|
#include <sys/types.h> |
11 |
|
|
#include <sys/stat.h> |
12 |
|
|
#include <sys/mman.h> |
13 |
|
|
#include <fcntl.h> |
14 |
|
|
|
15 |
|
|
#include "cpu.h" |
16 |
|
|
#include "ppc32_jit.h" |
17 |
|
|
#include "ppc32_amd64_trans.h" |
18 |
|
|
#include "memory.h" |
19 |
|
|
|
20 |
|
|
/* Macros for CPU structure access */ |
21 |
|
|
#define REG_OFFSET(reg) (OFFSET(cpu_ppc_t,gpr[(reg)])) |
22 |
|
|
#define MEMOP_OFFSET(op) (OFFSET(cpu_ppc_t,mem_op_fn[(op)])) |
23 |
|
|
|
24 |
|
|
#define DECLARE_INSN(name) \ |
25 |
|
|
static int ppc32_emit_##name(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, \ |
26 |
|
|
ppc_insn_t insn) |
27 |
|
|
|
28 |
dpavlin |
8 |
/* EFLAGS to Condition Register (CR) field - signed */ |
29 |
|
|
static m_uint32_t eflags_to_cr_signed[256] = { |
30 |
|
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
31 |
|
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
32 |
|
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
33 |
|
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
34 |
|
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
35 |
|
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
36 |
|
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
37 |
|
|
0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, |
38 |
|
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
39 |
|
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
40 |
|
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
41 |
|
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
42 |
|
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
43 |
|
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
44 |
|
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
45 |
|
|
0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, |
46 |
|
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
47 |
|
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
48 |
|
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
49 |
|
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
50 |
|
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
51 |
|
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
52 |
|
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
53 |
|
|
0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, |
54 |
|
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
55 |
|
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
56 |
|
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
57 |
|
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
58 |
|
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
59 |
|
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
60 |
|
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
61 |
|
|
0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, |
62 |
|
|
}; |
63 |
|
|
|
64 |
|
|
/* EFLAGS to Condition Register (CR) field - unsigned */ |
65 |
|
|
static m_uint32_t eflags_to_cr_unsigned[256] = { |
66 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
67 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
68 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
69 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
70 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
71 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
72 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
73 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
74 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
75 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
76 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
77 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
78 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
79 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
80 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
81 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
82 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
83 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
84 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
85 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
86 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
87 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
88 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
89 |
|
|
0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, |
90 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
91 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
92 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
93 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
94 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
95 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
96 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
97 |
|
|
0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, |
98 |
|
|
}; |
99 |
|
|
|
100 |
dpavlin |
7 |
/* Load a 32 bit immediate value */ |
101 |
|
|
static inline void ppc32_load_imm(ppc32_jit_tcb_t *b,u_int reg,m_uint32_t val) |
102 |
|
|
{ |
103 |
|
|
if (val) |
104 |
|
|
amd64_mov_reg_imm_size(b->jit_ptr,reg,val,4); |
105 |
|
|
else |
106 |
|
|
amd64_alu_reg_reg_size(b->jit_ptr,X86_XOR,reg,reg,4); |
107 |
|
|
} |
108 |
|
|
|
109 |
|
|
/* Set the Instruction Address (IA) register */ |
110 |
|
|
void ppc32_set_ia(ppc32_jit_tcb_t *b,m_uint32_t new_ia) |
111 |
|
|
{ |
112 |
|
|
amd64_mov_membase_imm(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),new_ia,4); |
113 |
|
|
} |
114 |
|
|
|
115 |
|
|
/* Set the Link Register (LR) */ |
116 |
|
|
void ppc32_set_lr(ppc32_jit_tcb_t *b,m_uint32_t new_lr) |
117 |
|
|
{ |
118 |
|
|
amd64_mov_membase_imm(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),new_lr,4); |
119 |
|
|
} |
120 |
|
|
|
121 |
dpavlin |
8 |
/* |
122 |
|
|
* Try to branch directly to the specified JIT block without returning to |
123 |
|
|
* main loop. |
124 |
|
|
*/ |
125 |
|
|
static void ppc32_try_direct_far_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
126 |
|
|
m_uint32_t new_ia) |
127 |
|
|
{ |
128 |
|
|
m_uint32_t new_page,ia_hash,ia_offset; |
129 |
|
|
u_char *test1,*test2,*test3; |
130 |
|
|
|
131 |
|
|
new_page = new_ia & PPC32_MIN_PAGE_MASK; |
132 |
|
|
ia_offset = (new_ia & PPC32_MIN_PAGE_IMASK) >> 2; |
133 |
|
|
ia_hash = ppc32_jit_get_ia_hash(new_ia); |
134 |
|
|
|
135 |
|
|
/* Get JIT block info in %rdx */ |
136 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RBX, |
137 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,exec_blk_map),8); |
138 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
139 |
|
|
AMD64_RBX,ia_hash*sizeof(void *),8); |
140 |
|
|
|
141 |
|
|
/* no JIT block found ? */ |
142 |
|
|
amd64_test_reg_reg(b->jit_ptr,AMD64_RDX,AMD64_RDX); |
143 |
|
|
test1 = b->jit_ptr; |
144 |
|
|
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
145 |
|
|
|
146 |
|
|
/* Check block IA */ |
147 |
|
|
ppc32_load_imm(b,AMD64_RAX,new_page); |
148 |
|
|
amd64_alu_reg_membase_size(b->jit_ptr,X86_CMP,X86_EAX,AMD64_RDX, |
149 |
|
|
OFFSET(ppc32_jit_tcb_t,start_ia),4); |
150 |
|
|
test2 = b->jit_ptr; |
151 |
|
|
amd64_branch8(b->jit_ptr, X86_CC_NE, 0, 1); |
152 |
|
|
|
153 |
|
|
/* Jump to the code */ |
154 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RSI, |
155 |
|
|
AMD64_RDX,OFFSET(ppc32_jit_tcb_t,jit_insn_ptr),8); |
156 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RBX, |
157 |
|
|
AMD64_RSI,ia_offset * sizeof(void *),8); |
158 |
|
|
|
159 |
|
|
amd64_test_reg_reg(b->jit_ptr,AMD64_RBX,AMD64_RBX); |
160 |
|
|
test3 = b->jit_ptr; |
161 |
|
|
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
162 |
|
|
amd64_jump_reg(b->jit_ptr,AMD64_RBX); |
163 |
|
|
|
164 |
|
|
/* Returns to caller... */ |
165 |
|
|
amd64_patch(test1,b->jit_ptr); |
166 |
|
|
amd64_patch(test2,b->jit_ptr); |
167 |
|
|
amd64_patch(test3,b->jit_ptr); |
168 |
|
|
|
169 |
|
|
ppc32_set_ia(b,new_ia); |
170 |
|
|
ppc32_jit_tcb_push_epilog(b); |
171 |
|
|
} |
172 |
|
|
|
173 |
dpavlin |
7 |
/* Set Jump */ |
174 |
|
|
static void ppc32_set_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
175 |
|
|
m_uint32_t new_ia,int local_jump) |
176 |
|
|
{ |
177 |
|
|
int return_to_caller = FALSE; |
178 |
|
|
u_char *jump_ptr; |
179 |
|
|
|
180 |
|
|
#if 0 |
181 |
|
|
if (cpu->sym_trace && !local_jump) |
182 |
|
|
return_to_caller = TRUE; |
183 |
|
|
#endif |
184 |
|
|
|
185 |
|
|
if (!return_to_caller && ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr)) { |
186 |
|
|
if (jump_ptr) { |
187 |
|
|
amd64_jump_code(b->jit_ptr,jump_ptr); |
188 |
|
|
} else { |
189 |
|
|
ppc32_jit_tcb_record_patch(b,b->jit_ptr,new_ia); |
190 |
|
|
amd64_jump32(b->jit_ptr,0); |
191 |
|
|
} |
192 |
dpavlin |
8 |
} else { |
193 |
|
|
if (cpu->exec_blk_direct_jump) { |
194 |
|
|
/* Block lookup optimization */ |
195 |
|
|
ppc32_try_direct_far_jump(cpu,b,new_ia); |
196 |
|
|
} else { |
197 |
|
|
ppc32_set_ia(b,new_ia); |
198 |
|
|
ppc32_jit_tcb_push_epilog(b); |
199 |
|
|
} |
200 |
dpavlin |
7 |
} |
201 |
|
|
} |
202 |
|
|
|
203 |
|
|
/* Load a GPR into the specified host register */ |
204 |
|
|
static forced_inline void ppc32_load_gpr(ppc32_jit_tcb_t *b,u_int host_reg, |
205 |
|
|
u_int ppc_reg) |
206 |
|
|
{ |
207 |
|
|
amd64_mov_reg_membase(b->jit_ptr,host_reg,AMD64_R15,REG_OFFSET(ppc_reg),4); |
208 |
|
|
} |
209 |
|
|
|
210 |
|
|
/* Store contents for a host register into a GPR register */ |
211 |
|
|
static forced_inline void ppc32_store_gpr(ppc32_jit_tcb_t *b,u_int ppc_reg, |
212 |
|
|
u_int host_reg) |
213 |
|
|
{ |
214 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,REG_OFFSET(ppc_reg),host_reg,4); |
215 |
|
|
} |
216 |
|
|
|
217 |
|
|
/* Apply an ALU operation on a GPR register and a host register */ |
218 |
|
|
static forced_inline void ppc32_alu_gpr(ppc32_jit_tcb_t *b,u_int op, |
219 |
|
|
u_int host_reg,u_int ppc_reg) |
220 |
|
|
{ |
221 |
|
|
amd64_alu_reg_membase_size(b->jit_ptr,op,host_reg, |
222 |
|
|
AMD64_R15,REG_OFFSET(ppc_reg),4); |
223 |
|
|
} |
224 |
|
|
|
225 |
|
|
/* |
226 |
|
|
* Update CR from %eflags |
227 |
dpavlin |
8 |
* %rax, %rdx, %rsi are modified. |
228 |
dpavlin |
7 |
*/ |
229 |
|
|
static void ppc32_update_cr(ppc32_jit_tcb_t *b,int field,int is_signed) |
230 |
|
|
{ |
231 |
dpavlin |
8 |
/* Get status bits from EFLAGS */ |
232 |
|
|
amd64_pushfd_size(b->jit_ptr,8); |
233 |
|
|
amd64_pop_reg(b->jit_ptr,AMD64_RAX); |
234 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0xFF); |
235 |
dpavlin |
7 |
|
236 |
dpavlin |
8 |
if (is_signed) |
237 |
|
|
amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_signed,8); |
238 |
|
|
else |
239 |
|
|
amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_unsigned,8); |
240 |
dpavlin |
7 |
|
241 |
dpavlin |
8 |
amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RDX,0,AMD64_RAX,2,4); |
242 |
dpavlin |
7 |
|
243 |
dpavlin |
8 |
#if 0 |
244 |
dpavlin |
7 |
/* Check XER Summary of Overflow and report it */ |
245 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX, |
246 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,xer),4); |
247 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,PPC32_XER_SO); |
248 |
|
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RCX,(field << 2) + 3); |
249 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RCX); |
250 |
dpavlin |
8 |
#endif |
251 |
dpavlin |
7 |
|
252 |
dpavlin |
8 |
/* Store modified CR field */ |
253 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(field), |
254 |
|
|
AMD64_RAX,4); |
255 |
dpavlin |
7 |
} |
256 |
|
|
|
257 |
|
|
/* |
258 |
|
|
* Update CR0 from %eflags |
259 |
|
|
* %eax, %ecx, %edx, %esi are modified. |
260 |
|
|
*/ |
261 |
|
|
static void ppc32_update_cr0(ppc32_jit_tcb_t *b) |
262 |
|
|
{ |
263 |
|
|
ppc32_update_cr(b,0,TRUE); |
264 |
|
|
} |
265 |
|
|
|
266 |
|
|
/* Basic C call */ |
267 |
|
|
static forced_inline void ppc32_emit_basic_c_call(ppc32_jit_tcb_t *b,void *f) |
268 |
|
|
{ |
269 |
|
|
amd64_mov_reg_imm(b->jit_ptr,AMD64_RCX,f); |
270 |
|
|
amd64_call_reg(b->jit_ptr,AMD64_RCX); |
271 |
|
|
} |
272 |
|
|
|
273 |
|
|
/* Emit a simple call to a C function without any parameter */ |
274 |
|
|
static void ppc32_emit_c_call(ppc32_jit_tcb_t *b,void *f) |
275 |
|
|
{ |
276 |
|
|
ppc32_set_ia(b,b->start_ia+((b->ppc_trans_pos-1)<<2)); |
277 |
|
|
ppc32_emit_basic_c_call(b,f); |
278 |
|
|
} |
279 |
|
|
|
280 |
|
|
/* Memory operation */ |
281 |
|
|
static void ppc32_emit_memop(ppc32_jit_tcb_t *b,int op,int base,int offset, |
282 |
|
|
int target,int update) |
283 |
|
|
{ |
284 |
|
|
m_uint32_t val = sign_extend(offset,16); |
285 |
|
|
u_char *test1; |
286 |
|
|
|
287 |
|
|
/* Save PC for exception handling */ |
288 |
|
|
ppc32_set_ia(b,b->start_ia+((b->ppc_trans_pos-1)<<2)); |
289 |
|
|
|
290 |
|
|
/* RSI = sign-extended offset */ |
291 |
|
|
ppc32_load_imm(b,AMD64_RSI,val); |
292 |
|
|
|
293 |
|
|
/* RSI = GPR[base] + sign-extended offset */ |
294 |
|
|
if (update || (base != 0)) |
295 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,base); |
296 |
|
|
|
297 |
|
|
if (update) |
298 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_R14,AMD64_RSI,4); |
299 |
|
|
|
300 |
|
|
/* RDX = target register */ |
301 |
|
|
amd64_mov_reg_imm(b->jit_ptr,AMD64_RDX,target); |
302 |
|
|
|
303 |
|
|
/* RDI = CPU instance pointer */ |
304 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
305 |
|
|
|
306 |
|
|
/* Call memory function */ |
307 |
|
|
amd64_call_membase(b->jit_ptr,AMD64_R15,MEMOP_OFFSET(op)); |
308 |
|
|
|
309 |
|
|
/* Exception ? */ |
310 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
311 |
|
|
test1 = b->jit_ptr; |
312 |
|
|
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
313 |
|
|
ppc32_jit_tcb_push_epilog(b); |
314 |
|
|
amd64_patch(test1,b->jit_ptr); |
315 |
|
|
|
316 |
|
|
if (update) |
317 |
|
|
ppc32_store_gpr(b,base,AMD64_R14); |
318 |
|
|
} |
319 |
|
|
|
320 |
|
|
/* Memory operation (indexed) */ |
321 |
|
|
static void ppc32_emit_memop_idx(ppc32_jit_tcb_t *b,int op,int ra,int rb, |
322 |
|
|
int target,int update) |
323 |
|
|
{ |
324 |
|
|
u_char *test1; |
325 |
|
|
|
326 |
|
|
/* Save PC for exception handling */ |
327 |
|
|
ppc32_set_ia(b,b->start_ia+((b->ppc_trans_pos-1)<<2)); |
328 |
|
|
|
329 |
|
|
/* RSI = $rb */ |
330 |
|
|
ppc32_load_gpr(b,AMD64_RSI,rb); |
331 |
|
|
|
332 |
|
|
/* RSI = GPR[base] + sign-extended offset */ |
333 |
|
|
if (update || (ra != 0)) |
334 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,ra); |
335 |
|
|
|
336 |
|
|
if (update) |
337 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_R14,AMD64_RSI,4); |
338 |
|
|
|
339 |
|
|
/* RDX = target register */ |
340 |
|
|
amd64_mov_reg_imm(b->jit_ptr,AMD64_RDX,target); |
341 |
|
|
|
342 |
|
|
/* RDI = CPU instance pointer */ |
343 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
344 |
|
|
|
345 |
|
|
/* Call memory function */ |
346 |
|
|
amd64_call_membase(b->jit_ptr,AMD64_R15,MEMOP_OFFSET(op)); |
347 |
|
|
|
348 |
|
|
/* Exception ? */ |
349 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
350 |
|
|
test1 = b->jit_ptr; |
351 |
|
|
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
352 |
|
|
ppc32_jit_tcb_push_epilog(b); |
353 |
|
|
amd64_patch(test1,b->jit_ptr); |
354 |
|
|
|
355 |
|
|
if (update) |
356 |
|
|
ppc32_store_gpr(b,ra,AMD64_R14); |
357 |
|
|
} |
358 |
|
|
|
359 |
|
|
typedef void (*memop_fast_access)(ppc32_jit_tcb_t *b,int target); |
360 |
|
|
|
361 |
|
|
/* Fast LBZ */ |
362 |
|
|
static void ppc32_memop_fast_lbz(ppc32_jit_tcb_t *b,int target) |
363 |
|
|
{ |
364 |
|
|
amd64_clear_reg(b->jit_ptr,AMD64_RCX); |
365 |
|
|
amd64_mov_reg_memindex(b->jit_ptr,AMD64_RCX,AMD64_RBX,0,AMD64_RSI,0,1); |
366 |
|
|
ppc32_store_gpr(b,target,AMD64_RCX); |
367 |
|
|
} |
368 |
|
|
|
369 |
|
|
/* Fast STB */ |
370 |
|
|
static void ppc32_memop_fast_stb(ppc32_jit_tcb_t *b,int target) |
371 |
|
|
{ |
372 |
|
|
ppc32_load_gpr(b,AMD64_RDX,target); |
373 |
|
|
amd64_mov_memindex_reg(b->jit_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,1); |
374 |
|
|
} |
375 |
|
|
|
376 |
|
|
/* Fast LWZ */ |
377 |
|
|
static void ppc32_memop_fast_lwz(ppc32_jit_tcb_t *b,int target) |
378 |
|
|
{ |
379 |
|
|
amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RBX,0,AMD64_RSI,0,4); |
380 |
|
|
amd64_bswap32(b->jit_ptr,AMD64_RAX); |
381 |
|
|
ppc32_store_gpr(b,target,AMD64_RAX); |
382 |
|
|
} |
383 |
|
|
|
384 |
|
|
/* Fast STW */ |
385 |
|
|
static void ppc32_memop_fast_stw(ppc32_jit_tcb_t *b,int target) |
386 |
|
|
{ |
387 |
|
|
ppc32_load_gpr(b,AMD64_RDX,target); |
388 |
|
|
amd64_bswap32(b->jit_ptr,AMD64_RDX); |
389 |
|
|
amd64_mov_memindex_reg(b->jit_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,4); |
390 |
|
|
} |
391 |
|
|
|
392 |
|
|
/* Fast memory operation */ |
393 |
|
|
static void ppc32_emit_memop_fast(ppc32_jit_tcb_t *b,int write_op, |
394 |
|
|
int opcode,int base,int offset,int target, |
395 |
|
|
memop_fast_access op_handler) |
396 |
|
|
{ |
397 |
|
|
m_uint32_t val = sign_extend(offset,16); |
398 |
|
|
u_char *test1,*test2,*p_exception,*p_exit; |
399 |
|
|
|
400 |
|
|
test2 = NULL; |
401 |
|
|
|
402 |
|
|
/* RSI = GPR[base] + sign-extended offset */ |
403 |
|
|
ppc32_load_imm(b,AMD64_RSI,val); |
404 |
|
|
if (base != 0) |
405 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,base); |
406 |
|
|
|
407 |
|
|
/* RBX = mts32_entry index */ |
408 |
|
|
amd64_mov_reg_reg_size(b->jit_ptr,X86_EBX,X86_ESI,4); |
409 |
|
|
amd64_shift_reg_imm_size(b->jit_ptr,X86_SHR,X86_EBX,MTS32_HASH_SHIFT,4); |
410 |
|
|
amd64_alu_reg_imm_size(b->jit_ptr,X86_AND,X86_EBX,MTS32_HASH_MASK,4); |
411 |
|
|
|
412 |
|
|
/* RCX = mts32 entry */ |
413 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX, |
414 |
|
|
AMD64_R15, |
415 |
|
|
OFFSET(cpu_ppc_t,mts_cache[PPC32_MTS_DCACHE]),8); |
416 |
|
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,5); /* TO FIX */ |
417 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_ADD,AMD64_RCX,AMD64_RBX); |
418 |
|
|
|
419 |
|
|
/* Compare virtual page address (EAX = vpage) */ |
420 |
|
|
amd64_mov_reg_reg(b->jit_ptr,X86_EAX,X86_ESI,4); |
421 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,X86_EAX,PPC32_MIN_PAGE_MASK); |
422 |
|
|
|
423 |
|
|
amd64_alu_reg_membase_size(b->jit_ptr,X86_CMP,X86_EAX,AMD64_RCX, |
424 |
|
|
OFFSET(mts32_entry_t,gvpa),4); |
425 |
|
|
test1 = b->jit_ptr; |
426 |
|
|
x86_branch8(b->jit_ptr, X86_CC_NZ, 0, 1); |
427 |
|
|
|
428 |
|
|
/* Test if we are writing to a COW page */ |
429 |
|
|
if (write_op) { |
430 |
|
|
amd64_test_membase_imm_size(b->jit_ptr, |
431 |
|
|
AMD64_RCX,OFFSET(mts32_entry_t,flags), |
432 |
dpavlin |
8 |
MTS_FLAG_COW|MTS_FLAG_EXEC,4); |
433 |
dpavlin |
7 |
test2 = b->jit_ptr; |
434 |
|
|
amd64_branch8(b->jit_ptr, X86_CC_NZ, 0, 1); |
435 |
|
|
} |
436 |
|
|
|
437 |
|
|
/* ESI = offset in page, RBX = Host Page Address */ |
438 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,X86_ESI,PPC32_MIN_PAGE_IMASK); |
439 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RBX, |
440 |
|
|
AMD64_RCX,OFFSET(mts32_entry_t,hpa),8); |
441 |
|
|
|
442 |
|
|
/* Memory access */ |
443 |
|
|
op_handler(b,target); |
444 |
|
|
|
445 |
|
|
p_exit = b->jit_ptr; |
446 |
|
|
amd64_jump8(b->jit_ptr,0); |
447 |
|
|
|
448 |
|
|
/* === Slow lookup === */ |
449 |
|
|
amd64_patch(test1,b->jit_ptr); |
450 |
|
|
if (test2) |
451 |
|
|
amd64_patch(test2,b->jit_ptr); |
452 |
|
|
|
453 |
|
|
/* Save IA for exception handling */ |
454 |
|
|
ppc32_set_ia(b,b->start_ia+((b->ppc_trans_pos-1)<<2)); |
455 |
|
|
|
456 |
|
|
/* RDX = target register */ |
457 |
|
|
amd64_mov_reg_imm(b->jit_ptr,AMD64_RDX,target); |
458 |
|
|
|
459 |
|
|
/* RDI = CPU instance */ |
460 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
461 |
|
|
|
462 |
|
|
/* Call memory access function */ |
463 |
|
|
amd64_call_membase(b->jit_ptr,AMD64_R15,MEMOP_OFFSET(opcode)); |
464 |
|
|
|
465 |
|
|
/* Exception ? */ |
466 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
467 |
|
|
p_exception = b->jit_ptr; |
468 |
|
|
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
469 |
|
|
ppc32_jit_tcb_push_epilog(b); |
470 |
|
|
|
471 |
|
|
amd64_patch(p_exit,b->jit_ptr); |
472 |
|
|
amd64_patch(p_exception,b->jit_ptr); |
473 |
|
|
} |
474 |
|
|
|
475 |
dpavlin |
8 |
/* Virtual Breakpoint */ |
476 |
|
|
void ppc32_emit_breakpoint(ppc32_jit_tcb_t *b) |
477 |
|
|
{ |
478 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
479 |
|
|
ppc32_emit_c_call(b,ppc32_run_breakpoint); |
480 |
|
|
} |
481 |
|
|
|
482 |
dpavlin |
7 |
/* Emit unhandled instruction code */ |
483 |
|
|
static int ppc32_emit_unknown(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
484 |
|
|
ppc_insn_t opcode) |
485 |
|
|
{ |
486 |
|
|
u_char *test1; |
487 |
|
|
|
488 |
|
|
#if 0 |
489 |
|
|
x86_mov_reg_imm(b->jit_ptr,X86_EAX,opcode); |
490 |
|
|
x86_alu_reg_imm(b->jit_ptr,X86_SUB,X86_ESP,4); |
491 |
|
|
x86_push_reg(b->jit_ptr,X86_EAX); |
492 |
|
|
x86_push_reg(b->jit_ptr,X86_EDI); |
493 |
|
|
ppc32_emit_c_call(b,ppc32_unknown_opcode); |
494 |
|
|
x86_alu_reg_imm(b->jit_ptr,X86_ADD,X86_ESP,12); |
495 |
|
|
#endif |
496 |
|
|
|
497 |
|
|
/* Fallback to non-JIT mode */ |
498 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
499 |
|
|
amd64_mov_reg_imm(b->jit_ptr,AMD64_RSI,opcode); |
500 |
|
|
|
501 |
|
|
ppc32_emit_c_call(b,ppc32_exec_single_insn_ext); |
502 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
503 |
|
|
test1 = b->jit_ptr; |
504 |
|
|
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
505 |
|
|
ppc32_jit_tcb_push_epilog(b); |
506 |
|
|
|
507 |
|
|
amd64_patch(test1,b->jit_ptr); |
508 |
|
|
return(0); |
509 |
|
|
} |
510 |
|
|
|
511 |
|
|
/* Increment the number of executed instructions (performance debugging) */ |
512 |
|
|
void ppc32_inc_perf_counter(ppc32_jit_tcb_t *b) |
513 |
|
|
{ |
514 |
|
|
amd64_inc_membase(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,perf_counter)); |
515 |
|
|
} |
516 |
|
|
|
517 |
|
|
/* ======================================================================== */ |
518 |
|
|
|
519 |
|
|
/* BLR - Branch to Link Register */ |
520 |
|
|
DECLARE_INSN(BLR) |
521 |
|
|
{ |
522 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
523 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,lr),4); |
524 |
|
|
amd64_mov_membase_reg(b->jit_ptr, |
525 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,ia),AMD64_RDX,4); |
526 |
|
|
|
527 |
|
|
/* set the return address */ |
528 |
|
|
if (insn & 1) |
529 |
|
|
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
530 |
|
|
|
531 |
|
|
ppc32_jit_tcb_push_epilog(b); |
532 |
|
|
return(0); |
533 |
|
|
} |
534 |
|
|
|
535 |
|
|
/* BCTR - Branch to Count Register */ |
536 |
|
|
DECLARE_INSN(BCTR) |
537 |
|
|
{ |
538 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
539 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,ctr),4); |
540 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia), |
541 |
|
|
AMD64_RDX,4); |
542 |
|
|
|
543 |
|
|
/* set the return address */ |
544 |
|
|
if (insn & 1) |
545 |
|
|
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
546 |
|
|
|
547 |
|
|
ppc32_jit_tcb_push_epilog(b); |
548 |
|
|
return(0); |
549 |
|
|
} |
550 |
|
|
|
551 |
|
|
/* MFLR - Move From Link Register */ |
552 |
|
|
DECLARE_INSN(MFLR) |
553 |
|
|
{ |
554 |
|
|
int rd = bits(insn,21,25); |
555 |
|
|
|
556 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
557 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,lr),4); |
558 |
|
|
ppc32_store_gpr(b,rd,X86_EDX); |
559 |
|
|
return(0); |
560 |
|
|
} |
561 |
|
|
|
562 |
|
|
/* MTLR - Move To Link Register */ |
563 |
|
|
DECLARE_INSN(MTLR) |
564 |
|
|
{ |
565 |
|
|
int rs = bits(insn,21,25); |
566 |
|
|
|
567 |
|
|
ppc32_load_gpr(b,X86_EDX,rs); |
568 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr), |
569 |
|
|
AMD64_RDX,4); |
570 |
|
|
return(0); |
571 |
|
|
} |
572 |
|
|
|
573 |
|
|
/* MFCTR - Move From Counter Register */ |
574 |
|
|
DECLARE_INSN(MFCTR) |
575 |
|
|
{ |
576 |
|
|
int rd = bits(insn,21,25); |
577 |
|
|
|
578 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
579 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,ctr),4); |
580 |
|
|
ppc32_store_gpr(b,rd,AMD64_RDX); |
581 |
|
|
return(0); |
582 |
|
|
} |
583 |
|
|
|
584 |
|
|
/* MTCTR - Move To Counter Register */ |
585 |
|
|
DECLARE_INSN(MTCTR) |
586 |
|
|
{ |
587 |
|
|
int rs = bits(insn,21,25); |
588 |
|
|
|
589 |
|
|
ppc32_load_gpr(b,AMD64_RDX,rs); |
590 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr), |
591 |
|
|
AMD64_RDX,4); |
592 |
|
|
return(0); |
593 |
|
|
} |
594 |
|
|
|
595 |
|
|
/* MFTBU - Move from Time Base (Up) */ |
596 |
|
|
DECLARE_INSN(MFTBU) |
597 |
|
|
{ |
598 |
|
|
int rd = bits(insn,21,25); |
599 |
|
|
|
600 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
601 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,tb)+4,4); |
602 |
|
|
ppc32_store_gpr(b,rd,AMD64_RDX); |
603 |
|
|
return(0); |
604 |
|
|
} |
605 |
|
|
|
606 |
|
|
#define PPC32_TB_INCREMENT 50 |
607 |
|
|
|
608 |
|
|
/* MFTBL - Move from Time Base (Lo) */ |
609 |
|
|
DECLARE_INSN(MFTBL) |
610 |
|
|
{ |
611 |
|
|
int rd = bits(insn,21,25); |
612 |
|
|
|
613 |
|
|
/* Increment the time base register */ |
614 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
615 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,tb),8); |
616 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_ADD,AMD64_RDX,PPC32_TB_INCREMENT); |
617 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,tb), |
618 |
|
|
AMD64_RDX,8); |
619 |
|
|
|
620 |
|
|
ppc32_store_gpr(b,rd,AMD64_RDX); |
621 |
|
|
return(0); |
622 |
|
|
} |
623 |
|
|
|
624 |
|
|
/* ADD */ |
625 |
|
|
DECLARE_INSN(ADD) |
626 |
|
|
{ |
627 |
|
|
int rd = bits(insn,21,25); |
628 |
|
|
int ra = bits(insn,16,20); |
629 |
|
|
int rb = bits(insn,11,15); |
630 |
|
|
|
631 |
|
|
/* $rd = $ra + $rb */ |
632 |
|
|
ppc32_load_gpr(b,AMD64_RBX,ra); |
633 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RBX,rb); |
634 |
|
|
ppc32_store_gpr(b,rd,AMD64_RBX); |
635 |
|
|
|
636 |
|
|
if (insn & 1) |
637 |
|
|
ppc32_update_cr0(b); |
638 |
|
|
|
639 |
|
|
return(0); |
640 |
|
|
} |
641 |
|
|
|
642 |
|
|
/* ADDC */ |
643 |
|
|
DECLARE_INSN(ADDC) |
644 |
|
|
{ |
645 |
|
|
int rd = bits(insn,21,25); |
646 |
|
|
int ra = bits(insn,16,20); |
647 |
|
|
int rb = bits(insn,11,15); |
648 |
|
|
|
649 |
|
|
/* $rd = $ra + $rb */ |
650 |
|
|
ppc32_load_gpr(b,AMD64_RBX,ra); |
651 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RBX,rb); |
652 |
|
|
ppc32_store_gpr(b,rd,AMD64_RBX); |
653 |
|
|
|
654 |
|
|
/* store the carry flag */ |
655 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
656 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
657 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
658 |
|
|
AMD64_RAX,4); |
659 |
|
|
|
660 |
|
|
if (insn & 1) { |
661 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
662 |
|
|
ppc32_update_cr0(b); |
663 |
|
|
} |
664 |
|
|
|
665 |
|
|
return(0); |
666 |
|
|
} |
667 |
|
|
|
668 |
|
|
/* ADDE - Add Extended */ |
669 |
|
|
DECLARE_INSN(ADDE) |
670 |
|
|
{ |
671 |
|
|
int rd = bits(insn,21,25); |
672 |
|
|
int ra = bits(insn,16,20); |
673 |
|
|
int rb = bits(insn,11,15); |
674 |
|
|
|
675 |
|
|
/* $ra + carry */ |
676 |
|
|
ppc32_load_gpr(b,AMD64_RSI,ra); |
677 |
|
|
amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,AMD64_RSI, |
678 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4); |
679 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
680 |
|
|
|
681 |
|
|
/* add $rb */ |
682 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,rb); |
683 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RCX,FALSE); |
684 |
|
|
|
685 |
|
|
ppc32_store_gpr(b,rd,AMD64_RSI); |
686 |
|
|
|
687 |
|
|
/* store the carry flag */ |
688 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RCX); |
689 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
690 |
|
|
|
691 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
692 |
|
|
AMD64_RAX,4); |
693 |
|
|
|
694 |
|
|
/* update cr0 */ |
695 |
|
|
if (insn & 1) { |
696 |
|
|
x86_test_reg_reg(b->jit_ptr,AMD64_RSI,AMD64_RSI); |
697 |
|
|
ppc32_update_cr0(b); |
698 |
|
|
} |
699 |
|
|
|
700 |
|
|
return(0); |
701 |
|
|
} |
702 |
|
|
|
703 |
|
|
/* ADDI - ADD Immediate */ |
704 |
|
|
DECLARE_INSN(ADDI) |
705 |
|
|
{ |
706 |
|
|
int rd = bits(insn,21,25); |
707 |
|
|
int ra = bits(insn,16,20); |
708 |
|
|
int imm = bits(insn,0,15); |
709 |
|
|
m_uint32_t tmp = sign_extend_32(imm,16); |
710 |
|
|
|
711 |
|
|
ppc32_load_imm(b,AMD64_RBX,tmp); |
712 |
|
|
|
713 |
|
|
if (ra != 0) |
714 |
|
|
amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,AMD64_RBX, |
715 |
|
|
AMD64_R15,REG_OFFSET(ra),4); |
716 |
|
|
|
717 |
|
|
ppc32_store_gpr(b,rd,AMD64_RBX); |
718 |
|
|
return(0); |
719 |
|
|
} |
720 |
|
|
|
721 |
|
|
/* ADDIC - ADD Immediate with Carry */ |
722 |
|
|
DECLARE_INSN(ADDIC) |
723 |
|
|
{ |
724 |
|
|
int rd = bits(insn,21,25); |
725 |
|
|
int ra = bits(insn,16,20); |
726 |
|
|
int imm = bits(insn,0,15); |
727 |
|
|
m_uint32_t tmp = sign_extend_32(imm,16); |
728 |
|
|
|
729 |
|
|
ppc32_load_imm(b,AMD64_RAX,tmp); |
730 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RAX,ra); |
731 |
|
|
ppc32_store_gpr(b,rd,AMD64_RAX); |
732 |
|
|
amd64_set_membase_size(b->jit_ptr,X86_CC_C, |
733 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
734 |
|
|
FALSE,4); |
735 |
|
|
return(0); |
736 |
|
|
} |
737 |
|
|
|
738 |
|
|
/* ADDIC. */ |
739 |
|
|
DECLARE_INSN(ADDIC_dot) |
740 |
|
|
{ |
741 |
|
|
int rd = bits(insn,21,25); |
742 |
|
|
int ra = bits(insn,16,20); |
743 |
|
|
int imm = bits(insn,0,15); |
744 |
|
|
m_uint32_t tmp = sign_extend_32(imm,16); |
745 |
|
|
|
746 |
|
|
ppc32_load_imm(b,AMD64_RAX,tmp); |
747 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RAX,ra); |
748 |
|
|
ppc32_store_gpr(b,rd,AMD64_RAX); |
749 |
|
|
amd64_set_membase_size(b->jit_ptr,X86_CC_C, |
750 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
751 |
|
|
FALSE,4); |
752 |
|
|
|
753 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
754 |
|
|
ppc32_update_cr0(b); |
755 |
|
|
return(0); |
756 |
|
|
} |
757 |
|
|
|
758 |
|
|
/* ADDIS - ADD Immediate Shifted */ |
759 |
|
|
DECLARE_INSN(ADDIS) |
760 |
|
|
{ |
761 |
|
|
int rd = bits(insn,21,25); |
762 |
|
|
int ra = bits(insn,16,20); |
763 |
|
|
m_uint32_t imm = bits(insn,0,15); |
764 |
|
|
|
765 |
|
|
ppc32_load_imm(b,AMD64_RBX,imm << 16); |
766 |
|
|
|
767 |
|
|
if (ra != 0) |
768 |
|
|
amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,AMD64_RBX, |
769 |
|
|
AMD64_R15,REG_OFFSET(ra),4); |
770 |
|
|
|
771 |
|
|
ppc32_store_gpr(b,rd,AMD64_RBX); |
772 |
|
|
return(0); |
773 |
|
|
} |
774 |
|
|
|
775 |
|
|
/* AND */ |
776 |
|
|
DECLARE_INSN(AND) |
777 |
|
|
{ |
778 |
|
|
int rs = bits(insn,21,25); |
779 |
|
|
int ra = bits(insn,16,20); |
780 |
|
|
int rb = bits(insn,11,15); |
781 |
|
|
|
782 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
783 |
|
|
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rb); |
784 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
785 |
|
|
|
786 |
|
|
if (insn & 1) |
787 |
|
|
ppc32_update_cr0(b); |
788 |
|
|
|
789 |
|
|
return(0); |
790 |
|
|
} |
791 |
|
|
|
792 |
|
|
/* ANDC */ |
793 |
|
|
DECLARE_INSN(ANDC) |
794 |
|
|
{ |
795 |
|
|
int rs = bits(insn,21,25); |
796 |
|
|
int ra = bits(insn,16,20); |
797 |
|
|
int rb = bits(insn,11,15); |
798 |
|
|
|
799 |
|
|
/* $ra = $rs & ~$rb */ |
800 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rb); |
801 |
|
|
x86_not_reg(b->jit_ptr,AMD64_RBX); |
802 |
|
|
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rs); |
803 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
804 |
|
|
|
805 |
|
|
if (insn & 1) |
806 |
|
|
ppc32_update_cr0(b); |
807 |
|
|
|
808 |
|
|
return(0); |
809 |
|
|
} |
810 |
|
|
|
811 |
|
|
/* AND Immediate */ |
812 |
|
|
DECLARE_INSN(ANDI) |
813 |
|
|
{ |
814 |
|
|
int rs = bits(insn,21,25); |
815 |
|
|
int ra = bits(insn,16,20); |
816 |
|
|
m_uint16_t imm = bits(insn,0,15); |
817 |
|
|
|
818 |
|
|
/* $ra = $rs & imm */ |
819 |
|
|
ppc32_load_imm(b,AMD64_RBX,imm); |
820 |
|
|
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rs); |
821 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
822 |
|
|
|
823 |
|
|
ppc32_update_cr0(b); |
824 |
|
|
return(0); |
825 |
|
|
} |
826 |
|
|
|
827 |
|
|
/* AND Immediate Shifted */ |
828 |
|
|
DECLARE_INSN(ANDIS) |
829 |
|
|
{ |
830 |
|
|
int rs = bits(insn,21,25); |
831 |
|
|
int ra = bits(insn,16,20); |
832 |
|
|
m_uint32_t imm = bits(insn,0,15); |
833 |
|
|
|
834 |
|
|
/* $ra = $rs & imm */ |
835 |
|
|
ppc32_load_imm(b,AMD64_RBX,imm << 16); |
836 |
|
|
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rs); |
837 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
838 |
|
|
|
839 |
|
|
ppc32_update_cr0(b); |
840 |
|
|
return(0); |
841 |
|
|
} |
842 |
|
|
|
843 |
|
|
/* B - Branch */ |
844 |
|
|
DECLARE_INSN(B) |
845 |
|
|
{ |
846 |
|
|
m_uint32_t offset = bits(insn,2,25); |
847 |
|
|
m_uint64_t new_ia; |
848 |
|
|
|
849 |
|
|
/* compute the new ia */ |
850 |
|
|
new_ia = b->start_ia + ((b->ppc_trans_pos-1) << 2); |
851 |
|
|
new_ia += sign_extend(offset << 2,26); |
852 |
|
|
ppc32_set_jump(cpu,b,new_ia,1); |
853 |
|
|
return(0); |
854 |
|
|
} |
855 |
|
|
|
856 |
|
|
/* BA - Branch Absolute */ |
857 |
|
|
DECLARE_INSN(BA) |
858 |
|
|
{ |
859 |
|
|
m_uint32_t offset = bits(insn,2,25); |
860 |
|
|
m_uint64_t new_ia; |
861 |
|
|
|
862 |
|
|
/* compute the new ia */ |
863 |
|
|
new_ia = sign_extend(offset << 2,26); |
864 |
|
|
ppc32_set_jump(cpu,b,new_ia,1); |
865 |
|
|
return(0); |
866 |
|
|
} |
867 |
|
|
|
868 |
|
|
/* BL - Branch and Link */ |
869 |
|
|
DECLARE_INSN(BL) |
870 |
|
|
{ |
871 |
|
|
m_uint32_t offset = bits(insn,2,25); |
872 |
|
|
m_uint64_t new_ia; |
873 |
|
|
|
874 |
|
|
/* compute the new ia */ |
875 |
|
|
new_ia = b->start_ia + ((b->ppc_trans_pos-1) << 2); |
876 |
|
|
new_ia += sign_extend(offset << 2,26); |
877 |
|
|
|
878 |
|
|
/* set the return address */ |
879 |
|
|
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
880 |
|
|
|
881 |
|
|
ppc32_set_jump(cpu,b,new_ia,1); |
882 |
|
|
return(0); |
883 |
|
|
} |
884 |
|
|
|
885 |
|
|
/* BLA - Branch and Link Absolute */ |
886 |
|
|
DECLARE_INSN(BLA) |
887 |
|
|
{ |
888 |
|
|
m_uint32_t offset = bits(insn,2,25); |
889 |
|
|
m_uint64_t new_ia; |
890 |
|
|
|
891 |
|
|
/* compute the new ia */ |
892 |
|
|
new_ia = sign_extend(offset << 2,26); |
893 |
|
|
|
894 |
|
|
/* set the return address */ |
895 |
|
|
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
896 |
|
|
|
897 |
|
|
ppc32_set_jump(cpu,b,new_ia,1); |
898 |
|
|
return(0); |
899 |
|
|
} |
900 |
|
|
|
901 |
|
|
/* BC - Branch Conditional (Condition Check only) */ |
902 |
|
|
DECLARE_INSN(BCC) |
903 |
|
|
{ |
904 |
|
|
int bo = bits(insn,21,25); |
905 |
|
|
int bi = bits(insn,16,20); |
906 |
|
|
int bd = bits(insn,2,15); |
907 |
dpavlin |
8 |
u_int cr_field,cr_bit; |
908 |
dpavlin |
7 |
m_uint32_t new_ia; |
909 |
|
|
u_char *jump_ptr; |
910 |
|
|
int local_jump; |
911 |
|
|
int cond; |
912 |
|
|
|
913 |
|
|
/* Get the wanted value for the condition bit */ |
914 |
|
|
cond = (bo >> 3) & 0x1; |
915 |
|
|
|
916 |
|
|
/* Set the return address */ |
917 |
|
|
if (insn & 1) |
918 |
|
|
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
919 |
|
|
|
920 |
|
|
/* Compute the new ia */ |
921 |
|
|
new_ia = sign_extend_32(bd << 2,16); |
922 |
|
|
if (!(insn & 0x02)) |
923 |
|
|
new_ia += b->start_ia + ((b->ppc_trans_pos-1) << 2); |
924 |
|
|
|
925 |
|
|
/* Test the condition bit */ |
926 |
dpavlin |
8 |
cr_field = ppc32_get_cr_field(bi); |
927 |
|
|
cr_bit = ppc32_get_cr_bit(bi); |
928 |
dpavlin |
7 |
|
929 |
dpavlin |
8 |
amd64_test_membase_imm_size(b->jit_ptr, |
930 |
|
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field), |
931 |
|
|
(1 << cr_bit),4); |
932 |
|
|
|
933 |
dpavlin |
7 |
local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr); |
934 |
|
|
|
935 |
|
|
/* |
936 |
|
|
* Optimize the jump, depending if the destination is in the same |
937 |
|
|
* page or not. |
938 |
|
|
*/ |
939 |
|
|
if (local_jump) { |
940 |
|
|
if (jump_ptr) { |
941 |
|
|
amd64_branch(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,jump_ptr,FALSE); |
942 |
|
|
} else { |
943 |
|
|
ppc32_jit_tcb_record_patch(b,b->jit_ptr,new_ia); |
944 |
|
|
amd64_branch32(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,0,FALSE); |
945 |
|
|
} |
946 |
|
|
} else { |
947 |
|
|
jump_ptr = b->jit_ptr; |
948 |
|
|
amd64_branch32(b->jit_ptr,(cond) ? X86_CC_Z : X86_CC_NZ,0,FALSE); |
949 |
|
|
ppc32_set_jump(cpu,b,new_ia,TRUE); |
950 |
|
|
amd64_patch(jump_ptr,b->jit_ptr); |
951 |
|
|
} |
952 |
|
|
|
953 |
|
|
return(0); |
954 |
|
|
} |
955 |
|
|
|
956 |
|
|
/* BC - Branch Conditional */ |
957 |
|
|
DECLARE_INSN(BC) |
958 |
|
|
{ |
959 |
|
|
int bo = bits(insn,21,25); |
960 |
|
|
int bi = bits(insn,16,20); |
961 |
|
|
int bd = bits(insn,2,15); |
962 |
dpavlin |
8 |
u_int cr_field,cr_bit; |
963 |
dpavlin |
7 |
m_uint32_t new_ia; |
964 |
|
|
u_char *jump_ptr; |
965 |
|
|
int local_jump; |
966 |
|
|
int cond,ctr; |
967 |
|
|
|
968 |
|
|
/* Get the wanted value for the condition bit and CTR value */ |
969 |
|
|
cond = (bo >> 3) & 0x1; |
970 |
|
|
ctr = (bo >> 1) & 0x1; |
971 |
|
|
|
972 |
|
|
/* Set the return address */ |
973 |
|
|
if (insn & 1) |
974 |
|
|
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
975 |
|
|
|
976 |
|
|
/* Compute the new ia */ |
977 |
|
|
new_ia = sign_extend_32(bd << 2,16); |
978 |
|
|
if (!(insn & 0x02)) |
979 |
|
|
new_ia += b->start_ia + ((b->ppc_trans_pos-1) << 2); |
980 |
|
|
|
981 |
|
|
ppc32_load_imm(b,AMD64_RAX,1); |
982 |
|
|
|
983 |
|
|
/* Decrement the count register */ |
984 |
|
|
if (!(bo & 0x04)) { |
985 |
|
|
amd64_dec_membase_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4); |
986 |
|
|
amd64_set_reg(b->jit_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,AMD64_RBX,FALSE); |
987 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RBX); |
988 |
|
|
} |
989 |
|
|
|
990 |
|
|
/* Test the condition bit */ |
991 |
|
|
if (!((bo >> 4) & 0x01)) { |
992 |
dpavlin |
8 |
cr_field = ppc32_get_cr_field(bi); |
993 |
|
|
cr_bit = ppc32_get_cr_bit(bi); |
994 |
|
|
|
995 |
|
|
amd64_test_membase_imm_size(b->jit_ptr, |
996 |
|
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field), |
997 |
|
|
(1 << cr_bit),4); |
998 |
|
|
|
999 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,AMD64_RCX,FALSE); |
1000 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RCX); |
1001 |
|
|
} |
1002 |
|
|
|
1003 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x01); |
1004 |
|
|
|
1005 |
|
|
local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr); |
1006 |
|
|
|
1007 |
|
|
/* |
1008 |
|
|
* Optimize the jump, depending if the destination is in the same |
1009 |
|
|
* page or not. |
1010 |
|
|
*/ |
1011 |
|
|
if (local_jump) { |
1012 |
|
|
if (jump_ptr) { |
1013 |
|
|
amd64_branch(b->jit_ptr,X86_CC_NZ,jump_ptr,FALSE); |
1014 |
|
|
} else { |
1015 |
|
|
ppc32_jit_tcb_record_patch(b,b->jit_ptr,new_ia); |
1016 |
|
|
amd64_branch32(b->jit_ptr,X86_CC_NZ,0,FALSE); |
1017 |
|
|
} |
1018 |
|
|
} else { |
1019 |
|
|
jump_ptr = b->jit_ptr; |
1020 |
|
|
amd64_branch32(b->jit_ptr,X86_CC_Z,0,FALSE); |
1021 |
|
|
ppc32_set_jump(cpu,b,new_ia,TRUE); |
1022 |
|
|
amd64_patch(jump_ptr,b->jit_ptr); |
1023 |
|
|
} |
1024 |
|
|
|
1025 |
|
|
return(0); |
1026 |
|
|
} |
1027 |
|
|
|
1028 |
|
|
/* BCLR - Branch Conditional to Link register */ |
1029 |
|
|
DECLARE_INSN(BCLR) |
1030 |
|
|
{ |
1031 |
|
|
int bo = bits(insn,21,25); |
1032 |
|
|
int bi = bits(insn,16,20); |
1033 |
|
|
int bd = bits(insn,2,15); |
1034 |
dpavlin |
8 |
u_int cr_field,cr_bit; |
1035 |
dpavlin |
7 |
m_uint32_t new_ia; |
1036 |
|
|
u_char *jump_ptr; |
1037 |
|
|
int cond,ctr; |
1038 |
|
|
|
1039 |
|
|
/* Get the wanted value for the condition bit and CTR value */ |
1040 |
|
|
cond = (bo >> 3) & 0x1; |
1041 |
|
|
ctr = (bo >> 1) & 0x1; |
1042 |
|
|
|
1043 |
|
|
/* Compute the new ia */ |
1044 |
|
|
new_ia = sign_extend_32(bd << 2,16); |
1045 |
|
|
if (!(insn & 0x02)) |
1046 |
|
|
new_ia += b->start_ia + ((b->ppc_trans_pos-1) << 2); |
1047 |
|
|
|
1048 |
|
|
ppc32_load_imm(b,AMD64_RAX,1); |
1049 |
|
|
|
1050 |
|
|
/* Decrement the count register */ |
1051 |
|
|
if (!(bo & 0x04)) { |
1052 |
|
|
amd64_dec_membase_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4); |
1053 |
|
|
amd64_set_reg(b->jit_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,AMD64_RBX,FALSE); |
1054 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RBX); |
1055 |
|
|
} |
1056 |
|
|
|
1057 |
|
|
/* Test the condition bit */ |
1058 |
|
|
if (!((bo >> 4) & 0x01)) { |
1059 |
dpavlin |
8 |
cr_field = ppc32_get_cr_field(bi); |
1060 |
|
|
cr_bit = ppc32_get_cr_bit(bi); |
1061 |
|
|
|
1062 |
|
|
amd64_test_membase_imm_size(b->jit_ptr, |
1063 |
|
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field), |
1064 |
|
|
(1 << cr_bit),4); |
1065 |
|
|
|
1066 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,AMD64_RCX,FALSE); |
1067 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RCX); |
1068 |
|
|
} |
1069 |
|
|
|
1070 |
|
|
/* Set the return address */ |
1071 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
1072 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,lr),4); |
1073 |
|
|
|
1074 |
|
|
if (insn & 1) |
1075 |
|
|
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
1076 |
|
|
|
1077 |
|
|
/* Branching */ |
1078 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x01); |
1079 |
|
|
|
1080 |
|
|
jump_ptr = b->jit_ptr; |
1081 |
|
|
amd64_branch32(b->jit_ptr,X86_CC_Z,0,FALSE); |
1082 |
|
|
|
1083 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RDX,0xFFFFFFFC); |
1084 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia), |
1085 |
|
|
AMD64_RDX,4); |
1086 |
|
|
ppc32_jit_tcb_push_epilog(b); |
1087 |
|
|
|
1088 |
|
|
amd64_patch(jump_ptr,b->jit_ptr); |
1089 |
|
|
return(0); |
1090 |
|
|
} |
1091 |
|
|
|
1092 |
|
|
/* CMP - Compare */ |
1093 |
|
|
DECLARE_INSN(CMP) |
1094 |
|
|
{ |
1095 |
|
|
int rd = bits(insn,23,25); |
1096 |
|
|
int ra = bits(insn,16,20); |
1097 |
|
|
int rb = bits(insn,11,15); |
1098 |
|
|
|
1099 |
|
|
ppc32_load_gpr(b,AMD64_RBX,ra); |
1100 |
|
|
ppc32_alu_gpr(b,X86_CMP,AMD64_RBX,rb); |
1101 |
|
|
ppc32_update_cr(b,rd,TRUE); |
1102 |
|
|
return(0); |
1103 |
|
|
} |
1104 |
|
|
|
1105 |
|
|
/* CMPI - Compare Immediate */ |
1106 |
|
|
DECLARE_INSN(CMPI) |
1107 |
|
|
{ |
1108 |
|
|
int rd = bits(insn,23,25); |
1109 |
|
|
int ra = bits(insn,16,20); |
1110 |
|
|
m_uint16_t imm = bits(insn,0,15); |
1111 |
|
|
m_uint32_t tmp = sign_extend_32(imm,16); |
1112 |
|
|
|
1113 |
|
|
ppc32_load_imm(b,AMD64_RBX,tmp); |
1114 |
|
|
ppc32_load_gpr(b,AMD64_RSI,ra); |
1115 |
|
|
amd64_alu_reg_reg_size(b->jit_ptr,X86_CMP,AMD64_RSI,AMD64_RBX,4); |
1116 |
|
|
|
1117 |
|
|
ppc32_update_cr(b,rd,TRUE); |
1118 |
|
|
return(0); |
1119 |
|
|
} |
1120 |
|
|
|
1121 |
|
|
/* CMPL - Compare Logical */ |
1122 |
|
|
DECLARE_INSN(CMPL) |
1123 |
|
|
{ |
1124 |
|
|
int rd = bits(insn,23,25); |
1125 |
|
|
int ra = bits(insn,16,20); |
1126 |
|
|
int rb = bits(insn,11,15); |
1127 |
|
|
|
1128 |
|
|
ppc32_load_gpr(b,AMD64_RAX,ra); |
1129 |
|
|
ppc32_alu_gpr(b,X86_CMP,AMD64_RAX,rb); |
1130 |
|
|
ppc32_update_cr(b,rd,FALSE); |
1131 |
|
|
return(0); |
1132 |
|
|
} |
1133 |
|
|
|
1134 |
|
|
/* CMPLI - Compare Immediate */ |
1135 |
|
|
DECLARE_INSN(CMPLI) |
1136 |
|
|
{ |
1137 |
|
|
int rd = bits(insn,23,25); |
1138 |
|
|
int ra = bits(insn,16,20); |
1139 |
|
|
m_uint16_t imm = bits(insn,0,15); |
1140 |
|
|
|
1141 |
|
|
ppc32_load_imm(b,AMD64_RBX,imm); |
1142 |
|
|
ppc32_load_gpr(b,AMD64_RSI,ra); |
1143 |
|
|
amd64_alu_reg_reg_size(b->jit_ptr,X86_CMP,AMD64_RSI,AMD64_RBX,4); |
1144 |
|
|
|
1145 |
|
|
ppc32_update_cr(b,rd,FALSE); |
1146 |
|
|
return(0); |
1147 |
|
|
} |
1148 |
|
|
|
1149 |
|
|
/* CRAND - Condition Register AND */ |
1150 |
|
|
DECLARE_INSN(CRAND) |
1151 |
|
|
{ |
1152 |
|
|
int bd = bits(insn,21,25); |
1153 |
|
|
int bb = bits(insn,16,20); |
1154 |
|
|
int ba = bits(insn,11,15); |
1155 |
|
|
|
1156 |
|
|
/* test $ba bit */ |
1157 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1158 |
|
|
AMD64_R15, |
1159 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1160 |
|
|
(1 << ppc32_get_cr_bit(ba))); |
1161 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1162 |
|
|
|
1163 |
|
|
/* test $bb bit */ |
1164 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1165 |
|
|
AMD64_R15, |
1166 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1167 |
|
|
(1 << ppc32_get_cr_bit(bb))); |
1168 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1169 |
|
|
|
1170 |
|
|
/* result of AND between $ba and $bb */ |
1171 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RBX,AMD64_RAX); |
1172 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1173 |
|
|
|
1174 |
|
|
/* set/clear $bd bit depending on the result */ |
1175 |
dpavlin |
8 |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1176 |
|
|
AMD64_R15, |
1177 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1178 |
|
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1179 |
dpavlin |
7 |
|
1180 |
dpavlin |
8 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1181 |
|
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1182 |
|
|
AMD64_R15, |
1183 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1184 |
|
|
AMD64_RBX,4); |
1185 |
dpavlin |
7 |
return(0); |
1186 |
|
|
} |
1187 |
|
|
|
1188 |
|
|
/* CRANDC - Condition Register AND with Complement */ |
1189 |
|
|
DECLARE_INSN(CRANDC) |
1190 |
|
|
{ |
1191 |
|
|
int bd = bits(insn,21,25); |
1192 |
|
|
int bb = bits(insn,16,20); |
1193 |
|
|
int ba = bits(insn,11,15); |
1194 |
|
|
|
1195 |
|
|
/* test $ba bit */ |
1196 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1197 |
|
|
AMD64_R15, |
1198 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1199 |
|
|
(1 << ppc32_get_cr_bit(ba))); |
1200 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1201 |
|
|
|
1202 |
|
|
/* test $bb bit */ |
1203 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1204 |
|
|
AMD64_R15, |
1205 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1206 |
|
|
(1 << ppc32_get_cr_bit(bb))); |
1207 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RBX,FALSE); |
1208 |
|
|
|
1209 |
|
|
/* result of AND between $ba and $bb */ |
1210 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RBX,AMD64_RAX); |
1211 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1212 |
|
|
|
1213 |
|
|
/* set/clear $bd bit depending on the result */ |
1214 |
dpavlin |
8 |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1215 |
|
|
AMD64_R15, |
1216 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1217 |
|
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1218 |
dpavlin |
7 |
|
1219 |
dpavlin |
8 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1220 |
|
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1221 |
|
|
AMD64_R15, |
1222 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1223 |
|
|
AMD64_RBX,4); |
1224 |
dpavlin |
7 |
return(0); |
1225 |
|
|
} |
1226 |
|
|
|
1227 |
|
|
/* CREQV - Condition Register EQV */ |
1228 |
|
|
DECLARE_INSN(CREQV) |
1229 |
|
|
{ |
1230 |
|
|
int bd = bits(insn,21,25); |
1231 |
|
|
int bb = bits(insn,16,20); |
1232 |
|
|
int ba = bits(insn,11,15); |
1233 |
|
|
|
1234 |
|
|
/* test $ba bit */ |
1235 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1236 |
|
|
AMD64_R15, |
1237 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1238 |
|
|
(1 << ppc32_get_cr_bit(ba))); |
1239 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1240 |
|
|
|
1241 |
|
|
/* test $bb bit */ |
1242 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1243 |
|
|
AMD64_R15, |
1244 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1245 |
|
|
(1 << ppc32_get_cr_bit(bb))); |
1246 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1247 |
|
|
|
1248 |
|
|
/* result of XOR between $ba and $bb */ |
1249 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_XOR,AMD64_RBX,AMD64_RAX); |
1250 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1251 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1252 |
|
|
|
1253 |
|
|
/* set/clear $bd bit depending on the result */ |
1254 |
dpavlin |
8 |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1255 |
|
|
AMD64_R15, |
1256 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1257 |
|
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1258 |
dpavlin |
7 |
|
1259 |
dpavlin |
8 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1260 |
|
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1261 |
|
|
AMD64_R15, |
1262 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1263 |
|
|
AMD64_RBX,4); |
1264 |
dpavlin |
7 |
return(0); |
1265 |
|
|
} |
1266 |
|
|
|
1267 |
|
|
/* CRNAND - Condition Register NAND */ |
1268 |
|
|
DECLARE_INSN(CRNAND) |
1269 |
|
|
{ |
1270 |
|
|
int bd = bits(insn,21,25); |
1271 |
|
|
int bb = bits(insn,16,20); |
1272 |
|
|
int ba = bits(insn,11,15); |
1273 |
|
|
|
1274 |
|
|
/* test $ba bit */ |
1275 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1276 |
|
|
AMD64_R15, |
1277 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1278 |
|
|
(1 << ppc32_get_cr_bit(ba))); |
1279 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1280 |
|
|
|
1281 |
|
|
/* test $bb bit */ |
1282 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1283 |
|
|
AMD64_R15, |
1284 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1285 |
|
|
(1 << ppc32_get_cr_bit(bb))); |
1286 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1287 |
|
|
|
1288 |
|
|
/* result of NAND between $ba and $bb */ |
1289 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RBX,AMD64_RAX); |
1290 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1291 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1292 |
|
|
|
1293 |
|
|
/* set/clear $bd bit depending on the result */ |
1294 |
dpavlin |
8 |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1295 |
|
|
AMD64_R15, |
1296 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1297 |
|
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1298 |
dpavlin |
7 |
|
1299 |
dpavlin |
8 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1300 |
|
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1301 |
|
|
AMD64_R15, |
1302 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1303 |
|
|
AMD64_RBX,4); |
1304 |
dpavlin |
7 |
return(0); |
1305 |
|
|
} |
1306 |
|
|
|
1307 |
|
|
/* CRNOR - Condition Register NOR */ |
1308 |
|
|
DECLARE_INSN(CRNOR) |
1309 |
|
|
{ |
1310 |
|
|
int bd = bits(insn,21,25); |
1311 |
|
|
int bb = bits(insn,16,20); |
1312 |
|
|
int ba = bits(insn,11,15); |
1313 |
|
|
|
1314 |
|
|
/* test $ba bit */ |
1315 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1316 |
|
|
AMD64_R15, |
1317 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1318 |
|
|
(1 << ppc32_get_cr_bit(ba))); |
1319 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1320 |
|
|
|
1321 |
|
|
/* test $bb bit */ |
1322 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1323 |
|
|
AMD64_R15, |
1324 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1325 |
|
|
(1 << ppc32_get_cr_bit(bb))); |
1326 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1327 |
|
|
|
1328 |
|
|
/* result of NOR between $ba and $bb */ |
1329 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RBX,AMD64_RAX); |
1330 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1331 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1332 |
|
|
|
1333 |
|
|
/* set/clear $bd bit depending on the result */ |
1334 |
dpavlin |
8 |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1335 |
|
|
AMD64_R15, |
1336 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1337 |
|
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1338 |
dpavlin |
7 |
|
1339 |
dpavlin |
8 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1340 |
|
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1341 |
|
|
AMD64_R15, |
1342 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1343 |
|
|
AMD64_RBX,4); |
1344 |
dpavlin |
7 |
return(0); |
1345 |
|
|
} |
1346 |
|
|
|
1347 |
|
|
/* CROR - Condition Register OR */ |
1348 |
|
|
DECLARE_INSN(CROR) |
1349 |
|
|
{ |
1350 |
|
|
int bd = bits(insn,21,25); |
1351 |
|
|
int bb = bits(insn,16,20); |
1352 |
|
|
int ba = bits(insn,11,15); |
1353 |
|
|
|
1354 |
|
|
/* test $ba bit */ |
1355 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1356 |
|
|
AMD64_R15, |
1357 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1358 |
|
|
(1 << ppc32_get_cr_bit(ba))); |
1359 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1360 |
|
|
|
1361 |
|
|
/* test $bb bit */ |
1362 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1363 |
|
|
AMD64_R15, |
1364 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1365 |
|
|
(1 << ppc32_get_cr_bit(bb))); |
1366 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1367 |
|
|
|
1368 |
|
|
/* result of NOR between $ba and $bb */ |
1369 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RBX,AMD64_RAX); |
1370 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1371 |
|
|
|
1372 |
|
|
/* set/clear $bd bit depending on the result */ |
1373 |
dpavlin |
8 |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1374 |
|
|
AMD64_R15, |
1375 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1376 |
|
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1377 |
dpavlin |
7 |
|
1378 |
dpavlin |
8 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1379 |
|
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1380 |
|
|
AMD64_R15, |
1381 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1382 |
|
|
AMD64_RBX,4); |
1383 |
dpavlin |
7 |
return(0); |
1384 |
|
|
} |
1385 |
|
|
|
1386 |
|
|
/* CRORC - Condition Register OR with Complement */ |
1387 |
|
|
DECLARE_INSN(CRORC) |
1388 |
|
|
{ |
1389 |
|
|
int bd = bits(insn,21,25); |
1390 |
|
|
int bb = bits(insn,16,20); |
1391 |
|
|
int ba = bits(insn,11,15); |
1392 |
|
|
|
1393 |
|
|
/* test $ba bit */ |
1394 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1395 |
|
|
AMD64_R15, |
1396 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1397 |
|
|
(1 << ppc32_get_cr_bit(ba))); |
1398 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1399 |
|
|
|
1400 |
|
|
/* test $bb bit */ |
1401 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1402 |
|
|
AMD64_R15, |
1403 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1404 |
|
|
(1 << ppc32_get_cr_bit(bb))); |
1405 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RBX,FALSE); |
1406 |
|
|
|
1407 |
|
|
/* result of ORC between $ba and $bb */ |
1408 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RBX,AMD64_RAX); |
1409 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1410 |
|
|
|
1411 |
|
|
/* set/clear $bd bit depending on the result */ |
1412 |
dpavlin |
8 |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1413 |
|
|
AMD64_R15, |
1414 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1415 |
|
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1416 |
dpavlin |
7 |
|
1417 |
dpavlin |
8 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1418 |
|
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1419 |
|
|
AMD64_R15, |
1420 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1421 |
|
|
AMD64_RBX,4); |
1422 |
dpavlin |
7 |
return(0); |
1423 |
|
|
} |
1424 |
|
|
|
1425 |
|
|
/* CRXOR - Condition Register XOR */ |
1426 |
|
|
DECLARE_INSN(CRXOR) |
1427 |
|
|
{ |
1428 |
|
|
int bd = bits(insn,21,25); |
1429 |
|
|
int bb = bits(insn,16,20); |
1430 |
|
|
int ba = bits(insn,11,15); |
1431 |
|
|
|
1432 |
|
|
/* test $ba bit */ |
1433 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1434 |
|
|
AMD64_R15, |
1435 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)), |
1436 |
|
|
(1 << ppc32_get_cr_bit(ba))); |
1437 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1438 |
|
|
|
1439 |
|
|
/* test $bb bit */ |
1440 |
dpavlin |
8 |
amd64_test_membase_imm(b->jit_ptr, |
1441 |
|
|
AMD64_R15, |
1442 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)), |
1443 |
|
|
(1 << ppc32_get_cr_bit(bb))); |
1444 |
dpavlin |
7 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1445 |
|
|
|
1446 |
|
|
/* result of XOR between $ba and $bb */ |
1447 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_XOR,AMD64_RBX,AMD64_RAX); |
1448 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1449 |
|
|
|
1450 |
|
|
/* set/clear $bd bit depending on the result */ |
1451 |
dpavlin |
8 |
amd64_alu_membase_imm_size(b->jit_ptr,X86_AND, |
1452 |
|
|
AMD64_R15, |
1453 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1454 |
|
|
~(1 << ppc32_get_cr_bit(bd)),4); |
1455 |
dpavlin |
7 |
|
1456 |
dpavlin |
8 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,ppc32_get_cr_bit(bd)); |
1457 |
|
|
amd64_alu_membase_reg_size(b->jit_ptr,X86_OR, |
1458 |
|
|
AMD64_R15, |
1459 |
|
|
PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)), |
1460 |
|
|
AMD64_RBX,4); |
1461 |
dpavlin |
7 |
return(0); |
1462 |
|
|
} |
1463 |
|
|
|
1464 |
|
|
/* DIVWU - Divide Word Unsigned */ |
1465 |
|
|
DECLARE_INSN(DIVWU) |
1466 |
|
|
{ |
1467 |
|
|
int rd = bits(insn,21,25); |
1468 |
|
|
int ra = bits(insn,16,20); |
1469 |
|
|
int rb = bits(insn,11,15); |
1470 |
|
|
|
1471 |
|
|
ppc32_load_gpr(b,AMD64_RAX,ra); |
1472 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rb); |
1473 |
|
|
ppc32_load_imm(b,AMD64_RDX,0); |
1474 |
|
|
|
1475 |
|
|
amd64_div_reg_size(b->jit_ptr,AMD64_RBX,0,4); |
1476 |
|
|
ppc32_store_gpr(b,rd,AMD64_RAX); |
1477 |
|
|
|
1478 |
|
|
if (insn & 1) { |
1479 |
|
|
amd64_test_reg_reg(b->jit_ptr,AMD64_RAX,AMD64_RAX); |
1480 |
|
|
ppc32_update_cr0(b); |
1481 |
|
|
} |
1482 |
|
|
|
1483 |
|
|
return(0); |
1484 |
|
|
} |
1485 |
|
|
|
1486 |
|
|
/* EQV */ |
1487 |
|
|
DECLARE_INSN(EQV) |
1488 |
|
|
{ |
1489 |
|
|
int rs = bits(insn,21,25); |
1490 |
|
|
int ra = bits(insn,16,20); |
1491 |
|
|
int rb = bits(insn,11,15); |
1492 |
|
|
|
1493 |
|
|
/* $ra = ~($rs ^ $rb) */ |
1494 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
1495 |
|
|
ppc32_alu_gpr(b,X86_XOR,AMD64_RBX,rb); |
1496 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1497 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
1498 |
|
|
|
1499 |
|
|
if (insn & 1) { |
1500 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1501 |
|
|
ppc32_update_cr0(b); |
1502 |
|
|
} |
1503 |
|
|
|
1504 |
|
|
return(0); |
1505 |
|
|
} |
1506 |
|
|
|
1507 |
|
|
/* EXTSB - Extend Sign Byte */ |
1508 |
|
|
DECLARE_INSN(EXTSB) |
1509 |
|
|
{ |
1510 |
|
|
int rs = bits(insn,21,25); |
1511 |
|
|
int ra = bits(insn,16,20); |
1512 |
|
|
|
1513 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
1514 |
|
|
amd64_shift_reg_imm_size(b->jit_ptr,X86_SHL,AMD64_RBX,24,4); |
1515 |
|
|
amd64_shift_reg_imm_size(b->jit_ptr,X86_SAR,AMD64_RBX,24,4); |
1516 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
1517 |
|
|
|
1518 |
|
|
if (insn & 1) { |
1519 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1520 |
|
|
ppc32_update_cr0(b); |
1521 |
|
|
} |
1522 |
|
|
|
1523 |
|
|
return(0); |
1524 |
|
|
} |
1525 |
|
|
|
1526 |
|
|
/* EXTSH - Extend Sign Word */ |
1527 |
|
|
DECLARE_INSN(EXTSH) |
1528 |
|
|
{ |
1529 |
|
|
int rs = bits(insn,21,25); |
1530 |
|
|
int ra = bits(insn,16,20); |
1531 |
|
|
|
1532 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
1533 |
|
|
amd64_shift_reg_imm_size(b->jit_ptr,X86_SHL,AMD64_RBX,16,4); |
1534 |
|
|
amd64_shift_reg_imm_size(b->jit_ptr,X86_SAR,AMD64_RBX,16,4); |
1535 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
1536 |
|
|
|
1537 |
|
|
if (insn & 1) { |
1538 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1539 |
|
|
ppc32_update_cr0(b); |
1540 |
|
|
} |
1541 |
|
|
|
1542 |
|
|
return(0); |
1543 |
|
|
} |
1544 |
|
|
|
1545 |
|
|
/* LBZ - Load Byte and Zero */ |
1546 |
|
|
DECLARE_INSN(LBZ) |
1547 |
|
|
{ |
1548 |
|
|
int rs = bits(insn,21,25); |
1549 |
|
|
int ra = bits(insn,16,20); |
1550 |
|
|
m_uint16_t offset = bits(insn,0,15); |
1551 |
|
|
|
1552 |
|
|
//ppc32_emit_memop(b,PPC_MEMOP_LBZ,ra,offset,rs,0); |
1553 |
|
|
ppc32_emit_memop_fast(b,0,PPC_MEMOP_LBZ,ra,offset,rs,ppc32_memop_fast_lbz); |
1554 |
|
|
return(0); |
1555 |
|
|
} |
1556 |
|
|
|
1557 |
|
|
/* LBZU - Load Byte and Zero with Update */ |
1558 |
|
|
DECLARE_INSN(LBZU) |
1559 |
|
|
{ |
1560 |
|
|
int rs = bits(insn,21,25); |
1561 |
|
|
int ra = bits(insn,16,20); |
1562 |
|
|
m_uint16_t offset = bits(insn,0,15); |
1563 |
|
|
|
1564 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_LBZ,ra,offset,rs,1); |
1565 |
|
|
return(0); |
1566 |
|
|
} |
1567 |
|
|
|
1568 |
|
|
/* LBZUX - Load Byte and Zero with Update Indexed */ |
1569 |
|
|
DECLARE_INSN(LBZUX) |
1570 |
|
|
{ |
1571 |
|
|
int rs = bits(insn,21,25); |
1572 |
|
|
int ra = bits(insn,16,20); |
1573 |
|
|
int rb = bits(insn,11,15); |
1574 |
|
|
|
1575 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_LBZ,ra,rb,rs,1); |
1576 |
|
|
return(0); |
1577 |
|
|
} |
1578 |
|
|
|
1579 |
|
|
/* LBZX - Load Byte and Zero Indexed */ |
1580 |
|
|
DECLARE_INSN(LBZX) |
1581 |
|
|
{ |
1582 |
|
|
int rs = bits(insn,21,25); |
1583 |
|
|
int ra = bits(insn,16,20); |
1584 |
|
|
int rb = bits(insn,11,15); |
1585 |
|
|
|
1586 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_LBZ,ra,rb,rs,0); |
1587 |
|
|
return(0); |
1588 |
|
|
} |
1589 |
|
|
|
1590 |
|
|
/* LHA - Load Half-Word Algebraic */ |
1591 |
|
|
DECLARE_INSN(LHA) |
1592 |
|
|
{ |
1593 |
|
|
int rs = bits(insn,21,25); |
1594 |
|
|
int ra = bits(insn,16,20); |
1595 |
|
|
m_uint16_t offset = bits(insn,0,15); |
1596 |
|
|
|
1597 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_LHA,ra,offset,rs,0); |
1598 |
|
|
return(0); |
1599 |
|
|
} |
1600 |
|
|
|
1601 |
|
|
/* LHAU - Load Half-Word Algebraic with Update */ |
1602 |
|
|
DECLARE_INSN(LHAU) |
1603 |
|
|
{ |
1604 |
|
|
int rs = bits(insn,21,25); |
1605 |
|
|
int ra = bits(insn,16,20); |
1606 |
|
|
m_uint16_t offset = bits(insn,0,15); |
1607 |
|
|
|
1608 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_LHA,ra,offset,rs,1); |
1609 |
|
|
return(0); |
1610 |
|
|
} |
1611 |
|
|
|
1612 |
|
|
/* LHAUX - Load Half-Word Algebraic with Update Indexed */ |
1613 |
|
|
DECLARE_INSN(LHAUX) |
1614 |
|
|
{ |
1615 |
|
|
int rs = bits(insn,21,25); |
1616 |
|
|
int ra = bits(insn,16,20); |
1617 |
|
|
int rb = bits(insn,11,15); |
1618 |
|
|
|
1619 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_LHA,ra,rb,rs,1); |
1620 |
|
|
return(0); |
1621 |
|
|
} |
1622 |
|
|
|
1623 |
|
|
/* LHAX - Load Half-Word Algebraic Indexed */ |
1624 |
|
|
DECLARE_INSN(LHAX) |
1625 |
|
|
{ |
1626 |
|
|
int rs = bits(insn,21,25); |
1627 |
|
|
int ra = bits(insn,16,20); |
1628 |
|
|
int rb = bits(insn,11,15); |
1629 |
|
|
|
1630 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_LHA,ra,rb,rs,0); |
1631 |
|
|
return(0); |
1632 |
|
|
} |
1633 |
|
|
|
1634 |
|
|
/* LHZ - Load Half-Word and Zero */ |
1635 |
|
|
DECLARE_INSN(LHZ) |
1636 |
|
|
{ |
1637 |
|
|
int rs = bits(insn,21,25); |
1638 |
|
|
int ra = bits(insn,16,20); |
1639 |
|
|
m_uint16_t offset = bits(insn,0,15); |
1640 |
|
|
|
1641 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_LHZ,ra,offset,rs,0); |
1642 |
|
|
return(0); |
1643 |
|
|
} |
1644 |
|
|
|
1645 |
|
|
/* LHZU - Load Half-Word and Zero with Update */ |
1646 |
|
|
DECLARE_INSN(LHZU) |
1647 |
|
|
{ |
1648 |
|
|
int rs = bits(insn,21,25); |
1649 |
|
|
int ra = bits(insn,16,20); |
1650 |
|
|
m_uint16_t offset = bits(insn,0,15); |
1651 |
|
|
|
1652 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_LHZ,ra,offset,rs,1); |
1653 |
|
|
return(0); |
1654 |
|
|
} |
1655 |
|
|
|
1656 |
|
|
/* LHZUX - Load Half-Word and Zero with Update Indexed */ |
1657 |
|
|
DECLARE_INSN(LHZUX) |
1658 |
|
|
{ |
1659 |
|
|
int rs = bits(insn,21,25); |
1660 |
|
|
int ra = bits(insn,16,20); |
1661 |
|
|
int rb = bits(insn,11,15); |
1662 |
|
|
|
1663 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_LHZ,ra,rb,rs,1); |
1664 |
|
|
return(0); |
1665 |
|
|
} |
1666 |
|
|
|
1667 |
|
|
/* LHZX - Load Half-Word and Zero Indexed */ |
1668 |
|
|
DECLARE_INSN(LHZX) |
1669 |
|
|
{ |
1670 |
|
|
int rs = bits(insn,21,25); |
1671 |
|
|
int ra = bits(insn,16,20); |
1672 |
|
|
int rb = bits(insn,11,15); |
1673 |
|
|
|
1674 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_LHZ,ra,rb,rs,0); |
1675 |
|
|
return(0); |
1676 |
|
|
} |
1677 |
|
|
|
1678 |
|
|
/* LWZ - Load Word and Zero */ |
1679 |
|
|
DECLARE_INSN(LWZ) |
1680 |
|
|
{ |
1681 |
|
|
int rs = bits(insn,21,25); |
1682 |
|
|
int ra = bits(insn,16,20); |
1683 |
|
|
m_uint16_t offset = bits(insn,0,15); |
1684 |
|
|
|
1685 |
|
|
//ppc32_emit_memop(b,PPC_MEMOP_LWZ,ra,offset,rs,0); |
1686 |
|
|
ppc32_emit_memop_fast(b,0,PPC_MEMOP_LWZ,ra,offset,rs,ppc32_memop_fast_lwz); |
1687 |
|
|
return(0); |
1688 |
|
|
} |
1689 |
|
|
|
1690 |
|
|
/* LWZU - Load Word and Zero with Update */ |
1691 |
|
|
DECLARE_INSN(LWZU) |
1692 |
|
|
{ |
1693 |
|
|
int rs = bits(insn,21,25); |
1694 |
|
|
int ra = bits(insn,16,20); |
1695 |
|
|
m_uint16_t offset = bits(insn,0,15); |
1696 |
|
|
|
1697 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_LWZ,ra,offset,rs,1); |
1698 |
|
|
return(0); |
1699 |
|
|
} |
1700 |
|
|
|
1701 |
|
|
/* LWZUX - Load Word and Zero with Update Indexed */ |
1702 |
|
|
DECLARE_INSN(LWZUX) |
1703 |
|
|
{ |
1704 |
|
|
int rs = bits(insn,21,25); |
1705 |
|
|
int ra = bits(insn,16,20); |
1706 |
|
|
int rb = bits(insn,11,15); |
1707 |
|
|
|
1708 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_LWZ,ra,rb,rs,1); |
1709 |
|
|
return(0); |
1710 |
|
|
} |
1711 |
|
|
|
1712 |
|
|
/* LWZX - Load Word and Zero Indexed */ |
1713 |
|
|
DECLARE_INSN(LWZX) |
1714 |
|
|
{ |
1715 |
|
|
int rs = bits(insn,21,25); |
1716 |
|
|
int ra = bits(insn,16,20); |
1717 |
|
|
int rb = bits(insn,11,15); |
1718 |
|
|
|
1719 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_LWZ,ra,rb,rs,0); |
1720 |
|
|
return(0); |
1721 |
|
|
} |
1722 |
|
|
|
1723 |
|
|
/* MCRF - Move Condition Register Field */ |
1724 |
|
|
DECLARE_INSN(MCRF) |
1725 |
|
|
{ |
1726 |
|
|
int rd = bits(insn,23,25); |
1727 |
|
|
int rs = bits(insn,18,20); |
1728 |
|
|
|
1729 |
dpavlin |
8 |
/* Load "rs" field in %rdx */ |
1730 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
1731 |
|
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(rs),4); |
1732 |
dpavlin |
7 |
|
1733 |
dpavlin |
8 |
/* Store it in "rd" field */ |
1734 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(rd), |
1735 |
|
|
AMD64_RDX,4); |
1736 |
dpavlin |
7 |
return(0); |
1737 |
|
|
} |
1738 |
|
|
|
1739 |
|
|
/* MFCR - Move from Condition Register */ |
1740 |
|
|
DECLARE_INSN(MFCR) |
1741 |
|
|
{ |
1742 |
|
|
int rd = bits(insn,21,25); |
1743 |
dpavlin |
8 |
int i; |
1744 |
|
|
|
1745 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_XOR,AMD64_RAX,AMD64_RAX); |
1746 |
dpavlin |
7 |
|
1747 |
dpavlin |
8 |
for(i=0;i<8;i++) { |
1748 |
|
|
/* load field in %rdx */ |
1749 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
1750 |
|
|
AMD64_R15,PPC32_CR_FIELD_OFFSET(i),4); |
1751 |
|
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RAX,4); |
1752 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RDX); |
1753 |
|
|
} |
1754 |
|
|
|
1755 |
dpavlin |
7 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
1756 |
|
|
return(0); |
1757 |
|
|
} |
1758 |
|
|
|
1759 |
|
|
/* MFMSR - Move from Machine State Register */ |
1760 |
|
|
DECLARE_INSN(MFMSR) |
1761 |
|
|
{ |
1762 |
|
|
int rd = bits(insn,21,25); |
1763 |
|
|
|
1764 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RAX, |
1765 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,msr),4); |
1766 |
|
|
ppc32_store_gpr(b,rd,AMD64_RAX); |
1767 |
|
|
return(0); |
1768 |
|
|
} |
1769 |
|
|
|
1770 |
|
|
/* MFSR - Move From Segment Register */ |
1771 |
|
|
DECLARE_INSN(MFSR) |
1772 |
|
|
{ |
1773 |
|
|
int rd = bits(insn,21,25); |
1774 |
|
|
int sr = bits(insn,16,19); |
1775 |
|
|
|
1776 |
|
|
amd64_mov_reg_membase(b->jit_ptr,AMD64_RAX, |
1777 |
|
|
AMD64_R15,(OFFSET(cpu_ppc_t,sr) + (sr << 2)),4); |
1778 |
|
|
ppc32_store_gpr(b,rd,AMD64_RAX); |
1779 |
|
|
return(0); |
1780 |
|
|
} |
1781 |
|
|
|
1782 |
|
|
/* MTCRF - Move to Condition Register Fields */ |
1783 |
|
|
DECLARE_INSN(MTCRF) |
1784 |
|
|
{ |
1785 |
|
|
int rs = bits(insn,21,25); |
1786 |
|
|
int crm = bits(insn,12,19); |
1787 |
|
|
int i; |
1788 |
|
|
|
1789 |
dpavlin |
8 |
ppc32_load_gpr(b,AMD64_RDX,rs); |
1790 |
|
|
|
1791 |
dpavlin |
7 |
for(i=0;i<8;i++) |
1792 |
dpavlin |
8 |
if (crm & (1 << (7 - i))) { |
1793 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_RAX,AMD64_RDX,8); |
1794 |
dpavlin |
7 |
|
1795 |
dpavlin |
8 |
if (i != 7) |
1796 |
|
|
amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RAX,28 - (i << 2)); |
1797 |
dpavlin |
7 |
|
1798 |
dpavlin |
8 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x0F); |
1799 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(i), |
1800 |
|
|
AMD64_RAX,4); |
1801 |
|
|
} |
1802 |
dpavlin |
7 |
|
1803 |
|
|
return(0); |
1804 |
|
|
} |
1805 |
|
|
|
1806 |
|
|
/* MULHW - Multiply High Word */ |
1807 |
|
|
DECLARE_INSN(MULHW) |
1808 |
|
|
{ |
1809 |
|
|
int rd = bits(insn,21,25); |
1810 |
|
|
int ra = bits(insn,16,20); |
1811 |
|
|
int rb = bits(insn,11,15); |
1812 |
|
|
|
1813 |
|
|
ppc32_load_gpr(b,AMD64_RAX,ra); |
1814 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rb); |
1815 |
|
|
amd64_mul_reg_size(b->jit_ptr,AMD64_RBX,1,4); |
1816 |
|
|
ppc32_store_gpr(b,rd,AMD64_RDX); |
1817 |
|
|
|
1818 |
|
|
if (insn & 1) { |
1819 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RDX,AMD64_RDX,4); |
1820 |
|
|
ppc32_update_cr0(b); |
1821 |
|
|
} |
1822 |
|
|
|
1823 |
|
|
return(0); |
1824 |
|
|
} |
1825 |
|
|
|
1826 |
|
|
/* MULHWU - Multiply High Word Unsigned */ |
1827 |
|
|
DECLARE_INSN(MULHWU) |
1828 |
|
|
{ |
1829 |
|
|
int rd = bits(insn,21,25); |
1830 |
|
|
int ra = bits(insn,16,20); |
1831 |
|
|
int rb = bits(insn,11,15); |
1832 |
|
|
|
1833 |
|
|
ppc32_load_gpr(b,AMD64_RAX,ra); |
1834 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rb); |
1835 |
|
|
amd64_mul_reg_size(b->jit_ptr,AMD64_RBX,0,4); |
1836 |
|
|
ppc32_store_gpr(b,rd,AMD64_RDX); |
1837 |
|
|
|
1838 |
|
|
if (insn & 1) { |
1839 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RDX,AMD64_RDX,4); |
1840 |
|
|
ppc32_update_cr0(b); |
1841 |
|
|
} |
1842 |
|
|
|
1843 |
|
|
return(0); |
1844 |
|
|
} |
1845 |
|
|
|
1846 |
|
|
/* MULLI - Multiply Low Immediate */ |
1847 |
|
|
DECLARE_INSN(MULLI) |
1848 |
|
|
{ |
1849 |
|
|
int rd = bits(insn,21,25); |
1850 |
|
|
int ra = bits(insn,16,20); |
1851 |
|
|
m_uint32_t imm = bits(insn,0,15); |
1852 |
|
|
|
1853 |
|
|
ppc32_load_gpr(b,AMD64_RAX,ra); |
1854 |
|
|
ppc32_load_imm(b,AMD64_RBX,sign_extend_32(imm,16)); |
1855 |
|
|
|
1856 |
|
|
amd64_mul_reg_size(b->jit_ptr,AMD64_RBX,1,4); |
1857 |
|
|
ppc32_store_gpr(b,rd,X86_EAX); |
1858 |
|
|
return(0); |
1859 |
|
|
} |
1860 |
|
|
|
1861 |
|
|
/* MULLW - Multiply Low Word */ |
1862 |
|
|
DECLARE_INSN(MULLW) |
1863 |
|
|
{ |
1864 |
|
|
int rd = bits(insn,21,25); |
1865 |
|
|
int ra = bits(insn,16,20); |
1866 |
|
|
int rb = bits(insn,11,15); |
1867 |
|
|
|
1868 |
|
|
ppc32_load_gpr(b,AMD64_RAX,ra); |
1869 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rb); |
1870 |
|
|
amd64_mul_reg_size(b->jit_ptr,AMD64_RBX,1,4); |
1871 |
|
|
ppc32_store_gpr(b,rd,AMD64_RAX); |
1872 |
|
|
|
1873 |
|
|
if (insn & 1) { |
1874 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
1875 |
|
|
ppc32_update_cr0(b); |
1876 |
|
|
} |
1877 |
|
|
|
1878 |
|
|
return(0); |
1879 |
|
|
} |
1880 |
|
|
|
1881 |
|
|
/* NAND */ |
1882 |
|
|
DECLARE_INSN(NAND) |
1883 |
|
|
{ |
1884 |
|
|
int rs = bits(insn,21,25); |
1885 |
|
|
int ra = bits(insn,16,20); |
1886 |
|
|
int rb = bits(insn,11,15); |
1887 |
|
|
|
1888 |
|
|
/* $ra = ~($rs & $rb) */ |
1889 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
1890 |
|
|
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rb); |
1891 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1892 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
1893 |
|
|
|
1894 |
|
|
if (insn & 1) { |
1895 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1896 |
|
|
ppc32_update_cr0(b); |
1897 |
|
|
} |
1898 |
|
|
|
1899 |
|
|
return(0); |
1900 |
|
|
} |
1901 |
|
|
|
1902 |
|
|
/* NEG */ |
1903 |
|
|
DECLARE_INSN(NEG) |
1904 |
|
|
{ |
1905 |
|
|
int rd = bits(insn,21,25); |
1906 |
|
|
int ra = bits(insn,16,20); |
1907 |
|
|
|
1908 |
|
|
ppc32_load_gpr(b,AMD64_RBX,ra); |
1909 |
|
|
amd64_neg_reg(b->jit_ptr,AMD64_RBX); |
1910 |
|
|
ppc32_store_gpr(b,rd,AMD64_RBX); |
1911 |
|
|
|
1912 |
|
|
if (insn & 1) { |
1913 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1914 |
|
|
ppc32_update_cr0(b); |
1915 |
|
|
} |
1916 |
|
|
|
1917 |
|
|
return(0); |
1918 |
|
|
} |
1919 |
|
|
|
1920 |
|
|
/* NOR */ |
1921 |
|
|
DECLARE_INSN(NOR) |
1922 |
|
|
{ |
1923 |
|
|
int rs = bits(insn,21,25); |
1924 |
|
|
int ra = bits(insn,16,20); |
1925 |
|
|
int rb = bits(insn,11,15); |
1926 |
|
|
|
1927 |
|
|
/* $ra = ~($rs | $rb) */ |
1928 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
1929 |
|
|
ppc32_alu_gpr(b,X86_OR,AMD64_RBX,rb); |
1930 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1931 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
1932 |
|
|
|
1933 |
|
|
if (insn & 1) { |
1934 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1935 |
|
|
ppc32_update_cr0(b); |
1936 |
|
|
} |
1937 |
|
|
|
1938 |
|
|
return(0); |
1939 |
|
|
} |
1940 |
|
|
|
1941 |
|
|
/* OR */ |
1942 |
|
|
DECLARE_INSN(OR) |
1943 |
|
|
{ |
1944 |
|
|
int rs = bits(insn,21,25); |
1945 |
|
|
int ra = bits(insn,16,20); |
1946 |
|
|
int rb = bits(insn,11,15); |
1947 |
|
|
|
1948 |
|
|
ppc32_load_gpr(b,AMD64_RCX,rs); |
1949 |
|
|
|
1950 |
|
|
if (rs != rb) |
1951 |
|
|
ppc32_alu_gpr(b,X86_OR,AMD64_RCX,rb); |
1952 |
|
|
|
1953 |
|
|
ppc32_store_gpr(b,ra,AMD64_RCX); |
1954 |
|
|
|
1955 |
|
|
if (insn & 1) { |
1956 |
|
|
if (rs == rb) |
1957 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RCX,AMD64_RCX,4); |
1958 |
|
|
ppc32_update_cr0(b); |
1959 |
|
|
} |
1960 |
|
|
|
1961 |
|
|
return(0); |
1962 |
|
|
} |
1963 |
|
|
|
1964 |
|
|
/* OR with Complement */ |
1965 |
|
|
DECLARE_INSN(ORC) |
1966 |
|
|
{ |
1967 |
|
|
int rs = bits(insn,21,25); |
1968 |
|
|
int ra = bits(insn,16,20); |
1969 |
|
|
int rb = bits(insn,11,15); |
1970 |
|
|
|
1971 |
|
|
/* $ra = $rs | ~$rb */ |
1972 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rb); |
1973 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1974 |
|
|
ppc32_alu_gpr(b,X86_OR,AMD64_RBX,rs); |
1975 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
1976 |
|
|
|
1977 |
|
|
if (insn & 1) |
1978 |
|
|
ppc32_update_cr0(b); |
1979 |
|
|
|
1980 |
|
|
return(0); |
1981 |
|
|
} |
1982 |
|
|
|
1983 |
|
|
/* OR Immediate */ |
1984 |
|
|
DECLARE_INSN(ORI) |
1985 |
|
|
{ |
1986 |
|
|
int rs = bits(insn,21,25); |
1987 |
|
|
int ra = bits(insn,16,20); |
1988 |
|
|
m_uint16_t imm = bits(insn,0,15); |
1989 |
|
|
|
1990 |
|
|
/* $ra = $rs | imm */ |
1991 |
|
|
ppc32_load_imm(b,AMD64_RBX,imm); |
1992 |
|
|
ppc32_alu_gpr(b,X86_OR,AMD64_RBX,rs); |
1993 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
1994 |
|
|
return(0); |
1995 |
|
|
} |
1996 |
|
|
|
1997 |
|
|
/* OR Immediate Shifted */ |
1998 |
|
|
DECLARE_INSN(ORIS) |
1999 |
|
|
{ |
2000 |
|
|
int rs = bits(insn,21,25); |
2001 |
|
|
int ra = bits(insn,16,20); |
2002 |
|
|
m_uint32_t imm = bits(insn,0,15); |
2003 |
|
|
|
2004 |
|
|
/* $ra = $rs | (imm << 16) */ |
2005 |
|
|
ppc32_load_imm(b,AMD64_RBX,imm << 16); |
2006 |
|
|
ppc32_alu_gpr(b,X86_OR,AMD64_RBX,rs); |
2007 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2008 |
|
|
return(0); |
2009 |
|
|
} |
2010 |
|
|
|
2011 |
|
|
/* RLWIMI - Rotate Left Word Immediate then Mask Insert */ |
2012 |
|
|
DECLARE_INSN(RLWIMI) |
2013 |
|
|
{ |
2014 |
|
|
int rs = bits(insn,21,25); |
2015 |
|
|
int ra = bits(insn,16,20); |
2016 |
|
|
int sh = bits(insn,11,15); |
2017 |
|
|
int mb = bits(insn,6,10); |
2018 |
|
|
int me = bits(insn,1,5); |
2019 |
|
|
register m_uint32_t mask; |
2020 |
|
|
|
2021 |
|
|
mask = ppc32_rotate_mask(mb,me); |
2022 |
|
|
|
2023 |
|
|
/* Apply inverse mask to %eax "ra" */ |
2024 |
|
|
ppc32_load_gpr(b,AMD64_RAX,ra); |
2025 |
|
|
if (mask != 0) |
2026 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,~mask); |
2027 |
|
|
|
2028 |
|
|
/* Rotate %ebx ("rs") of "sh" bits and apply the mask */ |
2029 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
2030 |
|
|
|
2031 |
|
|
if (sh != 0) |
2032 |
|
|
amd64_shift_reg_imm_size(b->jit_ptr,X86_ROL,AMD64_RBX,sh,4); |
2033 |
|
|
|
2034 |
|
|
if (mask != 0xFFFFFFFF) |
2035 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,mask); |
2036 |
|
|
|
2037 |
|
|
/* Store the result */ |
2038 |
|
|
amd64_alu_reg_reg_size(b->jit_ptr,X86_OR,AMD64_RBX,AMD64_RAX,4); |
2039 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2040 |
|
|
|
2041 |
|
|
if (insn & 1) |
2042 |
|
|
ppc32_update_cr0(b); |
2043 |
|
|
|
2044 |
|
|
return(0); |
2045 |
|
|
} |
2046 |
|
|
|
2047 |
|
|
/* RLWINM - Rotate Left Word Immediate AND with Mask */ |
2048 |
|
|
DECLARE_INSN(RLWINM) |
2049 |
|
|
{ |
2050 |
|
|
int rs = bits(insn,21,25); |
2051 |
|
|
int ra = bits(insn,16,20); |
2052 |
|
|
int sh = bits(insn,11,15); |
2053 |
|
|
int mb = bits(insn,6,10); |
2054 |
|
|
int me = bits(insn,1,5); |
2055 |
|
|
register m_uint32_t mask; |
2056 |
|
|
|
2057 |
|
|
mask = ppc32_rotate_mask(mb,me); |
2058 |
|
|
|
2059 |
|
|
/* Rotate %ebx ("rs") of "sh" bits and apply the mask */ |
2060 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
2061 |
|
|
|
2062 |
|
|
if (sh != 0) |
2063 |
|
|
amd64_shift_reg_imm_size(b->jit_ptr,X86_ROL,AMD64_RBX,sh,4); |
2064 |
|
|
|
2065 |
|
|
if (mask != 0xFFFFFFFF) |
2066 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,mask); |
2067 |
|
|
|
2068 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2069 |
|
|
|
2070 |
|
|
if (insn & 1) { |
2071 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
2072 |
|
|
ppc32_update_cr0(b); |
2073 |
|
|
} |
2074 |
|
|
|
2075 |
|
|
return(0); |
2076 |
|
|
} |
2077 |
|
|
|
2078 |
|
|
/* RLWNM - Rotate Left Word then Mask Insert */ |
2079 |
|
|
DECLARE_INSN(RLWNM) |
2080 |
|
|
{ |
2081 |
|
|
int rs = bits(insn,21,25); |
2082 |
|
|
int ra = bits(insn,16,20); |
2083 |
|
|
int rb = bits(insn,11,15); |
2084 |
|
|
int mb = bits(insn,6,10); |
2085 |
|
|
int me = bits(insn,1,5); |
2086 |
|
|
register m_uint32_t mask; |
2087 |
|
|
|
2088 |
|
|
mask = ppc32_rotate_mask(mb,me); |
2089 |
|
|
|
2090 |
|
|
/* Load the shift register ("sh") */ |
2091 |
|
|
ppc32_load_gpr(b,AMD64_RCX,rb); |
2092 |
|
|
|
2093 |
|
|
/* Rotate %ebx ("rs") and apply the mask */ |
2094 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
2095 |
|
|
amd64_shift_reg_size(b->jit_ptr,X86_ROL,AMD64_RBX,4); |
2096 |
|
|
|
2097 |
|
|
if (mask != 0xFFFFFFFF) |
2098 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,mask); |
2099 |
|
|
|
2100 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2101 |
|
|
|
2102 |
|
|
if (insn & 1) { |
2103 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
2104 |
|
|
ppc32_update_cr0(b); |
2105 |
|
|
} |
2106 |
|
|
|
2107 |
|
|
return(0); |
2108 |
|
|
} |
2109 |
|
|
|
2110 |
|
|
/* Shift Left Word */ |
2111 |
|
|
DECLARE_INSN(SLW) |
2112 |
|
|
{ |
2113 |
|
|
int rs = bits(insn,21,25); |
2114 |
|
|
int ra = bits(insn,16,20); |
2115 |
|
|
int rb = bits(insn,11,15); |
2116 |
|
|
|
2117 |
|
|
/* If count >= 32, then null result */ |
2118 |
|
|
ppc32_load_gpr(b,AMD64_RCX,rb); |
2119 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,0x3f); |
2120 |
|
|
|
2121 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
2122 |
|
|
amd64_shift_reg(b->jit_ptr,X86_SHL,AMD64_RBX); |
2123 |
|
|
|
2124 |
|
|
/* Store the result */ |
2125 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2126 |
|
|
|
2127 |
|
|
if (insn & 1) { |
2128 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
2129 |
|
|
ppc32_update_cr0(b); |
2130 |
|
|
} |
2131 |
|
|
|
2132 |
|
|
return(0); |
2133 |
|
|
} |
2134 |
|
|
|
2135 |
|
|
/* SRAWI - Shift Right Algebraic Word Immediate */ |
2136 |
|
|
DECLARE_INSN(SRAWI) |
2137 |
|
|
{ |
2138 |
|
|
int rs = bits(insn,21,25); |
2139 |
|
|
int ra = bits(insn,16,20); |
2140 |
|
|
int sh = bits(insn,11,15); |
2141 |
|
|
register m_uint32_t mask; |
2142 |
|
|
|
2143 |
|
|
mask = ~(0xFFFFFFFFU << sh); |
2144 |
|
|
|
2145 |
|
|
/* $ra = (int32)$rs >> sh */ |
2146 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
2147 |
|
|
amd64_mov_reg_reg(b->jit_ptr,AMD64_RSI,AMD64_RBX,4); |
2148 |
|
|
amd64_shift_reg_imm_size(b->jit_ptr,X86_SAR,AMD64_RBX,sh,4); |
2149 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2150 |
|
|
|
2151 |
|
|
/* test the sign-bit of gpr[rs] */ |
2152 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RSI,AMD64_RSI,4); |
2153 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_LT,AMD64_RAX,TRUE); |
2154 |
|
|
|
2155 |
|
|
amd64_alu_reg_imm_size(b->jit_ptr,X86_AND,AMD64_RSI,mask,4); |
2156 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RCX,TRUE); |
2157 |
|
|
|
2158 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RCX,AMD64_RAX); |
2159 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,0x1); |
2160 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
2161 |
|
|
AMD64_RCX,4); |
2162 |
|
|
|
2163 |
|
|
if (insn & 1) { |
2164 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
2165 |
|
|
ppc32_update_cr0(b); |
2166 |
|
|
} |
2167 |
|
|
|
2168 |
|
|
return(0); |
2169 |
|
|
} |
2170 |
|
|
|
2171 |
|
|
/* Shift Right Word */ |
2172 |
|
|
DECLARE_INSN(SRW) |
2173 |
|
|
{ |
2174 |
|
|
int rs = bits(insn,21,25); |
2175 |
|
|
int ra = bits(insn,16,20); |
2176 |
|
|
int rb = bits(insn,11,15); |
2177 |
|
|
|
2178 |
|
|
/* If count >= 32, then null result */ |
2179 |
|
|
ppc32_load_gpr(b,AMD64_RCX,rb); |
2180 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,0x3f); |
2181 |
|
|
|
2182 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
2183 |
|
|
amd64_shift_reg(b->jit_ptr,X86_SHR,AMD64_RBX); |
2184 |
|
|
|
2185 |
|
|
/* Store the result */ |
2186 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2187 |
|
|
|
2188 |
|
|
if (insn & 1) { |
2189 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
2190 |
|
|
ppc32_update_cr0(b); |
2191 |
|
|
} |
2192 |
|
|
|
2193 |
|
|
return(0); |
2194 |
|
|
} |
2195 |
|
|
|
2196 |
|
|
/* STB - Store Byte */ |
2197 |
|
|
DECLARE_INSN(STB) |
2198 |
|
|
{ |
2199 |
|
|
int rs = bits(insn,21,25); |
2200 |
|
|
int ra = bits(insn,16,20); |
2201 |
|
|
m_uint16_t offset = bits(insn,0,15); |
2202 |
|
|
|
2203 |
|
|
//ppc32_emit_memop(b,PPC_MEMOP_STB,ra,offset,rs,0); |
2204 |
|
|
ppc32_emit_memop_fast(b,1,PPC_MEMOP_STB,ra,offset,rs,ppc32_memop_fast_stb); |
2205 |
|
|
return(0); |
2206 |
|
|
} |
2207 |
|
|
|
2208 |
|
|
/* STBU - Store Byte with Update */ |
2209 |
|
|
DECLARE_INSN(STBU) |
2210 |
|
|
{ |
2211 |
|
|
int rs = bits(insn,21,25); |
2212 |
|
|
int ra = bits(insn,16,20); |
2213 |
|
|
m_uint16_t offset = bits(insn,0,15); |
2214 |
|
|
|
2215 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_STB,ra,offset,rs,1); |
2216 |
|
|
return(0); |
2217 |
|
|
} |
2218 |
|
|
|
2219 |
|
|
/* STBUX - Store Byte with Update Indexed */ |
2220 |
|
|
DECLARE_INSN(STBUX) |
2221 |
|
|
{ |
2222 |
|
|
int rs = bits(insn,21,25); |
2223 |
|
|
int ra = bits(insn,16,20); |
2224 |
|
|
int rb = bits(insn,11,15); |
2225 |
|
|
|
2226 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_STB,ra,rb,rs,1); |
2227 |
|
|
return(0); |
2228 |
|
|
} |
2229 |
|
|
|
2230 |
|
|
/* STBUX - Store Byte Indexed */ |
2231 |
|
|
DECLARE_INSN(STBX) |
2232 |
|
|
{ |
2233 |
|
|
int rs = bits(insn,21,25); |
2234 |
|
|
int ra = bits(insn,16,20); |
2235 |
|
|
int rb = bits(insn,11,15); |
2236 |
|
|
|
2237 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_STB,ra,rb,rs,0); |
2238 |
|
|
return(0); |
2239 |
|
|
} |
2240 |
|
|
|
2241 |
|
|
/* STH - Store Half-Word */ |
2242 |
|
|
DECLARE_INSN(STH) |
2243 |
|
|
{ |
2244 |
|
|
int rs = bits(insn,21,25); |
2245 |
|
|
int ra = bits(insn,16,20); |
2246 |
|
|
m_uint16_t offset = bits(insn,0,15); |
2247 |
|
|
|
2248 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_STH,ra,offset,rs,0); |
2249 |
|
|
return(0); |
2250 |
|
|
} |
2251 |
|
|
|
2252 |
|
|
/* STHU - Store Half-Word with Update */ |
2253 |
|
|
DECLARE_INSN(STHU) |
2254 |
|
|
{ |
2255 |
|
|
int rs = bits(insn,21,25); |
2256 |
|
|
int ra = bits(insn,16,20); |
2257 |
|
|
m_uint16_t offset = bits(insn,0,15); |
2258 |
|
|
|
2259 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_STH,ra,offset,rs,1); |
2260 |
|
|
return(0); |
2261 |
|
|
} |
2262 |
|
|
|
2263 |
|
|
/* STHUX - Store Half-Word with Update Indexed */ |
2264 |
|
|
DECLARE_INSN(STHUX) |
2265 |
|
|
{ |
2266 |
|
|
int rs = bits(insn,21,25); |
2267 |
|
|
int ra = bits(insn,16,20); |
2268 |
|
|
int rb = bits(insn,11,15); |
2269 |
|
|
|
2270 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_STH,ra,rb,rs,1); |
2271 |
|
|
return(0); |
2272 |
|
|
} |
2273 |
|
|
|
2274 |
|
|
/* STHUX - Store Half-Word Indexed */ |
2275 |
|
|
DECLARE_INSN(STHX) |
2276 |
|
|
{ |
2277 |
|
|
int rs = bits(insn,21,25); |
2278 |
|
|
int ra = bits(insn,16,20); |
2279 |
|
|
int rb = bits(insn,11,15); |
2280 |
|
|
|
2281 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_STH,ra,rb,rs,0); |
2282 |
|
|
return(0); |
2283 |
|
|
} |
2284 |
|
|
|
2285 |
|
|
/* STW - Store Word */ |
2286 |
|
|
DECLARE_INSN(STW) |
2287 |
|
|
{ |
2288 |
|
|
int rs = bits(insn,21,25); |
2289 |
|
|
int ra = bits(insn,16,20); |
2290 |
|
|
m_uint16_t offset = bits(insn,0,15); |
2291 |
|
|
|
2292 |
|
|
//ppc32_emit_memop(b,PPC_MEMOP_STW,ra,offset,rs,0); |
2293 |
|
|
ppc32_emit_memop_fast(b,1,PPC_MEMOP_STW,ra,offset,rs,ppc32_memop_fast_stw); |
2294 |
|
|
return(0); |
2295 |
|
|
} |
2296 |
|
|
|
2297 |
|
|
/* STWU - Store Word with Update */ |
2298 |
|
|
DECLARE_INSN(STWU) |
2299 |
|
|
{ |
2300 |
|
|
int rs = bits(insn,21,25); |
2301 |
|
|
int ra = bits(insn,16,20); |
2302 |
|
|
m_uint16_t offset = bits(insn,0,15); |
2303 |
|
|
|
2304 |
|
|
ppc32_emit_memop(b,PPC_MEMOP_STW,ra,offset,rs,1); |
2305 |
|
|
return(0); |
2306 |
|
|
} |
2307 |
|
|
|
2308 |
|
|
/* STWUX - Store Word with Update Indexed */ |
2309 |
|
|
DECLARE_INSN(STWUX) |
2310 |
|
|
{ |
2311 |
|
|
int rs = bits(insn,21,25); |
2312 |
|
|
int ra = bits(insn,16,20); |
2313 |
|
|
int rb = bits(insn,11,15); |
2314 |
|
|
|
2315 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_STW,ra,rb,rs,1); |
2316 |
|
|
return(0); |
2317 |
|
|
} |
2318 |
|
|
|
2319 |
|
|
/* STWUX - Store Word Indexed */ |
2320 |
|
|
DECLARE_INSN(STWX) |
2321 |
|
|
{ |
2322 |
|
|
int rs = bits(insn,21,25); |
2323 |
|
|
int ra = bits(insn,16,20); |
2324 |
|
|
int rb = bits(insn,11,15); |
2325 |
|
|
|
2326 |
|
|
ppc32_emit_memop_idx(b,PPC_MEMOP_STW,ra,rb,rs,0); |
2327 |
|
|
return(0); |
2328 |
|
|
} |
2329 |
|
|
|
2330 |
|
|
/* SUBF - Subtract From */ |
2331 |
|
|
DECLARE_INSN(SUBF) |
2332 |
|
|
{ |
2333 |
|
|
int rd = bits(insn,21,25); |
2334 |
|
|
int ra = bits(insn,16,20); |
2335 |
|
|
int rb = bits(insn,11,15); |
2336 |
|
|
|
2337 |
|
|
/* $rd = $rb - $rb */ |
2338 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rb); |
2339 |
|
|
ppc32_alu_gpr(b,X86_SUB,AMD64_RBX,ra); |
2340 |
|
|
ppc32_store_gpr(b,rd,AMD64_RBX); |
2341 |
|
|
|
2342 |
|
|
if (insn & 1) |
2343 |
|
|
ppc32_update_cr0(b); |
2344 |
|
|
|
2345 |
|
|
return(0); |
2346 |
|
|
} |
2347 |
|
|
|
2348 |
|
|
/* SUBFC - Subtract From Carrying */ |
2349 |
|
|
DECLARE_INSN(SUBFC) |
2350 |
|
|
{ |
2351 |
|
|
int rd = bits(insn,21,25); |
2352 |
|
|
int ra = bits(insn,16,20); |
2353 |
|
|
int rb = bits(insn,11,15); |
2354 |
|
|
|
2355 |
|
|
/* ~$ra + 1 */ |
2356 |
|
|
ppc32_load_gpr(b,AMD64_RSI,ra); |
2357 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RSI); |
2358 |
|
|
amd64_alu_reg_imm_size(b->jit_ptr,X86_ADD,AMD64_RSI,1,4); |
2359 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
2360 |
|
|
|
2361 |
|
|
/* add $rb */ |
2362 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,rb); |
2363 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RCX,FALSE); |
2364 |
|
|
|
2365 |
|
|
ppc32_store_gpr(b,rd,AMD64_RSI); |
2366 |
|
|
|
2367 |
|
|
/* store the carry flag */ |
2368 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RCX); |
2369 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
2370 |
|
|
|
2371 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
2372 |
|
|
AMD64_RAX,4); |
2373 |
|
|
|
2374 |
|
|
/* update cr0 */ |
2375 |
|
|
if (insn & 1) { |
2376 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RSI,AMD64_RSI,4); |
2377 |
|
|
ppc32_update_cr0(b); |
2378 |
|
|
} |
2379 |
|
|
|
2380 |
|
|
return(0); |
2381 |
|
|
} |
2382 |
|
|
|
2383 |
|
|
/* SUBFE - Subtract From Extended */ |
2384 |
|
|
DECLARE_INSN(SUBFE) |
2385 |
|
|
{ |
2386 |
|
|
int rd = bits(insn,21,25); |
2387 |
|
|
int ra = bits(insn,16,20); |
2388 |
|
|
int rb = bits(insn,11,15); |
2389 |
|
|
|
2390 |
|
|
/* ~$ra + carry */ |
2391 |
|
|
ppc32_load_gpr(b,AMD64_RSI,ra); |
2392 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RSI); |
2393 |
|
|
amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,AMD64_RSI, |
2394 |
|
|
AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4); |
2395 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
2396 |
|
|
|
2397 |
|
|
/* add $rb */ |
2398 |
|
|
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,rb); |
2399 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RCX,FALSE); |
2400 |
|
|
|
2401 |
|
|
ppc32_store_gpr(b,rd,AMD64_RSI); |
2402 |
|
|
|
2403 |
|
|
/* store the carry flag */ |
2404 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RCX); |
2405 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
2406 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
2407 |
|
|
AMD64_RAX,4); |
2408 |
|
|
|
2409 |
|
|
/* update cr0 */ |
2410 |
|
|
if (insn & 1) { |
2411 |
|
|
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RSI,AMD64_RSI,4); |
2412 |
|
|
ppc32_update_cr0(b); |
2413 |
|
|
} |
2414 |
|
|
|
2415 |
|
|
return(0); |
2416 |
|
|
} |
2417 |
|
|
|
2418 |
|
|
/* SUBFIC - Subtract From Immediate Carrying */ |
2419 |
|
|
DECLARE_INSN(SUBFIC) |
2420 |
|
|
{ |
2421 |
|
|
int rd = bits(insn,21,25); |
2422 |
|
|
int ra = bits(insn,16,20); |
2423 |
|
|
m_uint16_t imm = bits(insn,0,15); |
2424 |
|
|
m_uint32_t tmp = sign_extend_32(imm,16); |
2425 |
|
|
|
2426 |
|
|
/* ~$ra + 1 */ |
2427 |
|
|
ppc32_load_gpr(b,AMD64_RSI,ra); |
2428 |
|
|
amd64_not_reg(b->jit_ptr,AMD64_RSI); |
2429 |
|
|
amd64_alu_reg_imm_size(b->jit_ptr,X86_ADD,AMD64_RSI,1,4); |
2430 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
2431 |
|
|
|
2432 |
|
|
/* add sign-extended $immediate */ |
2433 |
|
|
amd64_alu_reg_imm_size(b->jit_ptr,X86_ADD,AMD64_RSI,tmp,4); |
2434 |
|
|
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RCX,FALSE); |
2435 |
|
|
|
2436 |
|
|
ppc32_store_gpr(b,rd,AMD64_RSI); |
2437 |
|
|
|
2438 |
|
|
/* store the carry flag */ |
2439 |
|
|
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RCX); |
2440 |
|
|
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
2441 |
|
|
|
2442 |
|
|
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
2443 |
|
|
AMD64_RAX,4); |
2444 |
|
|
return(0); |
2445 |
|
|
} |
2446 |
|
|
|
2447 |
|
|
/* SYNC - Synchronize */ |
2448 |
|
|
DECLARE_INSN(SYNC) |
2449 |
|
|
{ |
2450 |
|
|
return(0); |
2451 |
|
|
} |
2452 |
|
|
|
2453 |
|
|
/* XOR */ |
2454 |
|
|
DECLARE_INSN(XOR) |
2455 |
|
|
{ |
2456 |
|
|
int rs = bits(insn,21,25); |
2457 |
|
|
int ra = bits(insn,16,20); |
2458 |
|
|
int rb = bits(insn,11,15); |
2459 |
|
|
|
2460 |
|
|
ppc32_load_gpr(b,AMD64_RBX,rs); |
2461 |
|
|
ppc32_alu_gpr(b,X86_XOR,AMD64_RBX,rb); |
2462 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2463 |
|
|
|
2464 |
|
|
if (insn & 1) |
2465 |
|
|
ppc32_update_cr0(b); |
2466 |
|
|
|
2467 |
|
|
return(0); |
2468 |
|
|
} |
2469 |
|
|
|
2470 |
|
|
/* XORI - XOR Immediate */ |
2471 |
|
|
DECLARE_INSN(XORI) |
2472 |
|
|
{ |
2473 |
|
|
int rs = bits(insn,21,25); |
2474 |
|
|
int ra = bits(insn,16,20); |
2475 |
|
|
m_uint32_t imm = bits(insn,0,15); |
2476 |
|
|
|
2477 |
|
|
ppc32_load_imm(b,AMD64_RBX,imm); |
2478 |
|
|
ppc32_alu_gpr(b,X86_XOR,AMD64_RBX,rs); |
2479 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2480 |
|
|
return(0); |
2481 |
|
|
} |
2482 |
|
|
|
2483 |
|
|
/* XORIS - XOR Immediate Shifted */ |
2484 |
|
|
DECLARE_INSN(XORIS) |
2485 |
|
|
{ |
2486 |
|
|
int rs = bits(insn,21,25); |
2487 |
|
|
int ra = bits(insn,16,20); |
2488 |
|
|
m_uint32_t imm = bits(insn,0,15); |
2489 |
|
|
|
2490 |
|
|
ppc32_load_imm(b,AMD64_RBX,imm << 16); |
2491 |
|
|
ppc32_alu_gpr(b,X86_XOR,AMD64_RBX,rs); |
2492 |
|
|
ppc32_store_gpr(b,ra,AMD64_RBX); |
2493 |
|
|
return(0); |
2494 |
|
|
} |
2495 |
|
|
|
2496 |
|
|
/* PPC instruction array */ |
2497 |
|
|
struct ppc32_insn_tag ppc32_insn_tags[] = { |
2498 |
|
|
{ ppc32_emit_BLR , 0xfffffffe , 0x4e800020 }, |
2499 |
|
|
{ ppc32_emit_BCTR , 0xfffffffe , 0x4e800420 }, |
2500 |
|
|
{ ppc32_emit_MFLR , 0xfc1fffff , 0x7c0802a6 }, |
2501 |
|
|
{ ppc32_emit_MTLR , 0xfc1fffff , 0x7c0803a6 }, |
2502 |
|
|
{ ppc32_emit_MFCTR , 0xfc1fffff , 0x7c0902a6 }, |
2503 |
|
|
{ ppc32_emit_MTCTR , 0xfc1fffff , 0x7c0903a6 }, |
2504 |
|
|
{ ppc32_emit_MFTBL , 0xfc1ff7ff , 0x7c0c42e6 }, |
2505 |
|
|
{ ppc32_emit_MFTBU , 0xfc1ff7ff , 0x7c0d42e6 }, |
2506 |
|
|
{ ppc32_emit_ADD , 0xfc0007fe , 0x7c000214 }, |
2507 |
|
|
{ ppc32_emit_ADDC , 0xfc0007fe , 0x7c000014 }, |
2508 |
|
|
{ ppc32_emit_ADDE , 0xfc0007fe , 0x7c000114 }, |
2509 |
|
|
{ ppc32_emit_ADDI , 0xfc000000 , 0x38000000 }, |
2510 |
|
|
{ ppc32_emit_ADDIC , 0xfc000000 , 0x30000000 }, |
2511 |
|
|
{ ppc32_emit_ADDIC_dot , 0xfc000000 , 0x34000000 }, |
2512 |
|
|
{ ppc32_emit_ADDIS , 0xfc000000 , 0x3c000000 }, |
2513 |
|
|
{ ppc32_emit_AND , 0xfc0007fe , 0x7c000038 }, |
2514 |
|
|
{ ppc32_emit_ANDC , 0xfc0007fe , 0x7c000078 }, |
2515 |
|
|
{ ppc32_emit_ANDI , 0xfc000000 , 0x70000000 }, |
2516 |
|
|
{ ppc32_emit_ANDIS , 0xfc000000 , 0x74000000 }, |
2517 |
|
|
{ ppc32_emit_B , 0xfc000003 , 0x48000000 }, |
2518 |
|
|
{ ppc32_emit_BA , 0xfc000003 , 0x48000002 }, |
2519 |
|
|
{ ppc32_emit_BL , 0xfc000003 , 0x48000001 }, |
2520 |
|
|
{ ppc32_emit_BLA , 0xfc000003 , 0x48000003 }, |
2521 |
|
|
{ ppc32_emit_BCC , 0xfe800000 , 0x40800000 }, |
2522 |
|
|
{ ppc32_emit_BC , 0xfc000000 , 0x40000000 }, |
2523 |
|
|
{ ppc32_emit_BCLR , 0xfc00fffe , 0x4c000020 }, |
2524 |
|
|
{ ppc32_emit_CMP , 0xfc6007ff , 0x7c000000 }, |
2525 |
|
|
{ ppc32_emit_CMPI , 0xfc600000 , 0x2c000000 }, |
2526 |
|
|
{ ppc32_emit_CMPL , 0xfc6007ff , 0x7c000040 }, |
2527 |
|
|
{ ppc32_emit_CMPLI , 0xfc600000 , 0x28000000 }, |
2528 |
|
|
{ ppc32_emit_CRAND , 0xfc0007ff , 0x4c000202 }, |
2529 |
|
|
{ ppc32_emit_CRANDC , 0xfc0007ff , 0x4c000102 }, |
2530 |
|
|
{ ppc32_emit_CREQV , 0xfc0007ff , 0x4c000242 }, |
2531 |
|
|
{ ppc32_emit_CRNAND , 0xfc0007ff , 0x4c0001c2 }, |
2532 |
|
|
{ ppc32_emit_CRNOR , 0xfc0007ff , 0x4c000042 }, |
2533 |
|
|
{ ppc32_emit_CROR , 0xfc0007ff , 0x4c000382 }, |
2534 |
|
|
{ ppc32_emit_CRORC , 0xfc0007ff , 0x4c000342 }, |
2535 |
|
|
{ ppc32_emit_CRXOR , 0xfc0007ff , 0x4c000182 }, |
2536 |
|
|
{ ppc32_emit_DIVWU , 0xfc0007fe , 0x7c000396 }, |
2537 |
|
|
{ ppc32_emit_EQV , 0xfc0007fe , 0x7c000238 }, |
2538 |
|
|
{ ppc32_emit_EXTSB , 0xfc00fffe , 0x7c000774 }, |
2539 |
|
|
{ ppc32_emit_EXTSH , 0xfc00fffe , 0x7c000734 }, |
2540 |
|
|
{ ppc32_emit_LBZ , 0xfc000000 , 0x88000000 }, |
2541 |
|
|
{ ppc32_emit_LBZU , 0xfc000000 , 0x8c000000 }, |
2542 |
|
|
{ ppc32_emit_LBZUX , 0xfc0007ff , 0x7c0000ee }, |
2543 |
|
|
{ ppc32_emit_LBZX , 0xfc0007ff , 0x7c0000ae }, |
2544 |
|
|
{ ppc32_emit_LHA , 0xfc000000 , 0xa8000000 }, |
2545 |
|
|
{ ppc32_emit_LHAU , 0xfc000000 , 0xac000000 }, |
2546 |
|
|
{ ppc32_emit_LHAUX , 0xfc0007ff , 0x7c0002ee }, |
2547 |
|
|
{ ppc32_emit_LHAX , 0xfc0007ff , 0x7c0002ae }, |
2548 |
|
|
{ ppc32_emit_LHZ , 0xfc000000 , 0xa0000000 }, |
2549 |
|
|
{ ppc32_emit_LHZU , 0xfc000000 , 0xa4000000 }, |
2550 |
|
|
{ ppc32_emit_LHZUX , 0xfc0007ff , 0x7c00026e }, |
2551 |
|
|
{ ppc32_emit_LHZX , 0xfc0007ff , 0x7c00022e }, |
2552 |
|
|
{ ppc32_emit_LWZ , 0xfc000000 , 0x80000000 }, |
2553 |
|
|
{ ppc32_emit_LWZU , 0xfc000000 , 0x84000000 }, |
2554 |
|
|
{ ppc32_emit_LWZUX , 0xfc0007ff , 0x7c00006e }, |
2555 |
|
|
{ ppc32_emit_LWZX , 0xfc0007ff , 0x7c00002e }, |
2556 |
|
|
{ ppc32_emit_MCRF , 0xfc63ffff , 0x4c000000 }, |
2557 |
|
|
{ ppc32_emit_MFCR , 0xfc1fffff , 0x7c000026 }, |
2558 |
|
|
{ ppc32_emit_MFMSR , 0xfc1fffff , 0x7c0000a6 }, |
2559 |
|
|
{ ppc32_emit_MFSR , 0xfc10ffff , 0x7c0004a6 }, |
2560 |
|
|
{ ppc32_emit_MTCRF , 0xfc100fff , 0x7c000120 }, |
2561 |
|
|
{ ppc32_emit_MULHW , 0xfc0007fe , 0x7c000096 }, |
2562 |
|
|
{ ppc32_emit_MULHWU , 0xfc0007fe , 0x7c000016 }, |
2563 |
|
|
{ ppc32_emit_MULLI , 0xfc000000 , 0x1c000000 }, |
2564 |
|
|
{ ppc32_emit_MULLW , 0xfc0007fe , 0x7c0001d6 }, |
2565 |
|
|
{ ppc32_emit_NAND , 0xfc0007fe , 0x7c0003b8 }, |
2566 |
|
|
{ ppc32_emit_NEG , 0xfc00fffe , 0x7c0000d0 }, |
2567 |
|
|
{ ppc32_emit_NOR , 0xfc0007fe , 0x7c0000f8 }, |
2568 |
|
|
{ ppc32_emit_OR , 0xfc0007fe , 0x7c000378 }, |
2569 |
|
|
{ ppc32_emit_ORC , 0xfc0007fe , 0x7c000338 }, |
2570 |
|
|
{ ppc32_emit_ORI , 0xfc000000 , 0x60000000 }, |
2571 |
|
|
{ ppc32_emit_ORIS , 0xfc000000 , 0x64000000 }, |
2572 |
|
|
{ ppc32_emit_RLWIMI , 0xfc000000 , 0x50000000 }, |
2573 |
|
|
{ ppc32_emit_RLWINM , 0xfc000000 , 0x54000000 }, |
2574 |
|
|
{ ppc32_emit_RLWNM , 0xfc000000 , 0x5c000000 }, |
2575 |
|
|
{ ppc32_emit_SLW , 0xfc0007fe , 0x7c000030 }, |
2576 |
|
|
{ ppc32_emit_SRAWI , 0xfc0007fe , 0x7c000670 }, |
2577 |
|
|
{ ppc32_emit_SRW , 0xfc0007fe , 0x7c000430 }, |
2578 |
|
|
{ ppc32_emit_STB , 0xfc000000 , 0x98000000 }, |
2579 |
|
|
{ ppc32_emit_STBU , 0xfc000000 , 0x9c000000 }, |
2580 |
|
|
{ ppc32_emit_STBUX , 0xfc0007ff , 0x7c0001ee }, |
2581 |
|
|
{ ppc32_emit_STBX , 0xfc0007ff , 0x7c0001ae }, |
2582 |
|
|
{ ppc32_emit_STH , 0xfc000000 , 0xb0000000 }, |
2583 |
|
|
{ ppc32_emit_STHU , 0xfc000000 , 0xb4000000 }, |
2584 |
|
|
{ ppc32_emit_STHUX , 0xfc0007ff , 0x7c00036e }, |
2585 |
|
|
{ ppc32_emit_STHX , 0xfc0007ff , 0x7c00032e }, |
2586 |
|
|
{ ppc32_emit_STW , 0xfc000000 , 0x90000000 }, |
2587 |
|
|
{ ppc32_emit_STWU , 0xfc000000 , 0x94000000 }, |
2588 |
|
|
{ ppc32_emit_STWUX , 0xfc0007ff , 0x7c00016e }, |
2589 |
|
|
{ ppc32_emit_STWX , 0xfc0007ff , 0x7c00012e }, |
2590 |
|
|
{ ppc32_emit_SUBF , 0xfc0007fe , 0x7c000050 }, |
2591 |
|
|
{ ppc32_emit_SUBFC , 0xfc0007fe , 0x7c000010 }, |
2592 |
|
|
{ ppc32_emit_SUBFE , 0xfc0007fe , 0x7c000110 }, |
2593 |
|
|
{ ppc32_emit_SUBFIC , 0xfc000000 , 0x20000000 }, |
2594 |
|
|
{ ppc32_emit_SYNC , 0xffffffff , 0x7c0004ac }, |
2595 |
|
|
{ ppc32_emit_XOR , 0xfc0007fe , 0x7c000278 }, |
2596 |
|
|
{ ppc32_emit_XORI , 0xfc000000 , 0x68000000 }, |
2597 |
|
|
{ ppc32_emit_XORIS , 0xfc000000 , 0x6c000000 }, |
2598 |
|
|
{ ppc32_emit_unknown , 0x00000000 , 0x00000000 }, |
2599 |
dpavlin |
8 |
{ NULL , 0x00000000 , 0x00000000 }, |
2600 |
dpavlin |
7 |
}; |