1 |
/* |
2 |
* Cisco router simulation platform. |
3 |
* Copyright (c) 2005,2006 Christophe Fillot (cf@utc.fr) |
4 |
*/ |
5 |
|
6 |
#include <stdio.h> |
7 |
#include <stdlib.h> |
8 |
#include <unistd.h> |
9 |
#include <string.h> |
10 |
#include <sys/types.h> |
11 |
#include <sys/stat.h> |
12 |
#include <sys/mman.h> |
13 |
#include <fcntl.h> |
14 |
|
15 |
#include "cpu.h" |
16 |
#include "ppc32_jit.h" |
17 |
#include "ppc32_amd64_trans.h" |
18 |
#include "memory.h" |
19 |
|
20 |
/* Macros for CPU structure access */ |
21 |
#define REG_OFFSET(reg) (OFFSET(cpu_ppc_t,gpr[(reg)])) |
22 |
#define MEMOP_OFFSET(op) (OFFSET(cpu_ppc_t,mem_op_fn[(op)])) |
23 |
|
24 |
#define DECLARE_INSN(name) \ |
25 |
static int ppc32_emit_##name(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, \ |
26 |
ppc_insn_t insn) |
27 |
|
28 |
/* Load a 32 bit immediate value */ |
29 |
static inline void ppc32_load_imm(ppc32_jit_tcb_t *b,u_int reg,m_uint32_t val) |
30 |
{ |
31 |
if (val) |
32 |
amd64_mov_reg_imm_size(b->jit_ptr,reg,val,4); |
33 |
else |
34 |
amd64_alu_reg_reg_size(b->jit_ptr,X86_XOR,reg,reg,4); |
35 |
} |
36 |
|
37 |
/* Set the Instruction Address (IA) register */ |
38 |
void ppc32_set_ia(ppc32_jit_tcb_t *b,m_uint32_t new_ia) |
39 |
{ |
40 |
amd64_mov_membase_imm(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),new_ia,4); |
41 |
} |
42 |
|
43 |
/* Set the Link Register (LR) */ |
44 |
void ppc32_set_lr(ppc32_jit_tcb_t *b,m_uint32_t new_lr) |
45 |
{ |
46 |
amd64_mov_membase_imm(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),new_lr,4); |
47 |
} |
48 |
|
49 |
/* Set Jump */ |
50 |
static void ppc32_set_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
51 |
m_uint32_t new_ia,int local_jump) |
52 |
{ |
53 |
int return_to_caller = FALSE; |
54 |
u_char *jump_ptr; |
55 |
|
56 |
#if 0 |
57 |
if (cpu->sym_trace && !local_jump) |
58 |
return_to_caller = TRUE; |
59 |
#endif |
60 |
|
61 |
if (!return_to_caller && ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr)) { |
62 |
if (jump_ptr) { |
63 |
amd64_jump_code(b->jit_ptr,jump_ptr); |
64 |
} else { |
65 |
ppc32_jit_tcb_record_patch(b,b->jit_ptr,new_ia); |
66 |
amd64_jump32(b->jit_ptr,0); |
67 |
} |
68 |
} else { |
69 |
/* save PC */ |
70 |
ppc32_set_ia(b,new_ia); |
71 |
|
72 |
/* address is in another block, for now, returns to caller */ |
73 |
ppc32_jit_tcb_push_epilog(b); |
74 |
} |
75 |
} |
76 |
|
77 |
/* Load the Condition Register (CR) into the specified host register */ |
78 |
static forced_inline void ppc32_load_cr(ppc32_jit_tcb_t *b,u_int host_reg) |
79 |
{ |
80 |
amd64_mov_reg_membase(b->jit_ptr,host_reg,AMD64_R15,OFFSET(cpu_ppc_t,cr),4); |
81 |
} |
82 |
|
83 |
/* Store the Condition Register (CR) from the specified host register */ |
84 |
static forced_inline void ppc32_store_cr(ppc32_jit_tcb_t *b,u_int host_reg) |
85 |
{ |
86 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,cr),host_reg,4); |
87 |
} |
88 |
|
89 |
/* Load a GPR into the specified host register */ |
90 |
static forced_inline void ppc32_load_gpr(ppc32_jit_tcb_t *b,u_int host_reg, |
91 |
u_int ppc_reg) |
92 |
{ |
93 |
amd64_mov_reg_membase(b->jit_ptr,host_reg,AMD64_R15,REG_OFFSET(ppc_reg),4); |
94 |
} |
95 |
|
96 |
/* Store contents for a host register into a GPR register */ |
97 |
static forced_inline void ppc32_store_gpr(ppc32_jit_tcb_t *b,u_int ppc_reg, |
98 |
u_int host_reg) |
99 |
{ |
100 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,REG_OFFSET(ppc_reg),host_reg,4); |
101 |
} |
102 |
|
103 |
/* Apply an ALU operation on a GPR register and a host register */ |
104 |
static forced_inline void ppc32_alu_gpr(ppc32_jit_tcb_t *b,u_int op, |
105 |
u_int host_reg,u_int ppc_reg) |
106 |
{ |
107 |
amd64_alu_reg_membase_size(b->jit_ptr,op,host_reg, |
108 |
AMD64_R15,REG_OFFSET(ppc_reg),4); |
109 |
} |
110 |
|
111 |
/* |
112 |
* Update CR from %eflags |
113 |
* %eax, %ecx, %edx, %esi are modified. |
114 |
*/ |
115 |
#define PPC32_CR_LT_BIT 3 |
116 |
#define PPC32_CR_GT_BIT 2 |
117 |
#define PPC32_CR_EQ_BIT 1 |
118 |
#define PPC32_CR_SO_BIT 0 |
119 |
|
120 |
static void ppc32_update_cr(ppc32_jit_tcb_t *b,int field,int is_signed) |
121 |
{ |
122 |
m_uint32_t cr_mask; |
123 |
u_int cfb; |
124 |
|
125 |
cr_mask = 0xF0000000 >> (field << 2); |
126 |
cfb = 28 - (field << 2); |
127 |
|
128 |
amd64_set_reg(b->jit_ptr,X86_CC_LT,AMD64_RAX,is_signed); |
129 |
amd64_set_reg(b->jit_ptr,X86_CC_GT,AMD64_RCX,is_signed); |
130 |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RDX,is_signed); |
131 |
|
132 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RAX,(cfb + PPC32_CR_LT_BIT)); |
133 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RCX,(cfb + PPC32_CR_GT_BIT)); |
134 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RDX,(cfb + PPC32_CR_EQ_BIT)); |
135 |
|
136 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,X86_EAX,X86_ECX); |
137 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,X86_EAX,X86_EDX); |
138 |
|
139 |
/* Load Condition Register */ |
140 |
ppc32_load_cr(b,AMD64_RDX); |
141 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RDX,~cr_mask); |
142 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,cr_mask); |
143 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RAX); |
144 |
|
145 |
/* Check XER Summary of Overflow and report it */ |
146 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX, |
147 |
AMD64_R15,OFFSET(cpu_ppc_t,xer),4); |
148 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,PPC32_XER_SO); |
149 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RCX,(field << 2) + 3); |
150 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RCX); |
151 |
|
152 |
/* Store modified CR */ |
153 |
ppc32_store_cr(b,AMD64_RDX); |
154 |
} |
155 |
|
156 |
/* |
157 |
* Update CR0 from %eflags |
158 |
* %eax, %ecx, %edx, %esi are modified. |
159 |
*/ |
160 |
static void ppc32_update_cr0(ppc32_jit_tcb_t *b) |
161 |
{ |
162 |
ppc32_update_cr(b,0,TRUE); |
163 |
} |
164 |
|
165 |
/* Basic C call */ |
166 |
static forced_inline void ppc32_emit_basic_c_call(ppc32_jit_tcb_t *b,void *f) |
167 |
{ |
168 |
amd64_mov_reg_imm(b->jit_ptr,AMD64_RCX,f); |
169 |
amd64_call_reg(b->jit_ptr,AMD64_RCX); |
170 |
} |
171 |
|
172 |
/* Emit a simple call to a C function without any parameter */ |
173 |
static void ppc32_emit_c_call(ppc32_jit_tcb_t *b,void *f) |
174 |
{ |
175 |
ppc32_set_ia(b,b->start_ia+((b->ppc_trans_pos-1)<<2)); |
176 |
ppc32_emit_basic_c_call(b,f); |
177 |
} |
178 |
|
179 |
/* Memory operation */ |
180 |
static void ppc32_emit_memop(ppc32_jit_tcb_t *b,int op,int base,int offset, |
181 |
int target,int update) |
182 |
{ |
183 |
m_uint32_t val = sign_extend(offset,16); |
184 |
u_char *test1; |
185 |
|
186 |
/* Save PC for exception handling */ |
187 |
ppc32_set_ia(b,b->start_ia+((b->ppc_trans_pos-1)<<2)); |
188 |
|
189 |
/* RSI = sign-extended offset */ |
190 |
ppc32_load_imm(b,AMD64_RSI,val); |
191 |
|
192 |
/* RSI = GPR[base] + sign-extended offset */ |
193 |
if (update || (base != 0)) |
194 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,base); |
195 |
|
196 |
if (update) |
197 |
amd64_mov_reg_reg(b->jit_ptr,AMD64_R14,AMD64_RSI,4); |
198 |
|
199 |
/* RDX = target register */ |
200 |
amd64_mov_reg_imm(b->jit_ptr,AMD64_RDX,target); |
201 |
|
202 |
/* RDI = CPU instance pointer */ |
203 |
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
204 |
|
205 |
/* Call memory function */ |
206 |
amd64_call_membase(b->jit_ptr,AMD64_R15,MEMOP_OFFSET(op)); |
207 |
|
208 |
/* Exception ? */ |
209 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
210 |
test1 = b->jit_ptr; |
211 |
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
212 |
ppc32_jit_tcb_push_epilog(b); |
213 |
amd64_patch(test1,b->jit_ptr); |
214 |
|
215 |
if (update) |
216 |
ppc32_store_gpr(b,base,AMD64_R14); |
217 |
} |
218 |
|
219 |
/* Memory operation (indexed) */ |
220 |
static void ppc32_emit_memop_idx(ppc32_jit_tcb_t *b,int op,int ra,int rb, |
221 |
int target,int update) |
222 |
{ |
223 |
u_char *test1; |
224 |
|
225 |
/* Save PC for exception handling */ |
226 |
ppc32_set_ia(b,b->start_ia+((b->ppc_trans_pos-1)<<2)); |
227 |
|
228 |
/* RSI = $rb */ |
229 |
ppc32_load_gpr(b,AMD64_RSI,rb); |
230 |
|
231 |
/* RSI = GPR[base] + sign-extended offset */ |
232 |
if (update || (ra != 0)) |
233 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,ra); |
234 |
|
235 |
if (update) |
236 |
amd64_mov_reg_reg(b->jit_ptr,AMD64_R14,AMD64_RSI,4); |
237 |
|
238 |
/* RDX = target register */ |
239 |
amd64_mov_reg_imm(b->jit_ptr,AMD64_RDX,target); |
240 |
|
241 |
/* RDI = CPU instance pointer */ |
242 |
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
243 |
|
244 |
/* Call memory function */ |
245 |
amd64_call_membase(b->jit_ptr,AMD64_R15,MEMOP_OFFSET(op)); |
246 |
|
247 |
/* Exception ? */ |
248 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
249 |
test1 = b->jit_ptr; |
250 |
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
251 |
ppc32_jit_tcb_push_epilog(b); |
252 |
amd64_patch(test1,b->jit_ptr); |
253 |
|
254 |
if (update) |
255 |
ppc32_store_gpr(b,ra,AMD64_R14); |
256 |
} |
257 |
|
258 |
typedef void (*memop_fast_access)(ppc32_jit_tcb_t *b,int target); |
259 |
|
260 |
/* Fast LBZ */ |
261 |
static void ppc32_memop_fast_lbz(ppc32_jit_tcb_t *b,int target) |
262 |
{ |
263 |
amd64_clear_reg(b->jit_ptr,AMD64_RCX); |
264 |
amd64_mov_reg_memindex(b->jit_ptr,AMD64_RCX,AMD64_RBX,0,AMD64_RSI,0,1); |
265 |
ppc32_store_gpr(b,target,AMD64_RCX); |
266 |
} |
267 |
|
268 |
/* Fast STB */ |
269 |
static void ppc32_memop_fast_stb(ppc32_jit_tcb_t *b,int target) |
270 |
{ |
271 |
ppc32_load_gpr(b,AMD64_RDX,target); |
272 |
amd64_mov_memindex_reg(b->jit_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,1); |
273 |
} |
274 |
|
275 |
/* Fast LWZ */ |
276 |
static void ppc32_memop_fast_lwz(ppc32_jit_tcb_t *b,int target) |
277 |
{ |
278 |
amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RBX,0,AMD64_RSI,0,4); |
279 |
amd64_bswap32(b->jit_ptr,AMD64_RAX); |
280 |
ppc32_store_gpr(b,target,AMD64_RAX); |
281 |
} |
282 |
|
283 |
/* Fast STW */ |
284 |
static void ppc32_memop_fast_stw(ppc32_jit_tcb_t *b,int target) |
285 |
{ |
286 |
ppc32_load_gpr(b,AMD64_RDX,target); |
287 |
amd64_bswap32(b->jit_ptr,AMD64_RDX); |
288 |
amd64_mov_memindex_reg(b->jit_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,4); |
289 |
} |
290 |
|
291 |
/* Fast memory operation */ |
292 |
static void ppc32_emit_memop_fast(ppc32_jit_tcb_t *b,int write_op, |
293 |
int opcode,int base,int offset,int target, |
294 |
memop_fast_access op_handler) |
295 |
{ |
296 |
m_uint32_t val = sign_extend(offset,16); |
297 |
u_char *test1,*test2,*p_exception,*p_exit; |
298 |
|
299 |
test2 = NULL; |
300 |
|
301 |
/* RSI = GPR[base] + sign-extended offset */ |
302 |
ppc32_load_imm(b,AMD64_RSI,val); |
303 |
if (base != 0) |
304 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,base); |
305 |
|
306 |
/* RBX = mts32_entry index */ |
307 |
amd64_mov_reg_reg_size(b->jit_ptr,X86_EBX,X86_ESI,4); |
308 |
amd64_shift_reg_imm_size(b->jit_ptr,X86_SHR,X86_EBX,MTS32_HASH_SHIFT,4); |
309 |
amd64_alu_reg_imm_size(b->jit_ptr,X86_AND,X86_EBX,MTS32_HASH_MASK,4); |
310 |
|
311 |
/* RCX = mts32 entry */ |
312 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX, |
313 |
AMD64_R15, |
314 |
OFFSET(cpu_ppc_t,mts_cache[PPC32_MTS_DCACHE]),8); |
315 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,5); /* TO FIX */ |
316 |
amd64_alu_reg_reg(b->jit_ptr,X86_ADD,AMD64_RCX,AMD64_RBX); |
317 |
|
318 |
/* Compare virtual page address (EAX = vpage) */ |
319 |
amd64_mov_reg_reg(b->jit_ptr,X86_EAX,X86_ESI,4); |
320 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,X86_EAX,PPC32_MIN_PAGE_MASK); |
321 |
|
322 |
amd64_alu_reg_membase_size(b->jit_ptr,X86_CMP,X86_EAX,AMD64_RCX, |
323 |
OFFSET(mts32_entry_t,gvpa),4); |
324 |
test1 = b->jit_ptr; |
325 |
x86_branch8(b->jit_ptr, X86_CC_NZ, 0, 1); |
326 |
|
327 |
/* Test if we are writing to a COW page */ |
328 |
if (write_op) { |
329 |
amd64_test_membase_imm_size(b->jit_ptr, |
330 |
AMD64_RCX,OFFSET(mts32_entry_t,flags), |
331 |
MTS_FLAG_COW,4); |
332 |
test2 = b->jit_ptr; |
333 |
amd64_branch8(b->jit_ptr, X86_CC_NZ, 0, 1); |
334 |
} |
335 |
|
336 |
/* ESI = offset in page, RBX = Host Page Address */ |
337 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,X86_ESI,PPC32_MIN_PAGE_IMASK); |
338 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RBX, |
339 |
AMD64_RCX,OFFSET(mts32_entry_t,hpa),8); |
340 |
|
341 |
/* Memory access */ |
342 |
op_handler(b,target); |
343 |
|
344 |
p_exit = b->jit_ptr; |
345 |
amd64_jump8(b->jit_ptr,0); |
346 |
|
347 |
/* === Slow lookup === */ |
348 |
amd64_patch(test1,b->jit_ptr); |
349 |
if (test2) |
350 |
amd64_patch(test2,b->jit_ptr); |
351 |
|
352 |
/* Save IA for exception handling */ |
353 |
ppc32_set_ia(b,b->start_ia+((b->ppc_trans_pos-1)<<2)); |
354 |
|
355 |
/* RDX = target register */ |
356 |
amd64_mov_reg_imm(b->jit_ptr,AMD64_RDX,target); |
357 |
|
358 |
/* RDI = CPU instance */ |
359 |
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
360 |
|
361 |
/* Call memory access function */ |
362 |
amd64_call_membase(b->jit_ptr,AMD64_R15,MEMOP_OFFSET(opcode)); |
363 |
|
364 |
/* Exception ? */ |
365 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
366 |
p_exception = b->jit_ptr; |
367 |
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
368 |
ppc32_jit_tcb_push_epilog(b); |
369 |
|
370 |
amd64_patch(p_exit,b->jit_ptr); |
371 |
amd64_patch(p_exception,b->jit_ptr); |
372 |
} |
373 |
|
374 |
/* Emit unhandled instruction code */ |
375 |
static int ppc32_emit_unknown(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, |
376 |
ppc_insn_t opcode) |
377 |
{ |
378 |
u_char *test1; |
379 |
|
380 |
#if 0 |
381 |
x86_mov_reg_imm(b->jit_ptr,X86_EAX,opcode); |
382 |
x86_alu_reg_imm(b->jit_ptr,X86_SUB,X86_ESP,4); |
383 |
x86_push_reg(b->jit_ptr,X86_EAX); |
384 |
x86_push_reg(b->jit_ptr,X86_EDI); |
385 |
ppc32_emit_c_call(b,ppc32_unknown_opcode); |
386 |
x86_alu_reg_imm(b->jit_ptr,X86_ADD,X86_ESP,12); |
387 |
#endif |
388 |
|
389 |
/* Fallback to non-JIT mode */ |
390 |
amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8); |
391 |
amd64_mov_reg_imm(b->jit_ptr,AMD64_RSI,opcode); |
392 |
|
393 |
ppc32_emit_c_call(b,ppc32_exec_single_insn_ext); |
394 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
395 |
test1 = b->jit_ptr; |
396 |
amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
397 |
ppc32_jit_tcb_push_epilog(b); |
398 |
|
399 |
amd64_patch(test1,b->jit_ptr); |
400 |
return(0); |
401 |
} |
402 |
|
403 |
/* Increment the number of executed instructions (performance debugging) */ |
404 |
void ppc32_inc_perf_counter(ppc32_jit_tcb_t *b) |
405 |
{ |
406 |
amd64_inc_membase(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,perf_counter)); |
407 |
} |
408 |
|
409 |
/* ======================================================================== */ |
410 |
|
411 |
/* BLR - Branch to Link Register */ |
412 |
DECLARE_INSN(BLR) |
413 |
{ |
414 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
415 |
AMD64_R15,OFFSET(cpu_ppc_t,lr),4); |
416 |
amd64_mov_membase_reg(b->jit_ptr, |
417 |
AMD64_R15,OFFSET(cpu_ppc_t,ia),AMD64_RDX,4); |
418 |
|
419 |
/* set the return address */ |
420 |
if (insn & 1) |
421 |
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
422 |
|
423 |
ppc32_jit_tcb_push_epilog(b); |
424 |
return(0); |
425 |
} |
426 |
|
427 |
/* BCTR - Branch to Count Register */ |
428 |
DECLARE_INSN(BCTR) |
429 |
{ |
430 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
431 |
AMD64_R15,OFFSET(cpu_ppc_t,ctr),4); |
432 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia), |
433 |
AMD64_RDX,4); |
434 |
|
435 |
/* set the return address */ |
436 |
if (insn & 1) |
437 |
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
438 |
|
439 |
ppc32_jit_tcb_push_epilog(b); |
440 |
return(0); |
441 |
} |
442 |
|
443 |
/* MFLR - Move From Link Register */ |
444 |
DECLARE_INSN(MFLR) |
445 |
{ |
446 |
int rd = bits(insn,21,25); |
447 |
|
448 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
449 |
AMD64_R15,OFFSET(cpu_ppc_t,lr),4); |
450 |
ppc32_store_gpr(b,rd,X86_EDX); |
451 |
return(0); |
452 |
} |
453 |
|
454 |
/* MTLR - Move To Link Register */ |
455 |
DECLARE_INSN(MTLR) |
456 |
{ |
457 |
int rs = bits(insn,21,25); |
458 |
|
459 |
ppc32_load_gpr(b,X86_EDX,rs); |
460 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr), |
461 |
AMD64_RDX,4); |
462 |
return(0); |
463 |
} |
464 |
|
465 |
/* MFCTR - Move From Counter Register */ |
466 |
DECLARE_INSN(MFCTR) |
467 |
{ |
468 |
int rd = bits(insn,21,25); |
469 |
|
470 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
471 |
AMD64_R15,OFFSET(cpu_ppc_t,ctr),4); |
472 |
ppc32_store_gpr(b,rd,AMD64_RDX); |
473 |
return(0); |
474 |
} |
475 |
|
476 |
/* MTCTR - Move To Counter Register */ |
477 |
DECLARE_INSN(MTCTR) |
478 |
{ |
479 |
int rs = bits(insn,21,25); |
480 |
|
481 |
ppc32_load_gpr(b,AMD64_RDX,rs); |
482 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr), |
483 |
AMD64_RDX,4); |
484 |
return(0); |
485 |
} |
486 |
|
487 |
/* MFTBU - Move from Time Base (Up) */ |
488 |
DECLARE_INSN(MFTBU) |
489 |
{ |
490 |
int rd = bits(insn,21,25); |
491 |
|
492 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
493 |
AMD64_R15,OFFSET(cpu_ppc_t,tb)+4,4); |
494 |
ppc32_store_gpr(b,rd,AMD64_RDX); |
495 |
return(0); |
496 |
} |
497 |
|
498 |
#define PPC32_TB_INCREMENT 50 |
499 |
|
500 |
/* MFTBL - Move from Time Base (Lo) */ |
501 |
DECLARE_INSN(MFTBL) |
502 |
{ |
503 |
int rd = bits(insn,21,25); |
504 |
|
505 |
/* Increment the time base register */ |
506 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
507 |
AMD64_R15,OFFSET(cpu_ppc_t,tb),8); |
508 |
amd64_alu_reg_imm(b->jit_ptr,X86_ADD,AMD64_RDX,PPC32_TB_INCREMENT); |
509 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,tb), |
510 |
AMD64_RDX,8); |
511 |
|
512 |
ppc32_store_gpr(b,rd,AMD64_RDX); |
513 |
return(0); |
514 |
} |
515 |
|
516 |
/* ADD */ |
517 |
DECLARE_INSN(ADD) |
518 |
{ |
519 |
int rd = bits(insn,21,25); |
520 |
int ra = bits(insn,16,20); |
521 |
int rb = bits(insn,11,15); |
522 |
|
523 |
/* $rd = $ra + $rb */ |
524 |
ppc32_load_gpr(b,AMD64_RBX,ra); |
525 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RBX,rb); |
526 |
ppc32_store_gpr(b,rd,AMD64_RBX); |
527 |
|
528 |
if (insn & 1) |
529 |
ppc32_update_cr0(b); |
530 |
|
531 |
return(0); |
532 |
} |
533 |
|
534 |
/* ADDC */ |
535 |
DECLARE_INSN(ADDC) |
536 |
{ |
537 |
int rd = bits(insn,21,25); |
538 |
int ra = bits(insn,16,20); |
539 |
int rb = bits(insn,11,15); |
540 |
|
541 |
/* $rd = $ra + $rb */ |
542 |
ppc32_load_gpr(b,AMD64_RBX,ra); |
543 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RBX,rb); |
544 |
ppc32_store_gpr(b,rd,AMD64_RBX); |
545 |
|
546 |
/* store the carry flag */ |
547 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
548 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
549 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
550 |
AMD64_RAX,4); |
551 |
|
552 |
if (insn & 1) { |
553 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
554 |
ppc32_update_cr0(b); |
555 |
} |
556 |
|
557 |
return(0); |
558 |
} |
559 |
|
560 |
/* ADDE - Add Extended */ |
561 |
DECLARE_INSN(ADDE) |
562 |
{ |
563 |
int rd = bits(insn,21,25); |
564 |
int ra = bits(insn,16,20); |
565 |
int rb = bits(insn,11,15); |
566 |
|
567 |
/* $ra + carry */ |
568 |
ppc32_load_gpr(b,AMD64_RSI,ra); |
569 |
amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,AMD64_RSI, |
570 |
AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4); |
571 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
572 |
|
573 |
/* add $rb */ |
574 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,rb); |
575 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RCX,FALSE); |
576 |
|
577 |
ppc32_store_gpr(b,rd,AMD64_RSI); |
578 |
|
579 |
/* store the carry flag */ |
580 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RCX); |
581 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
582 |
|
583 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
584 |
AMD64_RAX,4); |
585 |
|
586 |
/* update cr0 */ |
587 |
if (insn & 1) { |
588 |
x86_test_reg_reg(b->jit_ptr,AMD64_RSI,AMD64_RSI); |
589 |
ppc32_update_cr0(b); |
590 |
} |
591 |
|
592 |
return(0); |
593 |
} |
594 |
|
595 |
/* ADDI - ADD Immediate */ |
596 |
DECLARE_INSN(ADDI) |
597 |
{ |
598 |
int rd = bits(insn,21,25); |
599 |
int ra = bits(insn,16,20); |
600 |
int imm = bits(insn,0,15); |
601 |
m_uint32_t tmp = sign_extend_32(imm,16); |
602 |
|
603 |
ppc32_load_imm(b,AMD64_RBX,tmp); |
604 |
|
605 |
if (ra != 0) |
606 |
amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,AMD64_RBX, |
607 |
AMD64_R15,REG_OFFSET(ra),4); |
608 |
|
609 |
ppc32_store_gpr(b,rd,AMD64_RBX); |
610 |
return(0); |
611 |
} |
612 |
|
613 |
/* ADDIC - ADD Immediate with Carry */ |
614 |
DECLARE_INSN(ADDIC) |
615 |
{ |
616 |
int rd = bits(insn,21,25); |
617 |
int ra = bits(insn,16,20); |
618 |
int imm = bits(insn,0,15); |
619 |
m_uint32_t tmp = sign_extend_32(imm,16); |
620 |
|
621 |
ppc32_load_imm(b,AMD64_RAX,tmp); |
622 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RAX,ra); |
623 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
624 |
amd64_set_membase_size(b->jit_ptr,X86_CC_C, |
625 |
AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
626 |
FALSE,4); |
627 |
return(0); |
628 |
} |
629 |
|
630 |
/* ADDIC. */ |
631 |
DECLARE_INSN(ADDIC_dot) |
632 |
{ |
633 |
int rd = bits(insn,21,25); |
634 |
int ra = bits(insn,16,20); |
635 |
int imm = bits(insn,0,15); |
636 |
m_uint32_t tmp = sign_extend_32(imm,16); |
637 |
|
638 |
ppc32_load_imm(b,AMD64_RAX,tmp); |
639 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RAX,ra); |
640 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
641 |
amd64_set_membase_size(b->jit_ptr,X86_CC_C, |
642 |
AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
643 |
FALSE,4); |
644 |
|
645 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
646 |
ppc32_update_cr0(b); |
647 |
return(0); |
648 |
} |
649 |
|
650 |
/* ADDIS - ADD Immediate Shifted */ |
651 |
DECLARE_INSN(ADDIS) |
652 |
{ |
653 |
int rd = bits(insn,21,25); |
654 |
int ra = bits(insn,16,20); |
655 |
m_uint32_t imm = bits(insn,0,15); |
656 |
|
657 |
ppc32_load_imm(b,AMD64_RBX,imm << 16); |
658 |
|
659 |
if (ra != 0) |
660 |
amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,AMD64_RBX, |
661 |
AMD64_R15,REG_OFFSET(ra),4); |
662 |
|
663 |
ppc32_store_gpr(b,rd,AMD64_RBX); |
664 |
return(0); |
665 |
} |
666 |
|
667 |
/* AND */ |
668 |
DECLARE_INSN(AND) |
669 |
{ |
670 |
int rs = bits(insn,21,25); |
671 |
int ra = bits(insn,16,20); |
672 |
int rb = bits(insn,11,15); |
673 |
|
674 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
675 |
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rb); |
676 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
677 |
|
678 |
if (insn & 1) |
679 |
ppc32_update_cr0(b); |
680 |
|
681 |
return(0); |
682 |
} |
683 |
|
684 |
/* ANDC */ |
685 |
DECLARE_INSN(ANDC) |
686 |
{ |
687 |
int rs = bits(insn,21,25); |
688 |
int ra = bits(insn,16,20); |
689 |
int rb = bits(insn,11,15); |
690 |
|
691 |
/* $ra = $rs & ~$rb */ |
692 |
ppc32_load_gpr(b,AMD64_RBX,rb); |
693 |
x86_not_reg(b->jit_ptr,AMD64_RBX); |
694 |
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rs); |
695 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
696 |
|
697 |
if (insn & 1) |
698 |
ppc32_update_cr0(b); |
699 |
|
700 |
return(0); |
701 |
} |
702 |
|
703 |
/* AND Immediate */ |
704 |
DECLARE_INSN(ANDI) |
705 |
{ |
706 |
int rs = bits(insn,21,25); |
707 |
int ra = bits(insn,16,20); |
708 |
m_uint16_t imm = bits(insn,0,15); |
709 |
|
710 |
/* $ra = $rs & imm */ |
711 |
ppc32_load_imm(b,AMD64_RBX,imm); |
712 |
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rs); |
713 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
714 |
|
715 |
ppc32_update_cr0(b); |
716 |
return(0); |
717 |
} |
718 |
|
719 |
/* AND Immediate Shifted */ |
720 |
DECLARE_INSN(ANDIS) |
721 |
{ |
722 |
int rs = bits(insn,21,25); |
723 |
int ra = bits(insn,16,20); |
724 |
m_uint32_t imm = bits(insn,0,15); |
725 |
|
726 |
/* $ra = $rs & imm */ |
727 |
ppc32_load_imm(b,AMD64_RBX,imm << 16); |
728 |
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rs); |
729 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
730 |
|
731 |
ppc32_update_cr0(b); |
732 |
return(0); |
733 |
} |
734 |
|
735 |
/* B - Branch */ |
736 |
DECLARE_INSN(B) |
737 |
{ |
738 |
m_uint32_t offset = bits(insn,2,25); |
739 |
m_uint64_t new_ia; |
740 |
|
741 |
/* compute the new ia */ |
742 |
new_ia = b->start_ia + ((b->ppc_trans_pos-1) << 2); |
743 |
new_ia += sign_extend(offset << 2,26); |
744 |
ppc32_set_jump(cpu,b,new_ia,1); |
745 |
return(0); |
746 |
} |
747 |
|
748 |
/* BA - Branch Absolute */ |
749 |
DECLARE_INSN(BA) |
750 |
{ |
751 |
m_uint32_t offset = bits(insn,2,25); |
752 |
m_uint64_t new_ia; |
753 |
|
754 |
/* compute the new ia */ |
755 |
new_ia = sign_extend(offset << 2,26); |
756 |
ppc32_set_jump(cpu,b,new_ia,1); |
757 |
return(0); |
758 |
} |
759 |
|
760 |
/* BL - Branch and Link */ |
761 |
DECLARE_INSN(BL) |
762 |
{ |
763 |
m_uint32_t offset = bits(insn,2,25); |
764 |
m_uint64_t new_ia; |
765 |
|
766 |
/* compute the new ia */ |
767 |
new_ia = b->start_ia + ((b->ppc_trans_pos-1) << 2); |
768 |
new_ia += sign_extend(offset << 2,26); |
769 |
|
770 |
/* set the return address */ |
771 |
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
772 |
|
773 |
ppc32_set_jump(cpu,b,new_ia,1); |
774 |
return(0); |
775 |
} |
776 |
|
777 |
/* BLA - Branch and Link Absolute */ |
778 |
DECLARE_INSN(BLA) |
779 |
{ |
780 |
m_uint32_t offset = bits(insn,2,25); |
781 |
m_uint64_t new_ia; |
782 |
|
783 |
/* compute the new ia */ |
784 |
new_ia = sign_extend(offset << 2,26); |
785 |
|
786 |
/* set the return address */ |
787 |
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
788 |
|
789 |
ppc32_set_jump(cpu,b,new_ia,1); |
790 |
return(0); |
791 |
} |
792 |
|
793 |
/* BC - Branch Conditional (Condition Check only) */ |
794 |
DECLARE_INSN(BCC) |
795 |
{ |
796 |
int bo = bits(insn,21,25); |
797 |
int bi = bits(insn,16,20); |
798 |
int bd = bits(insn,2,15); |
799 |
m_uint32_t new_ia; |
800 |
u_char *jump_ptr; |
801 |
int local_jump; |
802 |
int cond; |
803 |
|
804 |
/* Get the wanted value for the condition bit */ |
805 |
cond = (bo >> 3) & 0x1; |
806 |
|
807 |
/* Set the return address */ |
808 |
if (insn & 1) |
809 |
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
810 |
|
811 |
/* Compute the new ia */ |
812 |
new_ia = sign_extend_32(bd << 2,16); |
813 |
if (!(insn & 0x02)) |
814 |
new_ia += b->start_ia + ((b->ppc_trans_pos-1) << 2); |
815 |
|
816 |
/* Test the condition bit */ |
817 |
amd64_test_membase_imm_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,cr), |
818 |
(1 << (31 - bi)),4); |
819 |
|
820 |
local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr); |
821 |
|
822 |
/* |
823 |
* Optimize the jump, depending if the destination is in the same |
824 |
* page or not. |
825 |
*/ |
826 |
if (local_jump) { |
827 |
if (jump_ptr) { |
828 |
amd64_branch(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,jump_ptr,FALSE); |
829 |
} else { |
830 |
ppc32_jit_tcb_record_patch(b,b->jit_ptr,new_ia); |
831 |
amd64_branch32(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,0,FALSE); |
832 |
} |
833 |
} else { |
834 |
jump_ptr = b->jit_ptr; |
835 |
amd64_branch32(b->jit_ptr,(cond) ? X86_CC_Z : X86_CC_NZ,0,FALSE); |
836 |
ppc32_set_jump(cpu,b,new_ia,TRUE); |
837 |
amd64_patch(jump_ptr,b->jit_ptr); |
838 |
} |
839 |
|
840 |
return(0); |
841 |
} |
842 |
|
843 |
/* BC - Branch Conditional */ |
844 |
DECLARE_INSN(BC) |
845 |
{ |
846 |
int bo = bits(insn,21,25); |
847 |
int bi = bits(insn,16,20); |
848 |
int bd = bits(insn,2,15); |
849 |
m_uint32_t new_ia; |
850 |
u_char *jump_ptr; |
851 |
int local_jump; |
852 |
int cond,ctr; |
853 |
|
854 |
/* Get the wanted value for the condition bit and CTR value */ |
855 |
cond = (bo >> 3) & 0x1; |
856 |
ctr = (bo >> 1) & 0x1; |
857 |
|
858 |
/* Set the return address */ |
859 |
if (insn & 1) |
860 |
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
861 |
|
862 |
/* Compute the new ia */ |
863 |
new_ia = sign_extend_32(bd << 2,16); |
864 |
if (!(insn & 0x02)) |
865 |
new_ia += b->start_ia + ((b->ppc_trans_pos-1) << 2); |
866 |
|
867 |
ppc32_load_imm(b,AMD64_RAX,1); |
868 |
|
869 |
/* Decrement the count register */ |
870 |
if (!(bo & 0x04)) { |
871 |
amd64_dec_membase_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4); |
872 |
amd64_set_reg(b->jit_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,AMD64_RBX,FALSE); |
873 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RBX); |
874 |
} |
875 |
|
876 |
/* Test the condition bit */ |
877 |
if (!((bo >> 4) & 0x01)) { |
878 |
amd64_test_membase_imm_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,cr), |
879 |
(1 << (31 - bi)),4); |
880 |
amd64_set_reg(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,AMD64_RCX,FALSE); |
881 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RCX); |
882 |
} |
883 |
|
884 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x01); |
885 |
|
886 |
local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr); |
887 |
|
888 |
/* |
889 |
* Optimize the jump, depending if the destination is in the same |
890 |
* page or not. |
891 |
*/ |
892 |
if (local_jump) { |
893 |
if (jump_ptr) { |
894 |
amd64_branch(b->jit_ptr,X86_CC_NZ,jump_ptr,FALSE); |
895 |
} else { |
896 |
ppc32_jit_tcb_record_patch(b,b->jit_ptr,new_ia); |
897 |
amd64_branch32(b->jit_ptr,X86_CC_NZ,0,FALSE); |
898 |
} |
899 |
} else { |
900 |
jump_ptr = b->jit_ptr; |
901 |
amd64_branch32(b->jit_ptr,X86_CC_Z,0,FALSE); |
902 |
ppc32_set_jump(cpu,b,new_ia,TRUE); |
903 |
amd64_patch(jump_ptr,b->jit_ptr); |
904 |
} |
905 |
|
906 |
return(0); |
907 |
} |
908 |
|
909 |
/* BCLR - Branch Conditional to Link register */ |
910 |
DECLARE_INSN(BCLR) |
911 |
{ |
912 |
int bo = bits(insn,21,25); |
913 |
int bi = bits(insn,16,20); |
914 |
int bd = bits(insn,2,15); |
915 |
m_uint32_t new_ia; |
916 |
u_char *jump_ptr; |
917 |
int cond,ctr; |
918 |
|
919 |
/* Get the wanted value for the condition bit and CTR value */ |
920 |
cond = (bo >> 3) & 0x1; |
921 |
ctr = (bo >> 1) & 0x1; |
922 |
|
923 |
/* Compute the new ia */ |
924 |
new_ia = sign_extend_32(bd << 2,16); |
925 |
if (!(insn & 0x02)) |
926 |
new_ia += b->start_ia + ((b->ppc_trans_pos-1) << 2); |
927 |
|
928 |
ppc32_load_imm(b,AMD64_RAX,1); |
929 |
|
930 |
/* Decrement the count register */ |
931 |
if (!(bo & 0x04)) { |
932 |
amd64_dec_membase_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4); |
933 |
amd64_set_reg(b->jit_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,AMD64_RBX,FALSE); |
934 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RBX); |
935 |
} |
936 |
|
937 |
/* Test the condition bit */ |
938 |
if (!((bo >> 4) & 0x01)) { |
939 |
amd64_test_membase_imm_size(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,cr), |
940 |
(1 << (31 - bi)),4); |
941 |
amd64_set_reg(b->jit_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,AMD64_RCX,FALSE); |
942 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RAX,AMD64_RCX); |
943 |
} |
944 |
|
945 |
/* Set the return address */ |
946 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RDX, |
947 |
AMD64_R15,OFFSET(cpu_ppc_t,lr),4); |
948 |
|
949 |
if (insn & 1) |
950 |
ppc32_set_lr(b,b->start_ia + (b->ppc_trans_pos << 2)); |
951 |
|
952 |
/* Branching */ |
953 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x01); |
954 |
|
955 |
jump_ptr = b->jit_ptr; |
956 |
amd64_branch32(b->jit_ptr,X86_CC_Z,0,FALSE); |
957 |
|
958 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RDX,0xFFFFFFFC); |
959 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia), |
960 |
AMD64_RDX,4); |
961 |
ppc32_jit_tcb_push_epilog(b); |
962 |
|
963 |
amd64_patch(jump_ptr,b->jit_ptr); |
964 |
return(0); |
965 |
} |
966 |
|
967 |
/* CMP - Compare */ |
968 |
DECLARE_INSN(CMP) |
969 |
{ |
970 |
int rd = bits(insn,23,25); |
971 |
int ra = bits(insn,16,20); |
972 |
int rb = bits(insn,11,15); |
973 |
|
974 |
ppc32_load_gpr(b,AMD64_RBX,ra); |
975 |
ppc32_alu_gpr(b,X86_CMP,AMD64_RBX,rb); |
976 |
ppc32_update_cr(b,rd,TRUE); |
977 |
return(0); |
978 |
} |
979 |
|
980 |
/* CMPI - Compare Immediate */ |
981 |
DECLARE_INSN(CMPI) |
982 |
{ |
983 |
int rd = bits(insn,23,25); |
984 |
int ra = bits(insn,16,20); |
985 |
m_uint16_t imm = bits(insn,0,15); |
986 |
m_uint32_t tmp = sign_extend_32(imm,16); |
987 |
|
988 |
ppc32_load_imm(b,AMD64_RBX,tmp); |
989 |
ppc32_load_gpr(b,AMD64_RSI,ra); |
990 |
amd64_alu_reg_reg_size(b->jit_ptr,X86_CMP,AMD64_RSI,AMD64_RBX,4); |
991 |
|
992 |
ppc32_update_cr(b,rd,TRUE); |
993 |
return(0); |
994 |
} |
995 |
|
996 |
/* CMPL - Compare Logical */ |
997 |
DECLARE_INSN(CMPL) |
998 |
{ |
999 |
int rd = bits(insn,23,25); |
1000 |
int ra = bits(insn,16,20); |
1001 |
int rb = bits(insn,11,15); |
1002 |
|
1003 |
ppc32_load_gpr(b,AMD64_RAX,ra); |
1004 |
ppc32_alu_gpr(b,X86_CMP,AMD64_RAX,rb); |
1005 |
ppc32_update_cr(b,rd,FALSE); |
1006 |
return(0); |
1007 |
} |
1008 |
|
1009 |
/* CMPLI - Compare Immediate */ |
1010 |
DECLARE_INSN(CMPLI) |
1011 |
{ |
1012 |
int rd = bits(insn,23,25); |
1013 |
int ra = bits(insn,16,20); |
1014 |
m_uint16_t imm = bits(insn,0,15); |
1015 |
|
1016 |
ppc32_load_imm(b,AMD64_RBX,imm); |
1017 |
ppc32_load_gpr(b,AMD64_RSI,ra); |
1018 |
amd64_alu_reg_reg_size(b->jit_ptr,X86_CMP,AMD64_RSI,AMD64_RBX,4); |
1019 |
|
1020 |
ppc32_update_cr(b,rd,FALSE); |
1021 |
return(0); |
1022 |
} |
1023 |
|
1024 |
/* CRAND - Condition Register AND */ |
1025 |
DECLARE_INSN(CRAND) |
1026 |
{ |
1027 |
int bd = bits(insn,21,25); |
1028 |
int bb = bits(insn,16,20); |
1029 |
int ba = bits(insn,11,15); |
1030 |
|
1031 |
ppc32_load_cr(b,AMD64_RSI); |
1032 |
|
1033 |
/* test $ba bit */ |
1034 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
1035 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1036 |
|
1037 |
/* test $bb bit */ |
1038 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
1039 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1040 |
|
1041 |
/* result of AND between $ba and $bb */ |
1042 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RBX,AMD64_RAX); |
1043 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1044 |
|
1045 |
/* set/clear $bd bit depending on the result */ |
1046 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
1047 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
1048 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
1049 |
|
1050 |
ppc32_store_cr(b,AMD64_RSI); |
1051 |
return(0); |
1052 |
} |
1053 |
|
1054 |
/* CRANDC - Condition Register AND with Complement */ |
1055 |
DECLARE_INSN(CRANDC) |
1056 |
{ |
1057 |
int bd = bits(insn,21,25); |
1058 |
int bb = bits(insn,16,20); |
1059 |
int ba = bits(insn,11,15); |
1060 |
|
1061 |
ppc32_load_cr(b,AMD64_RSI); |
1062 |
|
1063 |
/* test $ba bit */ |
1064 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
1065 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1066 |
|
1067 |
/* test $bb bit */ |
1068 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
1069 |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RBX,FALSE); |
1070 |
|
1071 |
/* result of AND between $ba and $bb */ |
1072 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RBX,AMD64_RAX); |
1073 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1074 |
|
1075 |
/* set/clear $bd bit depending on the result */ |
1076 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
1077 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
1078 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
1079 |
|
1080 |
ppc32_store_cr(b,AMD64_RSI); |
1081 |
return(0); |
1082 |
} |
1083 |
|
1084 |
/* CREQV - Condition Register EQV */ |
1085 |
DECLARE_INSN(CREQV) |
1086 |
{ |
1087 |
int bd = bits(insn,21,25); |
1088 |
int bb = bits(insn,16,20); |
1089 |
int ba = bits(insn,11,15); |
1090 |
|
1091 |
ppc32_load_cr(b,AMD64_RSI); |
1092 |
|
1093 |
/* test $ba bit */ |
1094 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
1095 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1096 |
|
1097 |
/* test $bb bit */ |
1098 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
1099 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1100 |
|
1101 |
/* result of XOR between $ba and $bb */ |
1102 |
amd64_alu_reg_reg(b->jit_ptr,X86_XOR,AMD64_RBX,AMD64_RAX); |
1103 |
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1104 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1105 |
|
1106 |
/* set/clear $bd bit depending on the result */ |
1107 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
1108 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
1109 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
1110 |
|
1111 |
ppc32_store_cr(b,AMD64_RSI); |
1112 |
return(0); |
1113 |
} |
1114 |
|
1115 |
/* CRNAND - Condition Register NAND */ |
1116 |
DECLARE_INSN(CRNAND) |
1117 |
{ |
1118 |
int bd = bits(insn,21,25); |
1119 |
int bb = bits(insn,16,20); |
1120 |
int ba = bits(insn,11,15); |
1121 |
|
1122 |
ppc32_load_cr(b,AMD64_RSI); |
1123 |
|
1124 |
/* test $ba bit */ |
1125 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
1126 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1127 |
|
1128 |
/* test $bb bit */ |
1129 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
1130 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1131 |
|
1132 |
/* result of NAND between $ba and $bb */ |
1133 |
amd64_alu_reg_reg(b->jit_ptr,X86_AND,AMD64_RBX,AMD64_RAX); |
1134 |
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1135 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1136 |
|
1137 |
/* set/clear $bd bit depending on the result */ |
1138 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
1139 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
1140 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
1141 |
|
1142 |
ppc32_store_cr(b,AMD64_RSI); |
1143 |
return(0); |
1144 |
} |
1145 |
|
1146 |
/* CRNOR - Condition Register NOR */ |
1147 |
DECLARE_INSN(CRNOR) |
1148 |
{ |
1149 |
int bd = bits(insn,21,25); |
1150 |
int bb = bits(insn,16,20); |
1151 |
int ba = bits(insn,11,15); |
1152 |
|
1153 |
ppc32_load_cr(b,AMD64_RSI); |
1154 |
|
1155 |
/* test $ba bit */ |
1156 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
1157 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1158 |
|
1159 |
/* test $bb bit */ |
1160 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
1161 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1162 |
|
1163 |
/* result of NOR between $ba and $bb */ |
1164 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RBX,AMD64_RAX); |
1165 |
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1166 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1167 |
|
1168 |
/* set/clear $bd bit depending on the result */ |
1169 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
1170 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
1171 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
1172 |
|
1173 |
ppc32_store_cr(b,AMD64_RSI); |
1174 |
return(0); |
1175 |
} |
1176 |
|
1177 |
/* CROR - Condition Register OR */ |
1178 |
DECLARE_INSN(CROR) |
1179 |
{ |
1180 |
int bd = bits(insn,21,25); |
1181 |
int bb = bits(insn,16,20); |
1182 |
int ba = bits(insn,11,15); |
1183 |
|
1184 |
ppc32_load_cr(b,AMD64_RSI); |
1185 |
|
1186 |
/* test $ba bit */ |
1187 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
1188 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1189 |
|
1190 |
/* test $bb bit */ |
1191 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
1192 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1193 |
|
1194 |
/* result of NOR between $ba and $bb */ |
1195 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RBX,AMD64_RAX); |
1196 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1197 |
|
1198 |
/* set/clear $bd bit depending on the result */ |
1199 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
1200 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
1201 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
1202 |
|
1203 |
ppc32_store_cr(b,AMD64_RSI); |
1204 |
return(0); |
1205 |
} |
1206 |
|
1207 |
/* CRORC - Condition Register OR with Complement */ |
1208 |
DECLARE_INSN(CRORC) |
1209 |
{ |
1210 |
int bd = bits(insn,21,25); |
1211 |
int bb = bits(insn,16,20); |
1212 |
int ba = bits(insn,11,15); |
1213 |
|
1214 |
ppc32_load_cr(b,AMD64_RSI); |
1215 |
|
1216 |
/* test $ba bit */ |
1217 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
1218 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1219 |
|
1220 |
/* test $bb bit */ |
1221 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
1222 |
amd64_set_reg(b->jit_ptr,X86_CC_Z,AMD64_RBX,FALSE); |
1223 |
|
1224 |
/* result of ORC between $ba and $bb */ |
1225 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RBX,AMD64_RAX); |
1226 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1227 |
|
1228 |
/* set/clear $bd bit depending on the result */ |
1229 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
1230 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
1231 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
1232 |
|
1233 |
ppc32_store_cr(b,AMD64_RSI); |
1234 |
return(0); |
1235 |
} |
1236 |
|
1237 |
/* CRXOR - Condition Register XOR */ |
1238 |
DECLARE_INSN(CRXOR) |
1239 |
{ |
1240 |
int bd = bits(insn,21,25); |
1241 |
int bb = bits(insn,16,20); |
1242 |
int ba = bits(insn,11,15); |
1243 |
|
1244 |
ppc32_load_cr(b,AMD64_RSI); |
1245 |
|
1246 |
/* test $ba bit */ |
1247 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - ba)),4); |
1248 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RAX,FALSE); |
1249 |
|
1250 |
/* test $bb bit */ |
1251 |
amd64_test_reg_imm_size(b->jit_ptr,AMD64_RSI,(1 << (31 - bb)),4); |
1252 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RBX,FALSE); |
1253 |
|
1254 |
/* result of XOR between $ba and $bb */ |
1255 |
amd64_alu_reg_reg(b->jit_ptr,X86_XOR,AMD64_RBX,AMD64_RAX); |
1256 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x01); |
1257 |
|
1258 |
/* set/clear $bd bit depending on the result */ |
1259 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RSI,~(1 << (31 - bd))); |
1260 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(31 - bd)); |
1261 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RSI,AMD64_RBX); |
1262 |
|
1263 |
ppc32_store_cr(b,AMD64_RSI); |
1264 |
return(0); |
1265 |
} |
1266 |
|
1267 |
/* DIVWU - Divide Word Unsigned */ |
1268 |
DECLARE_INSN(DIVWU) |
1269 |
{ |
1270 |
int rd = bits(insn,21,25); |
1271 |
int ra = bits(insn,16,20); |
1272 |
int rb = bits(insn,11,15); |
1273 |
|
1274 |
ppc32_load_gpr(b,AMD64_RAX,ra); |
1275 |
ppc32_load_gpr(b,AMD64_RBX,rb); |
1276 |
ppc32_load_imm(b,AMD64_RDX,0); |
1277 |
|
1278 |
amd64_div_reg_size(b->jit_ptr,AMD64_RBX,0,4); |
1279 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
1280 |
|
1281 |
if (insn & 1) { |
1282 |
amd64_test_reg_reg(b->jit_ptr,AMD64_RAX,AMD64_RAX); |
1283 |
ppc32_update_cr0(b); |
1284 |
} |
1285 |
|
1286 |
return(0); |
1287 |
} |
1288 |
|
1289 |
/* EQV */ |
1290 |
DECLARE_INSN(EQV) |
1291 |
{ |
1292 |
int rs = bits(insn,21,25); |
1293 |
int ra = bits(insn,16,20); |
1294 |
int rb = bits(insn,11,15); |
1295 |
|
1296 |
/* $ra = ~($rs ^ $rb) */ |
1297 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1298 |
ppc32_alu_gpr(b,X86_XOR,AMD64_RBX,rb); |
1299 |
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1300 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1301 |
|
1302 |
if (insn & 1) { |
1303 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1304 |
ppc32_update_cr0(b); |
1305 |
} |
1306 |
|
1307 |
return(0); |
1308 |
} |
1309 |
|
1310 |
/* EXTSB - Extend Sign Byte */ |
1311 |
DECLARE_INSN(EXTSB) |
1312 |
{ |
1313 |
int rs = bits(insn,21,25); |
1314 |
int ra = bits(insn,16,20); |
1315 |
|
1316 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1317 |
amd64_shift_reg_imm_size(b->jit_ptr,X86_SHL,AMD64_RBX,24,4); |
1318 |
amd64_shift_reg_imm_size(b->jit_ptr,X86_SAR,AMD64_RBX,24,4); |
1319 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1320 |
|
1321 |
if (insn & 1) { |
1322 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1323 |
ppc32_update_cr0(b); |
1324 |
} |
1325 |
|
1326 |
return(0); |
1327 |
} |
1328 |
|
1329 |
/* EXTSH - Extend Sign Word */ |
1330 |
DECLARE_INSN(EXTSH) |
1331 |
{ |
1332 |
int rs = bits(insn,21,25); |
1333 |
int ra = bits(insn,16,20); |
1334 |
|
1335 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1336 |
amd64_shift_reg_imm_size(b->jit_ptr,X86_SHL,AMD64_RBX,16,4); |
1337 |
amd64_shift_reg_imm_size(b->jit_ptr,X86_SAR,AMD64_RBX,16,4); |
1338 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1339 |
|
1340 |
if (insn & 1) { |
1341 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1342 |
ppc32_update_cr0(b); |
1343 |
} |
1344 |
|
1345 |
return(0); |
1346 |
} |
1347 |
|
1348 |
/* LBZ - Load Byte and Zero */ |
1349 |
DECLARE_INSN(LBZ) |
1350 |
{ |
1351 |
int rs = bits(insn,21,25); |
1352 |
int ra = bits(insn,16,20); |
1353 |
m_uint16_t offset = bits(insn,0,15); |
1354 |
|
1355 |
//ppc32_emit_memop(b,PPC_MEMOP_LBZ,ra,offset,rs,0); |
1356 |
ppc32_emit_memop_fast(b,0,PPC_MEMOP_LBZ,ra,offset,rs,ppc32_memop_fast_lbz); |
1357 |
return(0); |
1358 |
} |
1359 |
|
1360 |
/* LBZU - Load Byte and Zero with Update */ |
1361 |
DECLARE_INSN(LBZU) |
1362 |
{ |
1363 |
int rs = bits(insn,21,25); |
1364 |
int ra = bits(insn,16,20); |
1365 |
m_uint16_t offset = bits(insn,0,15); |
1366 |
|
1367 |
ppc32_emit_memop(b,PPC_MEMOP_LBZ,ra,offset,rs,1); |
1368 |
return(0); |
1369 |
} |
1370 |
|
1371 |
/* LBZUX - Load Byte and Zero with Update Indexed */ |
1372 |
DECLARE_INSN(LBZUX) |
1373 |
{ |
1374 |
int rs = bits(insn,21,25); |
1375 |
int ra = bits(insn,16,20); |
1376 |
int rb = bits(insn,11,15); |
1377 |
|
1378 |
ppc32_emit_memop_idx(b,PPC_MEMOP_LBZ,ra,rb,rs,1); |
1379 |
return(0); |
1380 |
} |
1381 |
|
1382 |
/* LBZX - Load Byte and Zero Indexed */ |
1383 |
DECLARE_INSN(LBZX) |
1384 |
{ |
1385 |
int rs = bits(insn,21,25); |
1386 |
int ra = bits(insn,16,20); |
1387 |
int rb = bits(insn,11,15); |
1388 |
|
1389 |
ppc32_emit_memop_idx(b,PPC_MEMOP_LBZ,ra,rb,rs,0); |
1390 |
return(0); |
1391 |
} |
1392 |
|
1393 |
/* LHA - Load Half-Word Algebraic */ |
1394 |
DECLARE_INSN(LHA) |
1395 |
{ |
1396 |
int rs = bits(insn,21,25); |
1397 |
int ra = bits(insn,16,20); |
1398 |
m_uint16_t offset = bits(insn,0,15); |
1399 |
|
1400 |
ppc32_emit_memop(b,PPC_MEMOP_LHA,ra,offset,rs,0); |
1401 |
return(0); |
1402 |
} |
1403 |
|
1404 |
/* LHAU - Load Half-Word Algebraic with Update */ |
1405 |
DECLARE_INSN(LHAU) |
1406 |
{ |
1407 |
int rs = bits(insn,21,25); |
1408 |
int ra = bits(insn,16,20); |
1409 |
m_uint16_t offset = bits(insn,0,15); |
1410 |
|
1411 |
ppc32_emit_memop(b,PPC_MEMOP_LHA,ra,offset,rs,1); |
1412 |
return(0); |
1413 |
} |
1414 |
|
1415 |
/* LHAUX - Load Half-Word Algebraic with Update Indexed */ |
1416 |
DECLARE_INSN(LHAUX) |
1417 |
{ |
1418 |
int rs = bits(insn,21,25); |
1419 |
int ra = bits(insn,16,20); |
1420 |
int rb = bits(insn,11,15); |
1421 |
|
1422 |
ppc32_emit_memop_idx(b,PPC_MEMOP_LHA,ra,rb,rs,1); |
1423 |
return(0); |
1424 |
} |
1425 |
|
1426 |
/* LHAX - Load Half-Word Algebraic Indexed */ |
1427 |
DECLARE_INSN(LHAX) |
1428 |
{ |
1429 |
int rs = bits(insn,21,25); |
1430 |
int ra = bits(insn,16,20); |
1431 |
int rb = bits(insn,11,15); |
1432 |
|
1433 |
ppc32_emit_memop_idx(b,PPC_MEMOP_LHA,ra,rb,rs,0); |
1434 |
return(0); |
1435 |
} |
1436 |
|
1437 |
/* LHZ - Load Half-Word and Zero */ |
1438 |
DECLARE_INSN(LHZ) |
1439 |
{ |
1440 |
int rs = bits(insn,21,25); |
1441 |
int ra = bits(insn,16,20); |
1442 |
m_uint16_t offset = bits(insn,0,15); |
1443 |
|
1444 |
ppc32_emit_memop(b,PPC_MEMOP_LHZ,ra,offset,rs,0); |
1445 |
return(0); |
1446 |
} |
1447 |
|
1448 |
/* LHZU - Load Half-Word and Zero with Update */ |
1449 |
DECLARE_INSN(LHZU) |
1450 |
{ |
1451 |
int rs = bits(insn,21,25); |
1452 |
int ra = bits(insn,16,20); |
1453 |
m_uint16_t offset = bits(insn,0,15); |
1454 |
|
1455 |
ppc32_emit_memop(b,PPC_MEMOP_LHZ,ra,offset,rs,1); |
1456 |
return(0); |
1457 |
} |
1458 |
|
1459 |
/* LHZUX - Load Half-Word and Zero with Update Indexed */ |
1460 |
DECLARE_INSN(LHZUX) |
1461 |
{ |
1462 |
int rs = bits(insn,21,25); |
1463 |
int ra = bits(insn,16,20); |
1464 |
int rb = bits(insn,11,15); |
1465 |
|
1466 |
ppc32_emit_memop_idx(b,PPC_MEMOP_LHZ,ra,rb,rs,1); |
1467 |
return(0); |
1468 |
} |
1469 |
|
1470 |
/* LHZX - Load Half-Word and Zero Indexed */ |
1471 |
DECLARE_INSN(LHZX) |
1472 |
{ |
1473 |
int rs = bits(insn,21,25); |
1474 |
int ra = bits(insn,16,20); |
1475 |
int rb = bits(insn,11,15); |
1476 |
|
1477 |
ppc32_emit_memop_idx(b,PPC_MEMOP_LHZ,ra,rb,rs,0); |
1478 |
return(0); |
1479 |
} |
1480 |
|
1481 |
/* LWZ - Load Word and Zero */ |
1482 |
DECLARE_INSN(LWZ) |
1483 |
{ |
1484 |
int rs = bits(insn,21,25); |
1485 |
int ra = bits(insn,16,20); |
1486 |
m_uint16_t offset = bits(insn,0,15); |
1487 |
|
1488 |
//ppc32_emit_memop(b,PPC_MEMOP_LWZ,ra,offset,rs,0); |
1489 |
ppc32_emit_memop_fast(b,0,PPC_MEMOP_LWZ,ra,offset,rs,ppc32_memop_fast_lwz); |
1490 |
return(0); |
1491 |
} |
1492 |
|
1493 |
/* LWZU - Load Word and Zero with Update */ |
1494 |
DECLARE_INSN(LWZU) |
1495 |
{ |
1496 |
int rs = bits(insn,21,25); |
1497 |
int ra = bits(insn,16,20); |
1498 |
m_uint16_t offset = bits(insn,0,15); |
1499 |
|
1500 |
ppc32_emit_memop(b,PPC_MEMOP_LWZ,ra,offset,rs,1); |
1501 |
return(0); |
1502 |
} |
1503 |
|
1504 |
/* LWZUX - Load Word and Zero with Update Indexed */ |
1505 |
DECLARE_INSN(LWZUX) |
1506 |
{ |
1507 |
int rs = bits(insn,21,25); |
1508 |
int ra = bits(insn,16,20); |
1509 |
int rb = bits(insn,11,15); |
1510 |
|
1511 |
ppc32_emit_memop_idx(b,PPC_MEMOP_LWZ,ra,rb,rs,1); |
1512 |
return(0); |
1513 |
} |
1514 |
|
1515 |
/* LWZX - Load Word and Zero Indexed */ |
1516 |
DECLARE_INSN(LWZX) |
1517 |
{ |
1518 |
int rs = bits(insn,21,25); |
1519 |
int ra = bits(insn,16,20); |
1520 |
int rb = bits(insn,11,15); |
1521 |
|
1522 |
ppc32_emit_memop_idx(b,PPC_MEMOP_LWZ,ra,rb,rs,0); |
1523 |
return(0); |
1524 |
} |
1525 |
|
1526 |
/* MCRF - Move Condition Register Field */ |
1527 |
DECLARE_INSN(MCRF) |
1528 |
{ |
1529 |
int rd = bits(insn,23,25); |
1530 |
int rs = bits(insn,18,20); |
1531 |
m_uint32_t dmask; |
1532 |
|
1533 |
/* %rax = %rbx = CR */ |
1534 |
ppc32_load_cr(b,AMD64_RAX); |
1535 |
amd64_mov_reg_reg(b->jit_ptr,X86_EBX,X86_EAX,8); |
1536 |
|
1537 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RBX,(28 - (rs << 2))); |
1538 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,0x0F); |
1539 |
amd64_shift_reg_imm(b->jit_ptr,X86_SHL,AMD64_RBX,(28 - (rd << 2))); |
1540 |
|
1541 |
/* clear the destination bits */ |
1542 |
dmask = (0xF0000000 >> (rd << 2)); |
1543 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,~dmask); |
1544 |
|
1545 |
/* set the new field value */ |
1546 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RBX); |
1547 |
ppc32_store_cr(b,AMD64_RAX); |
1548 |
return(0); |
1549 |
} |
1550 |
|
1551 |
/* MFCR - Move from Condition Register */ |
1552 |
DECLARE_INSN(MFCR) |
1553 |
{ |
1554 |
int rd = bits(insn,21,25); |
1555 |
|
1556 |
ppc32_load_cr(b,AMD64_RAX); |
1557 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
1558 |
return(0); |
1559 |
} |
1560 |
|
1561 |
/* MFMSR - Move from Machine State Register */ |
1562 |
DECLARE_INSN(MFMSR) |
1563 |
{ |
1564 |
int rd = bits(insn,21,25); |
1565 |
|
1566 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RAX, |
1567 |
AMD64_R15,OFFSET(cpu_ppc_t,msr),4); |
1568 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
1569 |
return(0); |
1570 |
} |
1571 |
|
1572 |
/* MFSR - Move From Segment Register */ |
1573 |
DECLARE_INSN(MFSR) |
1574 |
{ |
1575 |
int rd = bits(insn,21,25); |
1576 |
int sr = bits(insn,16,19); |
1577 |
|
1578 |
amd64_mov_reg_membase(b->jit_ptr,AMD64_RAX, |
1579 |
AMD64_R15,(OFFSET(cpu_ppc_t,sr) + (sr << 2)),4); |
1580 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
1581 |
return(0); |
1582 |
} |
1583 |
|
1584 |
/* MTCRF - Move to Condition Register Fields */ |
1585 |
DECLARE_INSN(MTCRF) |
1586 |
{ |
1587 |
int rs = bits(insn,21,25); |
1588 |
int crm = bits(insn,12,19); |
1589 |
m_uint32_t mask = 0; |
1590 |
int i; |
1591 |
|
1592 |
for(i=0;i<8;i++) |
1593 |
if (crm & (1 << i)) |
1594 |
mask |= 0xF << (i << 2); |
1595 |
|
1596 |
ppc32_load_cr(b,AMD64_RAX); |
1597 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,~mask); |
1598 |
|
1599 |
ppc32_load_gpr(b,AMD64_RDX,rs); |
1600 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RDX,mask); |
1601 |
|
1602 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RAX); |
1603 |
ppc32_store_cr(b,AMD64_RDX); |
1604 |
return(0); |
1605 |
} |
1606 |
|
1607 |
/* MULHW - Multiply High Word */ |
1608 |
DECLARE_INSN(MULHW) |
1609 |
{ |
1610 |
int rd = bits(insn,21,25); |
1611 |
int ra = bits(insn,16,20); |
1612 |
int rb = bits(insn,11,15); |
1613 |
|
1614 |
ppc32_load_gpr(b,AMD64_RAX,ra); |
1615 |
ppc32_load_gpr(b,AMD64_RBX,rb); |
1616 |
amd64_mul_reg_size(b->jit_ptr,AMD64_RBX,1,4); |
1617 |
ppc32_store_gpr(b,rd,AMD64_RDX); |
1618 |
|
1619 |
if (insn & 1) { |
1620 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RDX,AMD64_RDX,4); |
1621 |
ppc32_update_cr0(b); |
1622 |
} |
1623 |
|
1624 |
return(0); |
1625 |
} |
1626 |
|
1627 |
/* MULHWU - Multiply High Word Unsigned */ |
1628 |
DECLARE_INSN(MULHWU) |
1629 |
{ |
1630 |
int rd = bits(insn,21,25); |
1631 |
int ra = bits(insn,16,20); |
1632 |
int rb = bits(insn,11,15); |
1633 |
|
1634 |
ppc32_load_gpr(b,AMD64_RAX,ra); |
1635 |
ppc32_load_gpr(b,AMD64_RBX,rb); |
1636 |
amd64_mul_reg_size(b->jit_ptr,AMD64_RBX,0,4); |
1637 |
ppc32_store_gpr(b,rd,AMD64_RDX); |
1638 |
|
1639 |
if (insn & 1) { |
1640 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RDX,AMD64_RDX,4); |
1641 |
ppc32_update_cr0(b); |
1642 |
} |
1643 |
|
1644 |
return(0); |
1645 |
} |
1646 |
|
1647 |
/* MULLI - Multiply Low Immediate */ |
1648 |
DECLARE_INSN(MULLI) |
1649 |
{ |
1650 |
int rd = bits(insn,21,25); |
1651 |
int ra = bits(insn,16,20); |
1652 |
m_uint32_t imm = bits(insn,0,15); |
1653 |
|
1654 |
ppc32_load_gpr(b,AMD64_RAX,ra); |
1655 |
ppc32_load_imm(b,AMD64_RBX,sign_extend_32(imm,16)); |
1656 |
|
1657 |
amd64_mul_reg_size(b->jit_ptr,AMD64_RBX,1,4); |
1658 |
ppc32_store_gpr(b,rd,X86_EAX); |
1659 |
return(0); |
1660 |
} |
1661 |
|
1662 |
/* MULLW - Multiply Low Word */ |
1663 |
DECLARE_INSN(MULLW) |
1664 |
{ |
1665 |
int rd = bits(insn,21,25); |
1666 |
int ra = bits(insn,16,20); |
1667 |
int rb = bits(insn,11,15); |
1668 |
|
1669 |
ppc32_load_gpr(b,AMD64_RAX,ra); |
1670 |
ppc32_load_gpr(b,AMD64_RBX,rb); |
1671 |
amd64_mul_reg_size(b->jit_ptr,AMD64_RBX,1,4); |
1672 |
ppc32_store_gpr(b,rd,AMD64_RAX); |
1673 |
|
1674 |
if (insn & 1) { |
1675 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4); |
1676 |
ppc32_update_cr0(b); |
1677 |
} |
1678 |
|
1679 |
return(0); |
1680 |
} |
1681 |
|
1682 |
/* NAND */ |
1683 |
DECLARE_INSN(NAND) |
1684 |
{ |
1685 |
int rs = bits(insn,21,25); |
1686 |
int ra = bits(insn,16,20); |
1687 |
int rb = bits(insn,11,15); |
1688 |
|
1689 |
/* $ra = ~($rs & $rb) */ |
1690 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1691 |
ppc32_alu_gpr(b,X86_AND,AMD64_RBX,rb); |
1692 |
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1693 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1694 |
|
1695 |
if (insn & 1) { |
1696 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1697 |
ppc32_update_cr0(b); |
1698 |
} |
1699 |
|
1700 |
return(0); |
1701 |
} |
1702 |
|
1703 |
/* NEG */ |
1704 |
DECLARE_INSN(NEG) |
1705 |
{ |
1706 |
int rd = bits(insn,21,25); |
1707 |
int ra = bits(insn,16,20); |
1708 |
|
1709 |
ppc32_load_gpr(b,AMD64_RBX,ra); |
1710 |
amd64_neg_reg(b->jit_ptr,AMD64_RBX); |
1711 |
ppc32_store_gpr(b,rd,AMD64_RBX); |
1712 |
|
1713 |
if (insn & 1) { |
1714 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1715 |
ppc32_update_cr0(b); |
1716 |
} |
1717 |
|
1718 |
return(0); |
1719 |
} |
1720 |
|
1721 |
/* NOR */ |
1722 |
DECLARE_INSN(NOR) |
1723 |
{ |
1724 |
int rs = bits(insn,21,25); |
1725 |
int ra = bits(insn,16,20); |
1726 |
int rb = bits(insn,11,15); |
1727 |
|
1728 |
/* $ra = ~($rs | $rb) */ |
1729 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1730 |
ppc32_alu_gpr(b,X86_OR,AMD64_RBX,rb); |
1731 |
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1732 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1733 |
|
1734 |
if (insn & 1) { |
1735 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1736 |
ppc32_update_cr0(b); |
1737 |
} |
1738 |
|
1739 |
return(0); |
1740 |
} |
1741 |
|
1742 |
/* OR */ |
1743 |
DECLARE_INSN(OR) |
1744 |
{ |
1745 |
int rs = bits(insn,21,25); |
1746 |
int ra = bits(insn,16,20); |
1747 |
int rb = bits(insn,11,15); |
1748 |
|
1749 |
ppc32_load_gpr(b,AMD64_RCX,rs); |
1750 |
|
1751 |
if (rs != rb) |
1752 |
ppc32_alu_gpr(b,X86_OR,AMD64_RCX,rb); |
1753 |
|
1754 |
ppc32_store_gpr(b,ra,AMD64_RCX); |
1755 |
|
1756 |
if (insn & 1) { |
1757 |
if (rs == rb) |
1758 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RCX,AMD64_RCX,4); |
1759 |
ppc32_update_cr0(b); |
1760 |
} |
1761 |
|
1762 |
return(0); |
1763 |
} |
1764 |
|
1765 |
/* OR with Complement */ |
1766 |
DECLARE_INSN(ORC) |
1767 |
{ |
1768 |
int rs = bits(insn,21,25); |
1769 |
int ra = bits(insn,16,20); |
1770 |
int rb = bits(insn,11,15); |
1771 |
|
1772 |
/* $ra = $rs | ~$rb */ |
1773 |
ppc32_load_gpr(b,AMD64_RBX,rb); |
1774 |
amd64_not_reg(b->jit_ptr,AMD64_RBX); |
1775 |
ppc32_alu_gpr(b,X86_OR,AMD64_RBX,rs); |
1776 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1777 |
|
1778 |
if (insn & 1) |
1779 |
ppc32_update_cr0(b); |
1780 |
|
1781 |
return(0); |
1782 |
} |
1783 |
|
1784 |
/* OR Immediate */ |
1785 |
DECLARE_INSN(ORI) |
1786 |
{ |
1787 |
int rs = bits(insn,21,25); |
1788 |
int ra = bits(insn,16,20); |
1789 |
m_uint16_t imm = bits(insn,0,15); |
1790 |
|
1791 |
/* $ra = $rs | imm */ |
1792 |
ppc32_load_imm(b,AMD64_RBX,imm); |
1793 |
ppc32_alu_gpr(b,X86_OR,AMD64_RBX,rs); |
1794 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1795 |
return(0); |
1796 |
} |
1797 |
|
1798 |
/* OR Immediate Shifted */ |
1799 |
DECLARE_INSN(ORIS) |
1800 |
{ |
1801 |
int rs = bits(insn,21,25); |
1802 |
int ra = bits(insn,16,20); |
1803 |
m_uint32_t imm = bits(insn,0,15); |
1804 |
|
1805 |
/* $ra = $rs | (imm << 16) */ |
1806 |
ppc32_load_imm(b,AMD64_RBX,imm << 16); |
1807 |
ppc32_alu_gpr(b,X86_OR,AMD64_RBX,rs); |
1808 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1809 |
return(0); |
1810 |
} |
1811 |
|
1812 |
/* RLWIMI - Rotate Left Word Immediate then Mask Insert */ |
1813 |
DECLARE_INSN(RLWIMI) |
1814 |
{ |
1815 |
int rs = bits(insn,21,25); |
1816 |
int ra = bits(insn,16,20); |
1817 |
int sh = bits(insn,11,15); |
1818 |
int mb = bits(insn,6,10); |
1819 |
int me = bits(insn,1,5); |
1820 |
register m_uint32_t mask; |
1821 |
|
1822 |
mask = ppc32_rotate_mask(mb,me); |
1823 |
|
1824 |
/* Apply inverse mask to %eax "ra" */ |
1825 |
ppc32_load_gpr(b,AMD64_RAX,ra); |
1826 |
if (mask != 0) |
1827 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,~mask); |
1828 |
|
1829 |
/* Rotate %ebx ("rs") of "sh" bits and apply the mask */ |
1830 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1831 |
|
1832 |
if (sh != 0) |
1833 |
amd64_shift_reg_imm_size(b->jit_ptr,X86_ROL,AMD64_RBX,sh,4); |
1834 |
|
1835 |
if (mask != 0xFFFFFFFF) |
1836 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,mask); |
1837 |
|
1838 |
/* Store the result */ |
1839 |
amd64_alu_reg_reg_size(b->jit_ptr,X86_OR,AMD64_RBX,AMD64_RAX,4); |
1840 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1841 |
|
1842 |
if (insn & 1) |
1843 |
ppc32_update_cr0(b); |
1844 |
|
1845 |
return(0); |
1846 |
} |
1847 |
|
1848 |
/* RLWINM - Rotate Left Word Immediate AND with Mask */ |
1849 |
DECLARE_INSN(RLWINM) |
1850 |
{ |
1851 |
int rs = bits(insn,21,25); |
1852 |
int ra = bits(insn,16,20); |
1853 |
int sh = bits(insn,11,15); |
1854 |
int mb = bits(insn,6,10); |
1855 |
int me = bits(insn,1,5); |
1856 |
register m_uint32_t mask; |
1857 |
|
1858 |
mask = ppc32_rotate_mask(mb,me); |
1859 |
|
1860 |
/* Rotate %ebx ("rs") of "sh" bits and apply the mask */ |
1861 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1862 |
|
1863 |
if (sh != 0) |
1864 |
amd64_shift_reg_imm_size(b->jit_ptr,X86_ROL,AMD64_RBX,sh,4); |
1865 |
|
1866 |
if (mask != 0xFFFFFFFF) |
1867 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,mask); |
1868 |
|
1869 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1870 |
|
1871 |
if (insn & 1) { |
1872 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1873 |
ppc32_update_cr0(b); |
1874 |
} |
1875 |
|
1876 |
return(0); |
1877 |
} |
1878 |
|
1879 |
/* RLWNM - Rotate Left Word then Mask Insert */ |
1880 |
DECLARE_INSN(RLWNM) |
1881 |
{ |
1882 |
int rs = bits(insn,21,25); |
1883 |
int ra = bits(insn,16,20); |
1884 |
int rb = bits(insn,11,15); |
1885 |
int mb = bits(insn,6,10); |
1886 |
int me = bits(insn,1,5); |
1887 |
register m_uint32_t mask; |
1888 |
|
1889 |
mask = ppc32_rotate_mask(mb,me); |
1890 |
|
1891 |
/* Load the shift register ("sh") */ |
1892 |
ppc32_load_gpr(b,AMD64_RCX,rb); |
1893 |
|
1894 |
/* Rotate %ebx ("rs") and apply the mask */ |
1895 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1896 |
amd64_shift_reg_size(b->jit_ptr,X86_ROL,AMD64_RBX,4); |
1897 |
|
1898 |
if (mask != 0xFFFFFFFF) |
1899 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,mask); |
1900 |
|
1901 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1902 |
|
1903 |
if (insn & 1) { |
1904 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1905 |
ppc32_update_cr0(b); |
1906 |
} |
1907 |
|
1908 |
return(0); |
1909 |
} |
1910 |
|
1911 |
/* Shift Left Word */ |
1912 |
DECLARE_INSN(SLW) |
1913 |
{ |
1914 |
int rs = bits(insn,21,25); |
1915 |
int ra = bits(insn,16,20); |
1916 |
int rb = bits(insn,11,15); |
1917 |
|
1918 |
/* If count >= 32, then null result */ |
1919 |
ppc32_load_gpr(b,AMD64_RCX,rb); |
1920 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,0x3f); |
1921 |
|
1922 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1923 |
amd64_shift_reg(b->jit_ptr,X86_SHL,AMD64_RBX); |
1924 |
|
1925 |
/* Store the result */ |
1926 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1927 |
|
1928 |
if (insn & 1) { |
1929 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1930 |
ppc32_update_cr0(b); |
1931 |
} |
1932 |
|
1933 |
return(0); |
1934 |
} |
1935 |
|
1936 |
/* SRAWI - Shift Right Algebraic Word Immediate */ |
1937 |
DECLARE_INSN(SRAWI) |
1938 |
{ |
1939 |
int rs = bits(insn,21,25); |
1940 |
int ra = bits(insn,16,20); |
1941 |
int sh = bits(insn,11,15); |
1942 |
register m_uint32_t mask; |
1943 |
|
1944 |
mask = ~(0xFFFFFFFFU << sh); |
1945 |
|
1946 |
/* $ra = (int32)$rs >> sh */ |
1947 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1948 |
amd64_mov_reg_reg(b->jit_ptr,AMD64_RSI,AMD64_RBX,4); |
1949 |
amd64_shift_reg_imm_size(b->jit_ptr,X86_SAR,AMD64_RBX,sh,4); |
1950 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1951 |
|
1952 |
/* test the sign-bit of gpr[rs] */ |
1953 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RSI,AMD64_RSI,4); |
1954 |
amd64_set_reg(b->jit_ptr,X86_CC_LT,AMD64_RAX,TRUE); |
1955 |
|
1956 |
amd64_alu_reg_imm_size(b->jit_ptr,X86_AND,AMD64_RSI,mask,4); |
1957 |
amd64_set_reg(b->jit_ptr,X86_CC_NZ,AMD64_RCX,TRUE); |
1958 |
|
1959 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RCX,AMD64_RAX); |
1960 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,0x1); |
1961 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
1962 |
AMD64_RCX,4); |
1963 |
|
1964 |
if (insn & 1) { |
1965 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1966 |
ppc32_update_cr0(b); |
1967 |
} |
1968 |
|
1969 |
return(0); |
1970 |
} |
1971 |
|
1972 |
/* Shift Right Word */ |
1973 |
DECLARE_INSN(SRW) |
1974 |
{ |
1975 |
int rs = bits(insn,21,25); |
1976 |
int ra = bits(insn,16,20); |
1977 |
int rb = bits(insn,11,15); |
1978 |
|
1979 |
/* If count >= 32, then null result */ |
1980 |
ppc32_load_gpr(b,AMD64_RCX,rb); |
1981 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,0x3f); |
1982 |
|
1983 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
1984 |
amd64_shift_reg(b->jit_ptr,X86_SHR,AMD64_RBX); |
1985 |
|
1986 |
/* Store the result */ |
1987 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
1988 |
|
1989 |
if (insn & 1) { |
1990 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RBX,AMD64_RBX,4); |
1991 |
ppc32_update_cr0(b); |
1992 |
} |
1993 |
|
1994 |
return(0); |
1995 |
} |
1996 |
|
1997 |
/* STB - Store Byte */ |
1998 |
DECLARE_INSN(STB) |
1999 |
{ |
2000 |
int rs = bits(insn,21,25); |
2001 |
int ra = bits(insn,16,20); |
2002 |
m_uint16_t offset = bits(insn,0,15); |
2003 |
|
2004 |
//ppc32_emit_memop(b,PPC_MEMOP_STB,ra,offset,rs,0); |
2005 |
ppc32_emit_memop_fast(b,1,PPC_MEMOP_STB,ra,offset,rs,ppc32_memop_fast_stb); |
2006 |
return(0); |
2007 |
} |
2008 |
|
2009 |
/* STBU - Store Byte with Update */ |
2010 |
DECLARE_INSN(STBU) |
2011 |
{ |
2012 |
int rs = bits(insn,21,25); |
2013 |
int ra = bits(insn,16,20); |
2014 |
m_uint16_t offset = bits(insn,0,15); |
2015 |
|
2016 |
ppc32_emit_memop(b,PPC_MEMOP_STB,ra,offset,rs,1); |
2017 |
return(0); |
2018 |
} |
2019 |
|
2020 |
/* STBUX - Store Byte with Update Indexed */ |
2021 |
DECLARE_INSN(STBUX) |
2022 |
{ |
2023 |
int rs = bits(insn,21,25); |
2024 |
int ra = bits(insn,16,20); |
2025 |
int rb = bits(insn,11,15); |
2026 |
|
2027 |
ppc32_emit_memop_idx(b,PPC_MEMOP_STB,ra,rb,rs,1); |
2028 |
return(0); |
2029 |
} |
2030 |
|
2031 |
/* STBUX - Store Byte Indexed */ |
2032 |
DECLARE_INSN(STBX) |
2033 |
{ |
2034 |
int rs = bits(insn,21,25); |
2035 |
int ra = bits(insn,16,20); |
2036 |
int rb = bits(insn,11,15); |
2037 |
|
2038 |
ppc32_emit_memop_idx(b,PPC_MEMOP_STB,ra,rb,rs,0); |
2039 |
return(0); |
2040 |
} |
2041 |
|
2042 |
/* STH - Store Half-Word */ |
2043 |
DECLARE_INSN(STH) |
2044 |
{ |
2045 |
int rs = bits(insn,21,25); |
2046 |
int ra = bits(insn,16,20); |
2047 |
m_uint16_t offset = bits(insn,0,15); |
2048 |
|
2049 |
ppc32_emit_memop(b,PPC_MEMOP_STH,ra,offset,rs,0); |
2050 |
return(0); |
2051 |
} |
2052 |
|
2053 |
/* STHU - Store Half-Word with Update */ |
2054 |
DECLARE_INSN(STHU) |
2055 |
{ |
2056 |
int rs = bits(insn,21,25); |
2057 |
int ra = bits(insn,16,20); |
2058 |
m_uint16_t offset = bits(insn,0,15); |
2059 |
|
2060 |
ppc32_emit_memop(b,PPC_MEMOP_STH,ra,offset,rs,1); |
2061 |
return(0); |
2062 |
} |
2063 |
|
2064 |
/* STHUX - Store Half-Word with Update Indexed */ |
2065 |
DECLARE_INSN(STHUX) |
2066 |
{ |
2067 |
int rs = bits(insn,21,25); |
2068 |
int ra = bits(insn,16,20); |
2069 |
int rb = bits(insn,11,15); |
2070 |
|
2071 |
ppc32_emit_memop_idx(b,PPC_MEMOP_STH,ra,rb,rs,1); |
2072 |
return(0); |
2073 |
} |
2074 |
|
2075 |
/* STHUX - Store Half-Word Indexed */ |
2076 |
DECLARE_INSN(STHX) |
2077 |
{ |
2078 |
int rs = bits(insn,21,25); |
2079 |
int ra = bits(insn,16,20); |
2080 |
int rb = bits(insn,11,15); |
2081 |
|
2082 |
ppc32_emit_memop_idx(b,PPC_MEMOP_STH,ra,rb,rs,0); |
2083 |
return(0); |
2084 |
} |
2085 |
|
2086 |
/* STW - Store Word */ |
2087 |
DECLARE_INSN(STW) |
2088 |
{ |
2089 |
int rs = bits(insn,21,25); |
2090 |
int ra = bits(insn,16,20); |
2091 |
m_uint16_t offset = bits(insn,0,15); |
2092 |
|
2093 |
//ppc32_emit_memop(b,PPC_MEMOP_STW,ra,offset,rs,0); |
2094 |
ppc32_emit_memop_fast(b,1,PPC_MEMOP_STW,ra,offset,rs,ppc32_memop_fast_stw); |
2095 |
return(0); |
2096 |
} |
2097 |
|
2098 |
/* STWU - Store Word with Update */ |
2099 |
DECLARE_INSN(STWU) |
2100 |
{ |
2101 |
int rs = bits(insn,21,25); |
2102 |
int ra = bits(insn,16,20); |
2103 |
m_uint16_t offset = bits(insn,0,15); |
2104 |
|
2105 |
ppc32_emit_memop(b,PPC_MEMOP_STW,ra,offset,rs,1); |
2106 |
return(0); |
2107 |
} |
2108 |
|
2109 |
/* STWUX - Store Word with Update Indexed */ |
2110 |
DECLARE_INSN(STWUX) |
2111 |
{ |
2112 |
int rs = bits(insn,21,25); |
2113 |
int ra = bits(insn,16,20); |
2114 |
int rb = bits(insn,11,15); |
2115 |
|
2116 |
ppc32_emit_memop_idx(b,PPC_MEMOP_STW,ra,rb,rs,1); |
2117 |
return(0); |
2118 |
} |
2119 |
|
2120 |
/* STWUX - Store Word Indexed */ |
2121 |
DECLARE_INSN(STWX) |
2122 |
{ |
2123 |
int rs = bits(insn,21,25); |
2124 |
int ra = bits(insn,16,20); |
2125 |
int rb = bits(insn,11,15); |
2126 |
|
2127 |
ppc32_emit_memop_idx(b,PPC_MEMOP_STW,ra,rb,rs,0); |
2128 |
return(0); |
2129 |
} |
2130 |
|
2131 |
/* SUBF - Subtract From */ |
2132 |
DECLARE_INSN(SUBF) |
2133 |
{ |
2134 |
int rd = bits(insn,21,25); |
2135 |
int ra = bits(insn,16,20); |
2136 |
int rb = bits(insn,11,15); |
2137 |
|
2138 |
/* $rd = $rb - $rb */ |
2139 |
ppc32_load_gpr(b,AMD64_RBX,rb); |
2140 |
ppc32_alu_gpr(b,X86_SUB,AMD64_RBX,ra); |
2141 |
ppc32_store_gpr(b,rd,AMD64_RBX); |
2142 |
|
2143 |
if (insn & 1) |
2144 |
ppc32_update_cr0(b); |
2145 |
|
2146 |
return(0); |
2147 |
} |
2148 |
|
2149 |
/* SUBFC - Subtract From Carrying */ |
2150 |
DECLARE_INSN(SUBFC) |
2151 |
{ |
2152 |
int rd = bits(insn,21,25); |
2153 |
int ra = bits(insn,16,20); |
2154 |
int rb = bits(insn,11,15); |
2155 |
|
2156 |
/* ~$ra + 1 */ |
2157 |
ppc32_load_gpr(b,AMD64_RSI,ra); |
2158 |
amd64_not_reg(b->jit_ptr,AMD64_RSI); |
2159 |
amd64_alu_reg_imm_size(b->jit_ptr,X86_ADD,AMD64_RSI,1,4); |
2160 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
2161 |
|
2162 |
/* add $rb */ |
2163 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,rb); |
2164 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RCX,FALSE); |
2165 |
|
2166 |
ppc32_store_gpr(b,rd,AMD64_RSI); |
2167 |
|
2168 |
/* store the carry flag */ |
2169 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RCX); |
2170 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
2171 |
|
2172 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
2173 |
AMD64_RAX,4); |
2174 |
|
2175 |
/* update cr0 */ |
2176 |
if (insn & 1) { |
2177 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RSI,AMD64_RSI,4); |
2178 |
ppc32_update_cr0(b); |
2179 |
} |
2180 |
|
2181 |
return(0); |
2182 |
} |
2183 |
|
2184 |
/* SUBFE - Subtract From Extended */ |
2185 |
DECLARE_INSN(SUBFE) |
2186 |
{ |
2187 |
int rd = bits(insn,21,25); |
2188 |
int ra = bits(insn,16,20); |
2189 |
int rb = bits(insn,11,15); |
2190 |
|
2191 |
/* ~$ra + carry */ |
2192 |
ppc32_load_gpr(b,AMD64_RSI,ra); |
2193 |
amd64_not_reg(b->jit_ptr,AMD64_RSI); |
2194 |
amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,AMD64_RSI, |
2195 |
AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4); |
2196 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
2197 |
|
2198 |
/* add $rb */ |
2199 |
ppc32_alu_gpr(b,X86_ADD,AMD64_RSI,rb); |
2200 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RCX,FALSE); |
2201 |
|
2202 |
ppc32_store_gpr(b,rd,AMD64_RSI); |
2203 |
|
2204 |
/* store the carry flag */ |
2205 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RCX); |
2206 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
2207 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
2208 |
AMD64_RAX,4); |
2209 |
|
2210 |
/* update cr0 */ |
2211 |
if (insn & 1) { |
2212 |
amd64_test_reg_reg_size(b->jit_ptr,AMD64_RSI,AMD64_RSI,4); |
2213 |
ppc32_update_cr0(b); |
2214 |
} |
2215 |
|
2216 |
return(0); |
2217 |
} |
2218 |
|
2219 |
/* SUBFIC - Subtract From Immediate Carrying */ |
2220 |
DECLARE_INSN(SUBFIC) |
2221 |
{ |
2222 |
int rd = bits(insn,21,25); |
2223 |
int ra = bits(insn,16,20); |
2224 |
m_uint16_t imm = bits(insn,0,15); |
2225 |
m_uint32_t tmp = sign_extend_32(imm,16); |
2226 |
|
2227 |
/* ~$ra + 1 */ |
2228 |
ppc32_load_gpr(b,AMD64_RSI,ra); |
2229 |
amd64_not_reg(b->jit_ptr,AMD64_RSI); |
2230 |
amd64_alu_reg_imm_size(b->jit_ptr,X86_ADD,AMD64_RSI,1,4); |
2231 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RAX,FALSE); |
2232 |
|
2233 |
/* add sign-extended $immediate */ |
2234 |
amd64_alu_reg_imm_size(b->jit_ptr,X86_ADD,AMD64_RSI,tmp,4); |
2235 |
amd64_set_reg(b->jit_ptr,X86_CC_C,AMD64_RCX,FALSE); |
2236 |
|
2237 |
ppc32_store_gpr(b,rd,AMD64_RSI); |
2238 |
|
2239 |
/* store the carry flag */ |
2240 |
amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RAX,AMD64_RCX); |
2241 |
amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0x1); |
2242 |
|
2243 |
amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca), |
2244 |
AMD64_RAX,4); |
2245 |
return(0); |
2246 |
} |
2247 |
|
2248 |
/* SYNC - Synchronize */ |
2249 |
DECLARE_INSN(SYNC) |
2250 |
{ |
2251 |
return(0); |
2252 |
} |
2253 |
|
2254 |
/* XOR */ |
2255 |
DECLARE_INSN(XOR) |
2256 |
{ |
2257 |
int rs = bits(insn,21,25); |
2258 |
int ra = bits(insn,16,20); |
2259 |
int rb = bits(insn,11,15); |
2260 |
|
2261 |
ppc32_load_gpr(b,AMD64_RBX,rs); |
2262 |
ppc32_alu_gpr(b,X86_XOR,AMD64_RBX,rb); |
2263 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
2264 |
|
2265 |
if (insn & 1) |
2266 |
ppc32_update_cr0(b); |
2267 |
|
2268 |
return(0); |
2269 |
} |
2270 |
|
2271 |
/* XORI - XOR Immediate */ |
2272 |
DECLARE_INSN(XORI) |
2273 |
{ |
2274 |
int rs = bits(insn,21,25); |
2275 |
int ra = bits(insn,16,20); |
2276 |
m_uint32_t imm = bits(insn,0,15); |
2277 |
|
2278 |
ppc32_load_imm(b,AMD64_RBX,imm); |
2279 |
ppc32_alu_gpr(b,X86_XOR,AMD64_RBX,rs); |
2280 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
2281 |
return(0); |
2282 |
} |
2283 |
|
2284 |
/* XORIS - XOR Immediate Shifted */ |
2285 |
DECLARE_INSN(XORIS) |
2286 |
{ |
2287 |
int rs = bits(insn,21,25); |
2288 |
int ra = bits(insn,16,20); |
2289 |
m_uint32_t imm = bits(insn,0,15); |
2290 |
|
2291 |
ppc32_load_imm(b,AMD64_RBX,imm << 16); |
2292 |
ppc32_alu_gpr(b,X86_XOR,AMD64_RBX,rs); |
2293 |
ppc32_store_gpr(b,ra,AMD64_RBX); |
2294 |
return(0); |
2295 |
} |
2296 |
|
2297 |
/* PPC instruction array */ |
2298 |
struct ppc32_insn_tag ppc32_insn_tags[] = { |
2299 |
{ ppc32_emit_BLR , 0xfffffffe , 0x4e800020 }, |
2300 |
{ ppc32_emit_BCTR , 0xfffffffe , 0x4e800420 }, |
2301 |
{ ppc32_emit_MFLR , 0xfc1fffff , 0x7c0802a6 }, |
2302 |
{ ppc32_emit_MTLR , 0xfc1fffff , 0x7c0803a6 }, |
2303 |
{ ppc32_emit_MFCTR , 0xfc1fffff , 0x7c0902a6 }, |
2304 |
{ ppc32_emit_MTCTR , 0xfc1fffff , 0x7c0903a6 }, |
2305 |
{ ppc32_emit_MFTBL , 0xfc1ff7ff , 0x7c0c42e6 }, |
2306 |
{ ppc32_emit_MFTBU , 0xfc1ff7ff , 0x7c0d42e6 }, |
2307 |
{ ppc32_emit_ADD , 0xfc0007fe , 0x7c000214 }, |
2308 |
{ ppc32_emit_ADDC , 0xfc0007fe , 0x7c000014 }, |
2309 |
{ ppc32_emit_ADDE , 0xfc0007fe , 0x7c000114 }, |
2310 |
{ ppc32_emit_ADDI , 0xfc000000 , 0x38000000 }, |
2311 |
{ ppc32_emit_ADDIC , 0xfc000000 , 0x30000000 }, |
2312 |
{ ppc32_emit_ADDIC_dot , 0xfc000000 , 0x34000000 }, |
2313 |
{ ppc32_emit_ADDIS , 0xfc000000 , 0x3c000000 }, |
2314 |
{ ppc32_emit_AND , 0xfc0007fe , 0x7c000038 }, |
2315 |
{ ppc32_emit_ANDC , 0xfc0007fe , 0x7c000078 }, |
2316 |
{ ppc32_emit_ANDI , 0xfc000000 , 0x70000000 }, |
2317 |
{ ppc32_emit_ANDIS , 0xfc000000 , 0x74000000 }, |
2318 |
{ ppc32_emit_B , 0xfc000003 , 0x48000000 }, |
2319 |
{ ppc32_emit_BA , 0xfc000003 , 0x48000002 }, |
2320 |
{ ppc32_emit_BL , 0xfc000003 , 0x48000001 }, |
2321 |
{ ppc32_emit_BLA , 0xfc000003 , 0x48000003 }, |
2322 |
{ ppc32_emit_BCC , 0xfe800000 , 0x40800000 }, |
2323 |
{ ppc32_emit_BC , 0xfc000000 , 0x40000000 }, |
2324 |
{ ppc32_emit_BCLR , 0xfc00fffe , 0x4c000020 }, |
2325 |
{ ppc32_emit_CMP , 0xfc6007ff , 0x7c000000 }, |
2326 |
{ ppc32_emit_CMPI , 0xfc600000 , 0x2c000000 }, |
2327 |
{ ppc32_emit_CMPL , 0xfc6007ff , 0x7c000040 }, |
2328 |
{ ppc32_emit_CMPLI , 0xfc600000 , 0x28000000 }, |
2329 |
{ ppc32_emit_CRAND , 0xfc0007ff , 0x4c000202 }, |
2330 |
{ ppc32_emit_CRANDC , 0xfc0007ff , 0x4c000102 }, |
2331 |
{ ppc32_emit_CREQV , 0xfc0007ff , 0x4c000242 }, |
2332 |
{ ppc32_emit_CRNAND , 0xfc0007ff , 0x4c0001c2 }, |
2333 |
{ ppc32_emit_CRNOR , 0xfc0007ff , 0x4c000042 }, |
2334 |
{ ppc32_emit_CROR , 0xfc0007ff , 0x4c000382 }, |
2335 |
{ ppc32_emit_CRORC , 0xfc0007ff , 0x4c000342 }, |
2336 |
{ ppc32_emit_CRXOR , 0xfc0007ff , 0x4c000182 }, |
2337 |
{ ppc32_emit_DIVWU , 0xfc0007fe , 0x7c000396 }, |
2338 |
{ ppc32_emit_EQV , 0xfc0007fe , 0x7c000238 }, |
2339 |
{ ppc32_emit_EXTSB , 0xfc00fffe , 0x7c000774 }, |
2340 |
{ ppc32_emit_EXTSH , 0xfc00fffe , 0x7c000734 }, |
2341 |
{ ppc32_emit_LBZ , 0xfc000000 , 0x88000000 }, |
2342 |
{ ppc32_emit_LBZU , 0xfc000000 , 0x8c000000 }, |
2343 |
{ ppc32_emit_LBZUX , 0xfc0007ff , 0x7c0000ee }, |
2344 |
{ ppc32_emit_LBZX , 0xfc0007ff , 0x7c0000ae }, |
2345 |
{ ppc32_emit_LHA , 0xfc000000 , 0xa8000000 }, |
2346 |
{ ppc32_emit_LHAU , 0xfc000000 , 0xac000000 }, |
2347 |
{ ppc32_emit_LHAUX , 0xfc0007ff , 0x7c0002ee }, |
2348 |
{ ppc32_emit_LHAX , 0xfc0007ff , 0x7c0002ae }, |
2349 |
{ ppc32_emit_LHZ , 0xfc000000 , 0xa0000000 }, |
2350 |
{ ppc32_emit_LHZU , 0xfc000000 , 0xa4000000 }, |
2351 |
{ ppc32_emit_LHZUX , 0xfc0007ff , 0x7c00026e }, |
2352 |
{ ppc32_emit_LHZX , 0xfc0007ff , 0x7c00022e }, |
2353 |
{ ppc32_emit_LWZ , 0xfc000000 , 0x80000000 }, |
2354 |
{ ppc32_emit_LWZU , 0xfc000000 , 0x84000000 }, |
2355 |
{ ppc32_emit_LWZUX , 0xfc0007ff , 0x7c00006e }, |
2356 |
{ ppc32_emit_LWZX , 0xfc0007ff , 0x7c00002e }, |
2357 |
{ ppc32_emit_MCRF , 0xfc63ffff , 0x4c000000 }, |
2358 |
{ ppc32_emit_MFCR , 0xfc1fffff , 0x7c000026 }, |
2359 |
{ ppc32_emit_MFMSR , 0xfc1fffff , 0x7c0000a6 }, |
2360 |
{ ppc32_emit_MFSR , 0xfc10ffff , 0x7c0004a6 }, |
2361 |
{ ppc32_emit_MTCRF , 0xfc100fff , 0x7c000120 }, |
2362 |
{ ppc32_emit_MULHW , 0xfc0007fe , 0x7c000096 }, |
2363 |
{ ppc32_emit_MULHWU , 0xfc0007fe , 0x7c000016 }, |
2364 |
{ ppc32_emit_MULLI , 0xfc000000 , 0x1c000000 }, |
2365 |
{ ppc32_emit_MULLW , 0xfc0007fe , 0x7c0001d6 }, |
2366 |
{ ppc32_emit_NAND , 0xfc0007fe , 0x7c0003b8 }, |
2367 |
{ ppc32_emit_NEG , 0xfc00fffe , 0x7c0000d0 }, |
2368 |
{ ppc32_emit_NOR , 0xfc0007fe , 0x7c0000f8 }, |
2369 |
{ ppc32_emit_OR , 0xfc0007fe , 0x7c000378 }, |
2370 |
{ ppc32_emit_ORC , 0xfc0007fe , 0x7c000338 }, |
2371 |
{ ppc32_emit_ORI , 0xfc000000 , 0x60000000 }, |
2372 |
{ ppc32_emit_ORIS , 0xfc000000 , 0x64000000 }, |
2373 |
{ ppc32_emit_RLWIMI , 0xfc000000 , 0x50000000 }, |
2374 |
{ ppc32_emit_RLWINM , 0xfc000000 , 0x54000000 }, |
2375 |
{ ppc32_emit_RLWNM , 0xfc000000 , 0x5c000000 }, |
2376 |
{ ppc32_emit_SLW , 0xfc0007fe , 0x7c000030 }, |
2377 |
{ ppc32_emit_SRAWI , 0xfc0007fe , 0x7c000670 }, |
2378 |
{ ppc32_emit_SRW , 0xfc0007fe , 0x7c000430 }, |
2379 |
{ ppc32_emit_STB , 0xfc000000 , 0x98000000 }, |
2380 |
{ ppc32_emit_STBU , 0xfc000000 , 0x9c000000 }, |
2381 |
{ ppc32_emit_STBUX , 0xfc0007ff , 0x7c0001ee }, |
2382 |
{ ppc32_emit_STBX , 0xfc0007ff , 0x7c0001ae }, |
2383 |
{ ppc32_emit_STH , 0xfc000000 , 0xb0000000 }, |
2384 |
{ ppc32_emit_STHU , 0xfc000000 , 0xb4000000 }, |
2385 |
{ ppc32_emit_STHUX , 0xfc0007ff , 0x7c00036e }, |
2386 |
{ ppc32_emit_STHX , 0xfc0007ff , 0x7c00032e }, |
2387 |
{ ppc32_emit_STW , 0xfc000000 , 0x90000000 }, |
2388 |
{ ppc32_emit_STWU , 0xfc000000 , 0x94000000 }, |
2389 |
{ ppc32_emit_STWUX , 0xfc0007ff , 0x7c00016e }, |
2390 |
{ ppc32_emit_STWX , 0xfc0007ff , 0x7c00012e }, |
2391 |
{ ppc32_emit_SUBF , 0xfc0007fe , 0x7c000050 }, |
2392 |
{ ppc32_emit_SUBFC , 0xfc0007fe , 0x7c000010 }, |
2393 |
{ ppc32_emit_SUBFE , 0xfc0007fe , 0x7c000110 }, |
2394 |
{ ppc32_emit_SUBFIC , 0xfc000000 , 0x20000000 }, |
2395 |
{ ppc32_emit_SYNC , 0xffffffff , 0x7c0004ac }, |
2396 |
{ ppc32_emit_XOR , 0xfc0007fe , 0x7c000278 }, |
2397 |
{ ppc32_emit_XORI , 0xfc000000 , 0x68000000 }, |
2398 |
{ ppc32_emit_XORIS , 0xfc000000 , 0x6c000000 }, |
2399 |
{ ppc32_emit_unknown , 0x00000000 , 0x00000000 }, |
2400 |
}; |