/[dynamips]/trunk/ppc32_amd64_trans.c
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Contents of /trunk/ppc32_amd64_trans.c

Parent Directory Parent Directory | Revision Log Revision Log


Revision 12 - (show annotations)
Sat Oct 6 16:45:40 2007 UTC (11 years, 7 months ago) by dpavlin
File MIME type: text/plain
File size: 111604 byte(s)
make working copy

1 /*
2 * Cisco router simulation platform.
3 * Copyright (c) 2005,2006 Christophe Fillot (cf@utc.fr)
4 */
5
6 #include <stdio.h>
7 #include <stdlib.h>
8 #include <unistd.h>
9 #include <string.h>
10 #include <sys/types.h>
11 #include <sys/stat.h>
12 #include <sys/mman.h>
13 #include <fcntl.h>
14
15 #include "cpu.h"
16 #include "jit_op.h"
17 #include "ppc32_jit.h"
18 #include "ppc32_amd64_trans.h"
19 #include "memory.h"
20
21 /* Macros for CPU structure access */
22 #define REG_OFFSET(reg) (OFFSET(cpu_ppc_t,gpr[(reg)]))
23 #define MEMOP_OFFSET(op) (OFFSET(cpu_ppc_t,mem_op_fn[(op)]))
24
25 #define DECLARE_INSN(name) \
26 static int ppc32_emit_##name(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, \
27 ppc_insn_t insn)
28
29 /* EFLAGS to Condition Register (CR) field - signed */
30 static m_uint32_t eflags_to_cr_signed[256] = {
31 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
32 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
33 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
34 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
35 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
36 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
37 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
38 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
39 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
40 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
41 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
42 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
43 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
44 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
45 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
46 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
47 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
48 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
49 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
50 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
51 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
52 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
53 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
54 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
55 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
56 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
57 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
58 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
59 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
60 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
61 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
62 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
63 };
64
65 /* EFLAGS to Condition Register (CR) field - unsigned */
66 static m_uint32_t eflags_to_cr_unsigned[256] = {
67 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
68 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
69 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
70 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
71 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
72 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
73 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
74 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
75 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
76 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
77 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
78 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
79 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
80 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
81 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
82 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
83 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
84 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
85 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
86 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
87 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
88 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
89 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
90 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
91 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
92 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
93 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
94 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
95 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
96 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
97 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
98 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
99 };
100
101 /* Load a 32 bit immediate value */
102 static inline void ppc32_load_imm(u_char **ptr,u_int reg,m_uint32_t val)
103 {
104 if (val)
105 amd64_mov_reg_imm_size(*ptr,reg,val,4);
106 else
107 amd64_alu_reg_reg_size(*ptr,X86_XOR,reg,reg,4);
108 }
109
110 /* Set the Instruction Address (IA) register */
111 void ppc32_set_ia(u_char **ptr,m_uint32_t new_ia)
112 {
113 amd64_mov_membase_imm(*ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),new_ia,4);
114 }
115
116 /* Set the Link Register (LR) */
117 static void ppc32_set_lr(jit_op_t *iop,m_uint32_t new_lr)
118 {
119 amd64_mov_membase_imm(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),new_lr,4);
120 }
121
122 /*
123 * Try to branch directly to the specified JIT block without returning to
124 * main loop.
125 */
126 static void ppc32_try_direct_far_jump(cpu_ppc_t *cpu,jit_op_t *iop,
127 m_uint32_t new_ia)
128 {
129 m_uint32_t new_page,ia_hash,ia_offset;
130 u_char *test1,*test2,*test3;
131
132 /* Indicate that we throw %rbx, %rdx */
133 ppc32_op_emit_alter_host_reg(cpu,AMD64_RBX);
134 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
135 ppc32_op_emit_alter_host_reg(cpu,AMD64_RSI);
136
137 new_page = new_ia & PPC32_MIN_PAGE_MASK;
138 ia_offset = (new_ia & PPC32_MIN_PAGE_IMASK) >> 2;
139 ia_hash = ppc32_jit_get_ia_hash(new_ia);
140
141 /* Get JIT block info in %rdx */
142 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
143 AMD64_R15,OFFSET(cpu_ppc_t,exec_blk_map),8);
144 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RDX,
145 AMD64_RBX,ia_hash*sizeof(void *),8);
146
147 /* no JIT block found ? */
148 amd64_test_reg_reg(iop->ob_ptr,AMD64_RDX,AMD64_RDX);
149 test1 = iop->ob_ptr;
150 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
151
152 /* Check block IA */
153 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,new_page);
154 amd64_alu_reg_membase_size(iop->ob_ptr,X86_CMP,AMD64_RAX,AMD64_RDX,
155 OFFSET(ppc32_jit_tcb_t,start_ia),4);
156 test2 = iop->ob_ptr;
157 amd64_branch8(iop->ob_ptr, X86_CC_NE, 0, 1);
158
159 /* Jump to the code */
160 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RSI,
161 AMD64_RDX,OFFSET(ppc32_jit_tcb_t,jit_insn_ptr),8);
162 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
163 AMD64_RSI,ia_offset * sizeof(void *),8);
164
165 amd64_test_reg_reg(iop->ob_ptr,AMD64_RBX,AMD64_RBX);
166 test3 = iop->ob_ptr;
167 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
168 amd64_jump_reg(iop->ob_ptr,AMD64_RBX);
169
170 /* Returns to caller... */
171 amd64_patch(test1,iop->ob_ptr);
172 amd64_patch(test2,iop->ob_ptr);
173 amd64_patch(test3,iop->ob_ptr);
174
175 ppc32_set_ia(&iop->ob_ptr,new_ia);
176 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
177 }
178
179 /* Set Jump */
180 static void ppc32_set_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,jit_op_t *iop,
181 m_uint32_t new_ia,int local_jump)
182 {
183 int return_to_caller = FALSE;
184 u_char *jump_ptr;
185
186 #if 0
187 if (cpu->sym_trace && !local_jump)
188 return_to_caller = TRUE;
189 #endif
190
191 if (!return_to_caller && ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr)) {
192 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
193 amd64_jump32(iop->ob_ptr,0);
194 } else {
195 if (cpu->exec_blk_direct_jump) {
196 /* Block lookup optimization */
197 ppc32_try_direct_far_jump(cpu,iop,new_ia);
198 } else {
199 ppc32_set_ia(&iop->ob_ptr,new_ia);
200 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
201 }
202 }
203 }
204
205 /* Jump to the next page */
206 void ppc32_set_page_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b)
207 {
208 jit_op_t *iop,*op_list = NULL;
209
210 cpu->gen->jit_op_current = &op_list;
211
212 iop = ppc32_op_emit_insn_output(cpu,4,"set_page_jump");
213 ppc32_set_jump(cpu,b,iop,b->start_ia + PPC32_MIN_PAGE_SIZE,FALSE);
214 ppc32_op_insn_output(b,iop);
215
216 jit_op_free_list(cpu->gen,op_list);
217 cpu->gen->jit_op_current = NULL;
218 }
219
220 /* Load a GPR into the specified host register */
221 static forced_inline void ppc32_load_gpr(u_char **ptr,u_int host_reg,
222 u_int ppc_reg)
223 {
224 amd64_mov_reg_membase(*ptr,host_reg,AMD64_R15,REG_OFFSET(ppc_reg),4);
225 }
226
227 /* Store contents for a host register into a GPR register */
228 static forced_inline void ppc32_store_gpr(u_char **ptr,u_int ppc_reg,
229 u_int host_reg)
230 {
231 amd64_mov_membase_reg(*ptr,AMD64_R15,REG_OFFSET(ppc_reg),host_reg,4);
232 }
233
234 /* Apply an ALU operation on a GPR register and a host register */
235 static forced_inline void ppc32_alu_gpr(u_char **ptr,u_int op,
236 u_int host_reg,u_int ppc_reg)
237 {
238 amd64_alu_reg_membase_size(*ptr,op,host_reg,
239 AMD64_R15,REG_OFFSET(ppc_reg),4);
240 }
241
242 /*
243 * Update CR from %eflags
244 * %rax, %rdx, %rsi are modified.
245 */
246 static void ppc32_update_cr(ppc32_jit_tcb_t *b,int field,int is_signed)
247 {
248 /* Get status bits from EFLAGS */
249 amd64_pushfd_size(b->jit_ptr,8);
250 amd64_pop_reg(b->jit_ptr,AMD64_RAX);
251 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0xFF);
252
253 if (is_signed)
254 amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_signed,8);
255 else
256 amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_unsigned,8);
257
258 amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RDX,0,AMD64_RAX,2,4);
259
260 #if 0
261 /* Check XER Summary of Overflow and report it */
262 amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX,
263 AMD64_R15,OFFSET(cpu_ppc_t,xer),4);
264 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,PPC32_XER_SO);
265 amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RCX,(field << 2) + 3);
266 amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RCX);
267 #endif
268
269 /* Store modified CR field */
270 amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(field),
271 AMD64_RAX,4);
272 }
273
274 /*
275 * Update CR0 from %eflags
276 * %eax, %ecx, %edx, %esi are modified.
277 */
278 static void ppc32_update_cr0(ppc32_jit_tcb_t *b)
279 {
280 ppc32_update_cr(b,0,TRUE);
281 }
282
283 /* Indicate registers modified by ppc32_update_cr() functions */
284 void ppc32_update_cr_set_altered_hreg(cpu_ppc_t *cpu)
285 {
286 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
287 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
288 }
289
290 /* Basic C call */
291 static forced_inline void ppc32_emit_basic_c_call(u_char **ptr,void *f)
292 {
293 amd64_mov_reg_imm(*ptr,AMD64_RBX,f);
294 amd64_call_reg(*ptr,AMD64_RBX);
295 }
296
297 /* Emit a simple call to a C function without any parameter */
298 static void ppc32_emit_c_call(ppc32_jit_tcb_t *b,jit_op_t *iop,void *f)
299 {
300 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
301 ppc32_emit_basic_c_call(&iop->ob_ptr,f);
302 }
303
304 /* ======================================================================== */
305
306 /* Initialize register mapping */
307 void ppc32_jit_init_hreg_mapping(cpu_ppc_t *cpu)
308 {
309 int avail_hregs[] = { AMD64_RSI, AMD64_RAX, AMD64_RCX, AMD64_RDX,
310 AMD64_R13, AMD64_R14, AMD64_RDI, -1 };
311 struct hreg_map *map;
312 int i,hreg;
313
314 cpu->hreg_map_list = cpu->hreg_lru = NULL;
315
316 /* Add the available registers to the map list */
317 for(i=0;avail_hregs[i]!=-1;i++) {
318 hreg = avail_hregs[i];
319 map = &cpu->hreg_map[hreg];
320
321 /* Initialize mapping. At the beginning, no PPC reg is mapped */
322 map->flags = 0;
323 map->hreg = hreg;
324 map->vreg = -1;
325 ppc32_jit_insert_hreg_mru(cpu,map);
326 }
327
328 /* Clear PPC registers mapping */
329 for(i=0;i<PPC32_GPR_NR;i++)
330 cpu->ppc_reg_map[i] = -1;
331 }
332
333 /* Allocate a specific temp register */
334 static int ppc32_jit_get_tmp_hreg(cpu_ppc_t *cpu)
335 {
336 return(AMD64_RBX);
337 }
338
339 /* ======================================================================== */
340 /* JIT operations (specific to target CPU). */
341 /* ======================================================================== */
342
343 /* INSN_OUTPUT */
344 void ppc32_op_insn_output(ppc32_jit_tcb_t *b,jit_op_t *op)
345 {
346 op->ob_final = b->jit_ptr;
347 memcpy(b->jit_ptr,op->ob_data,op->ob_ptr - op->ob_data);
348 b->jit_ptr += op->ob_ptr - op->ob_data;
349 }
350
351 /* LOAD_GPR: p[0] = %host_reg, p[1] = %ppc_reg */
352 void ppc32_op_load_gpr(ppc32_jit_tcb_t *b,jit_op_t *op)
353 {
354 if (op->param[0] != JIT_OP_INV_REG)
355 ppc32_load_gpr(&b->jit_ptr,op->param[0],op->param[1]);
356 }
357
358 /* STORE_GPR: p[0] = %host_reg, p[1] = %ppc_reg */
359 void ppc32_op_store_gpr(ppc32_jit_tcb_t *b,jit_op_t *op)
360 {
361 if (op->param[0] != JIT_OP_INV_REG)
362 ppc32_store_gpr(&b->jit_ptr,op->param[1],op->param[0]);
363 }
364
365 /* UPDATE_FLAGS: p[0] = cr_field, p[1] = is_signed */
366 void ppc32_op_update_flags(ppc32_jit_tcb_t *b,jit_op_t *op)
367 {
368 if (op->param[0] != JIT_OP_INV_REG)
369 ppc32_update_cr(b,op->param[0],op->param[1]);
370 }
371
372 /* MOVE_HOST_REG: p[0] = %host_dst_reg, p[1] = %host_src_reg */
373 void ppc32_op_move_host_reg(ppc32_jit_tcb_t *b,jit_op_t *op)
374 {
375 if ((op->param[0] != JIT_OP_INV_REG) && (op->param[1] != JIT_OP_INV_REG))
376 amd64_mov_reg_reg(b->jit_ptr,op->param[0],op->param[1],4);
377 }
378
379 /* SET_HOST_REG_IMM32: p[0] = %host_reg, p[1] = imm32 */
380 void ppc32_op_set_host_reg_imm32(ppc32_jit_tcb_t *b,jit_op_t *op)
381 {
382 if (op->param[0] != JIT_OP_INV_REG)
383 ppc32_load_imm(&b->jit_ptr,op->param[0],op->param[1]);
384 }
385
386 /* ======================================================================== */
387
388 /* Memory operation */
389 static void ppc32_emit_memop(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
390 int op,int base,int offset,int target,int update)
391 {
392 m_uint32_t val = sign_extend(offset,16);
393 jit_op_t *iop;
394
395 /*
396 * Since an exception can be triggered, clear JIT state. This allows
397 * to use branch target tag (we can directly branch on this instruction).
398 */
399 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
400 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
401
402 iop = ppc32_op_emit_insn_output(cpu,5,"memop");
403
404 /* Save PC for exception handling */
405 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
406
407 /* RSI = sign-extended offset */
408 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,val);
409
410 /* RSI = GPR[base] + sign-extended offset */
411 if (update || (base != 0))
412 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,base);
413
414 if (update)
415 amd64_mov_reg_reg(iop->ob_ptr,AMD64_R14,AMD64_RSI,4);
416
417 /* RDX = target register */
418 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
419
420 /* RDI = CPU instance pointer */
421 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
422
423 /* Call memory function */
424 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(op));
425
426 if (update)
427 ppc32_store_gpr(&iop->ob_ptr,base,AMD64_R14);
428 }
429
430 /* Memory operation (indexed) */
431 static void ppc32_emit_memop_idx(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
432 int op,int ra,int rb,int target,int update)
433 {
434 jit_op_t *iop;
435
436 /*
437 * Since an exception can be triggered, clear JIT state. This allows
438 * to use branch target tag (we can directly branch on this instruction).
439 */
440 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
441 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
442
443 iop = ppc32_op_emit_insn_output(cpu,5,"memop_idx");
444
445 /* Save PC for exception handling */
446 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
447
448 /* RSI = $rb */
449 ppc32_load_gpr(&iop->ob_ptr,AMD64_RSI,rb);
450
451 /* RSI = GPR[base] + sign-extended offset */
452 if (update || (ra != 0))
453 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,ra);
454
455 if (update)
456 amd64_mov_reg_reg(iop->ob_ptr,AMD64_R14,AMD64_RSI,4);
457
458 /* RDX = target register */
459 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
460
461 /* RDI = CPU instance pointer */
462 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
463
464 /* Call memory function */
465 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(op));
466
467 if (update)
468 ppc32_store_gpr(&iop->ob_ptr,ra,AMD64_R14);
469 }
470
471 typedef void (*memop_fast_access)(jit_op_t *iop,int target);
472
473 /* Fast LBZ */
474 static void ppc32_memop_fast_lbz(jit_op_t *iop,int target)
475 {
476 amd64_clear_reg(iop->ob_ptr,AMD64_RCX);
477 amd64_mov_reg_memindex(iop->ob_ptr,AMD64_RCX,AMD64_RBX,0,AMD64_RSI,0,1);
478 ppc32_store_gpr(&iop->ob_ptr,target,AMD64_RCX);
479 }
480
481 /* Fast STB */
482 static void ppc32_memop_fast_stb(jit_op_t *iop,int target)
483 {
484 ppc32_load_gpr(&iop->ob_ptr,AMD64_RDX,target);
485 amd64_mov_memindex_reg(iop->ob_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,1);
486 }
487
488 /* Fast LWZ */
489 static void ppc32_memop_fast_lwz(jit_op_t *iop,int target)
490 {
491 amd64_mov_reg_memindex(iop->ob_ptr,AMD64_RAX,AMD64_RBX,0,AMD64_RSI,0,4);
492 amd64_bswap32(iop->ob_ptr,AMD64_RAX);
493 ppc32_store_gpr(&iop->ob_ptr,target,AMD64_RAX);
494 }
495
496 /* Fast STW */
497 static void ppc32_memop_fast_stw(jit_op_t *iop,int target)
498 {
499 ppc32_load_gpr(&iop->ob_ptr,AMD64_RDX,target);
500 amd64_bswap32(iop->ob_ptr,AMD64_RDX);
501 amd64_mov_memindex_reg(iop->ob_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,4);
502 }
503
504 /* Fast memory operation */
505 static void ppc32_emit_memop_fast(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
506 int write_op,int opcode,
507 int base,int offset,int target,
508 memop_fast_access op_handler)
509 {
510 m_uint32_t val = sign_extend(offset,16);
511 u_char *test1,*test2,*p_exit;
512 jit_op_t *iop;
513
514 /*
515 * Since an exception can be triggered, clear JIT state. This allows
516 * to use branch target tag (we can directly branch on this instruction).
517 */
518 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
519 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
520
521 iop = ppc32_op_emit_insn_output(cpu,5,"memop_fast");
522
523 test2 = NULL;
524
525 /* RSI = GPR[base] + sign-extended offset */
526 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,val);
527 if (base != 0)
528 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,base);
529
530 /* RBX = mts32_entry index */
531 amd64_mov_reg_reg_size(iop->ob_ptr,X86_EBX,X86_ESI,4);
532 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHR,X86_EBX,MTS32_HASH_SHIFT,4);
533 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,X86_EBX,MTS32_HASH_MASK,4);
534
535 /* RCX = mts32 entry */
536 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RCX,
537 AMD64_R15,
538 OFFSET(cpu_ppc_t,mts_cache[PPC32_MTS_DCACHE]),8);
539 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,AMD64_RBX,5); /* TO FIX */
540 amd64_alu_reg_reg(iop->ob_ptr,X86_ADD,AMD64_RCX,AMD64_RBX);
541
542 /* Compare virtual page address (EAX = vpage) */
543 amd64_mov_reg_reg(iop->ob_ptr,X86_EAX,X86_ESI,4);
544 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,X86_EAX,PPC32_MIN_PAGE_MASK);
545
546 amd64_alu_reg_membase_size(iop->ob_ptr,X86_CMP,X86_EAX,AMD64_RCX,
547 OFFSET(mts32_entry_t,gvpa),4);
548 test1 = iop->ob_ptr;
549 amd64_branch8(iop->ob_ptr, X86_CC_NZ, 0, 1);
550
551 /* Test if we are writing to a COW page */
552 if (write_op) {
553 amd64_test_membase_imm_size(iop->ob_ptr,
554 AMD64_RCX,OFFSET(mts32_entry_t,flags),
555 MTS_FLAG_COW|MTS_FLAG_EXEC,4);
556 test2 = iop->ob_ptr;
557 amd64_branch8(iop->ob_ptr, X86_CC_NZ, 0, 1);
558 }
559
560 /* ESI = offset in page, RBX = Host Page Address */
561 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,X86_ESI,PPC32_MIN_PAGE_IMASK);
562 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
563 AMD64_RCX,OFFSET(mts32_entry_t,hpa),8);
564
565 /* Memory access */
566 op_handler(iop,target);
567
568 p_exit = iop->ob_ptr;
569 amd64_jump8(iop->ob_ptr,0);
570
571 /* === Slow lookup === */
572 amd64_patch(test1,iop->ob_ptr);
573 if (test2)
574 amd64_patch(test2,iop->ob_ptr);
575
576 /* Save IA for exception handling */
577 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
578
579 /* RDX = target register */
580 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
581
582 /* RDI = CPU instance */
583 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
584
585 /* Call memory access function */
586 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(opcode));
587
588 amd64_patch(p_exit,iop->ob_ptr);
589 }
590
591 /* Emit unhandled instruction code */
592 static int ppc32_emit_unknown(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
593 ppc_insn_t opcode)
594 {
595 u_char *test1;
596 jit_op_t *iop;
597
598 iop = ppc32_op_emit_insn_output(cpu,3,"unknown");
599
600 /* Update IA */
601 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
602
603 /* Fallback to non-JIT mode */
604 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
605 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RSI,opcode);
606
607 ppc32_emit_c_call(b,iop,ppc32_exec_single_insn_ext);
608 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
609 test1 = iop->ob_ptr;
610 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
611 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
612
613 amd64_patch(test1,iop->ob_ptr);
614
615 /* Signal this as an EOB to reset JIT state */
616 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
617 return(0);
618 }
619
620 /* Virtual Breakpoint */
621 void ppc32_emit_breakpoint(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b)
622 {
623 jit_op_t *iop;
624
625 iop = ppc32_op_emit_insn_output(cpu,2,"breakpoint");
626
627 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
628 ppc32_emit_c_call(b,iop,ppc32_run_breakpoint);
629
630 /* Signal this as an EOB to to reset JIT state */
631 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
632 }
633
634 /* Increment the number of executed instructions (performance debugging) */
635 void ppc32_inc_perf_counter(cpu_ppc_t *cpu)
636 {
637 jit_op_t *iop;
638
639 iop = ppc32_op_emit_insn_output(cpu,1,"perf_cnt");
640 amd64_inc_membase_size(iop->ob_ptr,
641 AMD64_R15,OFFSET(cpu_ppc_t,perf_counter),4);
642 }
643
644 /* ======================================================================== */
645
646 /* BLR - Branch to Link Register */
647 DECLARE_INSN(BLR)
648 {
649 jit_op_t *iop;
650 int hreg;
651
652 ppc32_jit_start_hreg_seq(cpu,"blr");
653 hreg = ppc32_jit_alloc_hreg(cpu,-1);
654 ppc32_op_emit_alter_host_reg(cpu,hreg);
655
656 iop = ppc32_op_emit_insn_output(cpu,2,"blr");
657
658 amd64_mov_reg_membase(iop->ob_ptr,hreg,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
659 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg,4);
660
661 /* set the return address */
662 if (insn & 1)
663 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
664
665 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
666 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
667 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
668
669 ppc32_jit_close_hreg_seq(cpu);
670 return(0);
671 }
672
673 /* BCTR - Branch to Count Register */
674 DECLARE_INSN(BCTR)
675 {
676 jit_op_t *iop;
677 int hreg;
678
679 ppc32_jit_start_hreg_seq(cpu,"bctr");
680 hreg = ppc32_jit_alloc_hreg(cpu,-1);
681 ppc32_op_emit_alter_host_reg(cpu,hreg);
682
683 iop = ppc32_op_emit_insn_output(cpu,2,"bctr");
684
685 amd64_mov_reg_membase(iop->ob_ptr,hreg,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
686 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg,4);
687
688 /* set the return address */
689 if (insn & 1)
690 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
691
692 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
693 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
694 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
695
696 ppc32_jit_close_hreg_seq(cpu);
697 return(0);
698 }
699
700 /* MFLR - Move From Link Register */
701 DECLARE_INSN(MFLR)
702 {
703 int rd = bits(insn,21,25);
704 int hreg_rd;
705 jit_op_t *iop;
706
707 ppc32_jit_start_hreg_seq(cpu,"mflr");
708 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
709 iop = ppc32_op_emit_insn_output(cpu,1,"mflr");
710
711 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
712 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
713
714 ppc32_jit_close_hreg_seq(cpu);
715 return(0);
716 }
717
718 /* MTLR - Move To Link Register */
719 DECLARE_INSN(MTLR)
720 {
721 int rs = bits(insn,21,25);
722 int hreg_rs;
723 jit_op_t *iop;
724
725 ppc32_jit_start_hreg_seq(cpu,"mtlr");
726 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
727 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
728
729 iop = ppc32_op_emit_insn_output(cpu,1,"mtlr");
730 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),hreg_rs,4);
731 return(0);
732 }
733
734 /* MFCTR - Move From Counter Register */
735 DECLARE_INSN(MFCTR)
736 {
737 int rd = bits(insn,21,25);
738 int hreg_rd;
739 jit_op_t *iop;
740
741 ppc32_jit_start_hreg_seq(cpu,"mfctr");
742 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
743
744 iop = ppc32_op_emit_insn_output(cpu,1,"mfctr");
745
746 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
747 AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
748 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
749
750 ppc32_jit_close_hreg_seq(cpu);
751 return(0);
752 }
753
754 /* MTCTR - Move To Counter Register */
755 DECLARE_INSN(MTCTR)
756 {
757 int rs = bits(insn,21,25);
758 int hreg_rs;
759 jit_op_t *iop;
760
761 ppc32_jit_start_hreg_seq(cpu,"mtctr");
762 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
763 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
764
765 iop = ppc32_op_emit_insn_output(cpu,1,"mtctr");
766
767 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),
768 hreg_rs,4);
769
770 ppc32_jit_close_hreg_seq(cpu);
771 return(0);
772 }
773
774 /* MFTBU - Move from Time Base (Up) */
775 DECLARE_INSN(MFTBU)
776 {
777 int rd = bits(insn,21,25);
778 int hreg_rd;
779 jit_op_t *iop;
780
781 ppc32_jit_start_hreg_seq(cpu,"mftbu");
782 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
783
784 iop = ppc32_op_emit_insn_output(cpu,1,"mftbu");
785
786 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
787 AMD64_R15,OFFSET(cpu_ppc_t,tb)+4,4);
788 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
789
790 ppc32_jit_close_hreg_seq(cpu);
791 return(0);
792 }
793
794 #define PPC32_TB_INCREMENT 50
795
796 /* MFTBL - Move from Time Base (Lo) */
797 DECLARE_INSN(MFTBL)
798 {
799 int rd = bits(insn,21,25);
800 int hreg_rd;
801 jit_op_t *iop;
802
803 ppc32_jit_start_hreg_seq(cpu,"mftbl");
804 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
805
806 iop = ppc32_op_emit_insn_output(cpu,3,"mftbl");
807
808 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
809 AMD64_R15,OFFSET(cpu_ppc_t,tb),8);
810 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,hreg_rd,PPC32_TB_INCREMENT);
811 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,tb),
812 hreg_rd,8);
813
814 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
815
816 ppc32_jit_close_hreg_seq(cpu);
817 return(0);
818 }
819
820 /* ADD */
821 DECLARE_INSN(ADD)
822 {
823 int rd = bits(insn,21,25);
824 int ra = bits(insn,16,20);
825 int rb = bits(insn,11,15);
826 int hreg_rd,hreg_ra,hreg_rb;
827 jit_op_t *iop;
828
829 /* $rd = $ra + $rb */
830 ppc32_jit_start_hreg_seq(cpu,"add");
831 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
832 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
833 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
834
835 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
836 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
837
838 iop = ppc32_op_emit_insn_output(cpu,2,"add");
839
840 if (rd == ra)
841 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
842 else if (rd == rb)
843 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_ra,4);
844 else {
845 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
846 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
847 }
848
849 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
850
851 if (insn & 1)
852 ppc32_op_emit_update_flags(cpu,0,TRUE);
853
854 ppc32_jit_close_hreg_seq(cpu);
855 return(0);
856 }
857
858 /* ADDC */
859 DECLARE_INSN(ADDC)
860 {
861 int rd = bits(insn,21,25);
862 int ra = bits(insn,16,20);
863 int rb = bits(insn,11,15);
864 int hreg_rd,hreg_ra,hreg_rb,hreg_t0;
865 jit_op_t *iop;
866
867 /* $rd = $ra + $rb */
868 ppc32_jit_start_hreg_seq(cpu,"addc");
869 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
870 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
871 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
872
873 /* store the carry flag */
874 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
875
876 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
877 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
878
879 iop = ppc32_op_emit_insn_output(cpu,2,"addc");
880
881 if (rd == ra)
882 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
883 else if (rd == rb)
884 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_ra,4);
885 else {
886 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
887 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
888 }
889
890 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
891
892 /* store the carry flag */
893 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t0,FALSE);
894 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x1);
895 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
896 hreg_t0,4);
897
898 if (insn & 1) {
899 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
900 ppc32_op_emit_update_flags(cpu,0,TRUE);
901 }
902
903 ppc32_jit_close_hreg_seq(cpu);
904 return(0);
905 }
906
907 /* ADDE - Add Extended */
908 DECLARE_INSN(ADDE)
909 {
910 int rd = bits(insn,21,25);
911 int ra = bits(insn,16,20);
912 int rb = bits(insn,11,15);
913 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
914 jit_op_t *iop;
915
916 ppc32_jit_start_hreg_seq(cpu,"adde");
917 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
918 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
919 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
920
921 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
922 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
923
924 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
925 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
926 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
927
928 iop = ppc32_op_emit_insn_output(cpu,3,"adde");
929
930 /* $t0 = $ra + carry */
931 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
932 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
933
934 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_t0,
935 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
936 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
937 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
938 hreg_t1,4);
939
940 /* $t0 += $rb */
941 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
942 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
943 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
944 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
945 hreg_t1,4);
946
947 /* update cr0 */
948 if (insn & 1)
949 amd64_test_reg_reg_size(iop->ob_ptr,hreg_t0,hreg_t0,4);
950
951 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
952 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
953
954 if (insn & 1)
955 ppc32_op_emit_update_flags(cpu,0,TRUE);
956
957 ppc32_jit_close_hreg_seq(cpu);
958 return(0);
959 }
960
961 /* ADDI - ADD Immediate */
962 DECLARE_INSN(ADDI)
963 {
964 int rd = bits(insn,21,25);
965 int ra = bits(insn,16,20);
966 int imm = bits(insn,0,15);
967 m_uint32_t tmp = sign_extend_32(imm,16);
968 int hreg_rd,hreg_ra;
969 jit_op_t *iop;
970
971 /* $rd = $ra + imm */
972 ppc32_jit_start_hreg_seq(cpu,"addi");
973 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
974
975 if (ra != 0) {
976 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
977 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
978
979 iop = ppc32_op_emit_insn_output(cpu,2,"addi");
980
981 if (rd != ra)
982 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
983
984 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
985 } else {
986 iop = ppc32_op_emit_insn_output(cpu,1,"addi");
987 ppc32_load_imm(&iop->ob_ptr,hreg_rd,tmp);
988 }
989
990 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
991
992 ppc32_jit_close_hreg_seq(cpu);
993 return(0);
994 }
995
996 /* ADDIC - ADD Immediate with Carry */
997 DECLARE_INSN(ADDIC)
998 {
999 int rd = bits(insn,21,25);
1000 int ra = bits(insn,16,20);
1001 int imm = bits(insn,0,15);
1002 m_uint32_t tmp = sign_extend_32(imm,16);
1003 int hreg_rd,hreg_ra;
1004 jit_op_t *iop;
1005
1006 /* $rd = $ra + imm */
1007 ppc32_jit_start_hreg_seq(cpu,"addic");
1008 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1009 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1010
1011 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1012
1013 iop = ppc32_op_emit_insn_output(cpu,1,"addic");
1014
1015 if (rd != ra)
1016 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1017
1018 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1019 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1020
1021 amd64_set_membase(iop->ob_ptr,X86_CC_C,
1022 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),FALSE);
1023
1024 ppc32_jit_close_hreg_seq(cpu);
1025 return(0);
1026 }
1027
1028 /* ADDIC. */
1029 DECLARE_INSN(ADDIC_dot)
1030 {
1031 int rd = bits(insn,21,25);
1032 int ra = bits(insn,16,20);
1033 int imm = bits(insn,0,15);
1034 m_uint32_t tmp = sign_extend_32(imm,16);
1035 int hreg_rd,hreg_ra;
1036 jit_op_t *iop;
1037
1038 /* $rd = $ra + imm */
1039 ppc32_jit_start_hreg_seq(cpu,"addic.");
1040 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1041 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1042
1043 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1044
1045 iop = ppc32_op_emit_insn_output(cpu,1,"addic.");
1046
1047 if (rd != ra)
1048 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1049
1050 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1051 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1052
1053 amd64_set_membase(iop->ob_ptr,X86_CC_C,
1054 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),FALSE);
1055
1056 ppc32_op_emit_update_flags(cpu,0,TRUE);
1057
1058 ppc32_jit_close_hreg_seq(cpu);
1059 return(0);
1060 }
1061
1062 /* ADDIS - ADD Immediate Shifted */
1063 DECLARE_INSN(ADDIS)
1064 {
1065 int rd = bits(insn,21,25);
1066 int ra = bits(insn,16,20);
1067 m_uint32_t imm = bits(insn,0,15);
1068 m_uint32_t tmp = imm << 16;
1069 int hreg_rd,hreg_ra;
1070 jit_op_t *iop;
1071
1072 /* $rd = $ra + (imm << 16) */
1073 ppc32_jit_start_hreg_seq(cpu,"addis");
1074 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1075
1076 if (ra != 0) {
1077 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1078 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1079
1080 iop = ppc32_op_emit_insn_output(cpu,1,"addis");
1081
1082 if (rd != ra)
1083 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1084
1085 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1086 } else {
1087 iop = ppc32_op_emit_insn_output(cpu,1,"addis");
1088 amd64_mov_reg_imm(iop->ob_ptr,hreg_rd,tmp);
1089 }
1090
1091 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1092
1093 ppc32_jit_close_hreg_seq(cpu);
1094 return(0);
1095 }
1096
1097 /* ADDZE */
1098 DECLARE_INSN(ADDZE)
1099 {
1100 int rd = bits(insn,21,25);
1101 int ra = bits(insn,16,20);
1102 int hreg_rd,hreg_ra,hreg_t0;
1103 jit_op_t *iop;
1104
1105 /* $rd = $ra + xer_ca + set_carry */
1106 ppc32_jit_start_hreg_seq(cpu,"addze");
1107 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1108 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1109 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1110
1111 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1112
1113 iop = ppc32_op_emit_insn_output(cpu,2,"addze");
1114
1115 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,hreg_t0);
1116
1117 if (rd != ra)
1118 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1119
1120 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_rd,
1121 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
1122
1123 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t0,FALSE);
1124 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
1125 hreg_t0,4);
1126
1127 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1128
1129 if (insn & 1)
1130 ppc32_op_emit_update_flags(cpu,0,TRUE);
1131
1132 ppc32_jit_close_hreg_seq(cpu);
1133 return(0);
1134 }
1135
1136 /* AND */
1137 DECLARE_INSN(AND)
1138 {
1139 int rs = bits(insn,21,25);
1140 int ra = bits(insn,16,20);
1141 int rb = bits(insn,11,15);
1142 int hreg_rs,hreg_ra,hreg_rb;
1143 jit_op_t *iop;
1144
1145 /* $ra = $rs & $rb */
1146 ppc32_jit_start_hreg_seq(cpu,"and");
1147 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1148 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1149 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1150
1151 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1152 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1153
1154 iop = ppc32_op_emit_insn_output(cpu,1,"and");
1155
1156 if (ra == rs)
1157 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
1158 else if (ra == rb)
1159 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rs,4);
1160 else {
1161 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1162 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
1163 }
1164
1165 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1166
1167 if (insn & 1)
1168 ppc32_op_emit_update_flags(cpu,0,TRUE);
1169
1170 ppc32_jit_close_hreg_seq(cpu);
1171 return(0);
1172 }
1173
1174 /* ANDC */
1175 DECLARE_INSN(ANDC)
1176 {
1177 int rs = bits(insn,21,25);
1178 int ra = bits(insn,16,20);
1179 int rb = bits(insn,11,15);
1180 int hreg_rs,hreg_ra,hreg_rb,hreg_t0;
1181 jit_op_t *iop;
1182
1183 /* $ra = $rs & ~$rb */
1184 ppc32_jit_start_hreg_seq(cpu,"andc");
1185 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1186 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1187 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1188
1189 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1190 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1191
1192 iop = ppc32_op_emit_insn_output(cpu,1,"andc");
1193
1194 /* $t0 = ~$rb */
1195 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1196 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
1197 amd64_not_reg(iop->ob_ptr,hreg_t0);
1198
1199 /* $ra = $rs & $t0 */
1200 if (ra == rs)
1201 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_t0,4);
1202 else {
1203 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_t0,hreg_rs,4);
1204 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
1205 }
1206
1207 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1208
1209 if (insn & 1)
1210 ppc32_op_emit_update_flags(cpu,0,TRUE);
1211
1212 ppc32_jit_close_hreg_seq(cpu);
1213 return(0);
1214 }
1215
1216 /* AND Immediate */
1217 DECLARE_INSN(ANDI)
1218 {
1219 int rs = bits(insn,21,25);
1220 int ra = bits(insn,16,20);
1221 m_uint16_t imm = bits(insn,0,15);
1222 m_uint32_t tmp = imm;
1223 int hreg_rs,hreg_ra;
1224 jit_op_t *iop;
1225
1226 /* $ra = $rs & imm */
1227 ppc32_jit_start_hreg_seq(cpu,"andi");
1228 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1229 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1230
1231 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1232
1233 iop = ppc32_op_emit_insn_output(cpu,2,"andi");
1234
1235 if (ra != rs)
1236 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1237
1238 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,hreg_ra,tmp,4);
1239 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1240
1241 ppc32_op_emit_update_flags(cpu,0,TRUE);
1242
1243 ppc32_jit_close_hreg_seq(cpu);
1244 return(0);
1245 }
1246
1247 /* AND Immediate Shifted */
1248 DECLARE_INSN(ANDIS)
1249 {
1250 int rs = bits(insn,21,25);
1251 int ra = bits(insn,16,20);
1252 m_uint32_t imm = bits(insn,0,15);
1253 m_uint32_t tmp = imm << 16;
1254 int hreg_rs,hreg_ra;
1255 jit_op_t *iop;
1256
1257 /* $ra = $rs & imm */
1258 ppc32_jit_start_hreg_seq(cpu,"andis");
1259 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1260 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1261
1262 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1263
1264 iop = ppc32_op_emit_insn_output(cpu,2,"andis");
1265
1266 if (ra != rs)
1267 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1268
1269 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,hreg_ra,tmp,4);
1270 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1271
1272 ppc32_op_emit_update_flags(cpu,0,TRUE);
1273
1274 ppc32_jit_close_hreg_seq(cpu);
1275 return(0);
1276 }
1277
1278 /* B - Branch */
1279 DECLARE_INSN(B)
1280 {
1281 m_uint32_t offset = bits(insn,2,25);
1282 m_uint32_t new_ia;
1283 jit_op_t *iop;
1284
1285 iop = ppc32_op_emit_insn_output(cpu,4,"b");
1286
1287 /* compute the new ia */
1288 new_ia = b->start_ia + (b->ppc_trans_pos << 2);
1289 new_ia += sign_extend(offset << 2,26);
1290 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1291
1292 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1293 ppc32_op_emit_branch_target(cpu,b,new_ia);
1294 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1295 return(0);
1296 }
1297
1298 /* BA - Branch Absolute */
1299 DECLARE_INSN(BA)
1300 {
1301 m_uint32_t offset = bits(insn,2,25);
1302 m_uint32_t new_ia;
1303 jit_op_t *iop;
1304
1305 iop = ppc32_op_emit_insn_output(cpu,4,"ba");
1306
1307 /* compute the new ia */
1308 new_ia = sign_extend(offset << 2,26);
1309 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1310
1311 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1312 ppc32_op_emit_branch_target(cpu,b,new_ia);
1313 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1314 return(0);
1315 }
1316
1317 /* BL - Branch and Link */
1318 DECLARE_INSN(BL)
1319 {
1320 m_uint32_t offset = bits(insn,2,25);
1321 m_uint32_t new_ia;
1322 jit_op_t *iop;
1323
1324 iop = ppc32_op_emit_insn_output(cpu,4,"bl");
1325
1326 /* compute the new ia */
1327 new_ia = b->start_ia + (b->ppc_trans_pos << 2);
1328 new_ia += sign_extend(offset << 2,26);
1329
1330 /* set the return address */
1331 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1332 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1333
1334 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1335 ppc32_op_emit_branch_target(cpu,b,new_ia);
1336 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1337 return(0);
1338 }
1339
1340 /* BLA - Branch and Link Absolute */
1341 DECLARE_INSN(BLA)
1342 {
1343 m_uint32_t offset = bits(insn,2,25);
1344 m_uint32_t new_ia;
1345 jit_op_t *iop;
1346
1347 iop = ppc32_op_emit_insn_output(cpu,4,"bla");
1348
1349 /* compute the new ia */
1350 new_ia = sign_extend(offset << 2,26);
1351
1352 /* set the return address */
1353 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1354 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1355
1356 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1357 ppc32_op_emit_branch_target(cpu,b,new_ia);
1358 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1359 return(0);
1360 }
1361
1362 /* BC - Branch Conditional (Condition Check only) */
1363 DECLARE_INSN(BCC)
1364 {
1365 int bo = bits(insn,21,25);
1366 int bi = bits(insn,16,20);
1367 int bd = bits(insn,2,15);
1368 jit_op_t *iop;
1369 u_int cr_field,cr_bit;
1370 m_uint32_t new_ia;
1371 u_char *jump_ptr;
1372 int local_jump;
1373 int cond;
1374
1375 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_JUMP);
1376
1377 iop = ppc32_op_emit_insn_output(cpu,5,"bcc");
1378
1379 /* Get the wanted value for the condition bit */
1380 cond = (bo >> 3) & 0x1;
1381
1382 /* Set the return address */
1383 if (insn & 1) {
1384 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1385 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1386 }
1387
1388 /* Compute the new ia */
1389 new_ia = sign_extend_32(bd << 2,16);
1390 if (!(insn & 0x02))
1391 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1392
1393 /* Test the condition bit */
1394 cr_field = ppc32_get_cr_field(bi);
1395 cr_bit = ppc32_get_cr_bit(bi);
1396
1397 ppc32_op_emit_require_flags(cpu,cr_field);
1398
1399 amd64_test_membase_imm_size(iop->ob_ptr,
1400 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1401 (1 << cr_bit),4);
1402
1403 local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr);
1404
1405 /*
1406 * Optimize the jump, depending if the destination is in the same
1407 * page or not.
1408 */
1409 if (local_jump) {
1410 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
1411 amd64_branch32(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,0,FALSE);
1412 } else {
1413 jump_ptr = iop->ob_ptr;
1414 amd64_branch32(iop->ob_ptr,(cond) ? X86_CC_Z : X86_CC_NZ,0,FALSE);
1415 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1416 amd64_patch(jump_ptr,iop->ob_ptr);
1417 }
1418
1419 ppc32_op_emit_branch_target(cpu,b,new_ia);
1420 return(0);
1421 }
1422
1423 /* BC - Branch Conditional */
1424 DECLARE_INSN(BC)
1425 {
1426 int bo = bits(insn,21,25);
1427 int bi = bits(insn,16,20);
1428 int bd = bits(insn,2,15);
1429 int hreg_t0,hreg_t1;
1430 jit_op_t *iop;
1431 u_int cr_field,cr_bit;
1432 m_uint32_t new_ia;
1433 u_char *jump_ptr;
1434 int local_jump;
1435 int cond,ctr;
1436
1437 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_JUMP);
1438
1439 iop = ppc32_op_emit_insn_output(cpu,5,"bc");
1440
1441 ppc32_jit_start_hreg_seq(cpu,"bc");
1442 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
1443 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
1444
1445 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
1446
1447 /* Get the wanted value for the condition bit and CTR value */
1448 cond = (bo >> 3) & 0x1;
1449 ctr = (bo >> 1) & 0x1;
1450
1451 /* Set the return address */
1452 if (insn & 1) {
1453 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1454 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1455 }
1456
1457 /* Compute the new ia */
1458 new_ia = sign_extend_32(bd << 2,16);
1459 if (!(insn & 0x02))
1460 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1461
1462 amd64_mov_reg_imm(iop->ob_ptr,hreg_t0,1);
1463
1464 /* Decrement the count register */
1465 if (!(bo & 0x04)) {
1466 amd64_dec_membase_size(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
1467 amd64_set_reg(iop->ob_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,hreg_t1,FALSE);
1468 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1469 }
1470
1471 /* Test the condition bit */
1472 if (!((bo >> 4) & 0x01)) {
1473 cr_field = ppc32_get_cr_field(bi);
1474 cr_bit = ppc32_get_cr_bit(bi);
1475
1476 ppc32_op_emit_require_flags(cpu,cr_field);
1477
1478 amd64_test_membase_imm_size(iop->ob_ptr,
1479 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1480 (1 << cr_bit),4);
1481
1482 amd64_set_reg(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,hreg_t1,FALSE);
1483 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1484 }
1485
1486 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1487
1488 local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr);
1489
1490 /*
1491 * Optimize the jump, depending if the destination is in the same
1492 * page or not.
1493 */
1494 if (local_jump) {
1495 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
1496 amd64_branch32(iop->ob_ptr,X86_CC_NZ,0,FALSE);
1497 } else {
1498 jump_ptr = iop->ob_ptr;
1499 amd64_branch32(iop->ob_ptr,X86_CC_Z,0,FALSE);
1500 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1501 amd64_patch(jump_ptr,iop->ob_ptr);
1502 }
1503
1504 ppc32_op_emit_branch_target(cpu,b,new_ia);
1505
1506 ppc32_jit_close_hreg_seq(cpu);
1507 return(0);
1508 }
1509
1510 /* BCLR - Branch Conditional to Link register */
1511 DECLARE_INSN(BCLR)
1512 {
1513 int bo = bits(insn,21,25);
1514 int bi = bits(insn,16,20);
1515 int bd = bits(insn,2,15);
1516 int hreg_t0,hreg_t1;
1517 jit_op_t *iop;
1518 u_int cr_field,cr_bit;
1519 m_uint32_t new_ia;
1520 u_char *jump_ptr;
1521 int cond,ctr;
1522
1523 ppc32_jit_start_hreg_seq(cpu,"bclr");
1524 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
1525 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
1526
1527 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
1528
1529 iop = ppc32_op_emit_insn_output(cpu,5,"bclr");
1530
1531 /* Get the wanted value for the condition bit and CTR value */
1532 cond = (bo >> 3) & 0x1;
1533 ctr = (bo >> 1) & 0x1;
1534
1535 /* Compute the new ia */
1536 new_ia = sign_extend_32(bd << 2,16);
1537 if (!(insn & 0x02))
1538 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1539
1540 amd64_mov_reg_imm(iop->ob_ptr,hreg_t0,1);
1541
1542 /* Decrement the count register */
1543 if (!(bo & 0x04)) {
1544 amd64_dec_membase_size(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
1545 amd64_set_reg(iop->ob_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,hreg_t1,FALSE);
1546 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1547 }
1548
1549 /* Test the condition bit */
1550 if (!((bo >> 4) & 0x01)) {
1551 cr_field = ppc32_get_cr_field(bi);
1552 cr_bit = ppc32_get_cr_bit(bi);
1553
1554 ppc32_op_emit_require_flags(cpu,cr_field);
1555
1556 amd64_test_membase_imm_size(iop->ob_ptr,
1557 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1558 (1 << cr_bit),4);
1559
1560 amd64_set_reg(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,hreg_t1,FALSE);
1561 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1562 }
1563
1564 /* Set the return address */
1565 amd64_mov_reg_membase(iop->ob_ptr,hreg_t1,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
1566
1567 if (insn & 1) {
1568 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1569 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1570 }
1571
1572 /* Branching */
1573 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1574
1575 jump_ptr = iop->ob_ptr;
1576 amd64_branch32(iop->ob_ptr,X86_CC_Z,0,FALSE);
1577
1578 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t1,0xFFFFFFFC);
1579 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg_t1,4);
1580 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
1581
1582 amd64_patch(jump_ptr,iop->ob_ptr);
1583
1584 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1585
1586 ppc32_jit_close_hreg_seq(cpu);
1587 return(0);
1588 }
1589
1590 /* CMP - Compare */
1591 DECLARE_INSN(CMP)
1592 {
1593 int rd = bits(insn,23,25);
1594 int ra = bits(insn,16,20);
1595 int rb = bits(insn,11,15);
1596 int hreg_ra,hreg_rb;
1597 jit_op_t *iop;
1598
1599 ppc32_jit_start_hreg_seq(cpu,"cmp");
1600 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1601 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1602
1603 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1604 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1605
1606 iop = ppc32_op_emit_insn_output(cpu,1,"cmp");
1607
1608 amd64_alu_reg_reg_size(iop->ob_ptr,X86_CMP,hreg_ra,hreg_rb,4);
1609 ppc32_op_emit_update_flags(cpu,rd,TRUE);
1610
1611 ppc32_jit_close_hreg_seq(cpu);
1612 return(0);
1613 }
1614
1615 /* CMPI - Compare Immediate */
1616 DECLARE_INSN(CMPI)
1617 {
1618 int rd = bits(insn,23,25);
1619 int ra = bits(insn,16,20);
1620 m_uint16_t imm = bits(insn,0,15);
1621 m_uint32_t tmp = sign_extend_32(imm,16);
1622 int hreg_ra;
1623 jit_op_t *iop;
1624
1625 ppc32_jit_start_hreg_seq(cpu,"cmpi");
1626 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1627 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1628
1629 iop = ppc32_op_emit_insn_output(cpu,1,"cmpi");
1630
1631 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_ra,tmp,4);
1632 ppc32_op_emit_update_flags(cpu,rd,TRUE);
1633
1634 ppc32_jit_close_hreg_seq(cpu);
1635 return(0);
1636 }
1637
1638 /* CMPL - Compare Logical */
1639 DECLARE_INSN(CMPL)
1640 {
1641 int rd = bits(insn,23,25);
1642 int ra = bits(insn,16,20);
1643 int rb = bits(insn,11,15);
1644 int hreg_ra,hreg_rb;
1645 jit_op_t *iop;
1646
1647 ppc32_jit_start_hreg_seq(cpu,"cmpl");
1648 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1649 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1650
1651 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1652 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1653
1654 iop = ppc32_op_emit_insn_output(cpu,1,"cmpl");
1655
1656 amd64_alu_reg_reg_size(iop->ob_ptr,X86_CMP,hreg_ra,hreg_rb,4);
1657 ppc32_op_emit_update_flags(cpu,rd,FALSE);
1658
1659 ppc32_jit_close_hreg_seq(cpu);
1660 return(0);
1661 }
1662
1663 /* CMPLI - Compare Immediate */
1664 DECLARE_INSN(CMPLI)
1665 {
1666 int rd = bits(insn,23,25);
1667 int ra = bits(insn,16,20);
1668 m_uint32_t imm = bits(insn,0,15);
1669 int hreg_ra;
1670 jit_op_t *iop;
1671
1672 ppc32_jit_start_hreg_seq(cpu,"cmpli");
1673 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1674 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1675
1676 iop = ppc32_op_emit_insn_output(cpu,1,"cmpli");
1677
1678 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_ra,imm,4);
1679 ppc32_op_emit_update_flags(cpu,rd,FALSE);
1680
1681 ppc32_jit_close_hreg_seq(cpu);
1682 return(0);
1683 }
1684
1685 /* CRAND - Condition Register AND */
1686 DECLARE_INSN(CRAND)
1687 {
1688 int bd = bits(insn,21,25);
1689 int bb = bits(insn,16,20);
1690 int ba = bits(insn,11,15);
1691 int hreg_t0;
1692 jit_op_t *iop;
1693
1694 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1695
1696 ppc32_jit_start_hreg_seq(cpu,"crand");
1697 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1698 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1699
1700 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1701 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1702 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1703
1704 iop = ppc32_op_emit_insn_output(cpu,3,"crand");
1705
1706 /* test $ba bit */
1707 amd64_test_membase_imm(iop->ob_ptr,
1708 AMD64_R15,
1709 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1710 (1 << ppc32_get_cr_bit(ba)));
1711 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1712
1713 /* test $bb bit */
1714 amd64_test_membase_imm(iop->ob_ptr,
1715 AMD64_R15,
1716 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1717 (1 << ppc32_get_cr_bit(bb)));
1718 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1719
1720 /* result of AND between $ba and $bb */
1721 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1722 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1723
1724 /* set/clear $bd bit depending on the result */
1725 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1726 AMD64_R15,
1727 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1728 ~(1 << ppc32_get_cr_bit(bd)),4);
1729
1730 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1731 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1732 AMD64_R15,
1733 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1734 hreg_t0,4);
1735
1736 ppc32_jit_close_hreg_seq(cpu);
1737 return(0);
1738 }
1739
1740 /* CRANDC - Condition Register AND with Complement */
1741 DECLARE_INSN(CRANDC)
1742 {
1743 int bd = bits(insn,21,25);
1744 int bb = bits(insn,16,20);
1745 int ba = bits(insn,11,15);
1746 int hreg_t0;
1747 jit_op_t *iop;
1748
1749 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1750
1751 ppc32_jit_start_hreg_seq(cpu,"crandc");
1752 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1753 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1754
1755 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1756 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1757 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1758
1759 iop = ppc32_op_emit_insn_output(cpu,3,"crandc");
1760
1761 /* test $ba bit */
1762 amd64_test_membase_imm(iop->ob_ptr,
1763 AMD64_R15,
1764 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1765 (1 << ppc32_get_cr_bit(ba)));
1766 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1767
1768 /* test $bb bit */
1769 amd64_test_membase_imm(iop->ob_ptr,
1770 AMD64_R15,
1771 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1772 (1 << ppc32_get_cr_bit(bb)));
1773 amd64_set_reg(iop->ob_ptr,X86_CC_Z,hreg_t0,FALSE);
1774
1775 /* result of AND between $ba and $bb */
1776 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1777 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1778
1779 /* set/clear $bd bit depending on the result */
1780 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1781 AMD64_R15,
1782 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1783 ~(1 << ppc32_get_cr_bit(bd)),4);
1784
1785 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1786 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1787 AMD64_R15,
1788 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1789 hreg_t0,4);
1790
1791 ppc32_jit_close_hreg_seq(cpu);
1792 return(0);
1793 }
1794
1795 /* CREQV - Condition Register EQV */
1796 DECLARE_INSN(CREQV)
1797 {
1798 int bd = bits(insn,21,25);
1799 int bb = bits(insn,16,20);
1800 int ba = bits(insn,11,15);
1801 int hreg_t0;
1802 jit_op_t *iop;
1803
1804 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1805
1806 ppc32_jit_start_hreg_seq(cpu,"creqv");
1807 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1808 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1809
1810 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1811 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1812 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1813
1814 iop = ppc32_op_emit_insn_output(cpu,3,"creqv");
1815
1816 /* test $ba bit */
1817 amd64_test_membase_imm(iop->ob_ptr,
1818 AMD64_R15,
1819 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1820 (1 << ppc32_get_cr_bit(ba)));
1821 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1822
1823 /* test $bb bit */
1824 amd64_test_membase_imm(iop->ob_ptr,
1825 AMD64_R15,
1826 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1827 (1 << ppc32_get_cr_bit(bb)));
1828 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1829
1830 /* result of XOR between $ba and $bb */
1831 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,AMD64_RDX);
1832 amd64_not_reg(iop->ob_ptr,hreg_t0);
1833 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1834
1835 /* set/clear $bd bit depending on the result */
1836 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1837 AMD64_R15,
1838 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1839 ~(1 << ppc32_get_cr_bit(bd)),4);
1840
1841 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1842 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1843 AMD64_R15,
1844 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1845 hreg_t0,4);
1846
1847 ppc32_jit_close_hreg_seq(cpu);
1848 return(0);
1849 }
1850
1851 /* CRNAND - Condition Register NAND */
1852 DECLARE_INSN(CRNAND)
1853 {
1854 int bd = bits(insn,21,25);
1855 int bb = bits(insn,16,20);
1856 int ba = bits(insn,11,15);
1857 int hreg_t0;
1858 jit_op_t *iop;
1859
1860 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1861
1862 ppc32_jit_start_hreg_seq(cpu,"crnand");
1863 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1864 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1865
1866 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1867 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1868 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1869
1870 iop = ppc32_op_emit_insn_output(cpu,3,"crnand");
1871
1872 /* test $ba bit */
1873 amd64_test_membase_imm(iop->ob_ptr,
1874 AMD64_R15,
1875 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1876 (1 << ppc32_get_cr_bit(ba)));
1877 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1878
1879 /* test $bb bit */
1880 amd64_test_membase_imm(iop->ob_ptr,
1881 AMD64_R15,
1882 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1883 (1 << ppc32_get_cr_bit(bb)));
1884 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1885
1886 /* result of NAND between $ba and $bb */
1887 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1888 amd64_not_reg(iop->ob_ptr,hreg_t0);
1889 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1890
1891 /* set/clear $bd bit depending on the result */
1892 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1893 AMD64_R15,
1894 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1895 ~(1 << ppc32_get_cr_bit(bd)),4);
1896
1897 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1898 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1899 AMD64_R15,
1900 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1901 hreg_t0,4);
1902
1903 ppc32_jit_close_hreg_seq(cpu);
1904 return(0);
1905 }
1906
1907 /* CRNOR - Condition Register NOR */
1908 DECLARE_INSN(CRNOR)
1909 {
1910 int bd = bits(insn,21,25);
1911 int bb = bits(insn,16,20);
1912 int ba = bits(insn,11,15);
1913 int hreg_t0;
1914 jit_op_t *iop;
1915
1916 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1917
1918 ppc32_jit_start_hreg_seq(cpu,"crnor");
1919 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1920 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1921
1922 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1923 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1924 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1925
1926 iop = ppc32_op_emit_insn_output(cpu,3,"crnor");
1927
1928 /* test $ba bit */
1929 amd64_test_membase_imm(iop->ob_ptr,
1930 AMD64_R15,
1931 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1932 (1 << ppc32_get_cr_bit(ba)));
1933 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1934
1935 /* test $bb bit */
1936 amd64_test_membase_imm(iop->ob_ptr,
1937 AMD64_R15,
1938 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1939 (1 << ppc32_get_cr_bit(bb)));
1940 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1941
1942 /* result of NOR between $ba and $bb */
1943 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
1944 amd64_not_reg(iop->ob_ptr,hreg_t0);
1945 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1946
1947 /* set/clear $bd bit depending on the result */
1948 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1949 AMD64_R15,
1950 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1951 ~(1 << ppc32_get_cr_bit(bd)),4);
1952
1953 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1954 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1955 AMD64_R15,
1956 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1957 hreg_t0,4);
1958
1959 ppc32_jit_close_hreg_seq(cpu);
1960 return(0);
1961 }
1962
1963 /* CROR - Condition Register OR */
1964 DECLARE_INSN(CROR)
1965 {
1966 int bd = bits(insn,21,25);
1967 int bb = bits(insn,16,20);
1968 int ba = bits(insn,11,15);
1969 int hreg_t0;
1970 jit_op_t *iop;
1971
1972 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1973
1974 ppc32_jit_start_hreg_seq(cpu,"cror");
1975 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1976 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1977
1978 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1979 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1980 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1981
1982 iop = ppc32_op_emit_insn_output(cpu,3,"cror");
1983
1984 /* test $ba bit */
1985 amd64_test_membase_imm(iop->ob_ptr,
1986 AMD64_R15,
1987 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1988 (1 << ppc32_get_cr_bit(ba)));
1989 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1990
1991 /* test $bb bit */
1992 amd64_test_membase_imm(iop->ob_ptr,
1993 AMD64_R15,
1994 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1995 (1 << ppc32_get_cr_bit(bb)));
1996 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1997
1998 /* result of NOR between $ba and $bb */
1999 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
2000 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2001
2002 /* set/clear $bd bit depending on the result */
2003 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2004 AMD64_R15,
2005 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2006 ~(1 << ppc32_get_cr_bit(bd)),4);
2007
2008 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2009 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2010 AMD64_R15,
2011 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2012 hreg_t0,4);
2013
2014 ppc32_jit_close_hreg_seq(cpu);
2015 return(0);
2016 }
2017
2018 /* CRORC - Condition Register OR with Complement */
2019 DECLARE_INSN(CRORC)
2020 {
2021 int bd = bits(insn,21,25);
2022 int bb = bits(insn,16,20);
2023 int ba = bits(insn,11,15);
2024 int hreg_t0;
2025 jit_op_t *iop;
2026
2027 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2028
2029 ppc32_jit_start_hreg_seq(cpu,"crorc");
2030 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2031 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2032
2033 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
2034 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
2035 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2036
2037 iop = ppc32_op_emit_insn_output(cpu,3,"crorc");
2038
2039 /* test $ba bit */
2040 amd64_test_membase_imm(iop->ob_ptr,
2041 AMD64_R15,
2042 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2043 (1 << ppc32_get_cr_bit(ba)));
2044 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2045
2046 /* test $bb bit */
2047 amd64_test_membase_imm(iop->ob_ptr,
2048 AMD64_R15,
2049 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2050 (1 << ppc32_get_cr_bit(bb)));
2051 amd64_set_reg(iop->ob_ptr,X86_CC_Z,hreg_t0,FALSE);
2052
2053 /* result of ORC between $ba and $bb */
2054 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
2055 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2056
2057 /* set/clear $bd bit depending on the result */
2058 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2059 AMD64_R15,
2060 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2061 ~(1 << ppc32_get_cr_bit(bd)),4);
2062
2063 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2064 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2065 AMD64_R15,
2066 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2067 hreg_t0,4);
2068
2069 ppc32_jit_close_hreg_seq(cpu);
2070 return(0);
2071 }
2072
2073 /* CRXOR - Condition Register XOR */
2074 DECLARE_INSN(CRXOR)
2075 {
2076 int bd = bits(insn,21,25);
2077 int bb = bits(insn,16,20);
2078 int ba = bits(insn,11,15);
2079 int hreg_t0;
2080 jit_op_t *iop;
2081
2082 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2083
2084 ppc32_jit_start_hreg_seq(cpu,"crxor");
2085 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2086 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2087
2088 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
2089 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
2090 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2091
2092 iop = ppc32_op_emit_insn_output(cpu,3,"crxor");
2093
2094 /* test $ba bit */
2095 amd64_test_membase_imm(iop->ob_ptr,
2096 AMD64_R15,
2097 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2098 (1 << ppc32_get_cr_bit(ba)));
2099 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2100
2101 /* test $bb bit */
2102 amd64_test_membase_imm(iop->ob_ptr,
2103 AMD64_R15,
2104 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2105 (1 << ppc32_get_cr_bit(bb)));
2106 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
2107
2108 /* result of XOR between $ba and $bb */
2109 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,AMD64_RDX);
2110 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2111
2112 /* set/clear $bd bit depending on the result */
2113 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2114 AMD64_R15,
2115 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2116 ~(1 << ppc32_get_cr_bit(bd)),4);
2117
2118 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2119 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2120 AMD64_R15,
2121 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2122 hreg_t0,4);
2123
2124 ppc32_jit_close_hreg_seq(cpu);
2125 return(0);
2126 }
2127
2128 /* DIVWU - Divide Word Unsigned */
2129 DECLARE_INSN(DIVWU)
2130 {
2131 int rd = bits(insn,21,25);
2132 int ra = bits(insn,16,20);
2133 int rb = bits(insn,11,15);
2134 int hreg_rb;
2135 jit_op_t *iop;
2136
2137 ppc32_jit_start_hreg_seq(cpu,"divwu");
2138 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2139 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2140 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2141
2142 /* $rd = $ra / $rb */
2143 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2144 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2145
2146 iop = ppc32_op_emit_insn_output(cpu,2,"divwu");
2147 ppc32_load_imm(&iop->ob_ptr,AMD64_RDX,0);
2148
2149 amd64_div_reg_size(iop->ob_ptr,hreg_rb,0,4);
2150
2151 if (insn & 1)
2152 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
2153
2154 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2155
2156 if (insn & 1)
2157 ppc32_op_emit_update_flags(cpu,0,TRUE);
2158
2159 /* edx:eax are directly modified: throw them */
2160 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2161 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2162
2163 ppc32_jit_close_hreg_seq(cpu);
2164 return(0);
2165 }
2166
2167 /* EQV */
2168 DECLARE_INSN(EQV)
2169 {
2170 int rs = bits(insn,21,25);
2171 int ra = bits(insn,16,20);
2172 int rb = bits(insn,11,15);
2173 int hreg_rs,hreg_ra,hreg_rb;
2174 jit_op_t *iop;
2175
2176 /* $ra = ~($rs ^ $rb) */
2177 ppc32_jit_start_hreg_seq(cpu,"eqv");
2178 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2179 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2180 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2181
2182 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2183 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2184
2185 iop = ppc32_op_emit_insn_output(cpu,1,"eqv");
2186
2187 if (ra == rs)
2188 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
2189 else if (ra == rb)
2190 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rs,4);
2191 else {
2192 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2193 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
2194 }
2195
2196 amd64_not_reg(iop->ob_ptr,hreg_ra);
2197
2198 if (insn & 1)
2199 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2200
2201 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2202
2203 if (insn & 1)
2204 ppc32_op_emit_update_flags(cpu,0,TRUE);
2205
2206 ppc32_jit_close_hreg_seq(cpu);
2207 return(0);
2208 }
2209
2210 /* EXTSB - Extend Sign Byte */
2211 DECLARE_INSN(EXTSB)
2212 {
2213 int rs = bits(insn,21,25);
2214 int ra = bits(insn,16,20);
2215 int hreg_rs,hreg_ra;
2216 jit_op_t *iop;
2217
2218 /* $ra = extsb($rs) */
2219 ppc32_jit_start_hreg_seq(cpu,"extsb");
2220 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2221 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2222
2223 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2224
2225 iop = ppc32_op_emit_insn_output(cpu,2,"extsb");
2226
2227 if (rs != ra)
2228 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2229
2230 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHL,hreg_ra,24,4);
2231 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,24,4);
2232
2233 if (insn & 1)
2234 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2235
2236 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2237
2238 if (insn & 1)
2239 ppc32_op_emit_update_flags(cpu,0,TRUE);
2240
2241 ppc32_jit_close_hreg_seq(cpu);
2242 return(0);
2243 }
2244
2245 /* EXTSH - Extend Sign Word */
2246 DECLARE_INSN(EXTSH)
2247 {
2248 int rs = bits(insn,21,25);
2249 int ra = bits(insn,16,20);
2250 int hreg_rs,hreg_ra;
2251 jit_op_t *iop;
2252
2253 /* $ra = extsh($rs) */
2254 ppc32_jit_start_hreg_seq(cpu,"extsh");
2255 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2256 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2257
2258 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2259
2260 iop = ppc32_op_emit_insn_output(cpu,2,"extsh");
2261
2262 if (rs != ra)
2263 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2264
2265 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHL,hreg_ra,16,4);
2266 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,16,4);
2267
2268 if (insn & 1)
2269 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2270
2271 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2272
2273 if (insn & 1)
2274 ppc32_op_emit_update_flags(cpu,0,TRUE);
2275
2276 ppc32_jit_close_hreg_seq(cpu);
2277 return(0);
2278 }
2279
2280 /* LBZ - Load Byte and Zero */
2281 DECLARE_INSN(LBZ)
2282 {
2283 int rs = bits(insn,21,25);
2284 int ra = bits(insn,16,20);
2285 m_uint16_t offset = bits(insn,0,15);
2286
2287 //ppc32_emit_memop(b,PPC_MEMOP_LBZ,ra,offset,rs,0);
2288 ppc32_emit_memop_fast(cpu,b,0,PPC_MEMOP_LBZ,ra,offset,rs,
2289 ppc32_memop_fast_lbz);
2290 return(0);
2291 }
2292
2293 /* LBZU - Load Byte and Zero with Update */
2294 DECLARE_INSN(LBZU)
2295 {
2296 int rs = bits(insn,21,25);
2297 int ra = bits(insn,16,20);
2298 m_uint16_t offset = bits(insn,0,15);
2299
2300 ppc32_emit_memop(cpu,b,PPC_MEMOP_LBZ,ra,offset,rs,1);
2301 return(0);
2302 }
2303
2304 /* LBZUX - Load Byte and Zero with Update Indexed */
2305 DECLARE_INSN(LBZUX)
2306 {
2307 int rs = bits(insn,21,25);
2308 int ra = bits(insn,16,20);
2309 int rb = bits(insn,11,15);
2310
2311 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LBZ,ra,rb,rs,1);
2312 return(0);
2313 }
2314
2315 /* LBZX - Load Byte and Zero Indexed */
2316 DECLARE_INSN(LBZX)
2317 {
2318 int rs = bits(insn,21,25);
2319 int ra = bits(insn,16,20);
2320 int rb = bits(insn,11,15);
2321
2322 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LBZ,ra,rb,rs,0);
2323 return(0);
2324 }
2325
2326 /* LHA - Load Half-Word Algebraic */
2327 DECLARE_INSN(LHA)
2328 {
2329 int rs = bits(insn,21,25);
2330 int ra = bits(insn,16,20);
2331 m_uint16_t offset = bits(insn,0,15);
2332
2333 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHA,ra,offset,rs,0);
2334 return(0);
2335 }
2336
2337 /* LHAU - Load Half-Word Algebraic with Update */
2338 DECLARE_INSN(LHAU)
2339 {
2340 int rs = bits(insn,21,25);
2341 int ra = bits(insn,16,20);
2342 m_uint16_t offset = bits(insn,0,15);
2343
2344 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHA,ra,offset,rs,1);
2345 return(0);
2346 }
2347
2348 /* LHAUX - Load Half-Word Algebraic with Update Indexed */
2349 DECLARE_INSN(LHAUX)
2350 {
2351 int rs = bits(insn,21,25);
2352 int ra = bits(insn,16,20);
2353 int rb = bits(insn,11,15);
2354
2355 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHA,ra,rb,rs,1);
2356 return(0);
2357 }
2358
2359 /* LHAX - Load Half-Word Algebraic Indexed */
2360 DECLARE_INSN(LHAX)
2361 {
2362 int rs = bits(insn,21,25);
2363 int ra = bits(insn,16,20);
2364 int rb = bits(insn,11,15);
2365
2366 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHA,ra,rb,rs,0);
2367 return(0);
2368 }
2369
2370 /* LHZ - Load Half-Word and Zero */
2371 DECLARE_INSN(LHZ)
2372 {
2373 int rs = bits(insn,21,25);
2374 int ra = bits(insn,16,20);
2375 m_uint16_t offset = bits(insn,0,15);
2376
2377 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHZ,ra,offset,rs,0);
2378 return(0);
2379 }
2380
2381 /* LHZU - Load Half-Word and Zero with Update */
2382 DECLARE_INSN(LHZU)
2383 {
2384 int rs = bits(insn,21,25);
2385 int ra = bits(insn,16,20);
2386 m_uint16_t offset = bits(insn,0,15);
2387
2388 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHZ,ra,offset,rs,1);
2389 return(0);
2390 }
2391
2392 /* LHZUX - Load Half-Word and Zero with Update Indexed */
2393 DECLARE_INSN(LHZUX)
2394 {
2395 int rs = bits(insn,21,25);
2396 int ra = bits(insn,16,20);
2397 int rb = bits(insn,11,15);
2398
2399 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHZ,ra,rb,rs,1);
2400 return(0);
2401 }
2402
2403 /* LHZX - Load Half-Word and Zero Indexed */
2404 DECLARE_INSN(LHZX)
2405 {
2406 int rs = bits(insn,21,25);
2407 int ra = bits(insn,16,20);
2408 int rb = bits(insn,11,15);
2409
2410 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHZ,ra,rb,rs,0);
2411 return(0);
2412 }
2413
2414 /* LWZ - Load Word and Zero */
2415 DECLARE_INSN(LWZ)
2416 {
2417 int rs = bits(insn,21,25);
2418 int ra = bits(insn,16,20);
2419 m_uint16_t offset = bits(insn,0,15);
2420
2421 //ppc32_emit_memop(b,PPC_MEMOP_LWZ,ra,offset,rs,0);
2422 ppc32_emit_memop_fast(cpu,b,0,PPC_MEMOP_LWZ,ra,offset,rs,
2423 ppc32_memop_fast_lwz);
2424 return(0);
2425 }
2426
2427 /* LWZU - Load Word and Zero with Update */
2428 DECLARE_INSN(LWZU)
2429 {
2430 int rs = bits(insn,21,25);
2431 int ra = bits(insn,16,20);
2432 m_uint16_t offset = bits(insn,0,15);
2433
2434 ppc32_emit_memop(cpu,b,PPC_MEMOP_LWZ,ra,offset,rs,1);
2435 return(0);
2436 }
2437
2438 /* LWZUX - Load Word and Zero with Update Indexed */
2439 DECLARE_INSN(LWZUX)
2440 {
2441 int rs = bits(insn,21,25);
2442 int ra = bits(insn,16,20);
2443 int rb = bits(insn,11,15);
2444
2445 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LWZ,ra,rb,rs,1);
2446 return(0);
2447 }
2448
2449 /* LWZX - Load Word and Zero Indexed */
2450 DECLARE_INSN(LWZX)
2451 {
2452 int rs = bits(insn,21,25);
2453 int ra = bits(insn,16,20);
2454 int rb = bits(insn,11,15);
2455
2456 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LWZ,ra,rb,rs,0);
2457 return(0);
2458 }
2459
2460 /* MCRF - Move Condition Register Field */
2461 DECLARE_INSN(MCRF)
2462 {
2463 int rd = bits(insn,23,25);
2464 int rs = bits(insn,18,20);
2465 int hreg_t0;
2466 jit_op_t *iop;
2467
2468 ppc32_jit_start_hreg_seq(cpu,"mcrf");
2469 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2470 ppc32_op_emit_require_flags(cpu,rs);
2471
2472 iop = ppc32_op_emit_insn_output(cpu,1,"mcrf");
2473
2474 /* Load "rs" field in %edx */
2475 amd64_mov_reg_membase(iop->ob_ptr,hreg_t0,
2476 AMD64_R15,PPC32_CR_FIELD_OFFSET(rs),4);
2477
2478 /* Store it in "rd" field */
2479 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(rd),
2480 hreg_t0,4);
2481
2482 ppc32_jit_close_hreg_seq(cpu);
2483 return(0);
2484 }
2485
2486 /* MFCR - Move from Condition Register */
2487 DECLARE_INSN(MFCR)
2488 {
2489 int rd = bits(insn,21,25);
2490 int hreg_rd,hreg_t0;
2491 jit_op_t *iop;
2492 int i;
2493
2494 ppc32_jit_start_hreg_seq(cpu,"mfcr");
2495 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2496 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2497 ppc32_op_emit_require_flags(cpu,JIT_OP_PPC_ALL_FLAGS);
2498
2499 iop = ppc32_op_emit_insn_output(cpu,3,"mfcr");
2500
2501 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_rd,hreg_rd);
2502
2503 for(i=0;i<8;i++) {
2504 /* load field in %edx */
2505 amd64_mov_reg_membase(iop->ob_ptr,hreg_t0,
2506 AMD64_R15,PPC32_CR_FIELD_OFFSET(i),4);
2507 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_rd,4);
2508 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_rd,hreg_t0);
2509 }
2510
2511 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2512
2513 ppc32_jit_close_hreg_seq(cpu);
2514 return(0);
2515 }
2516
2517 /* MFMSR - Move from Machine State Register */
2518 DECLARE_INSN(MFMSR)
2519 {
2520 int rd = bits(insn,21,25);
2521 int hreg_rd;
2522 jit_op_t *iop;
2523
2524 ppc32_jit_start_hreg_seq(cpu,"mfmsr");
2525 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2526
2527 iop = ppc32_op_emit_insn_output(cpu,1,"mfmsr");
2528 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
2529 AMD64_R15,OFFSET(cpu_ppc_t,msr),4);
2530 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2531
2532 ppc32_jit_close_hreg_seq(cpu);
2533 return(0);
2534 }
2535
2536 /* MFSR - Move From Segment Register */
2537 DECLARE_INSN(MFSR)
2538 {
2539 int rd = bits(insn,21,25);
2540 int sr = bits(insn,16,19);
2541 int hreg_rd;
2542 jit_op_t *iop;
2543
2544 ppc32_jit_start_hreg_seq(cpu,"mfsr");
2545 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2546
2547 iop = ppc32_op_emit_insn_output(cpu,1,"mfsr");
2548
2549 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
2550 AMD64_R15,(OFFSET(cpu_ppc_t,sr) + (sr << 2)),4);
2551 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2552
2553 ppc32_jit_close_hreg_seq(cpu);
2554 return(0);
2555 }
2556
2557 /* MTCRF - Move to Condition Register Fields */
2558 DECLARE_INSN(MTCRF)
2559 {
2560 int rs = bits(insn,21,25);
2561 int crm = bits(insn,12,19);
2562 int hreg_rs,hreg_t0;
2563 jit_op_t *iop;
2564 int i;
2565
2566 ppc32_jit_start_hreg_seq(cpu,"mtcrf");
2567 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2568 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2569
2570 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2571
2572 iop = ppc32_op_emit_insn_output(cpu,4,"mtcrf");
2573
2574 for(i=0;i<8;i++)
2575 if (crm & (1 << (7 - i))) {
2576 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
2577
2578 if (i != 7)
2579 amd64_shift_reg_imm(iop->ob_ptr,X86_SHR,hreg_t0,28 - (i << 2));
2580
2581 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x0F);
2582 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(i),
2583 hreg_t0,4);
2584 }
2585
2586 ppc32_op_emit_basic_opcode(cpu,JIT_OP_TRASH_FLAGS);
2587
2588 ppc32_jit_close_hreg_seq(cpu);
2589 return(0);
2590 }
2591
2592 /* MULHW - Multiply High Word */
2593 DECLARE_INSN(MULHW)
2594 {
2595 int rd = bits(insn,21,25);
2596 int ra = bits(insn,16,20);
2597 int rb = bits(insn,11,15);
2598 int hreg_rb;
2599 jit_op_t *iop;
2600
2601 ppc32_jit_start_hreg_seq(cpu,"mulhw");
2602 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2603 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2604 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2605
2606 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2607 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2608
2609 /* rd = hi(ra * rb) */
2610 iop = ppc32_op_emit_insn_output(cpu,2,"mulhw");
2611 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,1,4);
2612
2613 if (insn & 1)
2614 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RDX,AMD64_RDX,4);
2615
2616 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RDX);
2617
2618 if (insn & 1)
2619 ppc32_op_emit_update_flags(cpu,0,TRUE);
2620
2621 /* edx:eax are directly modified: throw them */
2622 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2623 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2624
2625 ppc32_jit_close_hreg_seq(cpu);
2626 return(0);
2627 }
2628
2629 /* MULHWU - Multiply High Word Unsigned */
2630 DECLARE_INSN(MULHWU)
2631 {
2632 int rd = bits(insn,21,25);
2633 int ra = bits(insn,16,20);
2634 int rb = bits(insn,11,15);
2635 int hreg_rb;
2636 jit_op_t *iop;
2637
2638 ppc32_jit_start_hreg_seq(cpu,"mulhwu");
2639 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2640 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2641 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2642
2643 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2644 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2645
2646 /* rd = hi(ra * rb) */
2647 iop = ppc32_op_emit_insn_output(cpu,2,"mulhwu");
2648 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,0,4);
2649
2650 if (insn & 1)
2651 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RDX,AMD64_RDX,4);
2652
2653 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RDX);
2654
2655 if (insn & 1)
2656 ppc32_op_emit_update_flags(cpu,0,TRUE);
2657
2658 /* edx:eax are directly modified: throw them */
2659 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2660 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2661
2662 ppc32_jit_close_hreg_seq(cpu);
2663 return(0);
2664 }
2665
2666 /* MULLI - Multiply Low Immediate */
2667 DECLARE_INSN(MULLI)
2668 {
2669 int rd = bits(insn,21,25);
2670 int ra = bits(insn,16,20);
2671 m_uint32_t imm = bits(insn,0,15);
2672 int hreg_t0;
2673 jit_op_t *iop;
2674
2675 ppc32_jit_start_hreg_seq(cpu,"mulli");
2676 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2677 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2678 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2679
2680 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2681
2682 /* rd = lo(ra * imm) */
2683 iop = ppc32_op_emit_insn_output(cpu,2,"mulli");
2684
2685 ppc32_load_imm(&iop->ob_ptr,hreg_t0,sign_extend_32(imm,16));
2686 amd64_mul_reg_size(iop->ob_ptr,hreg_t0,1,4);
2687 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2688
2689 /* edx:eax are directly modified: throw them */
2690 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2691 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2692
2693 ppc32_jit_close_hreg_seq(cpu);
2694 return(0);
2695 }
2696
2697 /* MULLW - Multiply Low Word */
2698 DECLARE_INSN(MULLW)
2699 {
2700 int rd = bits(insn,21,25);
2701 int ra = bits(insn,16,20);
2702 int rb = bits(insn,11,15);
2703 int hreg_rb;
2704 jit_op_t *iop;
2705
2706 ppc32_jit_start_hreg_seq(cpu,"mullw");
2707 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2708 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2709 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2710
2711 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2712 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2713
2714 /* rd = lo(ra * rb) */
2715 iop = ppc32_op_emit_insn_output(cpu,2,"mullw");
2716 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,1,4);
2717
2718 if (insn & 1)
2719 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
2720
2721 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2722
2723 if (insn & 1)
2724 ppc32_op_emit_update_flags(cpu,0,TRUE);
2725
2726 /* edx:eax are directly modified: throw them */
2727 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2728 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2729
2730 ppc32_jit_close_hreg_seq(cpu);
2731 return(0);
2732 }
2733
2734 /* NAND */
2735 DECLARE_INSN(NAND)
2736 {
2737 int rs = bits(insn,21,25);
2738 int ra = bits(insn,16,20);
2739 int rb = bits(insn,11,15);
2740 int hreg_rs,hreg_ra,hreg_rb;
2741 jit_op_t *iop;
2742
2743 /* $ra = ~($rs & $rb) */
2744 ppc32_jit_start_hreg_seq(cpu,"nand");
2745 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2746 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2747 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2748
2749 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2750 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2751
2752 iop = ppc32_op_emit_insn_output(cpu,2,"nand");
2753
2754 if (ra == rs)
2755 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
2756 else if (ra == rb)
2757 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rs,4);
2758 else {
2759 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2760 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
2761 }
2762
2763 amd64_not_reg(iop->ob_ptr,hreg_ra);
2764
2765 if (insn & 1)
2766 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2767
2768 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2769
2770 if (insn & 1)
2771 ppc32_op_emit_update_flags(cpu,0,TRUE);
2772
2773 ppc32_jit_close_hreg_seq(cpu);
2774 return(0);
2775 }
2776
2777 /* NEG */
2778 DECLARE_INSN(NEG)
2779 {
2780 int rd = bits(insn,21,25);
2781 int ra = bits(insn,16,20);
2782 int hreg_rd,hreg_ra;
2783 jit_op_t *iop;
2784
2785 /* $rd = neg($ra) */
2786 ppc32_jit_start_hreg_seq(cpu,"neg");
2787 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2788 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2789
2790 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
2791
2792 iop = ppc32_op_emit_insn_output(cpu,1,"neg");
2793
2794 if (rd != ra)
2795 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
2796
2797 amd64_neg_reg(iop->ob_ptr,hreg_rd);
2798
2799 if (insn & 1)
2800 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
2801
2802 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2803
2804 if (insn & 1)
2805 ppc32_op_emit_update_flags(cpu,0,TRUE);
2806
2807 ppc32_jit_close_hreg_seq(cpu);
2808 return(0);
2809 }
2810
2811 /* NOR */
2812 DECLARE_INSN(NOR)
2813 {
2814 int rs = bits(insn,21,25);
2815 int ra = bits(insn,16,20);
2816 int rb = bits(insn,11,15);
2817 int hreg_rs,hreg_ra,hreg_rb;
2818 jit_op_t *iop;
2819
2820 /* $ra = ~($rs | $rb) */
2821 ppc32_jit_start_hreg_seq(cpu,"nor");
2822 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2823 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2824 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2825
2826 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2827 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2828
2829 iop = ppc32_op_emit_insn_output(cpu,2,"nor");
2830
2831 if (ra == rs)
2832 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2833 else if (ra == rb)
2834 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rs,4);
2835 else {
2836 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2837 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2838 }
2839
2840 amd64_not_reg(iop->ob_ptr,hreg_ra);
2841
2842 if (insn & 1)
2843 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2844
2845 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2846
2847 if (insn & 1)
2848 ppc32_op_emit_update_flags(cpu,0,TRUE);
2849
2850 ppc32_jit_close_hreg_seq(cpu);
2851 return(0);
2852 }
2853
2854 /* OR */
2855 DECLARE_INSN(OR)
2856 {
2857 int rs = bits(insn,21,25);
2858 int ra = bits(insn,16,20);
2859 int rb = bits(insn,11,15);
2860 int hreg_rs,hreg_ra,hreg_rb;
2861 jit_op_t *iop;
2862
2863 /* $ra = $rs | $rb */
2864 ppc32_jit_start_hreg_seq(cpu,"or");
2865 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2866 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2867 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2868
2869 /* special optimization for move/nop operation */
2870 if (rs == rb) {
2871 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2872 iop = ppc32_op_emit_insn_output(cpu,2,"or");
2873
2874 if (ra != rs)
2875 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2876
2877 if (insn & 1)
2878 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2879
2880 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2881
2882 if (insn & 1)
2883 ppc32_op_emit_update_flags(cpu,0,TRUE);
2884
2885 ppc32_jit_close_hreg_seq(cpu);
2886 return(0);
2887 }
2888
2889 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2890 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2891
2892 iop = ppc32_op_emit_insn_output(cpu,2,"or");
2893
2894 if (ra == rs) {
2895 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2896 } else if (ra == rb)
2897 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rs,4);
2898 else {
2899 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2900 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2901 }
2902
2903 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2904
2905 if (insn & 1)
2906 ppc32_op_emit_update_flags(cpu,0,TRUE);
2907
2908 ppc32_jit_close_hreg_seq(cpu);
2909 return(0);
2910 }
2911
2912 /* OR with Complement */
2913 DECLARE_INSN(ORC)
2914 {
2915 int rs = bits(insn,21,25);
2916 int ra = bits(insn,16,20);
2917 int rb = bits(insn,11,15);
2918 int hreg_rs,hreg_ra,hreg_rb,hreg_t0;
2919 jit_op_t *iop;
2920
2921 /* $ra = $rs & ~$rb */
2922 ppc32_jit_start_hreg_seq(cpu,"orc");
2923 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2924 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2925 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2926
2927 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2928 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2929
2930 iop = ppc32_op_emit_insn_output(cpu,1,"orc");
2931
2932 /* $t0 = ~$rb */
2933 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2934 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
2935 amd64_not_reg(iop->ob_ptr,hreg_t0);
2936
2937 /* $ra = $rs | $t0 */
2938 if (ra == rs)
2939 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_t0,4);
2940 else {
2941 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_t0,hreg_rs,4);
2942 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
2943 }
2944
2945 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2946
2947 if (insn & 1)
2948 ppc32_op_emit_update_flags(cpu,0,TRUE);
2949
2950 ppc32_jit_close_hreg_seq(cpu);
2951 return(0);
2952 }
2953
2954 /* OR Immediate */
2955 DECLARE_INSN(ORI)
2956 {
2957 int rs = bits(insn,21,25);
2958 int ra = bits(insn,16,20);
2959 m_uint16_t imm = bits(insn,0,15);
2960 m_uint32_t tmp = imm;
2961 int hreg_rs,hreg_ra;
2962 jit_op_t *iop;
2963
2964 /* $ra = $rs | imm */
2965 ppc32_jit_start_hreg_seq(cpu,"ori");
2966 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2967 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2968
2969 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2970
2971 iop = ppc32_op_emit_insn_output(cpu,1,"ori");
2972
2973 if (ra != rs)
2974 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2975
2976 amd64_alu_reg_imm_size(iop->ob_ptr,X86_OR,hreg_ra,tmp,4);
2977 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2978
2979 ppc32_jit_close_hreg_seq(cpu);
2980 return(0);
2981 }
2982
2983 /* OR Immediate Shifted */
2984 DECLARE_INSN(ORIS)
2985 {
2986 int rs = bits(insn,21,25);
2987 int ra = bits(insn,16,20);
2988 m_uint16_t imm = bits(insn,0,15);
2989 m_uint32_t tmp = imm << 16;
2990 int hreg_rs,hreg_ra;
2991 jit_op_t *iop;
2992
2993 /* $ra = $rs | imm */
2994 ppc32_jit_start_hreg_seq(cpu,"oris");
2995 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2996 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2997
2998 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2999
3000 iop = ppc32_op_emit_insn_output(cpu,1,"oris");
3001
3002 if (ra != rs)
3003 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3004
3005 amd64_alu_reg_imm_size(iop->ob_ptr,X86_OR,hreg_ra,tmp,4);
3006 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3007
3008 ppc32_jit_close_hreg_seq(cpu);
3009 return(0);
3010 }
3011
3012 /* RLWIMI - Rotate Left Word Immediate then Mask Insert */
3013 DECLARE_INSN(RLWIMI)
3014 {
3015 int rs = bits(insn,21,25);
3016 int ra = bits(insn,16,20);
3017 int sh = bits(insn,11,15);
3018 int mb = bits(insn,6,10);
3019 int me = bits(insn,1,5);
3020 register m_uint32_t mask;
3021 int hreg_rs,hreg_ra,hreg_t0;
3022 jit_op_t *iop;
3023
3024 ppc32_jit_start_hreg_seq(cpu,"rlwimi");
3025 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3026 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3027 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3028
3029 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3030 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3031
3032 mask = ppc32_rotate_mask(mb,me);
3033
3034 iop = ppc32_op_emit_insn_output(cpu,2,"rlwimi");
3035
3036 /* Apply inverse mask to $ra */
3037 if (mask != 0)
3038 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_ra,~mask);
3039
3040 /* Rotate $rs of "sh" bits and apply the mask */
3041 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3042
3043 if (sh != 0)
3044 amd64_shift_reg_imm_size(iop->ob_ptr,X86_ROL,hreg_t0,sh,4);
3045
3046 if (mask != 0xFFFFFFFF)
3047 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3048
3049 /* Store the result */
3050 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_t0,4);
3051 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3052
3053 if (insn & 1)
3054 ppc32_op_emit_update_flags(cpu,0,TRUE);
3055
3056 ppc32_jit_close_hreg_seq(cpu);
3057 return(0);
3058 }
3059
3060 /* RLWINM - Rotate Left Word Immediate AND with Mask */
3061 DECLARE_INSN(RLWINM)
3062 {
3063 int rs = bits(insn,21,25);
3064 int ra = bits(insn,16,20);
3065 int sh = bits(insn,11,15);
3066 int mb = bits(insn,6,10);
3067 int me = bits(insn,1,5);
3068 register m_uint32_t mask;
3069 int hreg_rs,hreg_ra;
3070 jit_op_t *iop;
3071
3072 ppc32_jit_start_hreg_seq(cpu,"rlwinm");
3073 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3074 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3075
3076 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3077
3078 iop = ppc32_op_emit_insn_output(cpu,2,"rlwinm");
3079
3080 /* Rotate $rs of "sh" bits and apply the mask */
3081 mask = ppc32_rotate_mask(mb,me);
3082
3083 if (rs != ra)
3084 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3085
3086 if (sh != 0)
3087 amd64_shift_reg_imm_size(iop->ob_ptr,X86_ROL,hreg_ra,sh,4);
3088
3089 if (mask != 0xFFFFFFFF)
3090 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_ra,mask);
3091
3092 if (insn & 1)
3093 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3094
3095 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3096
3097 if (insn & 1)
3098 ppc32_op_emit_update_flags(cpu,0,TRUE);
3099
3100 ppc32_jit_close_hreg_seq(cpu);
3101 return(0);
3102 }
3103
3104 /* RLWNM - Rotate Left Word then Mask Insert */
3105 DECLARE_INSN(RLWNM)
3106 {
3107 int rs = bits(insn,21,25);
3108 int ra = bits(insn,16,20);
3109 int rb = bits(insn,11,15);
3110 int mb = bits(insn,6,10);
3111 int me = bits(insn,1,5);
3112 register m_uint32_t mask;
3113 int hreg_rs,hreg_ra,hreg_t0;
3114 jit_op_t *iop;
3115
3116 /* ecx is directly modified: throw it */
3117 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3118
3119 ppc32_jit_start_hreg_seq(cpu,"rlwnm");
3120 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3121
3122 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3123 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3124 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3125
3126 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3127 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3128 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3129
3130 iop = ppc32_op_emit_insn_output(cpu,2,"rlwnm");
3131
3132 /* Load the shift register ("sh") */
3133 mask = ppc32_rotate_mask(mb,me);
3134
3135 /* Rotate $rs and apply the mask */
3136 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3137
3138 amd64_shift_reg_size(iop->ob_ptr,X86_ROL,hreg_t0,4);
3139
3140 if (mask != 0xFFFFFFFF)
3141 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3142
3143 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
3144
3145 if (insn & 1)
3146 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3147
3148 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3149
3150 if (insn & 1)
3151 ppc32_op_emit_update_flags(cpu,0,TRUE);
3152
3153 ppc32_jit_close_hreg_seq(cpu);
3154 return(0);
3155 }
3156
3157 /* Shift Left Word */
3158 DECLARE_INSN(SLW)
3159 {
3160 int rs = bits(insn,21,25);
3161 int ra = bits(insn,16,20);
3162 int rb = bits(insn,11,15);
3163 int hreg_rs,hreg_ra;
3164 jit_op_t *iop;
3165
3166 /* ecx is directly modified: throw it */
3167 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3168
3169 ppc32_jit_start_hreg_seq(cpu,"slw");
3170 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3171 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3172 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3173
3174 /* $ra = $rs << $rb. If count >= 32, then null result */
3175 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3176 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3177
3178 iop = ppc32_op_emit_insn_output(cpu,3,"slw");
3179
3180 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,AMD64_RCX,0x3f);
3181
3182 if (ra != rs)
3183 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3184
3185 amd64_shift_reg(iop->ob_ptr,X86_SHL,hreg_ra);
3186
3187 /* store the result */
3188 if (insn & 1)
3189 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3190
3191 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3192
3193 if (insn & 1)
3194 ppc32_op_emit_update_flags(cpu,0,TRUE);
3195
3196 ppc32_jit_close_hreg_seq(cpu);
3197 return(0);
3198 }
3199
3200 /* SRAWI - Shift Right Algebraic Word Immediate */
3201 DECLARE_INSN(SRAWI)
3202 {
3203 int rs = bits(insn,21,25);
3204 int ra = bits(insn,16,20);
3205 int sh = bits(insn,11,15);
3206 register m_uint32_t mask;
3207 int hreg_rs,hreg_ra,hreg_t0;
3208 jit_op_t *iop;
3209
3210 ppc32_jit_start_hreg_seq(cpu,"srawi");
3211 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3212 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3213 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3214
3215 /* $ra = (int32)$rs >> sh */
3216 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3217
3218 iop = ppc32_op_emit_insn_output(cpu,3,"srawi");
3219 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3220
3221 if (ra != rs)
3222 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3223 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,sh,4);
3224
3225 /* set XER_CA depending on the result */
3226 mask = ~(0xFFFFFFFFU << sh) | 0x80000000;
3227
3228 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3229 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_t0,0x80000000,4);
3230 amd64_set_reg(iop->ob_ptr,X86_CC_A,hreg_t0,FALSE);
3231 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x1);
3232 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3233 hreg_t0,4);
3234
3235 if (insn & 1)
3236 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3237
3238 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3239
3240 if (insn & 1)
3241 ppc32_op_emit_update_flags(cpu,0,TRUE);
3242
3243 ppc32_jit_close_hreg_seq(cpu);
3244 return(0);
3245 }
3246
3247 /* Shift Right Word */
3248 DECLARE_INSN(SRW)
3249 {
3250 int rs = bits(insn,21,25);
3251 int ra = bits(insn,16,20);
3252 int rb = bits(insn,11,15);
3253 int hreg_rs,hreg_ra;
3254 jit_op_t *iop;
3255
3256 /* ecx is directly modified: throw it */
3257 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3258
3259 ppc32_jit_start_hreg_seq(cpu,"srw");
3260 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3261 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3262 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3263
3264 /* $ra = $rs >> $rb. If count >= 32, then null result */
3265 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3266 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3267
3268 iop = ppc32_op_emit_insn_output(cpu,3,"srw");
3269
3270 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,AMD64_RCX,0x3f);
3271
3272 if (ra != rs)
3273 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3274
3275 amd64_shift_reg(iop->ob_ptr,X86_SHR,hreg_ra);
3276
3277 /* store the result */
3278 if (insn & 1)
3279 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3280
3281 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3282
3283 if (insn & 1)
3284 ppc32_op_emit_update_flags(cpu,0,TRUE);
3285
3286 ppc32_jit_close_hreg_seq(cpu);
3287 return(0);
3288 }
3289
3290 /* STB - Store Byte */
3291 DECLARE_INSN(STB)
3292 {
3293 int rs = bits(insn,21,25);
3294 int ra = bits(insn,16,20);
3295 m_uint16_t offset = bits(insn,0,15);
3296
3297 //ppc32_emit_memop(b,PPC_MEMOP_STB,ra,offset,rs,0);
3298 ppc32_emit_memop_fast(cpu,b,1,PPC_MEMOP_STB,ra,offset,rs,
3299 ppc32_memop_fast_stb);
3300 return(0);
3301 }
3302
3303 /* STBU - Store Byte with Update */
3304 DECLARE_INSN(STBU)
3305 {
3306 int rs = bits(insn,21,25);
3307 int ra = bits(insn,16,20);
3308 m_uint16_t offset = bits(insn,0,15);
3309
3310 ppc32_emit_memop(cpu,b,PPC_MEMOP_STB,ra,offset,rs,1);
3311 return(0);
3312 }
3313
3314 /* STBUX - Store Byte with Update Indexed */
3315 DECLARE_INSN(STBUX)
3316 {
3317 int rs = bits(insn,21,25);
3318 int ra = bits(insn,16,20);
3319 int rb = bits(insn,11,15);
3320
3321 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STB,ra,rb,rs,1);
3322 return(0);
3323 }
3324
3325 /* STBUX - Store Byte Indexed */
3326 DECLARE_INSN(STBX)
3327 {
3328 int rs = bits(insn,21,25);
3329 int ra = bits(insn,16,20);
3330 int rb = bits(insn,11,15);
3331
3332 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STB,ra,rb,rs,0);
3333 return(0);
3334 }
3335
3336 /* STH - Store Half-Word */
3337 DECLARE_INSN(STH)
3338 {
3339 int rs = bits(insn,21,25);
3340 int ra = bits(insn,16,20);
3341 m_uint16_t offset = bits(insn,0,15);
3342
3343 ppc32_emit_memop(cpu,b,PPC_MEMOP_STH,ra,offset,rs,0);
3344 return(0);
3345 }
3346
3347 /* STHU - Store Half-Word with Update */
3348 DECLARE_INSN(STHU)
3349 {
3350 int rs = bits(insn,21,25);
3351 int ra = bits(insn,16,20);
3352 m_uint16_t offset = bits(insn,0,15);
3353
3354 ppc32_emit_memop(cpu,b,PPC_MEMOP_STH,ra,offset,rs,1);
3355 return(0);
3356 }
3357
3358 /* STHUX - Store Half-Word with Update Indexed */
3359 DECLARE_INSN(STHUX)
3360 {
3361 int rs = bits(insn,21,25);
3362 int ra = bits(insn,16,20);
3363 int rb = bits(insn,11,15);
3364
3365 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STH,ra,rb,rs,1);
3366 return(0);
3367 }
3368
3369 /* STHUX - Store Half-Word Indexed */
3370 DECLARE_INSN(STHX)
3371 {
3372 int rs = bits(insn,21,25);
3373 int ra = bits(insn,16,20);
3374 int rb = bits(insn,11,15);
3375
3376 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STH,ra,rb,rs,0);
3377 return(0);
3378 }
3379
3380 /* STW - Store Word */
3381 DECLARE_INSN(STW)
3382 {
3383 int rs = bits(insn,21,25);
3384 int ra = bits(insn,16,20);
3385 m_uint16_t offset = bits(insn,0,15);
3386
3387 //ppc32_emit_memop(b,PPC_MEMOP_STW,ra,offset,rs,0);
3388 ppc32_emit_memop_fast(cpu,b,1,PPC_MEMOP_STW,ra,offset,rs,
3389 ppc32_memop_fast_stw);
3390 return(0);
3391 }
3392
3393 /* STWU - Store Word with Update */
3394 DECLARE_INSN(STWU)
3395 {
3396 int rs = bits(insn,21,25);
3397 int ra = bits(insn,16,20);
3398 m_uint16_t offset = bits(insn,0,15);
3399
3400 ppc32_emit_memop(cpu,b,PPC_MEMOP_STW,ra,offset,rs,1);
3401 return(0);
3402 }
3403
3404 /* STWUX - Store Word with Update Indexed */
3405 DECLARE_INSN(STWUX)
3406 {
3407 int rs = bits(insn,21,25);
3408 int ra = bits(insn,16,20);
3409 int rb = bits(insn,11,15);
3410
3411 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STW,ra,rb,rs,1);
3412 return(0);
3413 }
3414
3415 /* STWUX - Store Word Indexed */
3416 DECLARE_INSN(STWX)
3417 {
3418 int rs = bits(insn,21,25);
3419 int ra = bits(insn,16,20);
3420 int rb = bits(insn,11,15);
3421
3422 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STW,ra,rb,rs,0);
3423 return(0);
3424 }
3425
3426 /* SUBF - Subtract From */
3427 DECLARE_INSN(SUBF)
3428 {
3429 int rd = bits(insn,21,25);
3430 int ra = bits(insn,16,20);
3431 int rb = bits(insn,11,15);
3432 int hreg_rd,hreg_ra,hreg_rb,hreg_t0;
3433 jit_op_t *iop;
3434
3435 /* $rd = $rb - $ra */
3436 ppc32_jit_start_hreg_seq(cpu,"subf");
3437 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3438
3439 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3440 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3441 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3442
3443 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3444 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3445
3446 iop = ppc32_op_emit_insn_output(cpu,2,"subf");
3447
3448 if (rd == rb)
3449 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_rd,hreg_ra,4);
3450 else if (rd == ra) {
3451 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
3452 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_t0,hreg_ra,4);
3453 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3454 } else {
3455 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_rb,4);
3456 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_rd,hreg_ra,4);
3457 }
3458
3459 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3460
3461 if (insn & 1)
3462 ppc32_op_emit_update_flags(cpu,0,TRUE);
3463
3464 ppc32_jit_close_hreg_seq(cpu);
3465 return(0);
3466 }
3467
3468 /* SUBFC - Subtract From Carrying */
3469 DECLARE_INSN(SUBFC)
3470 {
3471 int rd = bits(insn,21,25);
3472 int ra = bits(insn,16,20);
3473 int rb = bits(insn,11,15);
3474 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
3475 jit_op_t *iop;
3476
3477 /* $rd = ~$ra + 1 + $rb */
3478 ppc32_jit_start_hreg_seq(cpu,"subfc");
3479 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3480 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3481 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3482
3483 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3484 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3485
3486 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3487 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3488 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3489
3490 iop = ppc32_op_emit_insn_output(cpu,3,"subfc");
3491
3492 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3493
3494 /* $t0 = ~$ra + 1 */
3495 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3496 amd64_not_reg(iop->ob_ptr,hreg_t0);
3497 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,1,4);
3498 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3499 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3500 hreg_t1,4);
3501
3502 /* $t0 += $rb */
3503 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
3504 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3505 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3506 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3507 hreg_t1,4);
3508
3509 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3510
3511 if (insn & 1)
3512 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
3513
3514 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3515
3516 /* update cr0 */
3517 if (insn & 1)
3518 ppc32_update_cr0(b);
3519
3520 ppc32_jit_close_hreg_seq(cpu);
3521 return(0);
3522 }
3523
3524 /* SUBFE - Subtract From Extended */
3525 DECLARE_INSN(SUBFE)
3526 {
3527 int rd = bits(insn,21,25);
3528 int ra = bits(insn,16,20);
3529 int rb = bits(insn,11,15);
3530 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
3531 jit_op_t *iop;
3532
3533 /* $rd = ~$ra + $carry (xer_ca) + $rb */
3534 ppc32_jit_start_hreg_seq(cpu,"subfe");
3535 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3536 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3537 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3538
3539 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3540 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3541
3542 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3543 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3544 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3545
3546 iop = ppc32_op_emit_insn_output(cpu,3,"subfe");
3547
3548 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3549
3550 /* $t0 = ~$ra + $carry */
3551 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3552 amd64_not_reg(iop->ob_ptr,hreg_t0);
3553 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_t0,
3554 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
3555
3556 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3557 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3558 hreg_t1,4);
3559
3560 /* $t0 += $rb */
3561 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
3562 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3563 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3564 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3565 hreg_t1,4);
3566
3567 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3568
3569 if (insn & 1)
3570 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
3571
3572 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3573
3574 /* update cr0 */
3575 if (insn & 1)
3576 ppc32_update_cr0(b);
3577
3578 ppc32_jit_close_hreg_seq(cpu);
3579 return(0);
3580 }
3581
3582 /* SUBFIC - Subtract From Immediate Carrying */
3583 DECLARE_INSN(SUBFIC)
3584 {
3585 int rd = bits(insn,21,25);
3586 int ra = bits(insn,16,20);
3587 m_uint16_t imm = bits(insn,0,15);
3588 m_uint32_t tmp = sign_extend_32(imm,16);
3589 int hreg_ra,hreg_rd,hreg_t0,hreg_t1;
3590 jit_op_t *iop;
3591
3592 /* $rd = ~$ra + 1 + sign_extend(imm,16) */
3593 ppc32_jit_start_hreg_seq(cpu,"subfic");
3594 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3595 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3596
3597 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3598 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3599
3600 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3601 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3602
3603 iop = ppc32_op_emit_insn_output(cpu,3,"subfic");
3604
3605 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3606
3607 /* $t0 = ~$ra + 1 */
3608 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3609 amd64_not_reg(iop->ob_ptr,hreg_t0);
3610 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,1,4);
3611
3612 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3613 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3614 hreg_t1,4);
3615
3616 /* $t0 += sign_extend(imm,16) */
3617 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,tmp,4);
3618 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3619 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3620 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3621 hreg_t1,4);
3622
3623 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3624 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3625
3626 ppc32_jit_close_hreg_seq(cpu);
3627 return(0);
3628 }
3629
3630 /* SYNC - Synchronize */
3631 DECLARE_INSN(SYNC)
3632 {
3633 return(0);
3634 }
3635
3636 /* XOR */
3637 DECLARE_INSN(XOR)
3638 {
3639 int rs = bits(insn,21,25);
3640 int ra = bits(insn,16,20);
3641 int rb = bits(insn,11,15);
3642 int hreg_rs,hreg_ra,hreg_rb;
3643 jit_op_t *iop;
3644
3645 /* $ra = $rs ^ $rb */
3646 ppc32_jit_start_hreg_seq(cpu,"xor");
3647 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3648 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3649 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3650
3651 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3652 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3653
3654 iop = ppc32_op_emit_insn_output(cpu,1,"xor");
3655
3656 if (ra == rs)
3657 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
3658 else if (ra == rb)
3659 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rs,4);
3660 else {
3661 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3662 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
3663 }
3664
3665 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3666
3667 if (insn & 1)
3668 ppc32_op_emit_update_flags(cpu,0,TRUE);
3669
3670 ppc32_jit_close_hreg_seq(cpu);
3671 return(0);
3672 }
3673
3674 /* XORI - XOR Immediate */
3675 DECLARE_INSN(XORI)
3676 {
3677 int rs = bits(insn,21,25);
3678 int ra = bits(insn,16,20);
3679 m_uint32_t imm = bits(insn,0,15);
3680 int hreg_rs,hreg_ra;
3681 jit_op_t *iop;
3682
3683 /* $ra = $rs ^ imm */
3684 ppc32_jit_start_hreg_seq(cpu,"xori");
3685 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3686 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3687
3688 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3689
3690 iop = ppc32_op_emit_insn_output(cpu,1,"xori");
3691
3692 if (ra != rs)
3693 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3694
3695 amd64_alu_reg_imm(iop->ob_ptr,X86_XOR,hreg_ra,imm);
3696 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3697
3698 ppc32_jit_close_hreg_seq(cpu);
3699 return(0);
3700 }
3701
3702 /* XORIS - XOR Immediate Shifted */
3703 DECLARE_INSN(XORIS)
3704 {
3705 int rs = bits(insn,21,25);
3706 int ra = bits(insn,16,20);
3707 m_uint16_t imm = bits(insn,0,15);
3708 m_uint32_t tmp = imm << 16;
3709 int hreg_rs,hreg_ra;
3710 jit_op_t *iop;
3711
3712 /* $ra = $rs ^ (imm << 16) */
3713 ppc32_jit_start_hreg_seq(cpu,"xoris");
3714 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3715 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3716
3717 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3718
3719 iop = ppc32_op_emit_insn_output(cpu,1,"xoris");
3720
3721 if (ra != rs)
3722 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3723
3724 amd64_alu_reg_imm(iop->ob_ptr,X86_XOR,hreg_ra,tmp);
3725 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3726
3727 ppc32_jit_close_hreg_seq(cpu);
3728 return(0);
3729 }
3730
3731 /* PPC instruction array */
3732 struct ppc32_insn_tag ppc32_insn_tags[] = {
3733 { ppc32_emit_BLR , 0xfffffffe , 0x4e800020 },
3734 { ppc32_emit_BCTR , 0xfffffffe , 0x4e800420 },
3735 { ppc32_emit_MFLR , 0xfc1fffff , 0x7c0802a6 },
3736 { ppc32_emit_MTLR , 0xfc1fffff , 0x7c0803a6 },
3737 { ppc32_emit_MFCTR , 0xfc1fffff , 0x7c0902a6 },
3738 { ppc32_emit_MTCTR , 0xfc1fffff , 0x7c0903a6 },
3739 { ppc32_emit_MFTBL , 0xfc1ff7ff , 0x7c0c42e6 },
3740 { ppc32_emit_MFTBU , 0xfc1ff7ff , 0x7c0d42e6 },
3741 { ppc32_emit_ADD , 0xfc0007fe , 0x7c000214 },
3742 { ppc32_emit_ADDC , 0xfc0007fe , 0x7c000014 },
3743 { ppc32_emit_ADDE , 0xfc0007fe , 0x7c000114 },
3744 { ppc32_emit_ADDI , 0xfc000000 , 0x38000000 },
3745 { ppc32_emit_ADDIC , 0xfc000000 , 0x30000000 },
3746 { ppc32_emit_ADDIC_dot , 0xfc000000 , 0x34000000 },
3747 { ppc32_emit_ADDIS , 0xfc000000 , 0x3c000000 },
3748 { ppc32_emit_ADDZE , 0xfc00fffe , 0x7c000194 },
3749 { ppc32_emit_AND , 0xfc0007fe , 0x7c000038 },
3750 { ppc32_emit_ANDC , 0xfc0007fe , 0x7c000078 },
3751 { ppc32_emit_ANDI , 0xfc000000 , 0x70000000 },
3752 { ppc32_emit_ANDIS , 0xfc000000 , 0x74000000 },
3753 { ppc32_emit_B , 0xfc000003 , 0x48000000 },
3754 { ppc32_emit_BA , 0xfc000003 , 0x48000002 },
3755 { ppc32_emit_BL , 0xfc000003 , 0x48000001 },
3756 { ppc32_emit_BLA , 0xfc000003 , 0x48000003 },
3757 { ppc32_emit_BCC , 0xfe800000 , 0x40800000 },
3758 { ppc32_emit_BC , 0xfc000000 , 0x40000000 },
3759 { ppc32_emit_BCLR , 0xfc00fffe , 0x4c000020 },
3760 { ppc32_emit_CMP , 0xfc6007ff , 0x7c000000 },
3761 { ppc32_emit_CMPI , 0xfc600000 , 0x2c000000 },
3762 { ppc32_emit_CMPL , 0xfc6007ff , 0x7c000040 },
3763 { ppc32_emit_CMPLI , 0xfc600000 , 0x28000000 },
3764 { ppc32_emit_CRAND , 0xfc0007ff , 0x4c000202 },
3765 { ppc32_emit_CRANDC , 0xfc0007ff , 0x4c000102 },
3766 { ppc32_emit_CREQV , 0xfc0007ff , 0x4c000242 },
3767 { ppc32_emit_CRNAND , 0xfc0007ff , 0x4c0001c2 },
3768 { ppc32_emit_CRNOR , 0xfc0007ff , 0x4c000042 },
3769 { ppc32_emit_CROR , 0xfc0007ff , 0x4c000382 },
3770 { ppc32_emit_CRORC , 0xfc0007ff , 0x4c000342 },
3771 { ppc32_emit_CRXOR , 0xfc0007ff , 0x4c000182 },
3772 { ppc32_emit_DIVWU , 0xfc0007fe , 0x7c000396 },
3773 { ppc32_emit_EQV , 0xfc0007fe , 0x7c000238 },
3774 { ppc32_emit_EXTSB , 0xfc00fffe , 0x7c000774 },
3775 { ppc32_emit_EXTSH , 0xfc00fffe , 0x7c000734 },
3776 { ppc32_emit_LBZ , 0xfc000000 , 0x88000000 },
3777 { ppc32_emit_LBZU , 0xfc000000 , 0x8c000000 },
3778 { ppc32_emit_LBZUX , 0xfc0007ff , 0x7c0000ee },
3779 { ppc32_emit_LBZX , 0xfc0007ff , 0x7c0000ae },
3780 { ppc32_emit_LHA , 0xfc000000 , 0xa8000000 },
3781 { ppc32_emit_LHAU , 0xfc000000 , 0xac000000 },
3782 { ppc32_emit_LHAUX , 0xfc0007ff , 0x7c0002ee },
3783 { ppc32_emit_LHAX , 0xfc0007ff , 0x7c0002ae },
3784 { ppc32_emit_LHZ , 0xfc000000 , 0xa0000000 },
3785 { ppc32_emit_LHZU , 0xfc000000 , 0xa4000000 },
3786 { ppc32_emit_LHZUX , 0xfc0007ff , 0x7c00026e },
3787 { ppc32_emit_LHZX , 0xfc0007ff , 0x7c00022e },
3788 { ppc32_emit_LWZ , 0xfc000000 , 0x80000000 },
3789 { ppc32_emit_LWZU , 0xfc000000 , 0x84000000 },
3790 { ppc32_emit_LWZUX , 0xfc0007ff , 0x7c00006e },
3791 { ppc32_emit_LWZX , 0xfc0007ff , 0x7c00002e },
3792 { ppc32_emit_MCRF , 0xfc63ffff , 0x4c000000 },
3793 { ppc32_emit_MFCR , 0xfc1fffff , 0x7c000026 },
3794 { ppc32_emit_MFMSR , 0xfc1fffff , 0x7c0000a6 },
3795 { ppc32_emit_MFSR , 0xfc10ffff , 0x7c0004a6 },
3796 { ppc32_emit_MTCRF , 0xfc100fff , 0x7c000120 },
3797 { ppc32_emit_MULHW , 0xfc0007fe , 0x7c000096 },
3798 { ppc32_emit_MULHWU , 0xfc0007fe , 0x7c000016 },
3799 { ppc32_emit_MULLI , 0xfc000000 , 0x1c000000 },
3800 { ppc32_emit_MULLW , 0xfc0007fe , 0x7c0001d6 },
3801 { ppc32_emit_NAND , 0xfc0007fe , 0x7c0003b8 },
3802 { ppc32_emit_NEG , 0xfc00fffe , 0x7c0000d0 },
3803 { ppc32_emit_NOR , 0xfc0007fe , 0x7c0000f8 },
3804 { ppc32_emit_OR , 0xfc0007fe , 0x7c000378 },
3805 { ppc32_emit_ORC , 0xfc0007fe , 0x7c000338 },
3806 { ppc32_emit_ORI , 0xfc000000 , 0x60000000 },
3807 { ppc32_emit_ORIS , 0xfc000000 , 0x64000000 },
3808 { ppc32_emit_RLWIMI , 0xfc000000 , 0x50000000 },
3809 { ppc32_emit_RLWINM , 0xfc000000 , 0x54000000 },
3810 { ppc32_emit_RLWNM , 0xfc000000 , 0x5c000000 },
3811 { ppc32_emit_SLW , 0xfc0007fe , 0x7c000030 },
3812 { ppc32_emit_SRAWI , 0xfc0007fe , 0x7c000670 },
3813 { ppc32_emit_SRW , 0xfc0007fe , 0x7c000430 },
3814 { ppc32_emit_STB , 0xfc000000 , 0x98000000 },
3815 { ppc32_emit_STBU , 0xfc000000 , 0x9c000000 },
3816 { ppc32_emit_STBUX , 0xfc0007ff , 0x7c0001ee },
3817 { ppc32_emit_STBX , 0xfc0007ff , 0x7c0001ae },
3818 { ppc32_emit_STH , 0xfc000000 , 0xb0000000 },
3819 { ppc32_emit_STHU , 0xfc000000 , 0xb4000000 },
3820 { ppc32_emit_STHUX , 0xfc0007ff , 0x7c00036e },
3821 { ppc32_emit_STHX , 0xfc0007ff , 0x7c00032e },
3822 { ppc32_emit_STW , 0xfc000000 , 0x90000000 },
3823 { ppc32_emit_STWU , 0xfc000000 , 0x94000000 },
3824 { ppc32_emit_STWUX , 0xfc0007ff , 0x7c00016e },
3825 { ppc32_emit_STWX , 0xfc0007ff , 0x7c00012e },
3826 { ppc32_emit_SUBF , 0xfc0007fe , 0x7c000050 },
3827 { ppc32_emit_SUBFC , 0xfc0007fe , 0x7c000010 },
3828 { ppc32_emit_SUBFE , 0xfc0007fe , 0x7c000110 },
3829 { ppc32_emit_SUBFIC , 0xfc000000 , 0x20000000 },
3830 { ppc32_emit_SYNC , 0xffffffff , 0x7c0004ac },
3831 { ppc32_emit_XOR , 0xfc0007fe , 0x7c000278 },
3832 { ppc32_emit_XORI , 0xfc000000 , 0x68000000 },
3833 { ppc32_emit_XORIS , 0xfc000000 , 0x6c000000 },
3834 { ppc32_emit_unknown , 0x00000000 , 0x00000000 },
3835 { NULL , 0x00000000 , 0x00000000 },
3836 };

  ViewVC Help
Powered by ViewVC 1.1.26