/[dynamips]/upstream/dynamips-0.2.7-RC3/ppc32_amd64_trans.c
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Contents of /upstream/dynamips-0.2.7-RC3/ppc32_amd64_trans.c

Parent Directory Parent Directory | Revision Log Revision Log


Revision 9 - (show annotations)
Sat Oct 6 16:26:06 2007 UTC (13 years, 2 months ago) by dpavlin
File MIME type: text/plain
File size: 112256 byte(s)
dynamips-0.2.7-RC3

1 /*
2 * Cisco router simulation platform.
3 * Copyright (c) 2005,2006 Christophe Fillot (cf@utc.fr)
4 */
5
6 #include <stdio.h>
7 #include <stdlib.h>
8 #include <unistd.h>
9 #include <string.h>
10 #include <sys/types.h>
11 #include <sys/stat.h>
12 #include <sys/mman.h>
13 #include <fcntl.h>
14
15 #include "cpu.h"
16 #include "jit_op.h"
17 #include "ppc32_jit.h"
18 #include "ppc32_amd64_trans.h"
19 #include "memory.h"
20
21 /* Macros for CPU structure access */
22 #define REG_OFFSET(reg) (OFFSET(cpu_ppc_t,gpr[(reg)]))
23 #define MEMOP_OFFSET(op) (OFFSET(cpu_ppc_t,mem_op_fn[(op)]))
24
25 #define DECLARE_INSN(name) \
26 static int ppc32_emit_##name(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b, \
27 ppc_insn_t insn)
28
29 /* EFLAGS to Condition Register (CR) field - signed */
30 static m_uint32_t eflags_to_cr_signed[256] = {
31 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
32 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
33 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
34 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
35 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
36 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
37 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
38 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04,
39 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
40 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
41 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
42 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
43 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
44 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
45 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
46 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02, 0x02,
47 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
48 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
49 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
50 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
51 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
52 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
53 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
54 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08,
55 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
56 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
57 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
58 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
59 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
60 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
61 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
62 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a, 0x0a,
63 };
64
65 /* EFLAGS to Condition Register (CR) field - unsigned */
66 static m_uint32_t eflags_to_cr_unsigned[256] = {
67 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
68 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
69 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
70 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
71 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
72 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
73 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
74 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
75 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
76 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
77 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
78 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
79 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
80 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
81 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
82 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
83 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
84 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
85 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
86 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
87 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
88 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
89 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
90 0x04, 0x08, 0x04, 0x08, 0x04, 0x08, 0x04, 0x08,
91 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
92 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
93 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
94 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
95 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
96 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
97 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
98 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a, 0x02, 0x0a,
99 };
100
101 /* Load a 32 bit immediate value */
102 static inline void ppc32_load_imm(u_char **ptr,u_int reg,m_uint32_t val)
103 {
104 if (val)
105 amd64_mov_reg_imm_size(*ptr,reg,val,4);
106 else
107 amd64_alu_reg_reg_size(*ptr,X86_XOR,reg,reg,4);
108 }
109
110 /* Set the Instruction Address (IA) register */
111 void ppc32_set_ia(u_char **ptr,m_uint32_t new_ia)
112 {
113 amd64_mov_membase_imm(*ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),new_ia,4);
114 }
115
116 /* Set the Link Register (LR) */
117 static void ppc32_set_lr(jit_op_t *iop,m_uint32_t new_lr)
118 {
119 amd64_mov_membase_imm(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),new_lr,4);
120 }
121
122 /*
123 * Try to branch directly to the specified JIT block without returning to
124 * main loop.
125 */
126 static void ppc32_try_direct_far_jump(cpu_ppc_t *cpu,jit_op_t *iop,
127 m_uint32_t new_ia)
128 {
129 m_uint32_t new_page,ia_hash,ia_offset;
130 u_char *test1,*test2,*test3;
131
132 /* Indicate that we throw %rbx, %rdx */
133 ppc32_op_emit_alter_host_reg(cpu,AMD64_RBX);
134 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
135 ppc32_op_emit_alter_host_reg(cpu,AMD64_RSI);
136
137 new_page = new_ia & PPC32_MIN_PAGE_MASK;
138 ia_offset = (new_ia & PPC32_MIN_PAGE_IMASK) >> 2;
139 ia_hash = ppc32_jit_get_ia_hash(new_ia);
140
141 /* Get JIT block info in %rdx */
142 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
143 AMD64_R15,OFFSET(cpu_ppc_t,exec_blk_map),8);
144 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RDX,
145 AMD64_RBX,ia_hash*sizeof(void *),8);
146
147 /* no JIT block found ? */
148 amd64_test_reg_reg(iop->ob_ptr,AMD64_RDX,AMD64_RDX);
149 test1 = iop->ob_ptr;
150 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
151
152 /* Check block IA */
153 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,new_page);
154 amd64_alu_reg_membase_size(iop->ob_ptr,X86_CMP,AMD64_RAX,AMD64_RDX,
155 OFFSET(ppc32_jit_tcb_t,start_ia),4);
156 test2 = iop->ob_ptr;
157 amd64_branch8(iop->ob_ptr, X86_CC_NE, 0, 1);
158
159 /* Jump to the code */
160 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RSI,
161 AMD64_RDX,OFFSET(ppc32_jit_tcb_t,jit_insn_ptr),8);
162 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
163 AMD64_RSI,ia_offset * sizeof(void *),8);
164
165 amd64_test_reg_reg(iop->ob_ptr,AMD64_RBX,AMD64_RBX);
166 test3 = iop->ob_ptr;
167 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
168 amd64_jump_reg(iop->ob_ptr,AMD64_RBX);
169
170 /* Returns to caller... */
171 amd64_patch(test1,iop->ob_ptr);
172 amd64_patch(test2,iop->ob_ptr);
173 amd64_patch(test3,iop->ob_ptr);
174
175 ppc32_set_ia(&iop->ob_ptr,new_ia);
176 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
177 }
178
179 /* Set Jump */
180 static void ppc32_set_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,jit_op_t *iop,
181 m_uint32_t new_ia,int local_jump)
182 {
183 int return_to_caller = FALSE;
184 u_char *jump_ptr;
185
186 #if 0
187 if (cpu->sym_trace && !local_jump)
188 return_to_caller = TRUE;
189 #endif
190
191 if (!return_to_caller && ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr)) {
192 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
193 amd64_jump32(iop->ob_ptr,0);
194 } else {
195 if (cpu->exec_blk_direct_jump) {
196 /* Block lookup optimization */
197 ppc32_try_direct_far_jump(cpu,iop,new_ia);
198 } else {
199 ppc32_set_ia(&iop->ob_ptr,new_ia);
200 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
201 }
202 }
203 }
204
205 /* Jump to the next page */
206 void ppc32_set_page_jump(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b)
207 {
208 jit_op_t *iop,*op_list = NULL;
209
210 cpu->gen->jit_op_current = &op_list;
211
212 iop = ppc32_op_emit_insn_output(cpu,4,"set_page_jump");
213 ppc32_set_jump(cpu,b,iop,b->start_ia + PPC32_MIN_PAGE_SIZE,FALSE);
214 ppc32_op_insn_output(b,iop);
215
216 jit_op_free_list(cpu->gen,op_list);
217 cpu->gen->jit_op_current = NULL;
218 }
219
220 /* Load a GPR into the specified host register */
221 static forced_inline void ppc32_load_gpr(u_char **ptr,u_int host_reg,
222 u_int ppc_reg)
223 {
224 amd64_mov_reg_membase(*ptr,host_reg,AMD64_R15,REG_OFFSET(ppc_reg),4);
225 }
226
227 /* Store contents for a host register into a GPR register */
228 static forced_inline void ppc32_store_gpr(u_char **ptr,u_int ppc_reg,
229 u_int host_reg)
230 {
231 amd64_mov_membase_reg(*ptr,AMD64_R15,REG_OFFSET(ppc_reg),host_reg,4);
232 }
233
234 /* Apply an ALU operation on a GPR register and a host register */
235 static forced_inline void ppc32_alu_gpr(u_char **ptr,u_int op,
236 u_int host_reg,u_int ppc_reg)
237 {
238 amd64_alu_reg_membase_size(*ptr,op,host_reg,
239 AMD64_R15,REG_OFFSET(ppc_reg),4);
240 }
241
242 /*
243 * Update CR from %eflags
244 * %rax, %rdx, %rsi are modified.
245 */
246 static void ppc32_update_cr(ppc32_jit_tcb_t *b,int field,int is_signed)
247 {
248 /* Get status bits from EFLAGS */
249 amd64_pushfd_size(b->jit_ptr,8);
250 amd64_pop_reg(b->jit_ptr,AMD64_RAX);
251 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RAX,0xFF);
252
253 if (is_signed)
254 amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_signed,8);
255 else
256 amd64_mov_reg_imm_size(b->jit_ptr,AMD64_RDX,eflags_to_cr_unsigned,8);
257
258 amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RDX,0,AMD64_RAX,2,4);
259
260 #if 0
261 /* Check XER Summary of Overflow and report it */
262 amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX,
263 AMD64_R15,OFFSET(cpu_ppc_t,xer),4);
264 amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RCX,PPC32_XER_SO);
265 amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RCX,(field << 2) + 3);
266 amd64_alu_reg_reg(b->jit_ptr,X86_OR,AMD64_RDX,AMD64_RCX);
267 #endif
268
269 /* Store modified CR field */
270 amd64_mov_membase_reg(b->jit_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(field),
271 AMD64_RAX,4);
272 }
273
274 /*
275 * Update CR0 from %eflags
276 * %eax, %ecx, %edx, %esi are modified.
277 */
278 static void ppc32_update_cr0(ppc32_jit_tcb_t *b)
279 {
280 ppc32_update_cr(b,0,TRUE);
281 }
282
283 /* Indicate registers modified by ppc32_update_cr() functions */
284 void ppc32_update_cr_set_altered_hreg(cpu_ppc_t *cpu)
285 {
286 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
287 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
288 }
289
290 /* Basic C call */
291 static forced_inline void ppc32_emit_basic_c_call(u_char **ptr,void *f)
292 {
293 amd64_mov_reg_imm(*ptr,AMD64_RBX,f);
294 amd64_call_reg(*ptr,AMD64_RBX);
295 }
296
297 /* Emit a simple call to a C function without any parameter */
298 static void ppc32_emit_c_call(ppc32_jit_tcb_t *b,jit_op_t *iop,void *f)
299 {
300 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
301 ppc32_emit_basic_c_call(&iop->ob_ptr,f);
302 }
303
304 /* ======================================================================== */
305
306 /* Initialize register mapping */
307 void ppc32_jit_init_hreg_mapping(cpu_ppc_t *cpu)
308 {
309 int avail_hregs[] = { AMD64_RSI, AMD64_RAX, AMD64_RCX, AMD64_RDX,
310 AMD64_R13, AMD64_R14, AMD64_RDI, -1 };
311 struct hreg_map *map;
312 int i,hreg;
313
314 cpu->hreg_map_list = cpu->hreg_lru = NULL;
315
316 /* Add the available registers to the map list */
317 for(i=0;avail_hregs[i]!=-1;i++) {
318 hreg = avail_hregs[i];
319 map = &cpu->hreg_map[hreg];
320
321 /* Initialize mapping. At the beginning, no PPC reg is mapped */
322 map->flags = 0;
323 map->hreg = hreg;
324 map->vreg = -1;
325 ppc32_jit_insert_hreg_mru(cpu,map);
326 }
327
328 /* Clear PPC registers mapping */
329 for(i=0;i<PPC32_GPR_NR;i++)
330 cpu->ppc_reg_map[i] = -1;
331 }
332
333 /* Allocate a specific temp register */
334 static int ppc32_jit_get_tmp_hreg(cpu_ppc_t *cpu)
335 {
336 return(AMD64_RBX);
337 }
338
339 /* ======================================================================== */
340 /* JIT operations (specific to target CPU). */
341 /* ======================================================================== */
342
343 /* INSN_OUTPUT */
344 void ppc32_op_insn_output(ppc32_jit_tcb_t *b,jit_op_t *op)
345 {
346 op->ob_final = b->jit_ptr;
347 memcpy(b->jit_ptr,op->ob_data,op->ob_ptr - op->ob_data);
348 b->jit_ptr += op->ob_ptr - op->ob_data;
349 }
350
351 /* LOAD_GPR: p[0] = %host_reg, p[1] = %ppc_reg */
352 void ppc32_op_load_gpr(ppc32_jit_tcb_t *b,jit_op_t *op)
353 {
354 if (op->param[0] != JIT_OP_INV_REG)
355 ppc32_load_gpr(&b->jit_ptr,op->param[0],op->param[1]);
356 }
357
358 /* STORE_GPR: p[0] = %host_reg, p[1] = %ppc_reg */
359 void ppc32_op_store_gpr(ppc32_jit_tcb_t *b,jit_op_t *op)
360 {
361 if (op->param[0] != JIT_OP_INV_REG)
362 ppc32_store_gpr(&b->jit_ptr,op->param[1],op->param[0]);
363 }
364
365 /* UPDATE_FLAGS: p[0] = cr_field, p[1] = is_signed */
366 void ppc32_op_update_flags(ppc32_jit_tcb_t *b,jit_op_t *op)
367 {
368 if (op->param[0] != JIT_OP_INV_REG)
369 ppc32_update_cr(b,op->param[0],op->param[1]);
370 }
371
372 /* MOVE_HOST_REG: p[0] = %host_dst_reg, p[1] = %host_src_reg */
373 void ppc32_op_move_host_reg(ppc32_jit_tcb_t *b,jit_op_t *op)
374 {
375 if ((op->param[0] != JIT_OP_INV_REG) && (op->param[1] != JIT_OP_INV_REG))
376 amd64_mov_reg_reg(b->jit_ptr,op->param[0],op->param[1],4);
377 }
378
379 /* SET_HOST_REG_IMM32: p[0] = %host_reg, p[1] = imm32 */
380 void ppc32_op_set_host_reg_imm32(ppc32_jit_tcb_t *b,jit_op_t *op)
381 {
382 if (op->param[0] != JIT_OP_INV_REG)
383 ppc32_load_imm(&b->jit_ptr,op->param[0],op->param[1]);
384 }
385
386 /* ======================================================================== */
387
388 /* Memory operation */
389 static void ppc32_emit_memop(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
390 int op,int base,int offset,int target,int update)
391 {
392 m_uint32_t val = sign_extend(offset,16);
393 u_char *test1;
394 jit_op_t *iop;
395
396 /*
397 * Since an exception can be triggered, clear JIT state. This allows
398 * to use branch target tag (we can directly branch on this instruction).
399 */
400 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
401 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
402
403 iop = ppc32_op_emit_insn_output(cpu,5,"memop");
404
405 /* Save PC for exception handling */
406 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
407
408 /* RSI = sign-extended offset */
409 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,val);
410
411 /* RSI = GPR[base] + sign-extended offset */
412 if (update || (base != 0))
413 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,base);
414
415 if (update)
416 amd64_mov_reg_reg(iop->ob_ptr,AMD64_R14,AMD64_RSI,4);
417
418 /* RDX = target register */
419 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
420
421 /* RDI = CPU instance pointer */
422 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
423
424 /* Call memory function */
425 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(op));
426
427 /* Exception ? */
428 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
429 test1 = iop->ob_ptr;
430 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
431 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
432 amd64_patch(test1,iop->ob_ptr);
433
434 if (update)
435 ppc32_store_gpr(&iop->ob_ptr,base,AMD64_R14);
436 }
437
438 /* Memory operation (indexed) */
439 static void ppc32_emit_memop_idx(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
440 int op,int ra,int rb,int target,int update)
441 {
442 u_char *test1;
443 jit_op_t *iop;
444
445 /*
446 * Since an exception can be triggered, clear JIT state. This allows
447 * to use branch target tag (we can directly branch on this instruction).
448 */
449 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
450 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
451
452 iop = ppc32_op_emit_insn_output(cpu,5,"memop_idx");
453
454 /* Save PC for exception handling */
455 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
456
457 /* RSI = $rb */
458 ppc32_load_gpr(&iop->ob_ptr,AMD64_RSI,rb);
459
460 /* RSI = GPR[base] + sign-extended offset */
461 if (update || (ra != 0))
462 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,ra);
463
464 if (update)
465 amd64_mov_reg_reg(iop->ob_ptr,AMD64_R14,AMD64_RSI,4);
466
467 /* RDX = target register */
468 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
469
470 /* RDI = CPU instance pointer */
471 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
472
473 /* Call memory function */
474 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(op));
475
476 /* Exception ? */
477 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
478 test1 = iop->ob_ptr;
479 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
480 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
481 amd64_patch(test1,iop->ob_ptr);
482
483 if (update)
484 ppc32_store_gpr(&iop->ob_ptr,ra,AMD64_R14);
485 }
486
487 typedef void (*memop_fast_access)(jit_op_t *iop,int target);
488
489 /* Fast LBZ */
490 static void ppc32_memop_fast_lbz(jit_op_t *iop,int target)
491 {
492 amd64_clear_reg(iop->ob_ptr,AMD64_RCX);
493 amd64_mov_reg_memindex(iop->ob_ptr,AMD64_RCX,AMD64_RBX,0,AMD64_RSI,0,1);
494 ppc32_store_gpr(&iop->ob_ptr,target,AMD64_RCX);
495 }
496
497 /* Fast STB */
498 static void ppc32_memop_fast_stb(jit_op_t *iop,int target)
499 {
500 ppc32_load_gpr(&iop->ob_ptr,AMD64_RDX,target);
501 amd64_mov_memindex_reg(iop->ob_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,1);
502 }
503
504 /* Fast LWZ */
505 static void ppc32_memop_fast_lwz(jit_op_t *iop,int target)
506 {
507 amd64_mov_reg_memindex(iop->ob_ptr,AMD64_RAX,AMD64_RBX,0,AMD64_RSI,0,4);
508 amd64_bswap32(iop->ob_ptr,AMD64_RAX);
509 ppc32_store_gpr(&iop->ob_ptr,target,AMD64_RAX);
510 }
511
512 /* Fast STW */
513 static void ppc32_memop_fast_stw(jit_op_t *iop,int target)
514 {
515 ppc32_load_gpr(&iop->ob_ptr,AMD64_RDX,target);
516 amd64_bswap32(iop->ob_ptr,AMD64_RDX);
517 amd64_mov_memindex_reg(iop->ob_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RDX,4);
518 }
519
520 /* Fast memory operation */
521 static void ppc32_emit_memop_fast(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
522 int write_op,int opcode,
523 int base,int offset,int target,
524 memop_fast_access op_handler)
525 {
526 m_uint32_t val = sign_extend(offset,16);
527 u_char *test1,*test2,*p_exception,*p_exit;
528 jit_op_t *iop;
529
530 /*
531 * Since an exception can be triggered, clear JIT state. This allows
532 * to use branch target tag (we can directly branch on this instruction).
533 */
534 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_TARGET);
535 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
536
537 iop = ppc32_op_emit_insn_output(cpu,5,"memop_fast");
538
539 test2 = NULL;
540
541 /* RSI = GPR[base] + sign-extended offset */
542 ppc32_load_imm(&iop->ob_ptr,AMD64_RSI,val);
543 if (base != 0)
544 ppc32_alu_gpr(&iop->ob_ptr,X86_ADD,AMD64_RSI,base);
545
546 /* RBX = mts32_entry index */
547 amd64_mov_reg_reg_size(iop->ob_ptr,X86_EBX,X86_ESI,4);
548 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHR,X86_EBX,MTS32_HASH_SHIFT,4);
549 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,X86_EBX,MTS32_HASH_MASK,4);
550
551 /* RCX = mts32 entry */
552 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RCX,
553 AMD64_R15,
554 OFFSET(cpu_ppc_t,mts_cache[PPC32_MTS_DCACHE]),8);
555 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,AMD64_RBX,5); /* TO FIX */
556 amd64_alu_reg_reg(iop->ob_ptr,X86_ADD,AMD64_RCX,AMD64_RBX);
557
558 /* Compare virtual page address (EAX = vpage) */
559 amd64_mov_reg_reg(iop->ob_ptr,X86_EAX,X86_ESI,4);
560 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,X86_EAX,PPC32_MIN_PAGE_MASK);
561
562 amd64_alu_reg_membase_size(iop->ob_ptr,X86_CMP,X86_EAX,AMD64_RCX,
563 OFFSET(mts32_entry_t,gvpa),4);
564 test1 = iop->ob_ptr;
565 amd64_branch8(iop->ob_ptr, X86_CC_NZ, 0, 1);
566
567 /* Test if we are writing to a COW page */
568 if (write_op) {
569 amd64_test_membase_imm_size(iop->ob_ptr,
570 AMD64_RCX,OFFSET(mts32_entry_t,flags),
571 MTS_FLAG_COW|MTS_FLAG_EXEC,4);
572 test2 = iop->ob_ptr;
573 amd64_branch8(iop->ob_ptr, X86_CC_NZ, 0, 1);
574 }
575
576 /* ESI = offset in page, RBX = Host Page Address */
577 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,X86_ESI,PPC32_MIN_PAGE_IMASK);
578 amd64_mov_reg_membase(iop->ob_ptr,AMD64_RBX,
579 AMD64_RCX,OFFSET(mts32_entry_t,hpa),8);
580
581 /* Memory access */
582 op_handler(iop,target);
583
584 p_exit = iop->ob_ptr;
585 amd64_jump8(iop->ob_ptr,0);
586
587 /* === Slow lookup === */
588 amd64_patch(test1,iop->ob_ptr);
589 if (test2)
590 amd64_patch(test2,iop->ob_ptr);
591
592 /* Save IA for exception handling */
593 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
594
595 /* RDX = target register */
596 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RDX,target);
597
598 /* RDI = CPU instance */
599 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
600
601 /* Call memory access function */
602 amd64_call_membase(iop->ob_ptr,AMD64_R15,MEMOP_OFFSET(opcode));
603
604 /* Exception ? */
605 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
606 p_exception = iop->ob_ptr;
607 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
608 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
609
610 amd64_patch(p_exit,iop->ob_ptr);
611 amd64_patch(p_exception,iop->ob_ptr);
612 }
613
614 /* Emit unhandled instruction code */
615 static int ppc32_emit_unknown(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b,
616 ppc_insn_t opcode)
617 {
618 u_char *test1;
619 jit_op_t *iop;
620
621 iop = ppc32_op_emit_insn_output(cpu,3,"unknown");
622
623 /* Update IA */
624 ppc32_set_ia(&iop->ob_ptr,b->start_ia+(b->ppc_trans_pos << 2));
625
626 /* Fallback to non-JIT mode */
627 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
628 amd64_mov_reg_imm(iop->ob_ptr,AMD64_RSI,opcode);
629
630 ppc32_emit_c_call(b,iop,ppc32_exec_single_insn_ext);
631 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
632 test1 = iop->ob_ptr;
633 amd64_branch8(iop->ob_ptr, X86_CC_Z, 0, 1);
634 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
635
636 amd64_patch(test1,iop->ob_ptr);
637
638 /* Signal this as an EOB to reset JIT state */
639 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
640 return(0);
641 }
642
643 /* Virtual Breakpoint */
644 void ppc32_emit_breakpoint(cpu_ppc_t *cpu,ppc32_jit_tcb_t *b)
645 {
646 jit_op_t *iop;
647
648 iop = ppc32_op_emit_insn_output(cpu,2,"breakpoint");
649
650 amd64_mov_reg_reg(iop->ob_ptr,AMD64_RDI,AMD64_R15,8);
651 ppc32_emit_c_call(b,iop,ppc32_run_breakpoint);
652
653 /* Signal this as an EOB to to reset JIT state */
654 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
655 }
656
657 /* Increment the number of executed instructions (performance debugging) */
658 void ppc32_inc_perf_counter(ppc32_jit_tcb_t *b)
659 {
660 amd64_inc_membase(b->jit_ptr,AMD64_R15,OFFSET(cpu_ppc_t,perf_counter));
661 }
662
663 /* ======================================================================== */
664
665 /* BLR - Branch to Link Register */
666 DECLARE_INSN(BLR)
667 {
668 jit_op_t *iop;
669 int hreg;
670
671 ppc32_jit_start_hreg_seq(cpu,"blr");
672 hreg = ppc32_jit_alloc_hreg(cpu,-1);
673 ppc32_op_emit_alter_host_reg(cpu,hreg);
674
675 iop = ppc32_op_emit_insn_output(cpu,2,"blr");
676
677 amd64_mov_reg_membase(iop->ob_ptr,hreg,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
678 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg,4);
679
680 /* set the return address */
681 if (insn & 1)
682 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
683
684 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
685 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
686 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
687
688 ppc32_jit_close_hreg_seq(cpu);
689 return(0);
690 }
691
692 /* BCTR - Branch to Count Register */
693 DECLARE_INSN(BCTR)
694 {
695 jit_op_t *iop;
696 int hreg;
697
698 ppc32_jit_start_hreg_seq(cpu,"bctr");
699 hreg = ppc32_jit_alloc_hreg(cpu,-1);
700 ppc32_op_emit_alter_host_reg(cpu,hreg);
701
702 iop = ppc32_op_emit_insn_output(cpu,2,"bctr");
703
704 amd64_mov_reg_membase(iop->ob_ptr,hreg,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
705 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg,4);
706
707 /* set the return address */
708 if (insn & 1)
709 ppc32_set_lr(iop,b->start_ia + (b->ppc_trans_pos << 2));
710
711 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
712 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
713 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
714
715 ppc32_jit_close_hreg_seq(cpu);
716 return(0);
717 }
718
719 /* MFLR - Move From Link Register */
720 DECLARE_INSN(MFLR)
721 {
722 int rd = bits(insn,21,25);
723 int hreg_rd;
724 jit_op_t *iop;
725
726 ppc32_jit_start_hreg_seq(cpu,"mflr");
727 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
728 iop = ppc32_op_emit_insn_output(cpu,1,"mflr");
729
730 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
731 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
732
733 ppc32_jit_close_hreg_seq(cpu);
734 return(0);
735 }
736
737 /* MTLR - Move To Link Register */
738 DECLARE_INSN(MTLR)
739 {
740 int rs = bits(insn,21,25);
741 int hreg_rs;
742 jit_op_t *iop;
743
744 ppc32_jit_start_hreg_seq(cpu,"mtlr");
745 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
746 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
747
748 iop = ppc32_op_emit_insn_output(cpu,1,"mtlr");
749 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,lr),hreg_rs,4);
750 return(0);
751 }
752
753 /* MFCTR - Move From Counter Register */
754 DECLARE_INSN(MFCTR)
755 {
756 int rd = bits(insn,21,25);
757 int hreg_rd;
758 jit_op_t *iop;
759
760 ppc32_jit_start_hreg_seq(cpu,"mfctr");
761 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
762
763 iop = ppc32_op_emit_insn_output(cpu,1,"mfctr");
764
765 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
766 AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
767 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
768
769 ppc32_jit_close_hreg_seq(cpu);
770 return(0);
771 }
772
773 /* MTCTR - Move To Counter Register */
774 DECLARE_INSN(MTCTR)
775 {
776 int rs = bits(insn,21,25);
777 int hreg_rs;
778 jit_op_t *iop;
779
780 ppc32_jit_start_hreg_seq(cpu,"mtctr");
781 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
782 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
783
784 iop = ppc32_op_emit_insn_output(cpu,1,"mtctr");
785
786 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),
787 hreg_rs,4);
788
789 ppc32_jit_close_hreg_seq(cpu);
790 return(0);
791 }
792
793 /* MFTBU - Move from Time Base (Up) */
794 DECLARE_INSN(MFTBU)
795 {
796 int rd = bits(insn,21,25);
797 int hreg_rd;
798 jit_op_t *iop;
799
800 ppc32_jit_start_hreg_seq(cpu,"mftbu");
801 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
802
803 iop = ppc32_op_emit_insn_output(cpu,1,"mftbu");
804
805 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
806 AMD64_R15,OFFSET(cpu_ppc_t,tb)+4,4);
807 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
808
809 ppc32_jit_close_hreg_seq(cpu);
810 return(0);
811 }
812
813 #define PPC32_TB_INCREMENT 50
814
815 /* MFTBL - Move from Time Base (Lo) */
816 DECLARE_INSN(MFTBL)
817 {
818 int rd = bits(insn,21,25);
819 int hreg_rd;
820 jit_op_t *iop;
821
822 ppc32_jit_start_hreg_seq(cpu,"mftbl");
823 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
824
825 iop = ppc32_op_emit_insn_output(cpu,3,"mftbl");
826
827 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
828 AMD64_R15,OFFSET(cpu_ppc_t,tb),8);
829 amd64_alu_reg_imm(iop->ob_ptr,X86_ADD,hreg_rd,PPC32_TB_INCREMENT);
830 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,tb),
831 hreg_rd,8);
832
833 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
834
835 ppc32_jit_close_hreg_seq(cpu);
836 return(0);
837 }
838
839 /* ADD */
840 DECLARE_INSN(ADD)
841 {
842 int rd = bits(insn,21,25);
843 int ra = bits(insn,16,20);
844 int rb = bits(insn,11,15);
845 int hreg_rd,hreg_ra,hreg_rb;
846 jit_op_t *iop;
847
848 /* $rd = $ra + $rb */
849 ppc32_jit_start_hreg_seq(cpu,"add");
850 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
851 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
852 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
853
854 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
855 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
856
857 iop = ppc32_op_emit_insn_output(cpu,2,"add");
858
859 if (rd == ra)
860 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
861 else if (rd == rb)
862 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_ra,4);
863 else {
864 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
865 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
866 }
867
868 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
869
870 if (insn & 1)
871 ppc32_op_emit_update_flags(cpu,0,TRUE);
872
873 ppc32_jit_close_hreg_seq(cpu);
874 return(0);
875 }
876
877 /* ADDC */
878 DECLARE_INSN(ADDC)
879 {
880 int rd = bits(insn,21,25);
881 int ra = bits(insn,16,20);
882 int rb = bits(insn,11,15);
883 int hreg_rd,hreg_ra,hreg_rb,hreg_t0;
884 jit_op_t *iop;
885
886 /* $rd = $ra + $rb */
887 ppc32_jit_start_hreg_seq(cpu,"addc");
888 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
889 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
890 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
891
892 /* store the carry flag */
893 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
894
895 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
896 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
897
898 iop = ppc32_op_emit_insn_output(cpu,2,"addc");
899
900 if (rd == ra)
901 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
902 else if (rd == rb)
903 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_ra,4);
904 else {
905 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
906 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_rd,hreg_rb,4);
907 }
908
909 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
910
911 /* store the carry flag */
912 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t0,FALSE);
913 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x1);
914 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
915 hreg_t0,4);
916
917 if (insn & 1) {
918 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
919 ppc32_op_emit_update_flags(cpu,0,TRUE);
920 }
921
922 ppc32_jit_close_hreg_seq(cpu);
923 return(0);
924 }
925
926 /* ADDE - Add Extended */
927 DECLARE_INSN(ADDE)
928 {
929 int rd = bits(insn,21,25);
930 int ra = bits(insn,16,20);
931 int rb = bits(insn,11,15);
932 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
933 jit_op_t *iop;
934
935 ppc32_jit_start_hreg_seq(cpu,"adde");
936 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
937 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
938 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
939
940 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
941 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
942
943 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
944 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
945 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
946
947 iop = ppc32_op_emit_insn_output(cpu,3,"adde");
948
949 /* $t0 = $ra + carry */
950 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
951 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
952
953 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_t0,
954 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
955 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
956 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
957 hreg_t1,4);
958
959 /* $t0 += $rb */
960 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
961 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
962 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
963 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
964 hreg_t1,4);
965
966 /* update cr0 */
967 if (insn & 1)
968 amd64_test_reg_reg_size(iop->ob_ptr,hreg_t0,hreg_t0,4);
969
970 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
971 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
972
973 if (insn & 1)
974 ppc32_op_emit_update_flags(cpu,0,TRUE);
975
976 ppc32_jit_close_hreg_seq(cpu);
977 return(0);
978 }
979
980 /* ADDI - ADD Immediate */
981 DECLARE_INSN(ADDI)
982 {
983 int rd = bits(insn,21,25);
984 int ra = bits(insn,16,20);
985 int imm = bits(insn,0,15);
986 m_uint32_t tmp = sign_extend_32(imm,16);
987 int hreg_rd,hreg_ra;
988 jit_op_t *iop;
989
990 /* $rd = $ra + imm */
991 ppc32_jit_start_hreg_seq(cpu,"addi");
992 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
993
994 if (ra != 0) {
995 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
996 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
997
998 iop = ppc32_op_emit_insn_output(cpu,2,"addi");
999
1000 if (rd != ra)
1001 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1002
1003 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1004 } else {
1005 iop = ppc32_op_emit_insn_output(cpu,1,"addi");
1006 ppc32_load_imm(&iop->ob_ptr,hreg_rd,tmp);
1007 }
1008
1009 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1010
1011 ppc32_jit_close_hreg_seq(cpu);
1012 return(0);
1013 }
1014
1015 /* ADDIC - ADD Immediate with Carry */
1016 DECLARE_INSN(ADDIC)
1017 {
1018 int rd = bits(insn,21,25);
1019 int ra = bits(insn,16,20);
1020 int imm = bits(insn,0,15);
1021 m_uint32_t tmp = sign_extend_32(imm,16);
1022 int hreg_rd,hreg_ra;
1023 jit_op_t *iop;
1024
1025 /* $rd = $ra + imm */
1026 ppc32_jit_start_hreg_seq(cpu,"addic");
1027 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1028 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1029
1030 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1031
1032 iop = ppc32_op_emit_insn_output(cpu,1,"addic");
1033
1034 if (rd != ra)
1035 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1036
1037 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1038 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1039
1040 amd64_set_membase(iop->ob_ptr,X86_CC_C,
1041 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),FALSE);
1042
1043 ppc32_jit_close_hreg_seq(cpu);
1044 return(0);
1045 }
1046
1047 /* ADDIC. */
1048 DECLARE_INSN(ADDIC_dot)
1049 {
1050 int rd = bits(insn,21,25);
1051 int ra = bits(insn,16,20);
1052 int imm = bits(insn,0,15);
1053 m_uint32_t tmp = sign_extend_32(imm,16);
1054 int hreg_rd,hreg_ra;
1055 jit_op_t *iop;
1056
1057 /* $rd = $ra + imm */
1058 ppc32_jit_start_hreg_seq(cpu,"addic.");
1059 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1060 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1061
1062 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1063
1064 iop = ppc32_op_emit_insn_output(cpu,1,"addic.");
1065
1066 if (rd != ra)
1067 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1068
1069 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1070 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1071
1072 amd64_set_membase(iop->ob_ptr,X86_CC_C,
1073 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),FALSE);
1074
1075 ppc32_op_emit_update_flags(cpu,0,TRUE);
1076
1077 ppc32_jit_close_hreg_seq(cpu);
1078 return(0);
1079 }
1080
1081 /* ADDIS - ADD Immediate Shifted */
1082 DECLARE_INSN(ADDIS)
1083 {
1084 int rd = bits(insn,21,25);
1085 int ra = bits(insn,16,20);
1086 m_uint32_t imm = bits(insn,0,15);
1087 m_uint32_t tmp = imm << 16;
1088 int hreg_rd,hreg_ra;
1089 jit_op_t *iop;
1090
1091 /* $rd = $ra + (imm << 16) */
1092 ppc32_jit_start_hreg_seq(cpu,"addis");
1093 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1094
1095 if (ra != 0) {
1096 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1097 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1098
1099 iop = ppc32_op_emit_insn_output(cpu,1,"addis");
1100
1101 if (rd != ra)
1102 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1103
1104 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_rd,tmp,4);
1105 } else {
1106 iop = ppc32_op_emit_insn_output(cpu,1,"addis");
1107 amd64_mov_reg_imm(iop->ob_ptr,hreg_rd,tmp);
1108 }
1109
1110 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1111
1112 ppc32_jit_close_hreg_seq(cpu);
1113 return(0);
1114 }
1115
1116 /* ADDZE */
1117 DECLARE_INSN(ADDZE)
1118 {
1119 int rd = bits(insn,21,25);
1120 int ra = bits(insn,16,20);
1121 int hreg_rd,hreg_ra,hreg_t0;
1122 jit_op_t *iop;
1123
1124 /* $rd = $ra + xer_ca + set_carry */
1125 ppc32_jit_start_hreg_seq(cpu,"addze");
1126 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
1127 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1128 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1129
1130 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1131
1132 iop = ppc32_op_emit_insn_output(cpu,2,"addze");
1133
1134 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,hreg_t0);
1135
1136 if (rd != ra)
1137 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
1138
1139 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_rd,
1140 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
1141
1142 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t0,FALSE);
1143 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
1144 hreg_t0,4);
1145
1146 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
1147
1148 if (insn & 1)
1149 ppc32_op_emit_update_flags(cpu,0,TRUE);
1150
1151 ppc32_jit_close_hreg_seq(cpu);
1152 return(0);
1153 }
1154
1155 /* AND */
1156 DECLARE_INSN(AND)
1157 {
1158 int rs = bits(insn,21,25);
1159 int ra = bits(insn,16,20);
1160 int rb = bits(insn,11,15);
1161 int hreg_rs,hreg_ra,hreg_rb;
1162 jit_op_t *iop;
1163
1164 /* $ra = $rs & $rb */
1165 ppc32_jit_start_hreg_seq(cpu,"and");
1166 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1167 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1168 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1169
1170 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1171 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1172
1173 iop = ppc32_op_emit_insn_output(cpu,1,"and");
1174
1175 if (ra == rs)
1176 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
1177 else if (ra == rb)
1178 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rs,4);
1179 else {
1180 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1181 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
1182 }
1183
1184 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1185
1186 if (insn & 1)
1187 ppc32_op_emit_update_flags(cpu,0,TRUE);
1188
1189 ppc32_jit_close_hreg_seq(cpu);
1190 return(0);
1191 }
1192
1193 /* ANDC */
1194 DECLARE_INSN(ANDC)
1195 {
1196 int rs = bits(insn,21,25);
1197 int ra = bits(insn,16,20);
1198 int rb = bits(insn,11,15);
1199 int hreg_rs,hreg_ra,hreg_rb,hreg_t0;
1200 jit_op_t *iop;
1201
1202 /* $ra = $rs & ~$rb */
1203 ppc32_jit_start_hreg_seq(cpu,"andc");
1204 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1205 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1206 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1207
1208 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1209 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1210
1211 iop = ppc32_op_emit_insn_output(cpu,1,"andc");
1212
1213 /* $t0 = ~$rb */
1214 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1215 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
1216 amd64_not_reg(iop->ob_ptr,hreg_t0);
1217
1218 /* $ra = $rs & $t0 */
1219 if (ra == rs)
1220 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_t0,4);
1221 else {
1222 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_t0,hreg_rs,4);
1223 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
1224 }
1225
1226 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1227
1228 if (insn & 1)
1229 ppc32_op_emit_update_flags(cpu,0,TRUE);
1230
1231 ppc32_jit_close_hreg_seq(cpu);
1232 return(0);
1233 }
1234
1235 /* AND Immediate */
1236 DECLARE_INSN(ANDI)
1237 {
1238 int rs = bits(insn,21,25);
1239 int ra = bits(insn,16,20);
1240 m_uint16_t imm = bits(insn,0,15);
1241 m_uint32_t tmp = imm;
1242 int hreg_rs,hreg_ra;
1243 jit_op_t *iop;
1244
1245 /* $ra = $rs & imm */
1246 ppc32_jit_start_hreg_seq(cpu,"andi");
1247 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1248 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1249
1250 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1251
1252 iop = ppc32_op_emit_insn_output(cpu,2,"andi");
1253
1254 if (ra != rs)
1255 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1256
1257 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,hreg_ra,tmp,4);
1258 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1259
1260 ppc32_op_emit_update_flags(cpu,0,TRUE);
1261
1262 ppc32_jit_close_hreg_seq(cpu);
1263 return(0);
1264 }
1265
1266 /* AND Immediate Shifted */
1267 DECLARE_INSN(ANDIS)
1268 {
1269 int rs = bits(insn,21,25);
1270 int ra = bits(insn,16,20);
1271 m_uint32_t imm = bits(insn,0,15);
1272 m_uint32_t tmp = imm << 16;
1273 int hreg_rs,hreg_ra;
1274 jit_op_t *iop;
1275
1276 /* $ra = $rs & imm */
1277 ppc32_jit_start_hreg_seq(cpu,"andis");
1278 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
1279 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1280
1281 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
1282
1283 iop = ppc32_op_emit_insn_output(cpu,2,"andis");
1284
1285 if (ra != rs)
1286 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
1287
1288 amd64_alu_reg_imm_size(iop->ob_ptr,X86_AND,hreg_ra,tmp,4);
1289 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
1290
1291 ppc32_op_emit_update_flags(cpu,0,TRUE);
1292
1293 ppc32_jit_close_hreg_seq(cpu);
1294 return(0);
1295 }
1296
1297 /* B - Branch */
1298 DECLARE_INSN(B)
1299 {
1300 m_uint32_t offset = bits(insn,2,25);
1301 m_uint32_t new_ia;
1302 jit_op_t *iop;
1303
1304 iop = ppc32_op_emit_insn_output(cpu,4,"b");
1305
1306 /* compute the new ia */
1307 new_ia = b->start_ia + (b->ppc_trans_pos << 2);
1308 new_ia += sign_extend(offset << 2,26);
1309 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1310
1311 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1312 ppc32_op_emit_branch_target(cpu,b,new_ia);
1313 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1314 return(0);
1315 }
1316
1317 /* BA - Branch Absolute */
1318 DECLARE_INSN(BA)
1319 {
1320 m_uint32_t offset = bits(insn,2,25);
1321 m_uint32_t new_ia;
1322 jit_op_t *iop;
1323
1324 iop = ppc32_op_emit_insn_output(cpu,4,"ba");
1325
1326 /* compute the new ia */
1327 new_ia = sign_extend(offset << 2,26);
1328 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1329
1330 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1331 ppc32_op_emit_branch_target(cpu,b,new_ia);
1332 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1333 return(0);
1334 }
1335
1336 /* BL - Branch and Link */
1337 DECLARE_INSN(BL)
1338 {
1339 m_uint32_t offset = bits(insn,2,25);
1340 m_uint32_t new_ia;
1341 jit_op_t *iop;
1342
1343 iop = ppc32_op_emit_insn_output(cpu,4,"bl");
1344
1345 /* compute the new ia */
1346 new_ia = b->start_ia + (b->ppc_trans_pos << 2);
1347 new_ia += sign_extend(offset << 2,26);
1348
1349 /* set the return address */
1350 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1351 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1352
1353 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1354 ppc32_op_emit_branch_target(cpu,b,new_ia);
1355 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1356 return(0);
1357 }
1358
1359 /* BLA - Branch and Link Absolute */
1360 DECLARE_INSN(BLA)
1361 {
1362 m_uint32_t offset = bits(insn,2,25);
1363 m_uint32_t new_ia;
1364 jit_op_t *iop;
1365
1366 iop = ppc32_op_emit_insn_output(cpu,4,"bla");
1367
1368 /* compute the new ia */
1369 new_ia = sign_extend(offset << 2,26);
1370
1371 /* set the return address */
1372 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1373 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1374
1375 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1376 ppc32_op_emit_branch_target(cpu,b,new_ia);
1377 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1) << 2));
1378 return(0);
1379 }
1380
1381 /* BC - Branch Conditional (Condition Check only) */
1382 DECLARE_INSN(BCC)
1383 {
1384 int bo = bits(insn,21,25);
1385 int bi = bits(insn,16,20);
1386 int bd = bits(insn,2,15);
1387 jit_op_t *iop;
1388 u_int cr_field,cr_bit;
1389 m_uint32_t new_ia;
1390 u_char *jump_ptr;
1391 int local_jump;
1392 int cond;
1393
1394 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_JUMP);
1395
1396 iop = ppc32_op_emit_insn_output(cpu,5,"bcc");
1397
1398 /* Get the wanted value for the condition bit */
1399 cond = (bo >> 3) & 0x1;
1400
1401 /* Set the return address */
1402 if (insn & 1) {
1403 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1404 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1405 }
1406
1407 /* Compute the new ia */
1408 new_ia = sign_extend_32(bd << 2,16);
1409 if (!(insn & 0x02))
1410 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1411
1412 /* Test the condition bit */
1413 cr_field = ppc32_get_cr_field(bi);
1414 cr_bit = ppc32_get_cr_bit(bi);
1415
1416 ppc32_op_emit_require_flags(cpu,cr_field);
1417
1418 amd64_test_membase_imm_size(iop->ob_ptr,
1419 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1420 (1 << cr_bit),4);
1421
1422 local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr);
1423
1424 /*
1425 * Optimize the jump, depending if the destination is in the same
1426 * page or not.
1427 */
1428 if (local_jump) {
1429 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
1430 amd64_branch32(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,0,FALSE);
1431 } else {
1432 jump_ptr = iop->ob_ptr;
1433 amd64_branch32(iop->ob_ptr,(cond) ? X86_CC_Z : X86_CC_NZ,0,FALSE);
1434 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1435 amd64_patch(jump_ptr,iop->ob_ptr);
1436 }
1437
1438 ppc32_op_emit_branch_target(cpu,b,new_ia);
1439 return(0);
1440 }
1441
1442 /* BC - Branch Conditional */
1443 DECLARE_INSN(BC)
1444 {
1445 int bo = bits(insn,21,25);
1446 int bi = bits(insn,16,20);
1447 int bd = bits(insn,2,15);
1448 int hreg_t0,hreg_t1;
1449 jit_op_t *iop;
1450 u_int cr_field,cr_bit;
1451 m_uint32_t new_ia;
1452 u_char *jump_ptr;
1453 int local_jump;
1454 int cond,ctr;
1455
1456 ppc32_op_emit_basic_opcode(cpu,JIT_OP_BRANCH_JUMP);
1457
1458 iop = ppc32_op_emit_insn_output(cpu,5,"bc");
1459
1460 ppc32_jit_start_hreg_seq(cpu,"bc");
1461 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
1462 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
1463
1464 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
1465
1466 /* Get the wanted value for the condition bit and CTR value */
1467 cond = (bo >> 3) & 0x1;
1468 ctr = (bo >> 1) & 0x1;
1469
1470 /* Set the return address */
1471 if (insn & 1) {
1472 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1473 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1474 }
1475
1476 /* Compute the new ia */
1477 new_ia = sign_extend_32(bd << 2,16);
1478 if (!(insn & 0x02))
1479 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1480
1481 amd64_mov_reg_imm(iop->ob_ptr,hreg_t0,1);
1482
1483 /* Decrement the count register */
1484 if (!(bo & 0x04)) {
1485 amd64_dec_membase_size(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
1486 amd64_set_reg(iop->ob_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,hreg_t1,FALSE);
1487 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1488 }
1489
1490 /* Test the condition bit */
1491 if (!((bo >> 4) & 0x01)) {
1492 cr_field = ppc32_get_cr_field(bi);
1493 cr_bit = ppc32_get_cr_bit(bi);
1494
1495 ppc32_op_emit_require_flags(cpu,cr_field);
1496
1497 amd64_test_membase_imm_size(iop->ob_ptr,
1498 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1499 (1 << cr_bit),4);
1500
1501 amd64_set_reg(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,hreg_t1,FALSE);
1502 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1503 }
1504
1505 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1506
1507 local_jump = ppc32_jit_tcb_local_addr(b,new_ia,&jump_ptr);
1508
1509 /*
1510 * Optimize the jump, depending if the destination is in the same
1511 * page or not.
1512 */
1513 if (local_jump) {
1514 ppc32_jit_tcb_record_patch(b,iop,iop->ob_ptr,new_ia);
1515 amd64_branch32(iop->ob_ptr,X86_CC_NZ,0,FALSE);
1516 } else {
1517 jump_ptr = iop->ob_ptr;
1518 amd64_branch32(iop->ob_ptr,X86_CC_Z,0,FALSE);
1519 ppc32_set_jump(cpu,b,iop,new_ia,TRUE);
1520 amd64_patch(jump_ptr,iop->ob_ptr);
1521 }
1522
1523 ppc32_op_emit_branch_target(cpu,b,new_ia);
1524
1525 ppc32_jit_close_hreg_seq(cpu);
1526 return(0);
1527 }
1528
1529 /* BCLR - Branch Conditional to Link register */
1530 DECLARE_INSN(BCLR)
1531 {
1532 int bo = bits(insn,21,25);
1533 int bi = bits(insn,16,20);
1534 int bd = bits(insn,2,15);
1535 int hreg_t0,hreg_t1;
1536 jit_op_t *iop;
1537 u_int cr_field,cr_bit;
1538 m_uint32_t new_ia;
1539 u_char *jump_ptr;
1540 int cond,ctr;
1541
1542 ppc32_jit_start_hreg_seq(cpu,"bclr");
1543 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
1544 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
1545
1546 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
1547
1548 iop = ppc32_op_emit_insn_output(cpu,5,"bclr");
1549
1550 /* Get the wanted value for the condition bit and CTR value */
1551 cond = (bo >> 3) & 0x1;
1552 ctr = (bo >> 1) & 0x1;
1553
1554 /* Compute the new ia */
1555 new_ia = sign_extend_32(bd << 2,16);
1556 if (!(insn & 0x02))
1557 new_ia += b->start_ia + (b->ppc_trans_pos << 2);
1558
1559 amd64_mov_reg_imm(iop->ob_ptr,hreg_t0,1);
1560
1561 /* Decrement the count register */
1562 if (!(bo & 0x04)) {
1563 amd64_dec_membase_size(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ctr),4);
1564 amd64_set_reg(iop->ob_ptr,(ctr) ? X86_CC_Z : X86_CC_NZ,hreg_t1,FALSE);
1565 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1566 }
1567
1568 /* Test the condition bit */
1569 if (!((bo >> 4) & 0x01)) {
1570 cr_field = ppc32_get_cr_field(bi);
1571 cr_bit = ppc32_get_cr_bit(bi);
1572
1573 ppc32_op_emit_require_flags(cpu,cr_field);
1574
1575 amd64_test_membase_imm_size(iop->ob_ptr,
1576 AMD64_R15,PPC32_CR_FIELD_OFFSET(cr_field),
1577 (1 << cr_bit),4);
1578
1579 amd64_set_reg(iop->ob_ptr,(cond) ? X86_CC_NZ : X86_CC_Z,hreg_t1,FALSE);
1580 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,hreg_t1);
1581 }
1582
1583 /* Set the return address */
1584 amd64_mov_reg_membase(iop->ob_ptr,hreg_t1,AMD64_R15,OFFSET(cpu_ppc_t,lr),4);
1585
1586 if (insn & 1) {
1587 ppc32_set_lr(iop,b->start_ia + ((b->ppc_trans_pos+1) << 2));
1588 ppc32_op_emit_branch_target(cpu,b,b->start_ia+((b->ppc_trans_pos+1)<<2));
1589 }
1590
1591 /* Branching */
1592 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1593
1594 jump_ptr = iop->ob_ptr;
1595 amd64_branch32(iop->ob_ptr,X86_CC_Z,0,FALSE);
1596
1597 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t1,0xFFFFFFFC);
1598 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,ia),hreg_t1,4);
1599 ppc32_jit_tcb_push_epilog(&iop->ob_ptr);
1600
1601 amd64_patch(jump_ptr,iop->ob_ptr);
1602
1603 ppc32_op_emit_basic_opcode(cpu,JIT_OP_EOB);
1604
1605 ppc32_jit_close_hreg_seq(cpu);
1606 return(0);
1607 }
1608
1609 /* CMP - Compare */
1610 DECLARE_INSN(CMP)
1611 {
1612 int rd = bits(insn,23,25);
1613 int ra = bits(insn,16,20);
1614 int rb = bits(insn,11,15);
1615 int hreg_ra,hreg_rb;
1616 jit_op_t *iop;
1617
1618 ppc32_jit_start_hreg_seq(cpu,"cmp");
1619 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1620 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1621
1622 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1623 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1624
1625 iop = ppc32_op_emit_insn_output(cpu,1,"cmp");
1626
1627 amd64_alu_reg_reg_size(iop->ob_ptr,X86_CMP,hreg_ra,hreg_rb,4);
1628 ppc32_op_emit_update_flags(cpu,rd,TRUE);
1629
1630 ppc32_jit_close_hreg_seq(cpu);
1631 return(0);
1632 }
1633
1634 /* CMPI - Compare Immediate */
1635 DECLARE_INSN(CMPI)
1636 {
1637 int rd = bits(insn,23,25);
1638 int ra = bits(insn,16,20);
1639 m_uint16_t imm = bits(insn,0,15);
1640 m_uint32_t tmp = sign_extend_32(imm,16);
1641 int hreg_ra;
1642 jit_op_t *iop;
1643
1644 ppc32_jit_start_hreg_seq(cpu,"cmpi");
1645 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1646 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1647
1648 iop = ppc32_op_emit_insn_output(cpu,1,"cmpi");
1649
1650 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_ra,tmp,4);
1651 ppc32_op_emit_update_flags(cpu,rd,TRUE);
1652
1653 ppc32_jit_close_hreg_seq(cpu);
1654 return(0);
1655 }
1656
1657 /* CMPL - Compare Logical */
1658 DECLARE_INSN(CMPL)
1659 {
1660 int rd = bits(insn,23,25);
1661 int ra = bits(insn,16,20);
1662 int rb = bits(insn,11,15);
1663 int hreg_ra,hreg_rb;
1664 jit_op_t *iop;
1665
1666 ppc32_jit_start_hreg_seq(cpu,"cmpl");
1667 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1668 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
1669
1670 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1671 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
1672
1673 iop = ppc32_op_emit_insn_output(cpu,1,"cmpl");
1674
1675 amd64_alu_reg_reg_size(iop->ob_ptr,X86_CMP,hreg_ra,hreg_rb,4);
1676 ppc32_op_emit_update_flags(cpu,rd,FALSE);
1677
1678 ppc32_jit_close_hreg_seq(cpu);
1679 return(0);
1680 }
1681
1682 /* CMPLI - Compare Immediate */
1683 DECLARE_INSN(CMPLI)
1684 {
1685 int rd = bits(insn,23,25);
1686 int ra = bits(insn,16,20);
1687 m_uint32_t imm = bits(insn,0,15);
1688 int hreg_ra;
1689 jit_op_t *iop;
1690
1691 ppc32_jit_start_hreg_seq(cpu,"cmpli");
1692 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
1693 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
1694
1695 iop = ppc32_op_emit_insn_output(cpu,1,"cmpli");
1696
1697 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_ra,imm,4);
1698 ppc32_op_emit_update_flags(cpu,rd,FALSE);
1699
1700 ppc32_jit_close_hreg_seq(cpu);
1701 return(0);
1702 }
1703
1704 /* CRAND - Condition Register AND */
1705 DECLARE_INSN(CRAND)
1706 {
1707 int bd = bits(insn,21,25);
1708 int bb = bits(insn,16,20);
1709 int ba = bits(insn,11,15);
1710 int hreg_t0;
1711 jit_op_t *iop;
1712
1713 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1714
1715 ppc32_jit_start_hreg_seq(cpu,"crand");
1716 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1717 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1718
1719 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1720 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1721 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1722
1723 iop = ppc32_op_emit_insn_output(cpu,3,"crand");
1724
1725 /* test $ba bit */
1726 amd64_test_membase_imm(iop->ob_ptr,
1727 AMD64_R15,
1728 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1729 (1 << ppc32_get_cr_bit(ba)));
1730 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1731
1732 /* test $bb bit */
1733 amd64_test_membase_imm(iop->ob_ptr,
1734 AMD64_R15,
1735 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1736 (1 << ppc32_get_cr_bit(bb)));
1737 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1738
1739 /* result of AND between $ba and $bb */
1740 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1741 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1742
1743 /* set/clear $bd bit depending on the result */
1744 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1745 AMD64_R15,
1746 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1747 ~(1 << ppc32_get_cr_bit(bd)),4);
1748
1749 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1750 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1751 AMD64_R15,
1752 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1753 hreg_t0,4);
1754
1755 ppc32_jit_close_hreg_seq(cpu);
1756 return(0);
1757 }
1758
1759 /* CRANDC - Condition Register AND with Complement */
1760 DECLARE_INSN(CRANDC)
1761 {
1762 int bd = bits(insn,21,25);
1763 int bb = bits(insn,16,20);
1764 int ba = bits(insn,11,15);
1765 int hreg_t0;
1766 jit_op_t *iop;
1767
1768 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1769
1770 ppc32_jit_start_hreg_seq(cpu,"crandc");
1771 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1772 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1773
1774 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1775 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1776 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1777
1778 iop = ppc32_op_emit_insn_output(cpu,3,"crandc");
1779
1780 /* test $ba bit */
1781 amd64_test_membase_imm(iop->ob_ptr,
1782 AMD64_R15,
1783 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1784 (1 << ppc32_get_cr_bit(ba)));
1785 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1786
1787 /* test $bb bit */
1788 amd64_test_membase_imm(iop->ob_ptr,
1789 AMD64_R15,
1790 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1791 (1 << ppc32_get_cr_bit(bb)));
1792 amd64_set_reg(iop->ob_ptr,X86_CC_Z,hreg_t0,FALSE);
1793
1794 /* result of AND between $ba and $bb */
1795 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1796 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1797
1798 /* set/clear $bd bit depending on the result */
1799 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1800 AMD64_R15,
1801 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1802 ~(1 << ppc32_get_cr_bit(bd)),4);
1803
1804 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1805 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1806 AMD64_R15,
1807 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1808 hreg_t0,4);
1809
1810 ppc32_jit_close_hreg_seq(cpu);
1811 return(0);
1812 }
1813
1814 /* CREQV - Condition Register EQV */
1815 DECLARE_INSN(CREQV)
1816 {
1817 int bd = bits(insn,21,25);
1818 int bb = bits(insn,16,20);
1819 int ba = bits(insn,11,15);
1820 int hreg_t0;
1821 jit_op_t *iop;
1822
1823 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1824
1825 ppc32_jit_start_hreg_seq(cpu,"creqv");
1826 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1827 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1828
1829 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1830 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1831 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1832
1833 iop = ppc32_op_emit_insn_output(cpu,3,"creqv");
1834
1835 /* test $ba bit */
1836 amd64_test_membase_imm(iop->ob_ptr,
1837 AMD64_R15,
1838 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1839 (1 << ppc32_get_cr_bit(ba)));
1840 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1841
1842 /* test $bb bit */
1843 amd64_test_membase_imm(iop->ob_ptr,
1844 AMD64_R15,
1845 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1846 (1 << ppc32_get_cr_bit(bb)));
1847 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1848
1849 /* result of XOR between $ba and $bb */
1850 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,AMD64_RDX);
1851 amd64_not_reg(iop->ob_ptr,hreg_t0);
1852 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1853
1854 /* set/clear $bd bit depending on the result */
1855 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1856 AMD64_R15,
1857 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1858 ~(1 << ppc32_get_cr_bit(bd)),4);
1859
1860 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1861 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1862 AMD64_R15,
1863 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1864 hreg_t0,4);
1865
1866 ppc32_jit_close_hreg_seq(cpu);
1867 return(0);
1868 }
1869
1870 /* CRNAND - Condition Register NAND */
1871 DECLARE_INSN(CRNAND)
1872 {
1873 int bd = bits(insn,21,25);
1874 int bb = bits(insn,16,20);
1875 int ba = bits(insn,11,15);
1876 int hreg_t0;
1877 jit_op_t *iop;
1878
1879 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1880
1881 ppc32_jit_start_hreg_seq(cpu,"crnand");
1882 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1883 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1884
1885 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1886 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1887 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1888
1889 iop = ppc32_op_emit_insn_output(cpu,3,"crnand");
1890
1891 /* test $ba bit */
1892 amd64_test_membase_imm(iop->ob_ptr,
1893 AMD64_R15,
1894 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1895 (1 << ppc32_get_cr_bit(ba)));
1896 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1897
1898 /* test $bb bit */
1899 amd64_test_membase_imm(iop->ob_ptr,
1900 AMD64_R15,
1901 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1902 (1 << ppc32_get_cr_bit(bb)));
1903 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1904
1905 /* result of NAND between $ba and $bb */
1906 amd64_alu_reg_reg(iop->ob_ptr,X86_AND,hreg_t0,AMD64_RDX);
1907 amd64_not_reg(iop->ob_ptr,hreg_t0);
1908 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1909
1910 /* set/clear $bd bit depending on the result */
1911 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1912 AMD64_R15,
1913 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1914 ~(1 << ppc32_get_cr_bit(bd)),4);
1915
1916 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1917 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1918 AMD64_R15,
1919 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1920 hreg_t0,4);
1921
1922 ppc32_jit_close_hreg_seq(cpu);
1923 return(0);
1924 }
1925
1926 /* CRNOR - Condition Register NOR */
1927 DECLARE_INSN(CRNOR)
1928 {
1929 int bd = bits(insn,21,25);
1930 int bb = bits(insn,16,20);
1931 int ba = bits(insn,11,15);
1932 int hreg_t0;
1933 jit_op_t *iop;
1934
1935 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1936
1937 ppc32_jit_start_hreg_seq(cpu,"crnor");
1938 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1939 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1940
1941 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1942 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1943 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
1944
1945 iop = ppc32_op_emit_insn_output(cpu,3,"crnor");
1946
1947 /* test $ba bit */
1948 amd64_test_membase_imm(iop->ob_ptr,
1949 AMD64_R15,
1950 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
1951 (1 << ppc32_get_cr_bit(ba)));
1952 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
1953
1954 /* test $bb bit */
1955 amd64_test_membase_imm(iop->ob_ptr,
1956 AMD64_R15,
1957 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
1958 (1 << ppc32_get_cr_bit(bb)));
1959 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
1960
1961 /* result of NOR between $ba and $bb */
1962 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
1963 amd64_not_reg(iop->ob_ptr,hreg_t0);
1964 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
1965
1966 /* set/clear $bd bit depending on the result */
1967 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
1968 AMD64_R15,
1969 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1970 ~(1 << ppc32_get_cr_bit(bd)),4);
1971
1972 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
1973 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
1974 AMD64_R15,
1975 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
1976 hreg_t0,4);
1977
1978 ppc32_jit_close_hreg_seq(cpu);
1979 return(0);
1980 }
1981
1982 /* CROR - Condition Register OR */
1983 DECLARE_INSN(CROR)
1984 {
1985 int bd = bits(insn,21,25);
1986 int bb = bits(insn,16,20);
1987 int ba = bits(insn,11,15);
1988 int hreg_t0;
1989 jit_op_t *iop;
1990
1991 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
1992
1993 ppc32_jit_start_hreg_seq(cpu,"cror");
1994 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
1995 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
1996
1997 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
1998 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
1999 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2000
2001 iop = ppc32_op_emit_insn_output(cpu,3,"cror");
2002
2003 /* test $ba bit */
2004 amd64_test_membase_imm(iop->ob_ptr,
2005 AMD64_R15,
2006 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2007 (1 << ppc32_get_cr_bit(ba)));
2008 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2009
2010 /* test $bb bit */
2011 amd64_test_membase_imm(iop->ob_ptr,
2012 AMD64_R15,
2013 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2014 (1 << ppc32_get_cr_bit(bb)));
2015 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
2016
2017 /* result of NOR between $ba and $bb */
2018 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
2019 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2020
2021 /* set/clear $bd bit depending on the result */
2022 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2023 AMD64_R15,
2024 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2025 ~(1 << ppc32_get_cr_bit(bd)),4);
2026
2027 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2028 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2029 AMD64_R15,
2030 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2031 hreg_t0,4);
2032
2033 ppc32_jit_close_hreg_seq(cpu);
2034 return(0);
2035 }
2036
2037 /* CRORC - Condition Register OR with Complement */
2038 DECLARE_INSN(CRORC)
2039 {
2040 int bd = bits(insn,21,25);
2041 int bb = bits(insn,16,20);
2042 int ba = bits(insn,11,15);
2043 int hreg_t0;
2044 jit_op_t *iop;
2045
2046 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2047
2048 ppc32_jit_start_hreg_seq(cpu,"crorc");
2049 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2050 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2051
2052 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
2053 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
2054 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2055
2056 iop = ppc32_op_emit_insn_output(cpu,3,"crorc");
2057
2058 /* test $ba bit */
2059 amd64_test_membase_imm(iop->ob_ptr,
2060 AMD64_R15,
2061 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2062 (1 << ppc32_get_cr_bit(ba)));
2063 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2064
2065 /* test $bb bit */
2066 amd64_test_membase_imm(iop->ob_ptr,
2067 AMD64_R15,
2068 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2069 (1 << ppc32_get_cr_bit(bb)));
2070 amd64_set_reg(iop->ob_ptr,X86_CC_Z,hreg_t0,FALSE);
2071
2072 /* result of ORC between $ba and $bb */
2073 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_t0,AMD64_RDX);
2074 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2075
2076 /* set/clear $bd bit depending on the result */
2077 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2078 AMD64_R15,
2079 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2080 ~(1 << ppc32_get_cr_bit(bd)),4);
2081
2082 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2083 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2084 AMD64_R15,
2085 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2086 hreg_t0,4);
2087
2088 ppc32_jit_close_hreg_seq(cpu);
2089 return(0);
2090 }
2091
2092 /* CRXOR - Condition Register XOR */
2093 DECLARE_INSN(CRXOR)
2094 {
2095 int bd = bits(insn,21,25);
2096 int bb = bits(insn,16,20);
2097 int ba = bits(insn,11,15);
2098 int hreg_t0;
2099 jit_op_t *iop;
2100
2101 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2102
2103 ppc32_jit_start_hreg_seq(cpu,"crxor");
2104 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2105 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2106
2107 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(ba));
2108 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bb));
2109 ppc32_op_emit_require_flags(cpu,ppc32_get_cr_field(bd));
2110
2111 iop = ppc32_op_emit_insn_output(cpu,3,"crxor");
2112
2113 /* test $ba bit */
2114 amd64_test_membase_imm(iop->ob_ptr,
2115 AMD64_R15,
2116 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(ba)),
2117 (1 << ppc32_get_cr_bit(ba)));
2118 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,AMD64_RDX,FALSE);
2119
2120 /* test $bb bit */
2121 amd64_test_membase_imm(iop->ob_ptr,
2122 AMD64_R15,
2123 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bb)),
2124 (1 << ppc32_get_cr_bit(bb)));
2125 amd64_set_reg(iop->ob_ptr,X86_CC_NZ,hreg_t0,FALSE);
2126
2127 /* result of XOR between $ba and $bb */
2128 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t0,AMD64_RDX);
2129 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x01);
2130
2131 /* set/clear $bd bit depending on the result */
2132 amd64_alu_membase_imm_size(iop->ob_ptr,X86_AND,
2133 AMD64_R15,
2134 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2135 ~(1 << ppc32_get_cr_bit(bd)),4);
2136
2137 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_t0,ppc32_get_cr_bit(bd));
2138 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
2139 AMD64_R15,
2140 PPC32_CR_FIELD_OFFSET(ppc32_get_cr_field(bd)),
2141 hreg_t0,4);
2142
2143 ppc32_jit_close_hreg_seq(cpu);
2144 return(0);
2145 }
2146
2147 /* DIVWU - Divide Word Unsigned */
2148 DECLARE_INSN(DIVWU)
2149 {
2150 int rd = bits(insn,21,25);
2151 int ra = bits(insn,16,20);
2152 int rb = bits(insn,11,15);
2153 int hreg_rb;
2154 jit_op_t *iop;
2155
2156 ppc32_jit_start_hreg_seq(cpu,"divwu");
2157 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2158 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2159 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2160
2161 /* $rd = $ra / $rb */
2162 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2163 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2164
2165 iop = ppc32_op_emit_insn_output(cpu,2,"divwu");
2166 ppc32_load_imm(&iop->ob_ptr,AMD64_RDX,0);
2167
2168 amd64_div_reg_size(iop->ob_ptr,hreg_rb,0,4);
2169
2170 if (insn & 1)
2171 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
2172
2173 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2174
2175 if (insn & 1)
2176 ppc32_op_emit_update_flags(cpu,0,TRUE);
2177
2178 /* edx:eax are directly modified: throw them */
2179 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2180 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2181
2182 ppc32_jit_close_hreg_seq(cpu);
2183 return(0);
2184 }
2185
2186 /* EQV */
2187 DECLARE_INSN(EQV)
2188 {
2189 int rs = bits(insn,21,25);
2190 int ra = bits(insn,16,20);
2191 int rb = bits(insn,11,15);
2192 int hreg_rs,hreg_ra,hreg_rb;
2193 jit_op_t *iop;
2194
2195 /* $ra = ~($rs ^ $rb) */
2196 ppc32_jit_start_hreg_seq(cpu,"eqv");
2197 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2198 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2199 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2200
2201 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2202 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2203
2204 iop = ppc32_op_emit_insn_output(cpu,1,"eqv");
2205
2206 if (ra == rs)
2207 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
2208 else if (ra == rb)
2209 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rs,4);
2210 else {
2211 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2212 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
2213 }
2214
2215 amd64_not_reg(iop->ob_ptr,hreg_ra);
2216
2217 if (insn & 1)
2218 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2219
2220 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2221
2222 if (insn & 1)
2223 ppc32_op_emit_update_flags(cpu,0,TRUE);
2224
2225 ppc32_jit_close_hreg_seq(cpu);
2226 return(0);
2227 }
2228
2229 /* EXTSB - Extend Sign Byte */
2230 DECLARE_INSN(EXTSB)
2231 {
2232 int rs = bits(insn,21,25);
2233 int ra = bits(insn,16,20);
2234 int hreg_rs,hreg_ra;
2235 jit_op_t *iop;
2236
2237 /* $ra = extsb($rs) */
2238 ppc32_jit_start_hreg_seq(cpu,"extsb");
2239 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2240 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2241
2242 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2243
2244 iop = ppc32_op_emit_insn_output(cpu,2,"extsb");
2245
2246 if (rs != ra)
2247 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2248
2249 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHL,hreg_ra,24,4);
2250 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,24,4);
2251
2252 if (insn & 1)
2253 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2254
2255 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2256
2257 if (insn & 1)
2258 ppc32_op_emit_update_flags(cpu,0,TRUE);
2259
2260 ppc32_jit_close_hreg_seq(cpu);
2261 return(0);
2262 }
2263
2264 /* EXTSH - Extend Sign Word */
2265 DECLARE_INSN(EXTSH)
2266 {
2267 int rs = bits(insn,21,25);
2268 int ra = bits(insn,16,20);
2269 int hreg_rs,hreg_ra;
2270 jit_op_t *iop;
2271
2272 /* $ra = extsh($rs) */
2273 ppc32_jit_start_hreg_seq(cpu,"extsh");
2274 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2275 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2276
2277 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2278
2279 iop = ppc32_op_emit_insn_output(cpu,2,"extsh");
2280
2281 if (rs != ra)
2282 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2283
2284 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SHL,hreg_ra,16,4);
2285 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,16,4);
2286
2287 if (insn & 1)
2288 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2289
2290 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2291
2292 if (insn & 1)
2293 ppc32_op_emit_update_flags(cpu,0,TRUE);
2294
2295 ppc32_jit_close_hreg_seq(cpu);
2296 return(0);
2297 }
2298
2299 /* LBZ - Load Byte and Zero */
2300 DECLARE_INSN(LBZ)
2301 {
2302 int rs = bits(insn,21,25);
2303 int ra = bits(insn,16,20);
2304 m_uint16_t offset = bits(insn,0,15);
2305
2306 //ppc32_emit_memop(b,PPC_MEMOP_LBZ,ra,offset,rs,0);
2307 ppc32_emit_memop_fast(cpu,b,0,PPC_MEMOP_LBZ,ra,offset,rs,
2308 ppc32_memop_fast_lbz);
2309 return(0);
2310 }
2311
2312 /* LBZU - Load Byte and Zero with Update */
2313 DECLARE_INSN(LBZU)
2314 {
2315 int rs = bits(insn,21,25);
2316 int ra = bits(insn,16,20);
2317 m_uint16_t offset = bits(insn,0,15);
2318
2319 ppc32_emit_memop(cpu,b,PPC_MEMOP_LBZ,ra,offset,rs,1);
2320 return(0);
2321 }
2322
2323 /* LBZUX - Load Byte and Zero with Update Indexed */
2324 DECLARE_INSN(LBZUX)
2325 {
2326 int rs = bits(insn,21,25);
2327 int ra = bits(insn,16,20);
2328 int rb = bits(insn,11,15);
2329
2330 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LBZ,ra,rb,rs,1);
2331 return(0);
2332 }
2333
2334 /* LBZX - Load Byte and Zero Indexed */
2335 DECLARE_INSN(LBZX)
2336 {
2337 int rs = bits(insn,21,25);
2338 int ra = bits(insn,16,20);
2339 int rb = bits(insn,11,15);
2340
2341 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LBZ,ra,rb,rs,0);
2342 return(0);
2343 }
2344
2345 /* LHA - Load Half-Word Algebraic */
2346 DECLARE_INSN(LHA)
2347 {
2348 int rs = bits(insn,21,25);
2349 int ra = bits(insn,16,20);
2350 m_uint16_t offset = bits(insn,0,15);
2351
2352 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHA,ra,offset,rs,0);
2353 return(0);
2354 }
2355
2356 /* LHAU - Load Half-Word Algebraic with Update */
2357 DECLARE_INSN(LHAU)
2358 {
2359 int rs = bits(insn,21,25);
2360 int ra = bits(insn,16,20);
2361 m_uint16_t offset = bits(insn,0,15);
2362
2363 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHA,ra,offset,rs,1);
2364 return(0);
2365 }
2366
2367 /* LHAUX - Load Half-Word Algebraic with Update Indexed */
2368 DECLARE_INSN(LHAUX)
2369 {
2370 int rs = bits(insn,21,25);
2371 int ra = bits(insn,16,20);
2372 int rb = bits(insn,11,15);
2373
2374 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHA,ra,rb,rs,1);
2375 return(0);
2376 }
2377
2378 /* LHAX - Load Half-Word Algebraic Indexed */
2379 DECLARE_INSN(LHAX)
2380 {
2381 int rs = bits(insn,21,25);
2382 int ra = bits(insn,16,20);
2383 int rb = bits(insn,11,15);
2384
2385 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHA,ra,rb,rs,0);
2386 return(0);
2387 }
2388
2389 /* LHZ - Load Half-Word and Zero */
2390 DECLARE_INSN(LHZ)
2391 {
2392 int rs = bits(insn,21,25);
2393 int ra = bits(insn,16,20);
2394 m_uint16_t offset = bits(insn,0,15);
2395
2396 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHZ,ra,offset,rs,0);
2397 return(0);
2398 }
2399
2400 /* LHZU - Load Half-Word and Zero with Update */
2401 DECLARE_INSN(LHZU)
2402 {
2403 int rs = bits(insn,21,25);
2404 int ra = bits(insn,16,20);
2405 m_uint16_t offset = bits(insn,0,15);
2406
2407 ppc32_emit_memop(cpu,b,PPC_MEMOP_LHZ,ra,offset,rs,1);
2408 return(0);
2409 }
2410
2411 /* LHZUX - Load Half-Word and Zero with Update Indexed */
2412 DECLARE_INSN(LHZUX)
2413 {
2414 int rs = bits(insn,21,25);
2415 int ra = bits(insn,16,20);
2416 int rb = bits(insn,11,15);
2417
2418 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHZ,ra,rb,rs,1);
2419 return(0);
2420 }
2421
2422 /* LHZX - Load Half-Word and Zero Indexed */
2423 DECLARE_INSN(LHZX)
2424 {
2425 int rs = bits(insn,21,25);
2426 int ra = bits(insn,16,20);
2427 int rb = bits(insn,11,15);
2428
2429 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LHZ,ra,rb,rs,0);
2430 return(0);
2431 }
2432
2433 /* LWZ - Load Word and Zero */
2434 DECLARE_INSN(LWZ)
2435 {
2436 int rs = bits(insn,21,25);
2437 int ra = bits(insn,16,20);
2438 m_uint16_t offset = bits(insn,0,15);
2439
2440 //ppc32_emit_memop(b,PPC_MEMOP_LWZ,ra,offset,rs,0);
2441 ppc32_emit_memop_fast(cpu,b,0,PPC_MEMOP_LWZ,ra,offset,rs,
2442 ppc32_memop_fast_lwz);
2443 return(0);
2444 }
2445
2446 /* LWZU - Load Word and Zero with Update */
2447 DECLARE_INSN(LWZU)
2448 {
2449 int rs = bits(insn,21,25);
2450 int ra = bits(insn,16,20);
2451 m_uint16_t offset = bits(insn,0,15);
2452
2453 ppc32_emit_memop(cpu,b,PPC_MEMOP_LWZ,ra,offset,rs,1);
2454 return(0);
2455 }
2456
2457 /* LWZUX - Load Word and Zero with Update Indexed */
2458 DECLARE_INSN(LWZUX)
2459 {
2460 int rs = bits(insn,21,25);
2461 int ra = bits(insn,16,20);
2462 int rb = bits(insn,11,15);
2463
2464 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LWZ,ra,rb,rs,1);
2465 return(0);
2466 }
2467
2468 /* LWZX - Load Word and Zero Indexed */
2469 DECLARE_INSN(LWZX)
2470 {
2471 int rs = bits(insn,21,25);
2472 int ra = bits(insn,16,20);
2473 int rb = bits(insn,11,15);
2474
2475 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_LWZ,ra,rb,rs,0);
2476 return(0);
2477 }
2478
2479 /* MCRF - Move Condition Register Field */
2480 DECLARE_INSN(MCRF)
2481 {
2482 int rd = bits(insn,23,25);
2483 int rs = bits(insn,18,20);
2484 int hreg_t0;
2485 jit_op_t *iop;
2486
2487 ppc32_jit_start_hreg_seq(cpu,"mcrf");
2488 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2489 ppc32_op_emit_require_flags(cpu,rs);
2490
2491 iop = ppc32_op_emit_insn_output(cpu,1,"mcrf");
2492
2493 /* Load "rs" field in %edx */
2494 amd64_mov_reg_membase(iop->ob_ptr,hreg_t0,
2495 AMD64_R15,PPC32_CR_FIELD_OFFSET(rs),4);
2496
2497 /* Store it in "rd" field */
2498 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(rd),
2499 hreg_t0,4);
2500
2501 ppc32_jit_close_hreg_seq(cpu);
2502 return(0);
2503 }
2504
2505 /* MFCR - Move from Condition Register */
2506 DECLARE_INSN(MFCR)
2507 {
2508 int rd = bits(insn,21,25);
2509 int hreg_rd,hreg_t0;
2510 jit_op_t *iop;
2511 int i;
2512
2513 ppc32_jit_start_hreg_seq(cpu,"mfcr");
2514 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2515 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2516 ppc32_op_emit_require_flags(cpu,JIT_OP_PPC_ALL_FLAGS);
2517
2518 iop = ppc32_op_emit_insn_output(cpu,3,"mfcr");
2519
2520 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_rd,hreg_rd);
2521
2522 for(i=0;i<8;i++) {
2523 /* load field in %edx */
2524 amd64_mov_reg_membase(iop->ob_ptr,hreg_t0,
2525 AMD64_R15,PPC32_CR_FIELD_OFFSET(i),4);
2526 amd64_shift_reg_imm(iop->ob_ptr,X86_SHL,hreg_rd,4);
2527 amd64_alu_reg_reg(iop->ob_ptr,X86_OR,hreg_rd,hreg_t0);
2528 }
2529
2530 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2531
2532 ppc32_jit_close_hreg_seq(cpu);
2533 return(0);
2534 }
2535
2536 /* MFMSR - Move from Machine State Register */
2537 DECLARE_INSN(MFMSR)
2538 {
2539 int rd = bits(insn,21,25);
2540 int hreg_rd;
2541 jit_op_t *iop;
2542
2543 ppc32_jit_start_hreg_seq(cpu,"mfmsr");
2544 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2545
2546 iop = ppc32_op_emit_insn_output(cpu,1,"mfmsr");
2547 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
2548 AMD64_R15,OFFSET(cpu_ppc_t,msr),4);
2549 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2550
2551 ppc32_jit_close_hreg_seq(cpu);
2552 return(0);
2553 }
2554
2555 /* MFSR - Move From Segment Register */
2556 DECLARE_INSN(MFSR)
2557 {
2558 int rd = bits(insn,21,25);
2559 int sr = bits(insn,16,19);
2560 int hreg_rd;
2561 jit_op_t *iop;
2562
2563 ppc32_jit_start_hreg_seq(cpu,"mfsr");
2564 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2565
2566 iop = ppc32_op_emit_insn_output(cpu,1,"mfsr");
2567
2568 amd64_mov_reg_membase(iop->ob_ptr,hreg_rd,
2569 AMD64_R15,(OFFSET(cpu_ppc_t,sr) + (sr << 2)),4);
2570 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2571
2572 ppc32_jit_close_hreg_seq(cpu);
2573 return(0);
2574 }
2575
2576 /* MTCRF - Move to Condition Register Fields */
2577 DECLARE_INSN(MTCRF)
2578 {
2579 int rs = bits(insn,21,25);
2580 int crm = bits(insn,12,19);
2581 int hreg_rs,hreg_t0;
2582 jit_op_t *iop;
2583 int i;
2584
2585 ppc32_jit_start_hreg_seq(cpu,"mtcrf");
2586 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2587 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2588
2589 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2590
2591 iop = ppc32_op_emit_insn_output(cpu,3,"mtcrf");
2592
2593 for(i=0;i<8;i++)
2594 if (crm & (1 << (7 - i))) {
2595 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
2596
2597 if (i != 7)
2598 amd64_shift_reg_imm(iop->ob_ptr,X86_SHR,hreg_t0,28 - (i << 2));
2599
2600 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x0F);
2601 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,PPC32_CR_FIELD_OFFSET(i),
2602 hreg_t0,4);
2603 }
2604
2605 ppc32_op_emit_basic_opcode(cpu,JIT_OP_TRASH_FLAGS);
2606
2607 ppc32_jit_close_hreg_seq(cpu);
2608 return(0);
2609 }
2610
2611 /* MULHW - Multiply High Word */
2612 DECLARE_INSN(MULHW)
2613 {
2614 int rd = bits(insn,21,25);
2615 int ra = bits(insn,16,20);
2616 int rb = bits(insn,11,15);
2617 int hreg_rb;
2618 jit_op_t *iop;
2619
2620 ppc32_jit_start_hreg_seq(cpu,"mulhw");
2621 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2622 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2623 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2624
2625 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2626 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2627
2628 /* rd = hi(ra * rb) */
2629 iop = ppc32_op_emit_insn_output(cpu,2,"mulhw");
2630 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,1,4);
2631
2632 if (insn & 1)
2633 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RDX,AMD64_RDX,4);
2634
2635 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RDX);
2636
2637 if (insn & 1)
2638 ppc32_op_emit_update_flags(cpu,0,TRUE);
2639
2640 /* edx:eax are directly modified: throw them */
2641 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2642 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2643
2644 ppc32_jit_close_hreg_seq(cpu);
2645 return(0);
2646 }
2647
2648 /* MULHWU - Multiply High Word Unsigned */
2649 DECLARE_INSN(MULHWU)
2650 {
2651 int rd = bits(insn,21,25);
2652 int ra = bits(insn,16,20);
2653 int rb = bits(insn,11,15);
2654 int hreg_rb;
2655 jit_op_t *iop;
2656
2657 ppc32_jit_start_hreg_seq(cpu,"mulhwu");
2658 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2659 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2660 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2661
2662 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2663 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2664
2665 /* rd = hi(ra * rb) */
2666 iop = ppc32_op_emit_insn_output(cpu,2,"mulhwu");
2667 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,0,4);
2668
2669 if (insn & 1)
2670 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RDX,AMD64_RDX,4);
2671
2672 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RDX);
2673
2674 if (insn & 1)
2675 ppc32_op_emit_update_flags(cpu,0,TRUE);
2676
2677 /* edx:eax are directly modified: throw them */
2678 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2679 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2680
2681 ppc32_jit_close_hreg_seq(cpu);
2682 return(0);
2683 }
2684
2685 /* MULLI - Multiply Low Immediate */
2686 DECLARE_INSN(MULLI)
2687 {
2688 int rd = bits(insn,21,25);
2689 int ra = bits(insn,16,20);
2690 m_uint32_t imm = bits(insn,0,15);
2691 int hreg_t0;
2692 jit_op_t *iop;
2693
2694 ppc32_jit_start_hreg_seq(cpu,"mulli");
2695 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2696 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2697 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2698
2699 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2700
2701 /* rd = lo(ra * imm) */
2702 iop = ppc32_op_emit_insn_output(cpu,2,"mulli");
2703
2704 ppc32_load_imm(&iop->ob_ptr,hreg_t0,sign_extend_32(imm,16));
2705 amd64_mul_reg_size(iop->ob_ptr,hreg_t0,1,4);
2706 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2707
2708 /* edx:eax are directly modified: throw them */
2709 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2710 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2711
2712 ppc32_jit_close_hreg_seq(cpu);
2713 return(0);
2714 }
2715
2716 /* MULLW - Multiply Low Word */
2717 DECLARE_INSN(MULLW)
2718 {
2719 int rd = bits(insn,21,25);
2720 int ra = bits(insn,16,20);
2721 int rb = bits(insn,11,15);
2722 int hreg_rb;
2723 jit_op_t *iop;
2724
2725 ppc32_jit_start_hreg_seq(cpu,"mullw");
2726 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RAX);
2727 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RDX);
2728 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2729
2730 ppc32_op_emit_load_gpr(cpu,AMD64_RAX,ra);
2731 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2732
2733 /* rd = lo(ra * rb) */
2734 iop = ppc32_op_emit_insn_output(cpu,2,"mullw");
2735 amd64_mul_reg_size(iop->ob_ptr,hreg_rb,1,4);
2736
2737 if (insn & 1)
2738 amd64_test_reg_reg_size(iop->ob_ptr,AMD64_RAX,AMD64_RAX,4);
2739
2740 ppc32_op_emit_store_gpr(cpu,rd,AMD64_RAX);
2741
2742 if (insn & 1)
2743 ppc32_op_emit_update_flags(cpu,0,TRUE);
2744
2745 /* edx:eax are directly modified: throw them */
2746 ppc32_op_emit_alter_host_reg(cpu,AMD64_RAX);
2747 ppc32_op_emit_alter_host_reg(cpu,AMD64_RDX);
2748
2749 ppc32_jit_close_hreg_seq(cpu);
2750 return(0);
2751 }
2752
2753 /* NAND */
2754 DECLARE_INSN(NAND)
2755 {
2756 int rs = bits(insn,21,25);
2757 int ra = bits(insn,16,20);
2758 int rb = bits(insn,11,15);
2759 int hreg_rs,hreg_ra,hreg_rb;
2760 jit_op_t *iop;
2761
2762 /* $ra = ~($rs & $rb) */
2763 ppc32_jit_start_hreg_seq(cpu,"nand");
2764 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2765 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2766 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2767
2768 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2769 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2770
2771 iop = ppc32_op_emit_insn_output(cpu,2,"nand");
2772
2773 if (ra == rs)
2774 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
2775 else if (ra == rb)
2776 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rs,4);
2777 else {
2778 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2779 amd64_alu_reg_reg_size(iop->ob_ptr,X86_AND,hreg_ra,hreg_rb,4);
2780 }
2781
2782 amd64_not_reg(iop->ob_ptr,hreg_ra);
2783
2784 if (insn & 1)
2785 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2786
2787 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2788
2789 if (insn & 1)
2790 ppc32_op_emit_update_flags(cpu,0,TRUE);
2791
2792 ppc32_jit_close_hreg_seq(cpu);
2793 return(0);
2794 }
2795
2796 /* NEG */
2797 DECLARE_INSN(NEG)
2798 {
2799 int rd = bits(insn,21,25);
2800 int ra = bits(insn,16,20);
2801 int hreg_rd,hreg_ra;
2802 jit_op_t *iop;
2803
2804 /* $rd = neg($ra) */
2805 ppc32_jit_start_hreg_seq(cpu,"neg");
2806 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2807 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
2808
2809 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
2810
2811 iop = ppc32_op_emit_insn_output(cpu,1,"neg");
2812
2813 if (rd != ra)
2814 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_ra,4);
2815
2816 amd64_neg_reg(iop->ob_ptr,hreg_rd);
2817
2818 if (insn & 1)
2819 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
2820
2821 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
2822
2823 if (insn & 1)
2824 ppc32_op_emit_update_flags(cpu,0,TRUE);
2825
2826 ppc32_jit_close_hreg_seq(cpu);
2827 return(0);
2828 }
2829
2830 /* NOR */
2831 DECLARE_INSN(NOR)
2832 {
2833 int rs = bits(insn,21,25);
2834 int ra = bits(insn,16,20);
2835 int rb = bits(insn,11,15);
2836 int hreg_rs,hreg_ra,hreg_rb;
2837 jit_op_t *iop;
2838
2839 /* $ra = ~($rs | $rb) */
2840 ppc32_jit_start_hreg_seq(cpu,"nor");
2841 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2842 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2843 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2844
2845 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2846 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2847
2848 iop = ppc32_op_emit_insn_output(cpu,2,"nor");
2849
2850 if (ra == rs)
2851 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2852 else if (ra == rb)
2853 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rs,4);
2854 else {
2855 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2856 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2857 }
2858
2859 amd64_not_reg(iop->ob_ptr,hreg_ra);
2860
2861 if (insn & 1)
2862 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2863
2864 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2865
2866 if (insn & 1)
2867 ppc32_op_emit_update_flags(cpu,0,TRUE);
2868
2869 ppc32_jit_close_hreg_seq(cpu);
2870 return(0);
2871 }
2872
2873 /* OR */
2874 DECLARE_INSN(OR)
2875 {
2876 int rs = bits(insn,21,25);
2877 int ra = bits(insn,16,20);
2878 int rb = bits(insn,11,15);
2879 int hreg_rs,hreg_ra,hreg_rb;
2880 jit_op_t *iop;
2881
2882 /* $ra = $rs | $rb */
2883 ppc32_jit_start_hreg_seq(cpu,"or");
2884 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2885 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2886 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2887
2888 /* special optimization for move/nop operation */
2889 if (rs == rb) {
2890 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2891 iop = ppc32_op_emit_insn_output(cpu,2,"or");
2892
2893 if (ra != rs)
2894 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2895
2896 if (insn & 1)
2897 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
2898
2899 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2900
2901 if (insn & 1)
2902 ppc32_op_emit_update_flags(cpu,0,TRUE);
2903
2904 ppc32_jit_close_hreg_seq(cpu);
2905 return(0);
2906 }
2907
2908 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2909 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2910
2911 iop = ppc32_op_emit_insn_output(cpu,2,"or");
2912
2913 if (ra == rs) {
2914 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2915 } else if (ra == rb)
2916 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rs,4);
2917 else {
2918 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2919 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_rb,4);
2920 }
2921
2922 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2923
2924 if (insn & 1)
2925 ppc32_op_emit_update_flags(cpu,0,TRUE);
2926
2927 ppc32_jit_close_hreg_seq(cpu);
2928 return(0);
2929 }
2930
2931 /* OR with Complement */
2932 DECLARE_INSN(ORC)
2933 {
2934 int rs = bits(insn,21,25);
2935 int ra = bits(insn,16,20);
2936 int rb = bits(insn,11,15);
2937 int hreg_rs,hreg_ra,hreg_rb,hreg_t0;
2938 jit_op_t *iop;
2939
2940 /* $ra = $rs & ~$rb */
2941 ppc32_jit_start_hreg_seq(cpu,"orc");
2942 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2943 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2944 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
2945
2946 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2947 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
2948
2949 iop = ppc32_op_emit_insn_output(cpu,1,"orc");
2950
2951 /* $t0 = ~$rb */
2952 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
2953 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
2954 amd64_not_reg(iop->ob_ptr,hreg_t0);
2955
2956 /* $ra = $rs | $t0 */
2957 if (ra == rs)
2958 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_t0,4);
2959 else {
2960 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_t0,hreg_rs,4);
2961 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
2962 }
2963
2964 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2965
2966 if (insn & 1)
2967 ppc32_op_emit_update_flags(cpu,0,TRUE);
2968
2969 ppc32_jit_close_hreg_seq(cpu);
2970 return(0);
2971 }
2972
2973 /* OR Immediate */
2974 DECLARE_INSN(ORI)
2975 {
2976 int rs = bits(insn,21,25);
2977 int ra = bits(insn,16,20);
2978 m_uint16_t imm = bits(insn,0,15);
2979 m_uint32_t tmp = imm;
2980 int hreg_rs,hreg_ra;
2981 jit_op_t *iop;
2982
2983 /* $ra = $rs | imm */
2984 ppc32_jit_start_hreg_seq(cpu,"ori");
2985 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
2986 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
2987
2988 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
2989
2990 iop = ppc32_op_emit_insn_output(cpu,1,"ori");
2991
2992 if (ra != rs)
2993 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
2994
2995 amd64_alu_reg_imm_size(iop->ob_ptr,X86_OR,hreg_ra,tmp,4);
2996 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
2997
2998 ppc32_jit_close_hreg_seq(cpu);
2999 return(0);
3000 }
3001
3002 /* OR Immediate Shifted */
3003 DECLARE_INSN(ORIS)
3004 {
3005 int rs = bits(insn,21,25);
3006 int ra = bits(insn,16,20);
3007 m_uint16_t imm = bits(insn,0,15);
3008 m_uint32_t tmp = imm << 16;
3009 int hreg_rs,hreg_ra;
3010 jit_op_t *iop;
3011
3012 /* $ra = $rs | imm */
3013 ppc32_jit_start_hreg_seq(cpu,"oris");
3014 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3015 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3016
3017 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3018
3019 iop = ppc32_op_emit_insn_output(cpu,1,"oris");
3020
3021 if (ra != rs)
3022 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3023
3024 amd64_alu_reg_imm_size(iop->ob_ptr,X86_OR,hreg_ra,tmp,4);
3025 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3026
3027 ppc32_jit_close_hreg_seq(cpu);
3028 return(0);
3029 }
3030
3031 /* RLWIMI - Rotate Left Word Immediate then Mask Insert */
3032 DECLARE_INSN(RLWIMI)
3033 {
3034 int rs = bits(insn,21,25);
3035 int ra = bits(insn,16,20);
3036 int sh = bits(insn,11,15);
3037 int mb = bits(insn,6,10);
3038 int me = bits(insn,1,5);
3039 register m_uint32_t mask;
3040 int hreg_rs,hreg_ra,hreg_t0;
3041 jit_op_t *iop;
3042
3043 ppc32_jit_start_hreg_seq(cpu,"rlwimi");
3044 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3045 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3046 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3047
3048 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3049 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3050
3051 mask = ppc32_rotate_mask(mb,me);
3052
3053 iop = ppc32_op_emit_insn_output(cpu,2,"rlwimi");
3054
3055 /* Apply inverse mask to $ra */
3056 if (mask != 0)
3057 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_ra,~mask);
3058
3059 /* Rotate $rs of "sh" bits and apply the mask */
3060 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3061
3062 if (sh != 0)
3063 amd64_shift_reg_imm_size(iop->ob_ptr,X86_ROL,hreg_t0,sh,4);
3064
3065 if (mask != 0xFFFFFFFF)
3066 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3067
3068 /* Store the result */
3069 amd64_alu_reg_reg_size(iop->ob_ptr,X86_OR,hreg_ra,hreg_t0,4);
3070 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3071
3072 if (insn & 1)
3073 ppc32_op_emit_update_flags(cpu,0,TRUE);
3074
3075 ppc32_jit_close_hreg_seq(cpu);
3076 return(0);
3077 }
3078
3079 /* RLWINM - Rotate Left Word Immediate AND with Mask */
3080 DECLARE_INSN(RLWINM)
3081 {
3082 int rs = bits(insn,21,25);
3083 int ra = bits(insn,16,20);
3084 int sh = bits(insn,11,15);
3085 int mb = bits(insn,6,10);
3086 int me = bits(insn,1,5);
3087 register m_uint32_t mask;
3088 int hreg_rs,hreg_ra;
3089 jit_op_t *iop;
3090
3091 ppc32_jit_start_hreg_seq(cpu,"rlwinm");
3092 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3093 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3094
3095 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3096
3097 iop = ppc32_op_emit_insn_output(cpu,2,"rlwinm");
3098
3099 /* Rotate $rs of "sh" bits and apply the mask */
3100 mask = ppc32_rotate_mask(mb,me);
3101
3102 if (rs != ra)
3103 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3104
3105 if (sh != 0)
3106 amd64_shift_reg_imm_size(iop->ob_ptr,X86_ROL,hreg_ra,sh,4);
3107
3108 if (mask != 0xFFFFFFFF)
3109 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_ra,mask);
3110
3111 if (insn & 1)
3112 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3113
3114 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3115
3116 if (insn & 1)
3117 ppc32_op_emit_update_flags(cpu,0,TRUE);
3118
3119 ppc32_jit_close_hreg_seq(cpu);
3120 return(0);
3121 }
3122
3123 /* RLWNM - Rotate Left Word then Mask Insert */
3124 DECLARE_INSN(RLWNM)
3125 {
3126 int rs = bits(insn,21,25);
3127 int ra = bits(insn,16,20);
3128 int rb = bits(insn,11,15);
3129 int mb = bits(insn,6,10);
3130 int me = bits(insn,1,5);
3131 register m_uint32_t mask;
3132 int hreg_rs,hreg_ra,hreg_t0;
3133 jit_op_t *iop;
3134
3135 /* ecx is directly modified: throw it */
3136 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3137
3138 ppc32_jit_start_hreg_seq(cpu,"rlwnm");
3139 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3140
3141 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3142 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3143 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3144
3145 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3146 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3147 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3148
3149 iop = ppc32_op_emit_insn_output(cpu,2,"rlwnm");
3150
3151 /* Load the shift register ("sh") */
3152 mask = ppc32_rotate_mask(mb,me);
3153
3154 /* Rotate $rs and apply the mask */
3155 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3156
3157 amd64_shift_reg_size(iop->ob_ptr,X86_ROL,hreg_t0,4);
3158
3159 if (mask != 0xFFFFFFFF)
3160 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3161
3162 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_t0,4);
3163
3164 if (insn & 1)
3165 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3166
3167 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3168
3169 if (insn & 1)
3170 ppc32_op_emit_update_flags(cpu,0,TRUE);
3171
3172 ppc32_jit_close_hreg_seq(cpu);
3173 return(0);
3174 }
3175
3176 /* Shift Left Word */
3177 DECLARE_INSN(SLW)
3178 {
3179 int rs = bits(insn,21,25);
3180 int ra = bits(insn,16,20);
3181 int rb = bits(insn,11,15);
3182 int hreg_rs,hreg_ra;
3183 jit_op_t *iop;
3184
3185 /* ecx is directly modified: throw it */
3186 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3187
3188 ppc32_jit_start_hreg_seq(cpu,"slw");
3189 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3190 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3191 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3192
3193 /* $ra = $rs << $rb. If count >= 32, then null result */
3194 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3195 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3196
3197 iop = ppc32_op_emit_insn_output(cpu,3,"slw");
3198
3199 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,AMD64_RCX,0x3f);
3200
3201 if (ra != rs)
3202 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3203
3204 amd64_shift_reg(iop->ob_ptr,X86_SHL,hreg_ra);
3205
3206 /* store the result */
3207 if (insn & 1)
3208 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3209
3210 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3211
3212 if (insn & 1)
3213 ppc32_op_emit_update_flags(cpu,0,TRUE);
3214
3215 ppc32_jit_close_hreg_seq(cpu);
3216 return(0);
3217 }
3218
3219 /* SRAWI - Shift Right Algebraic Word Immediate */
3220 DECLARE_INSN(SRAWI)
3221 {
3222 int rs = bits(insn,21,25);
3223 int ra = bits(insn,16,20);
3224 int sh = bits(insn,11,15);
3225 register m_uint32_t mask;
3226 int hreg_rs,hreg_ra,hreg_t0;
3227 jit_op_t *iop;
3228
3229 ppc32_jit_start_hreg_seq(cpu,"srawi");
3230 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3231 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3232 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3233
3234 /* $ra = (int32)$rs >> sh */
3235 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3236
3237 iop = ppc32_op_emit_insn_output(cpu,3,"srawi");
3238 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rs,4);
3239
3240 if (ra != rs)
3241 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3242 amd64_shift_reg_imm_size(iop->ob_ptr,X86_SAR,hreg_ra,sh,4);
3243
3244 /* set XER_CA depending on the result */
3245 mask = ~(0xFFFFFFFFU << sh) | 0x80000000;
3246
3247 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,mask);
3248 amd64_alu_reg_imm_size(iop->ob_ptr,X86_CMP,hreg_t0,0x80000000,4);
3249 amd64_set_reg(iop->ob_ptr,X86_CC_A,hreg_t0,FALSE);
3250 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,hreg_t0,0x1);
3251 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3252 hreg_t0,4);
3253
3254 if (insn & 1)
3255 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3256
3257 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3258
3259 if (insn & 1)
3260 ppc32_op_emit_update_flags(cpu,0,TRUE);
3261
3262 ppc32_jit_close_hreg_seq(cpu);
3263 return(0);
3264 }
3265
3266 /* Shift Right Word */
3267 DECLARE_INSN(SRW)
3268 {
3269 int rs = bits(insn,21,25);
3270 int ra = bits(insn,16,20);
3271 int rb = bits(insn,11,15);
3272 int hreg_rs,hreg_ra;
3273 jit_op_t *iop;
3274
3275 /* ecx is directly modified: throw it */
3276 ppc32_op_emit_alter_host_reg(cpu,AMD64_RCX);
3277
3278 ppc32_jit_start_hreg_seq(cpu,"srw");
3279 ppc32_jit_alloc_hreg_forced(cpu,AMD64_RCX);
3280 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3281 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3282
3283 /* $ra = $rs >> $rb. If count >= 32, then null result */
3284 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3285 ppc32_op_emit_load_gpr(cpu,AMD64_RCX,rb);
3286
3287 iop = ppc32_op_emit_insn_output(cpu,3,"srw");
3288
3289 amd64_alu_reg_imm(iop->ob_ptr,X86_AND,AMD64_RCX,0x3f);
3290
3291 if (ra != rs)
3292 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3293
3294 amd64_shift_reg(iop->ob_ptr,X86_SHR,hreg_ra);
3295
3296 /* store the result */
3297 if (insn & 1)
3298 amd64_test_reg_reg_size(iop->ob_ptr,hreg_ra,hreg_ra,4);
3299
3300 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3301
3302 if (insn & 1)
3303 ppc32_op_emit_update_flags(cpu,0,TRUE);
3304
3305 ppc32_jit_close_hreg_seq(cpu);
3306 return(0);
3307 }
3308
3309 /* STB - Store Byte */
3310 DECLARE_INSN(STB)
3311 {
3312 int rs = bits(insn,21,25);
3313 int ra = bits(insn,16,20);
3314 m_uint16_t offset = bits(insn,0,15);
3315
3316 //ppc32_emit_memop(b,PPC_MEMOP_STB,ra,offset,rs,0);
3317 ppc32_emit_memop_fast(cpu,b,1,PPC_MEMOP_STB,ra,offset,rs,
3318 ppc32_memop_fast_stb);
3319 return(0);
3320 }
3321
3322 /* STBU - Store Byte with Update */
3323 DECLARE_INSN(STBU)
3324 {
3325 int rs = bits(insn,21,25);
3326 int ra = bits(insn,16,20);
3327 m_uint16_t offset = bits(insn,0,15);
3328
3329 ppc32_emit_memop(cpu,b,PPC_MEMOP_STB,ra,offset,rs,1);
3330 return(0);
3331 }
3332
3333 /* STBUX - Store Byte with Update Indexed */
3334 DECLARE_INSN(STBUX)
3335 {
3336 int rs = bits(insn,21,25);
3337 int ra = bits(insn,16,20);
3338 int rb = bits(insn,11,15);
3339
3340 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STB,ra,rb,rs,1);
3341 return(0);
3342 }
3343
3344 /* STBUX - Store Byte Indexed */
3345 DECLARE_INSN(STBX)
3346 {
3347 int rs = bits(insn,21,25);
3348 int ra = bits(insn,16,20);
3349 int rb = bits(insn,11,15);
3350
3351 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STB,ra,rb,rs,0);
3352 return(0);
3353 }
3354
3355 /* STH - Store Half-Word */
3356 DECLARE_INSN(STH)
3357 {
3358 int rs = bits(insn,21,25);
3359 int ra = bits(insn,16,20);
3360 m_uint16_t offset = bits(insn,0,15);
3361
3362 ppc32_emit_memop(cpu,b,PPC_MEMOP_STH,ra,offset,rs,0);
3363 return(0);
3364 }
3365
3366 /* STHU - Store Half-Word with Update */
3367 DECLARE_INSN(STHU)
3368 {
3369 int rs = bits(insn,21,25);
3370 int ra = bits(insn,16,20);
3371 m_uint16_t offset = bits(insn,0,15);
3372
3373 ppc32_emit_memop(cpu,b,PPC_MEMOP_STH,ra,offset,rs,1);
3374 return(0);
3375 }
3376
3377 /* STHUX - Store Half-Word with Update Indexed */
3378 DECLARE_INSN(STHUX)
3379 {
3380 int rs = bits(insn,21,25);
3381 int ra = bits(insn,16,20);
3382 int rb = bits(insn,11,15);
3383
3384 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STH,ra,rb,rs,1);
3385 return(0);
3386 }
3387
3388 /* STHUX - Store Half-Word Indexed */
3389 DECLARE_INSN(STHX)
3390 {
3391 int rs = bits(insn,21,25);
3392 int ra = bits(insn,16,20);
3393 int rb = bits(insn,11,15);
3394
3395 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STH,ra,rb,rs,0);
3396 return(0);
3397 }
3398
3399 /* STW - Store Word */
3400 DECLARE_INSN(STW)
3401 {
3402 int rs = bits(insn,21,25);
3403 int ra = bits(insn,16,20);
3404 m_uint16_t offset = bits(insn,0,15);
3405
3406 //ppc32_emit_memop(b,PPC_MEMOP_STW,ra,offset,rs,0);
3407 ppc32_emit_memop_fast(cpu,b,1,PPC_MEMOP_STW,ra,offset,rs,
3408 ppc32_memop_fast_stw);
3409 return(0);
3410 }
3411
3412 /* STWU - Store Word with Update */
3413 DECLARE_INSN(STWU)
3414 {
3415 int rs = bits(insn,21,25);
3416 int ra = bits(insn,16,20);
3417 m_uint16_t offset = bits(insn,0,15);
3418
3419 ppc32_emit_memop(cpu,b,PPC_MEMOP_STW,ra,offset,rs,1);
3420 return(0);
3421 }
3422
3423 /* STWUX - Store Word with Update Indexed */
3424 DECLARE_INSN(STWUX)
3425 {
3426 int rs = bits(insn,21,25);
3427 int ra = bits(insn,16,20);
3428 int rb = bits(insn,11,15);
3429
3430 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STW,ra,rb,rs,1);
3431 return(0);
3432 }
3433
3434 /* STWUX - Store Word Indexed */
3435 DECLARE_INSN(STWX)
3436 {
3437 int rs = bits(insn,21,25);
3438 int ra = bits(insn,16,20);
3439 int rb = bits(insn,11,15);
3440
3441 ppc32_emit_memop_idx(cpu,b,PPC_MEMOP_STW,ra,rb,rs,0);
3442 return(0);
3443 }
3444
3445 /* SUBF - Subtract From */
3446 DECLARE_INSN(SUBF)
3447 {
3448 int rd = bits(insn,21,25);
3449 int ra = bits(insn,16,20);
3450 int rb = bits(insn,11,15);
3451 int hreg_rd,hreg_ra,hreg_rb,hreg_t0;
3452 jit_op_t *iop;
3453
3454 /* $rd = $rb - $ra */
3455 ppc32_jit_start_hreg_seq(cpu,"subf");
3456 hreg_t0 = ppc32_jit_get_tmp_hreg(cpu);
3457
3458 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3459 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3460 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3461
3462 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3463 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3464
3465 iop = ppc32_op_emit_insn_output(cpu,2,"subf");
3466
3467 if (rd == rb)
3468 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_rd,hreg_ra,4);
3469 else if (rd == ra) {
3470 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_rb,4);
3471 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_t0,hreg_ra,4);
3472 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3473 } else {
3474 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_rb,4);
3475 amd64_alu_reg_reg_size(iop->ob_ptr,X86_SUB,hreg_rd,hreg_ra,4);
3476 }
3477
3478 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3479
3480 if (insn & 1)
3481 ppc32_op_emit_update_flags(cpu,0,TRUE);
3482
3483 ppc32_jit_close_hreg_seq(cpu);
3484 return(0);
3485 }
3486
3487 /* SUBFC - Subtract From Carrying */
3488 DECLARE_INSN(SUBFC)
3489 {
3490 int rd = bits(insn,21,25);
3491 int ra = bits(insn,16,20);
3492 int rb = bits(insn,11,15);
3493 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
3494 jit_op_t *iop;
3495
3496 /* $rd = ~$ra + 1 + $rb */
3497 ppc32_jit_start_hreg_seq(cpu,"subfc");
3498 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3499 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3500 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3501
3502 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3503 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3504
3505 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3506 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3507 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3508
3509 iop = ppc32_op_emit_insn_output(cpu,3,"subfc");
3510
3511 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3512
3513 /* $t0 = ~$ra + 1 */
3514 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3515 amd64_not_reg(iop->ob_ptr,hreg_t0);
3516 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,1,4);
3517 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3518 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3519 hreg_t1,4);
3520
3521 /* $t0 += $rb */
3522 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
3523 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3524 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3525 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3526 hreg_t1,4);
3527
3528 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3529
3530 if (insn & 1)
3531 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
3532
3533 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3534
3535 /* update cr0 */
3536 if (insn & 1)
3537 ppc32_update_cr0(b);
3538
3539 ppc32_jit_close_hreg_seq(cpu);
3540 return(0);
3541 }
3542
3543 /* SUBFE - Subtract From Extended */
3544 DECLARE_INSN(SUBFE)
3545 {
3546 int rd = bits(insn,21,25);
3547 int ra = bits(insn,16,20);
3548 int rb = bits(insn,11,15);
3549 int hreg_ra,hreg_rb,hreg_rd,hreg_t0,hreg_t1;
3550 jit_op_t *iop;
3551
3552 /* $rd = ~$ra + $carry (xer_ca) + $rb */
3553 ppc32_jit_start_hreg_seq(cpu,"subfe");
3554 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3555 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3556 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3557
3558 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3559 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3560
3561 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3562 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3563 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3564
3565 iop = ppc32_op_emit_insn_output(cpu,3,"subfe");
3566
3567 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3568
3569 /* $t0 = ~$ra + $carry */
3570 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3571 amd64_not_reg(iop->ob_ptr,hreg_t0);
3572 amd64_alu_reg_membase_size(iop->ob_ptr,X86_ADD,hreg_t0,
3573 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),4);
3574
3575 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3576 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3577 hreg_t1,4);
3578
3579 /* $t0 += $rb */
3580 amd64_alu_reg_reg_size(iop->ob_ptr,X86_ADD,hreg_t0,hreg_rb,4);
3581 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3582 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3583 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3584 hreg_t1,4);
3585
3586 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3587
3588 if (insn & 1)
3589 amd64_test_reg_reg_size(iop->ob_ptr,hreg_rd,hreg_rd,4);
3590
3591 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3592
3593 /* update cr0 */
3594 if (insn & 1)
3595 ppc32_update_cr0(b);
3596
3597 ppc32_jit_close_hreg_seq(cpu);
3598 return(0);
3599 }
3600
3601 /* SUBFIC - Subtract From Immediate Carrying */
3602 DECLARE_INSN(SUBFIC)
3603 {
3604 int rd = bits(insn,21,25);
3605 int ra = bits(insn,16,20);
3606 m_uint16_t imm = bits(insn,0,15);
3607 m_uint32_t tmp = sign_extend_32(imm,16);
3608 int hreg_ra,hreg_rd,hreg_t0,hreg_t1;
3609 jit_op_t *iop;
3610
3611 /* $rd = ~$ra + 1 + sign_extend(imm,16) */
3612 ppc32_jit_start_hreg_seq(cpu,"subfic");
3613 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3614 hreg_rd = ppc32_jit_alloc_hreg(cpu,rd);
3615
3616 hreg_t0 = ppc32_jit_alloc_hreg(cpu,-1);
3617 hreg_t1 = ppc32_jit_get_tmp_hreg(cpu);
3618
3619 ppc32_op_emit_alter_host_reg(cpu,hreg_t0);
3620 ppc32_op_emit_load_gpr(cpu,hreg_ra,ra);
3621
3622 iop = ppc32_op_emit_insn_output(cpu,3,"subfic");
3623
3624 amd64_alu_reg_reg(iop->ob_ptr,X86_XOR,hreg_t1,hreg_t1);
3625
3626 /* $t0 = ~$ra + 1 */
3627 amd64_mov_reg_reg(iop->ob_ptr,hreg_t0,hreg_ra,4);
3628 amd64_not_reg(iop->ob_ptr,hreg_t0);
3629 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,1,4);
3630
3631 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3632 amd64_mov_membase_reg(iop->ob_ptr,AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3633 hreg_t1,4);
3634
3635 /* $t0 += sign_extend(imm,16) */
3636 amd64_alu_reg_imm_size(iop->ob_ptr,X86_ADD,hreg_t0,tmp,4);
3637 amd64_set_reg(iop->ob_ptr,X86_CC_C,hreg_t1,FALSE);
3638 amd64_alu_membase_reg_size(iop->ob_ptr,X86_OR,
3639 AMD64_R15,OFFSET(cpu_ppc_t,xer_ca),
3640 hreg_t1,4);
3641
3642 amd64_mov_reg_reg(iop->ob_ptr,hreg_rd,hreg_t0,4);
3643 ppc32_op_emit_store_gpr(cpu,rd,hreg_rd);
3644
3645 ppc32_jit_close_hreg_seq(cpu);
3646 return(0);
3647 }
3648
3649 /* SYNC - Synchronize */
3650 DECLARE_INSN(SYNC)
3651 {
3652 return(0);
3653 }
3654
3655 /* XOR */
3656 DECLARE_INSN(XOR)
3657 {
3658 int rs = bits(insn,21,25);
3659 int ra = bits(insn,16,20);
3660 int rb = bits(insn,11,15);
3661 int hreg_rs,hreg_ra,hreg_rb;
3662 jit_op_t *iop;
3663
3664 /* $ra = $rs ^ $rb */
3665 ppc32_jit_start_hreg_seq(cpu,"xor");
3666 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3667 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3668 hreg_rb = ppc32_jit_alloc_hreg(cpu,rb);
3669
3670 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3671 ppc32_op_emit_load_gpr(cpu,hreg_rb,rb);
3672
3673 iop = ppc32_op_emit_insn_output(cpu,1,"xor");
3674
3675 if (ra == rs)
3676 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
3677 else if (ra == rb)
3678 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rs,4);
3679 else {
3680 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3681 amd64_alu_reg_reg_size(iop->ob_ptr,X86_XOR,hreg_ra,hreg_rb,4);
3682 }
3683
3684 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3685
3686 if (insn & 1)
3687 ppc32_op_emit_update_flags(cpu,0,TRUE);
3688
3689 ppc32_jit_close_hreg_seq(cpu);
3690 return(0);
3691 }
3692
3693 /* XORI - XOR Immediate */
3694 DECLARE_INSN(XORI)
3695 {
3696 int rs = bits(insn,21,25);
3697 int ra = bits(insn,16,20);
3698 m_uint32_t imm = bits(insn,0,15);
3699 int hreg_rs,hreg_ra;
3700 jit_op_t *iop;
3701
3702 /* $ra = $rs ^ imm */
3703 ppc32_jit_start_hreg_seq(cpu,"xori");
3704 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3705 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3706
3707 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3708
3709 iop = ppc32_op_emit_insn_output(cpu,1,"xori");
3710
3711 if (ra != rs)
3712 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3713
3714 amd64_alu_reg_imm(iop->ob_ptr,X86_XOR,hreg_ra,imm);
3715 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3716
3717 ppc32_jit_close_hreg_seq(cpu);
3718 return(0);
3719 }
3720
3721 /* XORIS - XOR Immediate Shifted */
3722 DECLARE_INSN(XORIS)
3723 {
3724 int rs = bits(insn,21,25);
3725 int ra = bits(insn,16,20);
3726 m_uint16_t imm = bits(insn,0,15);
3727 m_uint32_t tmp = imm << 16;
3728 int hreg_rs,hreg_ra;
3729 jit_op_t *iop;
3730
3731 /* $ra = $rs ^ (imm << 16) */
3732 ppc32_jit_start_hreg_seq(cpu,"xoris");
3733 hreg_rs = ppc32_jit_alloc_hreg(cpu,rs);
3734 hreg_ra = ppc32_jit_alloc_hreg(cpu,ra);
3735
3736 ppc32_op_emit_load_gpr(cpu,hreg_rs,rs);
3737
3738 iop = ppc32_op_emit_insn_output(cpu,1,"xoris");
3739
3740 if (ra != rs)
3741 amd64_mov_reg_reg(iop->ob_ptr,hreg_ra,hreg_rs,4);
3742
3743 amd64_alu_reg_imm(iop->ob_ptr,X86_XOR,hreg_ra,tmp);
3744 ppc32_op_emit_store_gpr(cpu,ra,hreg_ra);
3745
3746 ppc32_jit_close_hreg_seq(cpu);
3747 return(0);
3748 }
3749
3750 /* PPC instruction array */
3751 struct ppc32_insn_tag ppc32_insn_tags[] = {
3752 { ppc32_emit_BLR , 0xfffffffe , 0x4e800020 },
3753 { ppc32_emit_BCTR , 0xfffffffe , 0x4e800420 },
3754 { ppc32_emit_MFLR , 0xfc1fffff , 0x7c0802a6 },
3755 { ppc32_emit_MTLR , 0xfc1fffff , 0x7c0803a6 },
3756 { ppc32_emit_MFCTR , 0xfc1fffff , 0x7c0902a6 },
3757 { ppc32_emit_MTCTR , 0xfc1fffff , 0x7c0903a6 },
3758 { ppc32_emit_MFTBL , 0xfc1ff7ff , 0x7c0c42e6 },
3759 { ppc32_emit_MFTBU , 0xfc1ff7ff , 0x7c0d42e6 },
3760 { ppc32_emit_ADD , 0xfc0007fe , 0x7c000214 },
3761 { ppc32_emit_ADDC , 0xfc0007fe , 0x7c000014 },
3762 { ppc32_emit_ADDE , 0xfc0007fe , 0x7c000114 },
3763 { ppc32_emit_ADDI , 0xfc000000 , 0x38000000 },
3764 { ppc32_emit_ADDIC , 0xfc000000 , 0x30000000 },
3765 { ppc32_emit_ADDIC_dot , 0xfc000000 , 0x34000000 },
3766 { ppc32_emit_ADDIS , 0xfc000000 , 0x3c000000 },
3767 { ppc32_emit_ADDZE , 0xfc00fffe , 0x7c000194 },
3768 { ppc32_emit_AND , 0xfc0007fe , 0x7c000038 },
3769 { ppc32_emit_ANDC , 0xfc0007fe , 0x7c000078 },
3770 { ppc32_emit_ANDI , 0xfc000000 , 0x70000000 },
3771 { ppc32_emit_ANDIS , 0xfc000000 , 0x74000000 },
3772 { ppc32_emit_B , 0xfc000003 , 0x48000000 },
3773 { ppc32_emit_BA , 0xfc000003 , 0x48000002 },
3774 { ppc32_emit_BL , 0xfc000003 , 0x48000001 },
3775 { ppc32_emit_BLA , 0xfc000003 , 0x48000003 },
3776 { ppc32_emit_BCC , 0xfe800000 , 0x40800000 },
3777 { ppc32_emit_BC , 0xfc000000 , 0x40000000 },
3778 { ppc32_emit_BCLR , 0xfc00fffe , 0x4c000020 },
3779 { ppc32_emit_CMP , 0xfc6007ff , 0x7c000000 },
3780 { ppc32_emit_CMPI , 0xfc600000 , 0x2c000000 },
3781 { ppc32_emit_CMPL , 0xfc6007ff , 0x7c000040 },
3782 { ppc32_emit_CMPLI , 0xfc600000 , 0x28000000 },
3783 { ppc32_emit_CRAND , 0xfc0007ff , 0x4c000202 },
3784 { ppc32_emit_CRANDC , 0xfc0007ff , 0x4c000102 },
3785 { ppc32_emit_CREQV , 0xfc0007ff , 0x4c000242 },
3786 { ppc32_emit_CRNAND , 0xfc0007ff , 0x4c0001c2 },
3787 { ppc32_emit_CRNOR , 0xfc0007ff , 0x4c000042 },
3788 { ppc32_emit_CROR , 0xfc0007ff , 0x4c000382 },
3789 { ppc32_emit_CRORC , 0xfc0007ff , 0x4c000342 },
3790 { ppc32_emit_CRXOR , 0xfc0007ff , 0x4c000182 },
3791 { ppc32_emit_DIVWU , 0xfc0007fe , 0x7c000396 },
3792 { ppc32_emit_EQV , 0xfc0007fe , 0x7c000238 },
3793 { ppc32_emit_EXTSB , 0xfc00fffe , 0x7c000774 },
3794 { ppc32_emit_EXTSH , 0xfc00fffe , 0x7c000734 },
3795 { ppc32_emit_LBZ , 0xfc000000 , 0x88000000 },
3796 { ppc32_emit_LBZU , 0xfc000000 , 0x8c000000 },
3797 { ppc32_emit_LBZUX , 0xfc0007ff , 0x7c0000ee },
3798 { ppc32_emit_LBZX , 0xfc0007ff , 0x7c0000ae },
3799 { ppc32_emit_LHA , 0xfc000000 , 0xa8000000 },
3800 { ppc32_emit_LHAU , 0xfc000000 , 0xac000000 },
3801 { ppc32_emit_LHAUX , 0xfc0007ff , 0x7c0002ee },
3802 { ppc32_emit_LHAX , 0xfc0007ff , 0x7c0002ae },
3803 { ppc32_emit_LHZ , 0xfc000000 , 0xa0000000 },
3804 { ppc32_emit_LHZU , 0xfc000000 , 0xa4000000 },
3805 { ppc32_emit_LHZUX , 0xfc0007ff , 0x7c00026e },
3806 { ppc32_emit_LHZX , 0xfc0007ff , 0x7c00022e },
3807 { ppc32_emit_LWZ , 0xfc000000 , 0x80000000 },
3808 { ppc32_emit_LWZU , 0xfc000000 , 0x84000000 },
3809 { ppc32_emit_LWZUX , 0xfc0007ff , 0x7c00006e },
3810 { ppc32_emit_LWZX , 0xfc0007ff , 0x7c00002e },
3811 { ppc32_emit_MCRF , 0xfc63ffff , 0x4c000000 },
3812 { ppc32_emit_MFCR , 0xfc1fffff , 0x7c000026 },
3813 { ppc32_emit_MFMSR , 0xfc1fffff , 0x7c0000a6 },
3814 { ppc32_emit_MFSR , 0xfc10ffff , 0x7c0004a6 },
3815 { ppc32_emit_MTCRF , 0xfc100fff , 0x7c000120 },
3816 { ppc32_emit_MULHW , 0xfc0007fe , 0x7c000096 },
3817 { ppc32_emit_MULHWU , 0xfc0007fe , 0x7c000016 },
3818 { ppc32_emit_MULLI , 0xfc000000 , 0x1c000000 },
3819 { ppc32_emit_MULLW , 0xfc0007fe , 0x7c0001d6 },
3820 { ppc32_emit_NAND , 0xfc0007fe , 0x7c0003b8 },
3821 { ppc32_emit_NEG , 0xfc00fffe , 0x7c0000d0 },
3822 { ppc32_emit_NOR , 0xfc0007fe , 0x7c0000f8 },
3823 { ppc32_emit_OR , 0xfc0007fe , 0x7c000378 },
3824 { ppc32_emit_ORC , 0xfc0007fe , 0x7c000338 },
3825 { ppc32_emit_ORI , 0xfc000000 , 0x60000000 },
3826 { ppc32_emit_ORIS , 0xfc000000 , 0x64000000 },
3827 { ppc32_emit_RLWIMI , 0xfc000000 , 0x50000000 },
3828 { ppc32_emit_RLWINM , 0xfc000000 , 0x54000000 },
3829 { ppc32_emit_RLWNM , 0xfc000000 , 0x5c000000 },
3830 { ppc32_emit_SLW , 0xfc0007fe , 0x7c000030 },
3831 { ppc32_emit_SRAWI , 0xfc0007fe , 0x7c000670 },
3832 { ppc32_emit_SRW , 0xfc0007fe , 0x7c000430 },
3833 { ppc32_emit_STB , 0xfc000000 , 0x98000000 },
3834 { ppc32_emit_STBU , 0xfc000000 , 0x9c000000 },
3835 { ppc32_emit_STBUX , 0xfc0007ff , 0x7c0001ee },
3836 { ppc32_emit_STBX , 0xfc0007ff , 0x7c0001ae },
3837 { ppc32_emit_STH , 0xfc000000 , 0xb0000000 },
3838 { ppc32_emit_STHU , 0xfc000000 , 0xb4000000 },
3839 { ppc32_emit_STHUX , 0xfc0007ff , 0x7c00036e },
3840 { ppc32_emit_STHX , 0xfc0007ff , 0x7c00032e },
3841 { ppc32_emit_STW , 0xfc000000 , 0x90000000 },
3842 { ppc32_emit_STWU , 0xfc000000 , 0x94000000 },
3843 { ppc32_emit_STWUX , 0xfc0007ff , 0x7c00016e },
3844 { ppc32_emit_STWX , 0xfc0007ff , 0x7c00012e },
3845 { ppc32_emit_SUBF , 0xfc0007fe , 0x7c000050 },
3846 { ppc32_emit_SUBFC , 0xfc0007fe , 0x7c000010 },
3847 { ppc32_emit_SUBFE , 0xfc0007fe , 0x7c000110 },
3848 { ppc32_emit_SUBFIC , 0xfc000000 , 0x20000000 },
3849 { ppc32_emit_SYNC , 0xffffffff , 0x7c0004ac },
3850 { ppc32_emit_XOR , 0xfc0007fe , 0x7c000278 },
3851 { ppc32_emit_XORI , 0xfc000000 , 0x68000000 },
3852 { ppc32_emit_XORIS , 0xfc000000 , 0x6c000000 },
3853 { ppc32_emit_unknown , 0x00000000 , 0x00000000 },
3854 { NULL , 0x00000000 , 0x00000000 },
3855 };

  ViewVC Help
Powered by ViewVC 1.1.26