132 |
x86_mov_memindex_reg(b->jit_ptr,X86_EAX,0,X86_EBX,0,X86_EDX,4); |
x86_mov_memindex_reg(b->jit_ptr,X86_EAX,0,X86_EBX,0,X86_EDX,4); |
133 |
} |
} |
134 |
|
|
135 |
/* Fast memory operation */ |
/* Fast memory operation (64-bit) */ |
136 |
static void mips64_emit_memop_fast(insn_block_t *b,int op, |
static void mips64_emit_memop_fast64(insn_block_t *b,int op, |
137 |
int base,int offset, |
int base,int offset, |
138 |
int target,int keep_ll_bit, |
int target,int keep_ll_bit, |
139 |
memop_fast_access op_handler) |
memop_fast_access op_handler) |
140 |
{ |
{ |
141 |
m_uint64_t val = sign_extend(offset,16); |
m_uint64_t val = sign_extend(offset,16); |
142 |
u_char *test1,*test2,*test3,*test4; |
u_char *test1,*test2,*test3,*test4; |
154 |
x86_shift_reg_imm(b->jit_ptr,X86_SHR,X86_EAX,MTS64_HASH_SHIFT); |
x86_shift_reg_imm(b->jit_ptr,X86_SHR,X86_EAX,MTS64_HASH_SHIFT); |
155 |
x86_alu_reg_imm(b->jit_ptr,X86_AND,X86_EAX,MTS64_HASH_MASK); |
x86_alu_reg_imm(b->jit_ptr,X86_AND,X86_EAX,MTS64_HASH_MASK); |
156 |
|
|
157 |
/* EDX = mts64_cache */ |
/* EDX = mts_cache */ |
158 |
x86_mov_reg_membase(b->jit_ptr,X86_EDX, |
x86_mov_reg_membase(b->jit_ptr,X86_EDX, |
159 |
X86_EDI,OFFSET(cpu_mips_t,mts64_cache),4); |
X86_EDI,OFFSET(cpu_mips_t,mts_cache),4); |
160 |
|
|
161 |
/* ESI = mts64_entry */ |
/* ESI = mts64_entry */ |
162 |
x86_mov_reg_memindex(b->jit_ptr,X86_ESI,X86_EDX,0,X86_EAX,2,4); |
x86_mov_reg_memindex(b->jit_ptr,X86_ESI,X86_EDX,0,X86_EAX,2,4); |
236 |
x86_patch(p_exception,b->jit_ptr); |
x86_patch(p_exception,b->jit_ptr); |
237 |
} |
} |
238 |
|
|
239 |
|
/* Fast memory operation (32-bit) */ |
240 |
|
static void mips64_emit_memop_fast32(insn_block_t *b,int op, |
241 |
|
int base,int offset, |
242 |
|
int target,int keep_ll_bit, |
243 |
|
memop_fast_access op_handler) |
244 |
|
{ |
245 |
|
m_uint32_t val = sign_extend(offset,16); |
246 |
|
u_char *test1,*test2,*test3; |
247 |
|
u_char *p_exception,*p_exit; |
248 |
|
|
249 |
|
/* EBX = sign-extended offset */ |
250 |
|
x86_mov_reg_imm(b->jit_ptr,X86_EBX,val); |
251 |
|
|
252 |
|
/* EBX = GPR[base] + sign-extended offset */ |
253 |
|
x86_alu_reg_membase(b->jit_ptr,X86_ADD,X86_EBX,X86_EDI,REG_OFFSET(base)); |
254 |
|
|
255 |
|
/* EAX = mts32_entry index */ |
256 |
|
x86_mov_reg_reg(b->jit_ptr,X86_EAX,X86_EBX,4); |
257 |
|
x86_shift_reg_imm(b->jit_ptr,X86_SHR,X86_EAX,MTS32_HASH_SHIFT); |
258 |
|
x86_alu_reg_imm(b->jit_ptr,X86_AND,X86_EAX,MTS32_HASH_MASK); |
259 |
|
|
260 |
|
/* EDX = mts_cache */ |
261 |
|
x86_mov_reg_membase(b->jit_ptr,X86_EDX, |
262 |
|
X86_EDI,OFFSET(cpu_mips_t,mts_cache),4); |
263 |
|
|
264 |
|
/* ESI = mts32_entry */ |
265 |
|
x86_mov_reg_memindex(b->jit_ptr,X86_ESI,X86_EDX,0,X86_EAX,2,4); |
266 |
|
x86_test_reg_reg(b->jit_ptr,X86_ESI,X86_ESI); /* slow lookup */ |
267 |
|
test1 = b->jit_ptr; |
268 |
|
x86_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
269 |
|
|
270 |
|
/* ECX = entry mask, compare the virtual addresses */ |
271 |
|
x86_mov_reg_membase(b->jit_ptr,X86_ECX, |
272 |
|
X86_ESI,OFFSET(mts32_entry_t,mask),4); |
273 |
|
x86_alu_reg_reg(b->jit_ptr,X86_AND,X86_ECX,X86_EBX); |
274 |
|
x86_alu_reg_membase(b->jit_ptr,X86_CMP,X86_ECX,X86_ESI, |
275 |
|
OFFSET(mts32_entry_t,start)); |
276 |
|
test2 = b->jit_ptr; |
277 |
|
x86_branch8(b->jit_ptr, X86_CC_NZ, 0, 1); |
278 |
|
|
279 |
|
/* Ok, we have the good entry. Test if this is a device */ |
280 |
|
x86_mov_reg_membase(b->jit_ptr,X86_EAX, |
281 |
|
X86_ESI,OFFSET(mts32_entry_t,action),4); |
282 |
|
x86_mov_reg_reg(b->jit_ptr,X86_EDX,X86_EAX,4); |
283 |
|
x86_alu_reg_imm(b->jit_ptr,X86_AND,X86_EDX,MTS_DEV_MASK); |
284 |
|
test3 = b->jit_ptr; |
285 |
|
x86_branch8(b->jit_ptr, X86_CC_NZ, 0, 1); |
286 |
|
|
287 |
|
/* EAX = action */ |
288 |
|
x86_alu_reg_imm(b->jit_ptr,X86_AND,X86_EAX,MTS_ADDR_MASK); |
289 |
|
|
290 |
|
/* Compute offset */ |
291 |
|
x86_alu_reg_membase(b->jit_ptr,X86_SUB,X86_EBX, |
292 |
|
X86_ESI,OFFSET(mts32_entry_t,start)); |
293 |
|
|
294 |
|
/* Memory access */ |
295 |
|
op_handler(b,target); |
296 |
|
|
297 |
|
p_exit = b->jit_ptr; |
298 |
|
x86_jump8(b->jit_ptr,0); |
299 |
|
|
300 |
|
/* === Slow lookup === */ |
301 |
|
x86_patch(test1,b->jit_ptr); |
302 |
|
x86_patch(test2,b->jit_ptr); |
303 |
|
x86_patch(test3,b->jit_ptr); |
304 |
|
|
305 |
|
/* Update PC (EBX = vaddr) */ |
306 |
|
mips64_set_pc(b,b->start_pc+((b->mips_trans_pos-1)<<2)); |
307 |
|
|
308 |
|
/* Sign-extend virtual address and put vaddr in ECX:EDX */ |
309 |
|
x86_mov_reg_reg(b->jit_ptr,X86_EAX,X86_EBX,4); |
310 |
|
x86_cdq(b->jit_ptr); |
311 |
|
x86_mov_reg_reg(b->jit_ptr,X86_ECX,X86_EDX,4); |
312 |
|
x86_mov_reg_reg(b->jit_ptr,X86_EDX,X86_EAX,4); |
313 |
|
|
314 |
|
/* EBX = target register */ |
315 |
|
x86_mov_reg_imm(b->jit_ptr,X86_EBX,target); |
316 |
|
|
317 |
|
/* EAX = CPU instance pointer */ |
318 |
|
x86_mov_reg_reg(b->jit_ptr,X86_EAX,X86_EDI,4); |
319 |
|
|
320 |
|
/* |
321 |
|
* Push parameters on stack and call memory function. |
322 |
|
* Keep the stack aligned on a 16-byte boundary for Darwin/x86. |
323 |
|
*/ |
324 |
|
x86_alu_reg_imm(b->jit_ptr,X86_SUB,X86_ESP,8); |
325 |
|
x86_push_reg(b->jit_ptr,X86_EBX); |
326 |
|
x86_call_membase(b->jit_ptr,X86_EDI,MEMOP_OFFSET(op)); |
327 |
|
x86_alu_reg_imm(b->jit_ptr,X86_ADD,X86_ESP,12); |
328 |
|
|
329 |
|
/* Check for exception */ |
330 |
|
x86_test_reg_reg(b->jit_ptr,X86_EAX,X86_EAX); |
331 |
|
p_exception = b->jit_ptr; |
332 |
|
x86_branch8(b->jit_ptr, X86_CC_Z, 0, 1); |
333 |
|
insn_block_push_epilog(b); |
334 |
|
|
335 |
|
x86_patch(p_exit,b->jit_ptr); |
336 |
|
x86_patch(p_exception,b->jit_ptr); |
337 |
|
} |
338 |
|
|
339 |
|
/* Fast memory operation */ |
340 |
|
static void mips64_emit_memop_fast(cpu_mips_t *cpu,insn_block_t *b,int op, |
341 |
|
int base,int offset, |
342 |
|
int target,int keep_ll_bit, |
343 |
|
memop_fast_access op_handler) |
344 |
|
{ |
345 |
|
switch(cpu->addr_mode) { |
346 |
|
case 32: |
347 |
|
mips64_emit_memop_fast32(b,op,base,offset,target,keep_ll_bit, |
348 |
|
op_handler); |
349 |
|
break; |
350 |
|
case 64: |
351 |
|
mips64_emit_memop_fast64(b,op,base,offset,target,keep_ll_bit, |
352 |
|
op_handler); |
353 |
|
break; |
354 |
|
} |
355 |
|
} |
356 |
|
|
357 |
/* Memory operation */ |
/* Memory operation */ |
358 |
static void mips64_emit_memop(insn_block_t *b,int op,int base,int offset, |
static void mips64_emit_memop(insn_block_t *b,int op,int base,int offset, |
359 |
int target,int keep_ll_bit) |
int target,int keep_ll_bit) |
1962 |
int offset = bits(insn,0,15); |
int offset = bits(insn,0,15); |
1963 |
|
|
1964 |
if (cpu->fast_memop) { |
if (cpu->fast_memop) { |
1965 |
mips64_emit_memop_fast(b,MIPS_MEMOP_LW,base,offset,rt,TRUE, |
mips64_emit_memop_fast(cpu,b,MIPS_MEMOP_LW,base,offset,rt,TRUE, |
1966 |
mips64_memop_fast_lw); |
mips64_memop_fast_lw); |
1967 |
} else { |
} else { |
1968 |
mips64_emit_memop(b,MIPS_MEMOP_LW,base,offset,rt,TRUE); |
mips64_emit_memop(b,MIPS_MEMOP_LW,base,offset,rt,TRUE); |
2677 |
int offset = bits(insn,0,15); |
int offset = bits(insn,0,15); |
2678 |
|
|
2679 |
if (cpu->fast_memop) { |
if (cpu->fast_memop) { |
2680 |
mips64_emit_memop_fast(b,MIPS_MEMOP_SW,base,offset,rt,FALSE, |
mips64_emit_memop_fast(cpu,b,MIPS_MEMOP_SW,base,offset,rt,FALSE, |
2681 |
mips64_memop_fast_sw); |
mips64_memop_fast_sw); |
2682 |
} else { |
} else { |
2683 |
mips64_emit_memop(b,MIPS_MEMOP_SW,base,offset,rt,FALSE); |
mips64_emit_memop(b,MIPS_MEMOP_SW,base,offset,rt,FALSE); |