/[dynamips]/upstream/dynamips-0.2.6-RC3/amd64_trans.c
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Diff of /upstream/dynamips-0.2.6-RC3/amd64_trans.c

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

upstream/dynamips-0.2.6-RC2/amd64_trans.c revision 3 by dpavlin, Sat Oct 6 16:05:34 2007 UTC upstream/dynamips-0.2.6-RC3/amd64_trans.c revision 4 by dpavlin, Sat Oct 6 16:06:49 2007 UTC
# Line 128  static void mips64_memop_fast_sw(insn_bl Line 128  static void mips64_memop_fast_sw(insn_bl
128     amd64_mov_memindex_reg(b->jit_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RAX,4);     amd64_mov_memindex_reg(b->jit_ptr,AMD64_RBX,0,AMD64_RSI,0,AMD64_RAX,4);
129  }  }
130    
131  /* Fast memory operation */  /* Fast memory operation (64-bit) */
132  static void mips64_emit_memop_fast(insn_block_t *b,int op,  static void mips64_emit_memop_fast64(insn_block_t *b,int op,
133                                     int base,int offset,                                       int base,int offset,
134                                     int target,int keep_ll_bit,                                       int target,int keep_ll_bit,
135                                     memop_fast_access op_handler)                                       memop_fast_access op_handler)
136  {    {  
137     m_uint64_t val = sign_extend(offset,16);     m_uint64_t val = sign_extend(offset,16);
138     u_char *test1,*test2,*test3;     u_char *test1,*test2,*test3;
# Line 151  static void mips64_emit_memop_fast(insn_ Line 151  static void mips64_emit_memop_fast(insn_
151     amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RBX,MTS64_HASH_SHIFT);     amd64_shift_reg_imm(b->jit_ptr,X86_SHR,AMD64_RBX,MTS64_HASH_SHIFT);
152     amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,MTS64_HASH_MASK);     amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_RBX,MTS64_HASH_MASK);
153    
154     /* RCX = mts64_cache */     /* RCX = mts_cache */
155     amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX,     amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX,
156                           AMD64_R15,OFFSET(cpu_mips_t,mts64_cache),8);                           AMD64_R15,OFFSET(cpu_mips_t,mts_cache),8);
157    
158     /* RAX = mts64_entry */     /* RAX = mts64_entry */
159     amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RCX,0,AMD64_RBX,3,8);     amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RCX,0,AMD64_RBX,3,8);
# Line 219  static void mips64_emit_memop_fast(insn_ Line 219  static void mips64_emit_memop_fast(insn_
219     amd64_patch(p_exception,b->jit_ptr);     amd64_patch(p_exception,b->jit_ptr);
220  }  }
221    
222    /* Fast memory operation (32-bit) */
223    static void mips64_emit_memop_fast32(insn_block_t *b,int op,
224                                         int base,int offset,
225                                         int target,int keep_ll_bit,
226                                         memop_fast_access op_handler)
227    {  
228       m_uint32_t val = sign_extend(offset,16);
229       u_char *test1,*test2,*test3;
230       u_char *p_exception,*p_exit;
231    
232       /* RDI = CPU instance */
233       amd64_mov_reg_reg(b->jit_ptr,AMD64_RDI,AMD64_R15,8);
234    
235       /* ESI = GPR[base] + sign-extended offset */
236       amd64_mov_reg_imm(b->jit_ptr,X86_ESI,val);
237       amd64_alu_reg_membase_size(b->jit_ptr,X86_ADD,
238                                  X86_ESI,AMD64_RDI,REG_OFFSET(base),4);
239    
240       /* EBX = mts32_entry index */
241       amd64_mov_reg_reg_size(b->jit_ptr,X86_EBX,X86_ESI,4);
242       amd64_shift_reg_imm_size(b->jit_ptr,X86_SHR,X86_EBX,MTS64_HASH_SHIFT,4);
243       amd64_alu_reg_imm_size(b->jit_ptr,X86_AND,X86_EBX,MTS64_HASH_MASK,4);
244    
245       /* RCX = mts_cache */
246       amd64_mov_reg_membase(b->jit_ptr,AMD64_RCX,
247                             AMD64_R15,OFFSET(cpu_mips_t,mts_cache),8);
248    
249       /* RAX = mts32_entry */
250       amd64_mov_reg_memindex(b->jit_ptr,AMD64_RAX,AMD64_RCX,0,AMD64_RBX,3,8);
251    
252       /* Do we have a non-null entry ? */
253       amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,8);
254       test1 = b->jit_ptr;
255       amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1);
256    
257       /* ECX = start */
258       amd64_mov_reg_membase(b->jit_ptr,X86_ECX,
259                             AMD64_RAX,OFFSET(mts32_entry_t,start),4);  
260      
261       /* EDX = mask, RBX = action */
262       amd64_mov_reg_membase(b->jit_ptr,X86_EDX,
263                             AMD64_RAX,OFFSET(mts32_entry_t,mask),4);
264       amd64_mov_reg_membase(b->jit_ptr,AMD64_RBX,
265                             AMD64_RAX,OFFSET(mts32_entry_t,action),8);
266       amd64_alu_reg_reg_size(b->jit_ptr,X86_AND,X86_EDX,X86_ESI,4);
267    
268       /* Virtual Address in the good range ? */
269       amd64_alu_reg_reg_size(b->jit_ptr,X86_CMP,X86_EDX,X86_ECX,4);
270       test2 = b->jit_ptr;
271       amd64_branch8(b->jit_ptr, X86_CC_NE, 0, 1);
272    
273       /* Device access ? */
274       amd64_mov_reg_reg(b->jit_ptr,AMD64_R8,AMD64_RBX,8);
275       amd64_alu_reg_imm(b->jit_ptr,X86_AND,AMD64_R8,MTS_DEV_MASK);
276       test3 = b->jit_ptr;
277       amd64_branch8(b->jit_ptr, X86_CC_NZ, 0, 1);
278    
279       /* === Fast access === */
280       amd64_alu_reg_reg_size(b->jit_ptr,X86_SUB,X86_ESI,X86_ECX,4);
281    
282       /* Memory access */
283       op_handler(b,target);
284    
285       p_exit = b->jit_ptr;
286       amd64_jump8(b->jit_ptr,0);
287    
288       /* === Slow lookup === */
289       amd64_patch(test1,b->jit_ptr);
290       amd64_patch(test2,b->jit_ptr);
291       amd64_patch(test3,b->jit_ptr);
292    
293       /* Save PC for exception handling */
294       mips64_set_pc(b,b->start_pc+((b->mips_trans_pos-1)<<2));
295    
296       /* Sign-extend virtual address */
297       amd64_movsxd_reg_reg(b->jit_ptr,AMD64_RSI,X86_ESI);
298    
299       /* RDX = target register */
300       amd64_mov_reg_imm(b->jit_ptr,AMD64_RDX,target);
301    
302       /* Call memory access function */
303       amd64_call_membase(b->jit_ptr,AMD64_RDI,MEMOP_OFFSET(op));
304    
305       /* Exception ? */
306       amd64_test_reg_reg_size(b->jit_ptr,AMD64_RAX,AMD64_RAX,4);
307       p_exception = b->jit_ptr;
308       amd64_branch8(b->jit_ptr, X86_CC_Z, 0, 1);
309       insn_block_push_epilog(b);
310    
311       amd64_patch(p_exit,b->jit_ptr);
312       amd64_patch(p_exception,b->jit_ptr);
313    }
314    
315    /* Fast memory operation */
316    static void mips64_emit_memop_fast(cpu_mips_t *cpu,insn_block_t *b,int op,
317                                       int base,int offset,
318                                       int target,int keep_ll_bit,
319                                       memop_fast_access op_handler)
320    {
321       switch(cpu->addr_mode) {
322          case 32:
323             mips64_emit_memop_fast32(b,op,base,offset,target,keep_ll_bit,
324                                      op_handler);
325             break;
326          case 64:
327             mips64_emit_memop_fast64(b,op,base,offset,target,keep_ll_bit,
328                                      op_handler);
329             break;
330       }
331    }
332    
333  /* Memory operation */  /* Memory operation */
334  static void mips64_emit_memop(insn_block_t *b,int op,int base,int offset,  static void mips64_emit_memop(insn_block_t *b,int op,int base,int offset,
335                                int target,int keep_ll_bit)                                int target,int keep_ll_bit)
# Line 1674  static int mips64_emit_LW(cpu_mips_t *cp Line 1785  static int mips64_emit_LW(cpu_mips_t *cp
1785     int offset = bits(insn,0,15);     int offset = bits(insn,0,15);
1786    
1787     if (cpu->fast_memop) {     if (cpu->fast_memop) {
1788        mips64_emit_memop_fast(b,MIPS_MEMOP_LW,base,offset,rt,TRUE,        mips64_emit_memop_fast(cpu,b,MIPS_MEMOP_LW,base,offset,rt,TRUE,
1789                               mips64_memop_fast_lw);                               mips64_memop_fast_lw);
1790     } else {     } else {
1791        mips64_emit_memop(b,MIPS_MEMOP_LW,base,offset,rt,TRUE);        mips64_emit_memop(b,MIPS_MEMOP_LW,base,offset,rt,TRUE);
# Line 2288  static int mips64_emit_SW(cpu_mips_t *cp Line 2399  static int mips64_emit_SW(cpu_mips_t *cp
2399     int offset = bits(insn,0,15);     int offset = bits(insn,0,15);
2400    
2401     if (cpu->fast_memop) {     if (cpu->fast_memop) {
2402        mips64_emit_memop_fast(b,MIPS_MEMOP_SW,base,offset,rt,FALSE,        mips64_emit_memop_fast(cpu,b,MIPS_MEMOP_SW,base,offset,rt,FALSE,
2403                               mips64_memop_fast_sw);                               mips64_memop_fast_sw);
2404     } else {     } else {
2405        mips64_emit_memop(b,MIPS_MEMOP_SW,base,offset,rt,FALSE);        mips64_emit_memop(b,MIPS_MEMOP_SW,base,offset,rt,FALSE);

Legend:
Removed from v.3  
changed lines
  Added in v.4

  ViewVC Help
Powered by ViewVC 1.1.26