/[dynamips]/trunk/amd64-codegen.h
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Annotation of /trunk/amd64-codegen.h

Parent Directory Parent Directory | Revision Log Revision Log


Revision 12 - (hide annotations)
Sat Oct 6 16:45:40 2007 UTC (16 years, 5 months ago) by dpavlin
File MIME type: text/plain
File size: 61012 byte(s)
make working copy

1 dpavlin 1 /*
2     * amd64-codegen.h: Macros for generating amd64 code
3     *
4     * Authors:
5     * Paolo Molaro (lupus@ximian.com)
6     * Intel Corporation (ORP Project)
7     * Sergey Chaban (serge@wildwestsoftware.com)
8     * Dietmar Maurer (dietmar@ximian.com)
9     * Patrik Torstensson
10     * Zalman Stern
11     *
12     * Copyright (C) 2000 Intel Corporation. All rights reserved.
13     * Copyright (C) 2001, 2002 Ximian, Inc.
14     */
15    
16     #ifndef AMD64_H
17     #define AMD64_H
18    
19     typedef enum {
20     AMD64_RAX = 0,
21     AMD64_RCX = 1,
22     AMD64_RDX = 2,
23     AMD64_RBX = 3,
24     AMD64_RSP = 4,
25     AMD64_RBP = 5,
26     AMD64_RSI = 6,
27     AMD64_RDI = 7,
28     AMD64_R8 = 8,
29     AMD64_R9 = 9,
30     AMD64_R10 = 10,
31     AMD64_R11 = 11,
32     AMD64_R12 = 12,
33     AMD64_R13 = 13,
34     AMD64_R14 = 14,
35     AMD64_R15 = 15,
36     AMD64_RIP = 16,
37     AMD64_NREG
38     } AMD64_Reg_No;
39    
40     typedef enum {
41     AMD64_XMM0 = 0,
42     AMD64_XMM1 = 1,
43     AMD64_XMM2 = 2,
44     AMD64_XMM3 = 3,
45     AMD64_XMM4 = 4,
46     AMD64_XMM5 = 5,
47     AMD64_XMM6 = 6,
48     AMD64_XMM7 = 7,
49     AMD64_XMM8 = 8,
50     AMD64_XMM9 = 9,
51     AMD64_XMM10 = 10,
52     AMD64_XMM11 = 11,
53     AMD64_XMM12 = 12,
54     AMD64_XMM13 = 13,
55     AMD64_XMM14 = 14,
56     AMD64_XMM15 = 15,
57     AMD64_XMM_NREG = 16,
58     } AMD64_XMM_Reg_No;
59    
60     typedef enum
61     {
62     AMD64_REX_B = 1, /* The register in r/m field, base register in SIB byte, or reg in opcode is 8-15 rather than 0-7 */
63     AMD64_REX_X = 2, /* The index register in SIB byte is 8-15 rather than 0-7 */
64     AMD64_REX_R = 4, /* The reg field of ModRM byte is 8-15 rather than 0-7 */
65     AMD64_REX_W = 8 /* Opeartion is 64-bits instead of 32 (default) or 16 (with 0x66 prefix) */
66     } AMD64_REX_Bits;
67    
68     #define AMD64_CALLEE_REGS ((1<<AMD64_RAX) | (1<<AMD64_RCX) | (1<<AMD64_RDX) | (1<<AMD64_RSI) | (1<<AMD64_RDI) | (1<<AMD64_R8) | (1<<AMD64_R9) | (1<<AMD64_R10))
69     #define AMD64_IS_CALLEE_REG(reg) (AMD64_CALLEE_REGS & (1 << (reg)))
70    
71     #define AMD64_ARGUMENT_REGS ((1<<AMD64_RDI) | (1<<AMD64_RSI) | (1<<AMD64_RDX) | (1<<AMD64_RCX) | (1<<AMD64_R8) | (1<<AMD64_R9))
72     #define AMD64_IS_ARGUMENT_REG(reg) (AMD64_ARGUMENT_REGS & (1 << (reg)))
73    
74     #define AMD64_CALLEE_SAVED_REGS ((1<<AMD64_RBX) | (1<<AMD64_R12) | (1<<AMD64_R13) | (1<<AMD64_R14) | (1<<AMD64_R15) | (1<<AMD64_RBP))
75     #define AMD64_IS_CALLEE_SAVED_REG(reg) (AMD64_CALLEE_SAVED_REGS & (1 << (reg)))
76    
77     #define AMD64_REX(bits) ((unsigned char)(0x40 | (bits)))
78     #define amd64_emit_rex(inst, width, reg_modrm, reg_index, reg_rm_base_opcode) do \
79     { \
80     unsigned char _amd64_rex_bits = \
81     (((width) > 4) ? AMD64_REX_W : 0) | \
82     (((reg_modrm) > 7) ? AMD64_REX_R : 0) | \
83     (((reg_index) > 7) ? AMD64_REX_X : 0) | \
84     (((reg_rm_base_opcode) > 7) ? AMD64_REX_B : 0); \
85     if ((_amd64_rex_bits != 0) || (((width) == 1))) *(inst)++ = AMD64_REX(_amd64_rex_bits); \
86     } while (0)
87    
88     typedef union {
89     long val;
90     unsigned char b [8];
91     } amd64_imm_buf;
92    
93     #include "x86-codegen.h"
94    
95     #define amd64_bswap32(inst,reg) \
96     do { \
97     *(inst)++ = 0x0f; \
98     *(inst)++ = (unsigned char)0xc8 + (reg); \
99     } while (0)
100    
101     /* In 64 bit mode, all registers have a low byte subregister */
102     #undef X86_IS_BYTE_REG
103     #define X86_IS_BYTE_REG(reg) 1
104    
105     #define amd64_modrm_mod(modrm) ((modrm) >> 6)
106     #define amd64_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
107     #define amd64_modrm_rm(modrm) ((modrm) & 0x7)
108    
109     #define amd64_rex_r(rex) ((((rex) >> 2) & 0x1) << 3)
110     #define amd64_rex_x(rex) ((((rex) >> 1) & 0x1) << 3)
111     #define amd64_rex_b(rex) ((((rex) >> 0) & 0x1) << 3)
112    
113     #define amd64_is_imm32(val) ((glong)val >= -((glong)1<<31) && (glong)val <= (((glong)1<<31)-1))
114    
115     #define x86_imm_emit64(inst,imm) \
116     do { \
117     amd64_imm_buf imb; imb.val = (long) (imm); \
118     *(inst)++ = imb.b [0]; \
119     *(inst)++ = imb.b [1]; \
120     *(inst)++ = imb.b [2]; \
121     *(inst)++ = imb.b [3]; \
122     *(inst)++ = imb.b [4]; \
123     *(inst)++ = imb.b [5]; \
124     *(inst)++ = imb.b [6]; \
125     *(inst)++ = imb.b [7]; \
126     } while (0)
127    
128     #define amd64_membase_emit(inst,reg,basereg,disp) do { \
129     if ((basereg) == AMD64_RIP) { \
130     x86_address_byte ((inst), 0, (reg)&0x7, 5); \
131     x86_imm_emit32 ((inst), (disp)); \
132     } \
133     else \
134     x86_membase_emit ((inst),(reg)&0x7, (basereg)&0x7, (disp)); \
135     } while (0)
136    
137     #define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) \
138     do { \
139     if ((reg) == X86_EAX) { \
140     amd64_emit_rex(inst, size, 0, 0, 0); \
141     *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
142     x86_imm_emit32 ((inst), (imm)); \
143     break; \
144     } \
145     if (x86_is_imm8((imm))) { \
146     amd64_emit_rex(inst, size, 0, 0, (reg)); \
147     *(inst)++ = (unsigned char)0x83; \
148     x86_reg_emit ((inst), (opc), (reg)); \
149     x86_imm_emit8 ((inst), (imm)); \
150     } else { \
151     amd64_emit_rex(inst, size, 0, 0, (reg)); \
152     *(inst)++ = (unsigned char)0x81; \
153     x86_reg_emit ((inst), (opc), (reg)); \
154     x86_imm_emit32 ((inst), (imm)); \
155     } \
156     } while (0)
157    
158     #define amd64_alu_reg_imm(inst,opc,reg,imm) amd64_alu_reg_imm_size((inst),(opc),(reg),(imm),8)
159    
160     #define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) \
161     do { \
162     amd64_emit_rex(inst, size, (dreg), 0, (reg)); \
163     *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
164     x86_reg_emit ((inst), (dreg), (reg)); \
165     } while (0)
166    
167     #define amd64_alu_reg_reg(inst,opc,dreg,reg) amd64_alu_reg_reg_size ((inst),(opc),(dreg),(reg),8)
168    
169     #define amd64_mov_regp_reg(inst,regp,reg,size) \
170     do { \
171     if ((size) == 2) \
172     *(inst)++ = (unsigned char)0x66; \
173     amd64_emit_rex(inst, (size), (reg), 0, (regp)); \
174     switch ((size)) { \
175     case 1: *(inst)++ = (unsigned char)0x88; break; \
176     case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break; \
177     default: assert (0); \
178     } \
179     x86_regp_emit ((inst), (reg), (regp)); \
180     } while (0)
181    
182     #define amd64_mov_membase_reg(inst,basereg,disp,reg,size) \
183     do { \
184     if ((size) == 2) \
185     *(inst)++ = (unsigned char)0x66; \
186     amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
187     switch ((size)) { \
188     case 1: *(inst)++ = (unsigned char)0x88; break; \
189     case 2: case 4: case 8: *(inst)++ = (unsigned char)0x89; break; \
190     default: assert (0); \
191     } \
192     x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
193     } while (0)
194    
195    
196     #define amd64_mov_reg_reg(inst,dreg,reg,size) \
197     do { \
198     if ((size) == 2) \
199     *(inst)++ = (unsigned char)0x66; \
200     amd64_emit_rex(inst, (size), (dreg), 0, (reg)); \
201     switch ((size)) { \
202     case 1: *(inst)++ = (unsigned char)0x8a; break; \
203     case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
204     default: assert (0); \
205     } \
206     x86_reg_emit ((inst), (dreg), (reg)); \
207     } while (0)
208    
209     #define amd64_mov_reg_mem(inst,reg,mem,size) \
210     do { \
211     if ((size) == 2) \
212     *(inst)++ = (unsigned char)0x66; \
213     amd64_emit_rex(inst, (size), (reg), 0, 0); \
214     switch ((size)) { \
215     case 1: *(inst)++ = (unsigned char)0x8a; break; \
216     case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
217     default: assert (0); \
218     } \
219     x86_address_byte ((inst), 0, (reg), 4); \
220     x86_address_byte ((inst), 0, 4, 5); \
221     x86_imm_emit32 ((inst), (mem)); \
222     } while (0)
223    
224     #define amd64_mov_reg_membase(inst,reg,basereg,disp,size) \
225     do { \
226     if ((size) == 2) \
227     *(inst)++ = (unsigned char)0x66; \
228     amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
229     switch ((size)) { \
230     case 1: *(inst)++ = (unsigned char)0x8a; break; \
231     case 2: case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
232     default: assert (0); \
233     } \
234     amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
235     } while (0)
236    
237     #define amd64_movzx_reg_membase(inst,reg,basereg,disp,size) \
238     do { \
239     amd64_emit_rex(inst, (size), (reg), 0, (basereg)); \
240     switch ((size)) { \
241     case 1: *(inst)++ = (unsigned char)0x0f; *(inst)++ = (unsigned char)0xb6; break; \
242     case 2: *(inst)++ = (unsigned char)0x0f; *(inst)++ = (unsigned char)0xb7; break; \
243     case 4: case 8: *(inst)++ = (unsigned char)0x8b; break; \
244     default: assert (0); \
245     } \
246     x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
247     } while (0)
248    
249     #define amd64_movsxd_reg_membase(inst,reg,basereg,disp) \
250     do { \
251     amd64_emit_rex(inst,8,(reg),0,(basereg)); \
252     *(inst)++ = (unsigned char)0x63; \
253     x86_membase_emit ((inst), ((reg)&0x7), ((basereg)&0x7), (disp)); \
254     } while (0)
255    
256     #define amd64_movsxd_reg_reg(inst,dreg,reg) \
257     do { \
258     amd64_emit_rex(inst,8,(dreg),0,(reg)); \
259     *(inst)++ = (unsigned char)0x63; \
260     x86_reg_emit ((inst), (dreg), (reg)); \
261     } while (0)
262    
263     /* Pretty much the only instruction that supports a 64-bit immediate. Optimize for common case of
264     * 32-bit immediate. Pepper with casts to avoid warnings.
265     */
266     #define amd64_mov_reg_imm_size(inst,reg,imm,size) \
267     do { \
268     amd64_emit_rex(inst, (size), 0, 0, (reg)); \
269     *(inst)++ = (unsigned char)0xb8 + ((reg) & 0x7); \
270     if ((size) == 8) \
271     x86_imm_emit64 ((inst), (long)(imm)); \
272     else \
273     x86_imm_emit32 ((inst), (int)(long)(imm)); \
274     } while (0)
275    
276     #define amd64_mov_reg_imm(inst,reg,imm) \
277     do { \
278     int _amd64_width_temp = ((long)(imm) == (long)(int)(long)(imm)); \
279     amd64_mov_reg_imm_size ((inst), (reg), (imm), (_amd64_width_temp ? 4 : 8)); \
280     } while (0)
281    
282     #define amd64_set_reg_template(inst,reg) amd64_mov_reg_imm_size ((inst),(reg), 0, 8)
283    
284     #define amd64_set_template(inst,reg) amd64_set_reg_template((inst),(reg))
285    
286     #define amd64_mov_membase_imm(inst,basereg,disp,imm,size) \
287     do { \
288     if ((size) == 2) \
289     *(inst)++ = (unsigned char)0x66; \
290     amd64_emit_rex(inst, (size) == 1 ? 0 : (size), 0, 0, (basereg)); \
291     if ((size) == 1) { \
292     *(inst)++ = (unsigned char)0xc6; \
293     x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
294     x86_imm_emit8 ((inst), (imm)); \
295     } else if ((size) == 2) { \
296     *(inst)++ = (unsigned char)0xc7; \
297     x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
298     x86_imm_emit16 ((inst), (imm)); \
299     } else { \
300     *(inst)++ = (unsigned char)0xc7; \
301     x86_membase_emit ((inst), 0, (basereg) & 0x7, (disp)); \
302     x86_imm_emit32 ((inst), (imm)); \
303     } \
304     } while (0)
305    
306     #define amd64_lea_membase(inst,reg,basereg,disp) \
307     do { \
308     amd64_emit_rex(inst, 8, (reg), 0, (basereg)); \
309     *(inst)++ = (unsigned char)0x8d; \
310     amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
311     } while (0)
312    
313     /* Instruction are implicitly 64-bits so don't generate REX for just the size. */
314     #define amd64_push_reg(inst,reg) \
315     do { \
316     amd64_emit_rex(inst, 0, 0, 0, (reg)); \
317     *(inst)++ = (unsigned char)0x50 + ((reg) & 0x7); \
318     } while (0)
319    
320     /* Instruction is implicitly 64-bits so don't generate REX for just the size. */
321     #define amd64_push_membase(inst,basereg,disp) \
322     do { \
323     amd64_emit_rex(inst, 0, 0, 0, (basereg)); \
324     *(inst)++ = (unsigned char)0xff; \
325     x86_membase_emit ((inst), 6, (basereg) & 0x7, (disp)); \
326     } while (0)
327    
328     #define amd64_pop_reg(inst,reg) \
329     do { \
330     amd64_emit_rex(inst, 0, 0, 0, (reg)); \
331     *(inst)++ = (unsigned char)0x58 + ((reg) & 0x7); \
332     } while (0)
333    
334     #define amd64_call_reg(inst,reg) \
335     do { \
336     amd64_emit_rex(inst, 8, 0, 0, (reg)); \
337     *(inst)++ = (unsigned char)0xff; \
338     x86_reg_emit ((inst), 2, ((reg) & 0x7)); \
339     } while (0)
340    
341     #define amd64_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
342     #define amd64_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
343     #define amd64_movsd_reg_regp(inst,reg,regp) \
344     do { \
345     *(inst)++ = (unsigned char)0xf2; \
346     amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
347     *(inst)++ = (unsigned char)0x0f; \
348     *(inst)++ = (unsigned char)0x10; \
349     x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
350     } while (0)
351    
352     #define amd64_movsd_regp_reg(inst,regp,reg) \
353     do { \
354     *(inst)++ = (unsigned char)0xf2; \
355     amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
356     *(inst)++ = (unsigned char)0x0f; \
357     *(inst)++ = (unsigned char)0x11; \
358     x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
359     } while (0)
360    
361     #define amd64_movss_reg_regp(inst,reg,regp) \
362     do { \
363     *(inst)++ = (unsigned char)0xf3; \
364     amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
365     *(inst)++ = (unsigned char)0x0f; \
366     *(inst)++ = (unsigned char)0x10; \
367     x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
368     } while (0)
369    
370     #define amd64_movss_regp_reg(inst,regp,reg) \
371     do { \
372     *(inst)++ = (unsigned char)0xf3; \
373     amd64_emit_rex(inst, 0, (reg), 0, (regp)); \
374     *(inst)++ = (unsigned char)0x0f; \
375     *(inst)++ = (unsigned char)0x11; \
376     x86_regp_emit ((inst), (reg) & 0x7, (regp) & 0x7); \
377     } while (0)
378    
379     #define amd64_movsd_reg_membase(inst,reg,basereg,disp) \
380     do { \
381     *(inst)++ = (unsigned char)0xf2; \
382     amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
383     *(inst)++ = (unsigned char)0x0f; \
384     *(inst)++ = (unsigned char)0x10; \
385     x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
386     } while (0)
387    
388     #define amd64_movss_reg_membase(inst,reg,basereg,disp) \
389     do { \
390     *(inst)++ = (unsigned char)0xf3; \
391     amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
392     *(inst)++ = (unsigned char)0x0f; \
393     *(inst)++ = (unsigned char)0x10; \
394     x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
395     } while (0)
396    
397     #define amd64_movsd_membase_reg(inst,basereg,disp,reg) \
398     do { \
399     *(inst)++ = (unsigned char)0xf2; \
400     amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
401     *(inst)++ = (unsigned char)0x0f; \
402     *(inst)++ = (unsigned char)0x11; \
403     x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
404     } while (0)
405    
406     #define amd64_movss_membase_reg(inst,basereg,disp,reg) \
407     do { \
408     *(inst)++ = (unsigned char)0xf3; \
409     amd64_emit_rex(inst, 0, (reg), 0, (basereg)); \
410     *(inst)++ = (unsigned char)0x0f; \
411     *(inst)++ = (unsigned char)0x11; \
412     x86_membase_emit ((inst), (reg) & 0x7, (basereg) & 0x7, (disp)); \
413     } while (0)
414    
415     /* The original inc_reg opcode is used as the REX prefix */
416     #define amd64_inc_reg_size(inst,reg,size) \
417     do { \
418     amd64_emit_rex ((inst),(size),0,0,(reg)); \
419     *(inst)++ = (unsigned char)0xff; \
420     x86_reg_emit ((inst),0,(reg) & 0x7); \
421     } while (0)
422    
423     #define amd64_dec_reg_size(inst,reg,size) \
424     do { \
425     amd64_emit_rex ((inst),(size),0,0,(reg)); \
426     *(inst)++ = (unsigned char)0xff; \
427     x86_reg_emit ((inst),1,(reg) & 0x7); \
428     } while (0)
429    
430     #define amd64_padding_size(inst,size) \
431     do { if (size == 1) x86_padding ((inst),(size)); else { amd64_emit_rex ((inst),8,0,0,0); x86_padding((inst),(size) - 1); } } while (0)
432    
433     #define amd64_fld_membase_size(inst,basereg,disp,is_double,size) do { \
434     amd64_emit_rex ((inst),0,0,0,(basereg)); \
435     *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
436     amd64_membase_emit ((inst), 0, (basereg), (disp)); \
437     } while (0)
438    
439     #define amd64_call_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); *(inst)++ = (unsigned char)0xff; amd64_membase_emit ((inst),2, (basereg),(disp)); } while (0)
440    
441     /*
442     * SSE
443     */
444    
445     #define emit_opcode3(inst,op1,op2,op3) do { \
446     *(inst)++ = (unsigned char)(op1); \
447     *(inst)++ = (unsigned char)(op2); \
448     *(inst)++ = (unsigned char)(op3); \
449     } while (0)
450    
451     #define emit_sse_reg_reg_size(inst,dreg,reg,op1,op2,op3,size) do { \
452     *(inst)++ = (unsigned char)(op1); \
453     amd64_emit_rex ((inst), size, (dreg), 0, (reg)); \
454     *(inst)++ = (unsigned char)(op2); \
455     *(inst)++ = (unsigned char)(op3); \
456     x86_reg_emit ((inst), (dreg), (reg)); \
457     } while (0)
458    
459     #define emit_sse_reg_reg(inst,dreg,reg,op1,op2,op3) emit_sse_reg_reg_size ((inst), (dreg), (reg), (op1), (op2), (op3), 0)
460    
461     #define emit_sse_membase_reg(inst,basereg,disp,reg,op1,op2,op3) do { \
462     *(inst)++ = (unsigned char)(op1); \
463     amd64_emit_rex ((inst), 0, (reg), 0, (basereg)); \
464     *(inst)++ = (unsigned char)(op2); \
465     *(inst)++ = (unsigned char)(op3); \
466     amd64_membase_emit ((inst), (reg), (basereg), (disp)); \
467     } while (0)
468    
469     #define emit_sse_reg_membase(inst,dreg,basereg,disp,op1,op2,op3) do { \
470     *(inst)++ = (unsigned char)(op1); \
471     amd64_emit_rex ((inst), 0, (dreg), 0, (basereg) == AMD64_RIP ? 0 : (basereg)); \
472     *(inst)++ = (unsigned char)(op2); \
473     *(inst)++ = (unsigned char)(op3); \
474     amd64_membase_emit ((inst), (dreg), (basereg), (disp)); \
475     } while (0)
476    
477     #define amd64_sse_xorpd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg), 0x66, 0x0f, 0x57)
478    
479     #define amd64_sse_xorpd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst),(dreg),(basereg), (disp), 0x66, 0x0f, 0x57)
480    
481     #define amd64_sse_movsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x10)
482    
483     #define amd64_sse_movsd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0xf2, 0x0f, 0x10)
484    
485     #define amd64_sse_movsd_membase_reg(inst,basereg,disp,reg) emit_sse_membase_reg ((inst), (basereg), (disp), (reg), 0xf2, 0x0f, 0x11)
486    
487     #define amd64_sse_movss_membase_reg(inst,basereg,disp,reg) emit_sse_membase_reg ((inst), (basereg), (disp), (reg), 0xf3, 0x0f, 0x11)
488    
489     #define amd64_sse_movss_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0xf3, 0x0f, 0x10)
490    
491     #define amd64_sse_comisd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst),(dreg),(reg),0x66,0x0f,0x2f)
492    
493     #define amd64_sse_comisd_reg_membase(inst,dreg,basereg,disp) emit_sse_reg_membase ((inst), (dreg), (basereg), (disp), 0x66, 0x0f, 0x2f)
494    
495     #define amd64_sse_cvtsd2si_reg_reg(inst,dreg,reg) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2d, 8)
496    
497     #define amd64_sse_cvttsd2si_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2c, (size))
498    
499     #define amd64_sse_cvttsd2si_reg_reg(inst,dreg,reg) amd64_sse_cvttsd2si_reg_reg_size ((inst), (dreg), (reg), 8)
500    
501     #define amd64_sse_cvtsi2sd_reg_reg_size(inst,dreg,reg,size) emit_sse_reg_reg_size ((inst), (dreg), (reg), 0xf2, 0x0f, 0x2a, (size))
502    
503     #define amd64_sse_cvtsi2sd_reg_reg(inst,dreg,reg) amd64_sse_cvtsi2sd_reg_reg_size ((inst), (dreg), (reg), 8)
504    
505     #define amd64_sse_cvtsd2ss_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5a)
506    
507     #define amd64_sse_cvtss2sd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf3, 0x0f, 0x5a)
508    
509     #define amd64_sse_addsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x58)
510    
511     #define amd64_sse_subsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5c)
512    
513     #define amd64_sse_mulsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x59)
514    
515     #define amd64_sse_divsd_reg_reg(inst,dreg,reg) emit_sse_reg_reg ((inst), (dreg), (reg), 0xf2, 0x0f, 0x5e)
516    
517     /* Generated from x86-codegen.h */
518    
519     #define amd64_breakpoint_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_breakpoint(inst); } while (0)
520     #define amd64_cld_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_cld(inst); } while (0)
521     #define amd64_stosb_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_stosb(inst); } while (0)
522     #define amd64_stosl_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_stosl(inst); } while (0)
523     #define amd64_stosd_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_stosd(inst); } while (0)
524     #define amd64_movsb_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_movsb(inst); } while (0)
525     #define amd64_movsl_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_movsl(inst); } while (0)
526     #define amd64_movsd_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_movsd(inst); } while (0)
527     #define amd64_prefix_size(inst,p,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_prefix((inst), p); } while (0)
528     #define amd64_rdtsc_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_rdtsc(inst); } while (0)
529     #define amd64_cmpxchg_reg_reg_size(inst,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_cmpxchg_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); } while (0)
530     #define amd64_cmpxchg_mem_reg_size(inst,mem,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_cmpxchg_mem_reg((inst),(mem),((reg)&0x7)); } while (0)
531     #define amd64_cmpxchg_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_cmpxchg_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7)); } while (0)
532     #define amd64_xchg_reg_reg_size(inst,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_xchg_reg_reg((inst),((dreg)&0x7),((reg)&0x7),(size) == 8 ? 4 : (size)); } while (0)
533     #define amd64_xchg_mem_reg_size(inst,mem,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_xchg_mem_reg((inst),(mem),((reg)&0x7),(size) == 8 ? 4 : (size)); } while (0)
534     #define amd64_xchg_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg))); x86_xchg_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size) == 8 ? 4 : (size)); } while (0)
535     #define amd64_inc_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_inc_mem((inst),(mem)); } while (0)
536     #define amd64_inc_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_inc_membase((inst),((basereg)&0x7),(disp)); } while (0)
537     //#define amd64_inc_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_inc_reg((inst),((reg)&0x7)); } while (0)
538     #define amd64_dec_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_dec_mem((inst),(mem)); } while (0)
539     #define amd64_dec_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_dec_membase((inst),((basereg)&0x7),(disp)); } while (0)
540     //#define amd64_dec_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_dec_reg((inst),((reg)&0x7)); } while (0)
541     #define amd64_not_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_not_mem((inst),(mem)); } while (0)
542     #define amd64_not_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_not_membase((inst),((basereg)&0x7),(disp)); } while (0)
543     #define amd64_not_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_not_reg((inst),((reg)&0x7)); } while (0)
544     #define amd64_neg_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_neg_mem((inst),(mem)); } while (0)
545     #define amd64_neg_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_neg_membase((inst),((basereg)&0x7),(disp)); } while (0)
546     #define amd64_neg_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_neg_reg((inst),((reg)&0x7)); } while (0)
547     #define amd64_nop_size(inst,size) do { x86_nop(inst); } while (0)
548     //#define amd64_alu_reg_imm_size(inst,opc,reg,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_reg_imm((inst),(opc),((reg)&0x7),(imm)); } while (0)
549     #define amd64_alu_mem_imm_size(inst,opc,mem,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_alu_mem_imm((inst),(opc),(mem),(imm)); } while (0)
550     #define amd64_alu_membase_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_alu_membase_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); } while (0)
551     #define amd64_alu_mem_reg_size(inst,opc,mem,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_mem_reg((inst),(opc),(mem),((reg)&0x7)); } while (0)
552     #define amd64_alu_membase_reg_size(inst,opc,basereg,disp,reg,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_alu_membase_reg((inst),(opc),((basereg)&0x7),(disp),((reg)&0x7)); } while (0)
553     //#define amd64_alu_reg_reg_size(inst,opc,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_alu_reg_reg((inst),(opc),((dreg)&0x7),((reg)&0x7)); } while (0)
554     #define amd64_alu_reg8_reg8_size(inst,opc,dreg,reg,is_dreg_h,is_reg_h,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_alu_reg8_reg8((inst),(opc),((dreg)&0x7),((reg)&0x7),(is_dreg_h),(is_reg_h)); } while (0)
555     #define amd64_alu_reg_mem_size(inst,opc,reg,mem,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_alu_reg_mem((inst),(opc),((reg)&0x7),(mem)); } while (0)
556     #define amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_alu_reg_membase((inst),(opc),((reg)&0x7),((basereg)&0x7),(disp)); } while (0)
557     #define amd64_test_reg_imm_size(inst,reg,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_test_reg_imm((inst),((reg)&0x7),(imm)); } while (0)
558     #define amd64_test_mem_imm_size(inst,mem,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_test_mem_imm((inst),(mem),(imm)); } while (0)
559     #define amd64_test_membase_imm_size(inst,basereg,disp,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_test_membase_imm((inst),((basereg)&0x7),(disp),(imm)); } while (0)
560     #define amd64_test_reg_reg_size(inst,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_test_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); } while (0)
561     #define amd64_test_mem_reg_size(inst,mem,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_test_mem_reg((inst),(mem),((reg)&0x7)); } while (0)
562     #define amd64_test_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_test_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7)); } while (0)
563     #define amd64_shift_reg_imm_size(inst,opc,reg,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_reg_imm((inst),(opc),((reg)&0x7),(imm)); } while (0)
564     #define amd64_shift_mem_imm_size(inst,opc,mem,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_mem_imm((inst),(opc),(mem),(imm)); } while (0)
565     #define amd64_shift_membase_imm_size(inst,opc,basereg,disp,imm,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_shift_membase_imm((inst),(opc),((basereg)&0x7),(disp),(imm)); } while (0)
566     #define amd64_shift_reg_size(inst,opc,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_reg((inst),(opc),((reg)&0x7)); } while (0)
567     #define amd64_shift_mem_size(inst,opc,mem,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_shift_mem((inst),(opc),(mem)); } while (0)
568     #define amd64_shift_membase_size(inst,opc,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_shift_membase((inst),(opc),((basereg)&0x7),(disp)); } while (0)
569     #define amd64_shrd_reg_size(inst,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shrd_reg((inst),((dreg)&0x7),((reg)&0x7)); } while (0)
570     #define amd64_shrd_reg_imm_size(inst,dreg,reg,shamt,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shrd_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(shamt)); } while (0)
571     #define amd64_shld_reg_size(inst,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shld_reg((inst),((dreg)&0x7),((reg)&0x7)); } while (0)
572     #define amd64_shld_reg_imm_size(inst,dreg,reg,shamt,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_shld_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(shamt)); } while (0)
573     #define amd64_mul_reg_size(inst,reg,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mul_reg((inst),((reg)&0x7),(is_signed)); } while (0)
574     #define amd64_mul_mem_size(inst,mem,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_mul_mem((inst),(mem),(is_signed)); } while (0)
575     #define amd64_mul_membase_size(inst,basereg,disp,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_mul_membase((inst),((basereg)&0x7),(disp),(is_signed)); } while (0)
576     #define amd64_imul_reg_reg_size(inst,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_imul_reg_reg((inst),((dreg)&0x7),((reg)&0x7)); } while (0)
577     #define amd64_imul_reg_mem_size(inst,reg,mem,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_imul_reg_mem((inst),((reg)&0x7),(mem)); } while (0)
578     #define amd64_imul_reg_membase_size(inst,reg,basereg,disp,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_imul_reg_membase((inst),((reg)&0x7),((basereg)&0x7),(disp)); } while (0)
579     #define amd64_imul_reg_reg_imm_size(inst,dreg,reg,imm,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_imul_reg_reg_imm((inst),((dreg)&0x7),((reg)&0x7),(imm)); } while (0)
580     #define amd64_imul_reg_mem_imm_size(inst,reg,mem,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_imul_reg_mem_imm((inst),((reg)&0x7),(mem),(imm)); } while (0)
581     #define amd64_imul_reg_membase_imm_size(inst,reg,basereg,disp,imm,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_imul_reg_membase_imm((inst),((reg)&0x7),((basereg)&0x7),(disp),(imm)); } while (0)
582     #define amd64_div_reg_size(inst,reg,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_div_reg((inst),((reg)&0x7),(is_signed)); } while (0)
583     #define amd64_div_mem_size(inst,mem,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_div_mem((inst),(mem),(is_signed)); } while (0)
584     #define amd64_div_membase_size(inst,basereg,disp,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_div_membase((inst),((basereg)&0x7),(disp),(is_signed)); } while (0)
585     #define amd64_mov_mem_reg_size(inst,mem,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_mem_reg((inst),(mem),((reg)&0x7),(size) == 8 ? 4 : (size)); } while (0)
586     //#define amd64_mov_regp_reg_size(inst,regp,reg,size) do { amd64_emit_rex ((inst),(size),(regp),0,(reg)); x86_mov_regp_reg((inst),(regp),((reg)&0x7),(size) == 8 ? 4 : (size)); } while (0)
587     //#define amd64_mov_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_mov_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size) == 8 ? 4 : (size)); } while (0)
588     #define amd64_mov_memindex_reg_size(inst,basereg,disp,indexreg,shift,reg,size) do { amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_mov_memindex_reg((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),((reg)&0x7),(size) == 8 ? 4 : (size)); } while (0)
589     #define amd64_mov_reg_reg_size(inst,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_mov_reg_reg((inst),((dreg)&0x7),((reg)&0x7),(size) == 8 ? 4 : (size)); } while (0)
590     //#define amd64_mov_reg_mem_size(inst,reg,mem,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_reg_mem((inst),((reg)&0x7),(mem),(size) == 8 ? 4 : (size)); } while (0)
591     //#define amd64_mov_reg_membase_size(inst,reg,basereg,disp,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_mov_reg_membase((inst),((reg)&0x7),((basereg)&0x7),(disp),(size) == 8 ? 4 : (size)); } while (0)
592     #define amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) do { amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_mov_reg_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(size) == 8 ? 4 : (size)); } while (0)
593     #define amd64_clear_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_clear_reg((inst),((reg)&0x7)); } while (0)
594     //#define amd64_mov_reg_imm_size(inst,reg,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_mov_reg_imm((inst),((reg)&0x7),(imm)); } while (0)
595     #define amd64_mov_mem_imm_size(inst,mem,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_mov_mem_imm((inst),(mem),(imm),(size) == 8 ? 4 : (size)); } while (0)
596     //#define amd64_mov_membase_imm_size(inst,basereg,disp,imm,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_mov_membase_imm((inst),((basereg)&0x7),(disp),(imm),(size) == 8 ? 4 : (size)); } while (0)
597     #define amd64_mov_memindex_imm_size(inst,basereg,disp,indexreg,shift,imm,size) do { amd64_emit_rex ((inst),(size),0,(indexreg),(basereg)); x86_mov_memindex_imm((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(imm),(size) == 8 ? 4 : (size)); } while (0)
598     #define amd64_lea_mem_size(inst,reg,mem,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_lea_mem((inst),((reg)&0x7),(mem)); } while (0)
599     //#define amd64_lea_membase_size(inst,reg,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_lea_membase((inst),((reg)&0x7),((basereg)&0x7),(disp)); } while (0)
600     #define amd64_lea_memindex_size(inst,reg,basereg,disp,indexreg,shift,size) do { amd64_emit_rex ((inst),(size),(reg),(indexreg),(basereg)); x86_lea_memindex((inst),((reg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift)); } while (0)
601     #define amd64_widen_reg_size(inst,dreg,reg,is_signed,is_half,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_widen_reg((inst),((dreg)&0x7),((reg)&0x7),(is_signed),(is_half)); } while (0)
602     #define amd64_widen_mem_size(inst,dreg,mem,is_signed,is_half,size) do { amd64_emit_rex ((inst),(size),(dreg),0,0); x86_widen_mem((inst),((dreg)&0x7),(mem),(is_signed),(is_half)); } while (0)
603     #define amd64_widen_membase_size(inst,dreg,basereg,disp,is_signed,is_half,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(basereg)); x86_widen_membase((inst),((dreg)&0x7),((basereg)&0x7),(disp),(is_signed),(is_half)); } while (0)
604     #define amd64_widen_memindex_size(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half,size) do { amd64_emit_rex ((inst),(size),(dreg),(indexreg),(basereg)); x86_widen_memindex((inst),((dreg)&0x7),((basereg)&0x7),(disp),((indexreg)&0x7),(shift),(is_signed),(is_half)); } while (0)
605     #define amd64_cdq_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_cdq(inst); } while (0)
606     #define amd64_wait_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_wait(inst); } while (0)
607     #define amd64_fp_op_mem_size(inst,opc,mem,is_double,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fp_op_mem((inst),(opc),(mem),(is_double)); } while (0)
608     #define amd64_fp_op_membase_size(inst,opc,basereg,disp,is_double,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fp_op_membase((inst),(opc),((basereg)&0x7),(disp),(is_double)); } while (0)
609     #define amd64_fp_op_size(inst,opc,index,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fp_op((inst),(opc),(index)); } while (0)
610     #define amd64_fp_op_reg_size(inst,opc,index,pop_stack,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fp_op_reg((inst),(opc),(index),(pop_stack)); } while (0)
611     #define amd64_fp_int_op_membase_size(inst,opc,basereg,disp,is_int,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fp_int_op_membase((inst),(opc),((basereg)&0x7),(disp),(is_int)); } while (0)
612     #define amd64_fstp_size(inst,index,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fstp((inst),(index)); } while (0)
613     #define amd64_fcompp_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fcompp(inst); } while (0)
614     #define amd64_fucompp_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fucompp(inst); } while (0)
615     #define amd64_fnstsw_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fnstsw(inst); } while (0)
616     #define amd64_fnstcw_size(inst,mem,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fnstcw((inst),(mem)); } while (0)
617     #define amd64_fnstcw_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_fnstcw_membase((inst),((basereg)&0x7),(disp)); } while (0)
618     #define amd64_fldcw_size(inst,mem,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fldcw((inst),(mem)); } while (0)
619     #define amd64_fldcw_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fldcw_membase((inst),((basereg)&0x7),(disp)); } while (0)
620     #define amd64_fchs_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fchs(inst); } while (0)
621     #define amd64_frem_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_frem(inst); } while (0)
622     #define amd64_fxch_size(inst,index,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fxch((inst),(index)); } while (0)
623     #define amd64_fcomi_size(inst,index,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fcomi((inst),(index)); } while (0)
624     #define amd64_fcomip_size(inst,index,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fcomip((inst),(index)); } while (0)
625     #define amd64_fucomi_size(inst,index,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fucomi((inst),(index)); } while (0)
626     #define amd64_fucomip_size(inst,index,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fucomip((inst),(index)); } while (0)
627     #define amd64_fld_size(inst,mem,is_double,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fld((inst),(mem),(is_double)); } while (0)
628     //#define amd64_fld_membase_size(inst,basereg,disp,is_double,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fld_membase((inst),((basereg)&0x7),(disp),(is_double)); } while (0)
629     #define amd64_fld80_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fld80_mem((inst),(mem)); } while (0)
630     #define amd64_fld80_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_fld80_membase((inst),((basereg)&0x7),(disp)); } while (0)
631     #define amd64_fild_size(inst,mem,is_long,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fild((inst),(mem),(is_long)); } while (0)
632     #define amd64_fild_membase_size(inst,basereg,disp,is_long,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fild_membase((inst),((basereg)&0x7),(disp),(is_long)); } while (0)
633     #define amd64_fld_reg_size(inst,index,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fld_reg((inst),(index)); } while (0)
634     #define amd64_fldz_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fldz(inst); } while (0)
635     #define amd64_fld1_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fld1(inst); } while (0)
636     #define amd64_fldpi_size(inst,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fldpi(inst); } while (0)
637     #define amd64_fst_size(inst,mem,is_double,pop_stack,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fst((inst),(mem),(is_double),(pop_stack)); } while (0)
638     #define amd64_fst_membase_size(inst,basereg,disp,is_double,pop_stack,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fst_membase((inst),((basereg)&0x7),(disp),(is_double),(pop_stack)); } while (0)
639     #define amd64_fst80_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fst80_mem((inst),(mem)); } while (0)
640     #define amd64_fst80_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fst80_membase((inst),((basereg)&0x7),(disp)); } while (0)
641     #define amd64_fist_pop_size(inst,mem,is_long,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_fist_pop((inst),(mem),(is_long)); } while (0)
642     #define amd64_fist_pop_membase_size(inst,basereg,disp,is_long,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fist_pop_membase((inst),((basereg)&0x7),(disp),(is_long)); } while (0)
643     #define amd64_fstsw_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fstsw(inst); } while (0)
644     #define amd64_fist_membase_size(inst,basereg,disp,is_int,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_fist_membase((inst),((basereg)&0x7),(disp),(is_int)); } while (0)
645     //#define amd64_push_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_push_reg((inst),((reg)&0x7)); } while (0)
646     #define amd64_push_regp_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_push_regp((inst),((reg)&0x7)); } while (0)
647     #define amd64_push_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_push_mem((inst),(mem)); } while (0)
648     //#define amd64_push_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_push_membase((inst),((basereg)&0x7),(disp)); } while (0)
649     #define amd64_push_memindex_size(inst,basereg,disp,indexreg,shift,size) do { amd64_emit_rex ((inst),(size),0,(indexreg),(basereg)); x86_push_memindex((inst),((basereg)&0x7),(disp),((indexreg)&0x7),(shift)); } while (0)
650     #define amd64_push_imm_size(inst,imm,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_push_imm((inst),(imm)); } while (0)
651     //#define amd64_pop_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_pop_reg((inst),((reg)&0x7)); } while (0)
652     #define amd64_pop_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_pop_mem((inst),(mem)); } while (0)
653     #define amd64_pop_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_pop_membase((inst),((basereg)&0x7),(disp)); } while (0)
654     #define amd64_pushad_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_pushad(inst); } while (0)
655     #define amd64_pushfd_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_pushfd(inst); } while (0)
656     #define amd64_popad_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_popad(inst); } while (0)
657     #define amd64_popfd_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_popfd(inst); } while (0)
658     #define amd64_loop_size(inst,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_loop((inst),(imm)); } while (0)
659     #define amd64_loope_size(inst,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_loope((inst),(imm)); } while (0)
660     #define amd64_loopne_size(inst,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_loopne((inst),(imm)); } while (0)
661     #define amd64_jump32_size(inst,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_jump32((inst),(imm)); } while (0)
662     #define amd64_jump8_size(inst,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_jump8((inst),(imm)); } while (0)
663     #define amd64_jump_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_jump_reg((inst),((reg)&0x7)); } while (0)
664     #define amd64_jump_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_jump_mem((inst),(mem)); } while (0)
665     #define amd64_jump_membase_size(inst,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_jump_membase((inst),((basereg)&0x7),(disp)); } while (0)
666     #define amd64_jump_code_size(inst,target,size) do { x86_jump_code((inst),(target)); } while (0)
667     #define amd64_jump_disp_size(inst,disp,size) do { amd64_emit_rex ((inst),0,0,0,0); x86_jump_disp((inst),(disp)); } while (0)
668     #define amd64_branch8_size(inst,cond,imm,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_branch8((inst),(cond),(imm),(is_signed)); } while (0)
669     #define amd64_branch32_size(inst,cond,imm,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_branch32((inst),(cond),(imm),(is_signed)); } while (0)
670     #define amd64_branch_size(inst,cond,target,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_branch((inst),(cond),(target),(is_signed)); } while (0)
671     #define amd64_branch_disp_size(inst,cond,disp,is_signed,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_branch_disp((inst),(cond),(disp),(is_signed)); } while (0)
672     #define amd64_set_reg_size(inst,cond,reg,is_signed,size) do { amd64_emit_rex((inst),1,0,0,(reg)); x86_set_reg((inst),(cond),((reg)&0x7),(is_signed)); } while (0)
673     #define amd64_set_mem_size(inst,cond,mem,is_signed,size) do { x86_set_mem((inst),(cond),(mem),(is_signed)); } while (0)
674     #define amd64_set_membase_size(inst,cond,basereg,disp,is_signed,size) do { amd64_emit_rex ((inst),0,0,0,(basereg)); x86_set_membase((inst),(cond),((basereg)&0x7),(disp),(is_signed)); } while (0)
675     #define amd64_call_imm_size(inst,disp,size) do { x86_call_imm((inst),(disp)); } while (0)
676     //#define amd64_call_reg_size(inst,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_call_reg((inst),((reg)&0x7)); } while (0)
677     #define amd64_call_mem_size(inst,mem,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_call_mem((inst),(mem)); } while (0)
678     #define amd64_call_code_size(inst,target,size) do { x86_call_code((inst),(target)); } while (0)
679     //#define amd64_ret_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_ret(inst); } while (0)
680     #define amd64_ret_imm_size(inst,imm,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_ret_imm((inst),(imm)); } while (0)
681     #define amd64_cmov_reg_size(inst,cond,is_signed,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_cmov_reg((inst),(cond),(is_signed),((dreg)&0x7),((reg)&0x7)); } while (0)
682     #define amd64_cmov_mem_size(inst,cond,is_signed,reg,mem,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_cmov_mem((inst),(cond),(is_signed),((reg)&0x7),(mem)); } while (0)
683     #define amd64_cmov_membase_size(inst,cond,is_signed,reg,basereg,disp,size) do { amd64_emit_rex ((inst),(size),0,0,(basereg)); x86_cmov_membase((inst),(cond),(is_signed),((reg)&0x7),((basereg)&0x7),(disp)); } while (0)
684     #define amd64_enter_size(inst,framesize) do { amd64_emit_rex ((inst),(size),0,0,0); x86_enter((inst),(framesize)); } while (0)
685     //#define amd64_leave_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_leave(inst); } while (0)
686     #define amd64_sahf_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_sahf(inst); } while (0)
687     #define amd64_fsin_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fsin(inst); } while (0)
688     #define amd64_fcos_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fcos(inst); } while (0)
689     #define amd64_fabs_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fabs(inst); } while (0)
690     #define amd64_ftst_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_ftst(inst); } while (0)
691     #define amd64_fxam_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fxam(inst); } while (0)
692     #define amd64_fpatan_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fpatan(inst); } while (0)
693     #define amd64_fprem_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fprem(inst); } while (0)
694     #define amd64_fprem1_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fprem1(inst); } while (0)
695     #define amd64_frndint_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_frndint(inst); } while (0)
696     #define amd64_fsqrt_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fsqrt(inst); } while (0)
697     #define amd64_fptan_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_fptan(inst); } while (0)
698     //#define amd64_padding_size(inst,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_padding((inst),(size)); } while (0)
699     #define amd64_prolog_size(inst,frame_size,reg_mask,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_prolog((inst),(frame_size),(reg_mask)); } while (0)
700     #define amd64_epilog_size(inst,reg_mask,size) do { amd64_emit_rex ((inst),(size),0,0,0); x86_epilog((inst),(reg_mask)); } while (0)
701     #define amd64_xadd_reg_reg_size(inst,dreg,reg,size) do { amd64_emit_rex ((inst),(size),(dreg),0,(reg)); x86_xadd_reg_reg ((inst), (dreg), (reg), (size)); } while (0)
702     #define amd64_xadd_mem_reg_size(inst,mem,reg,size) do { amd64_emit_rex ((inst),(size),0,0,(reg)); x86_xadd_mem_reg((inst),(mem),((reg)&0x7), (size)); } while (0)
703     #define amd64_xadd_membase_reg_size(inst,basereg,disp,reg,size) do { amd64_emit_rex ((inst),(size),(reg),0,(basereg)); x86_xadd_membase_reg((inst),((basereg)&0x7),(disp),((reg)&0x7),(size)); } while (0)
704    
705    
706    
707    
708     #define amd64_breakpoint(inst) amd64_breakpoint_size(inst,8)
709     #define amd64_cld(inst) amd64_cld_size(inst,8)
710     #define amd64_stosb(inst) amd64_stosb_size(inst,8)
711     #define amd64_stosl(inst) amd64_stosl_size(inst,8)
712     #define amd64_stosd(inst) amd64_stosd_size(inst,8)
713     #define amd64_movsb(inst) amd64_movsb_size(inst,8)
714     #define amd64_movsl(inst) amd64_movsl_size(inst,8)
715     #define amd64_movsd(inst) amd64_movsd_size(inst,8)
716     #define amd64_prefix(inst,p) amd64_prefix_size(inst,p,8)
717     #define amd64_rdtsc(inst) amd64_rdtsc_size(inst,8)
718     #define amd64_cmpxchg_reg_reg(inst,dreg,reg) amd64_cmpxchg_reg_reg_size(inst,dreg,reg,8)
719     #define amd64_cmpxchg_mem_reg(inst,mem,reg) amd64_cmpxchg_mem_reg_size(inst,mem,reg,8)
720     #define amd64_cmpxchg_membase_reg(inst,basereg,disp,reg) amd64_cmpxchg_membase_reg_size(inst,basereg,disp,reg,8)
721     #define amd64_xchg_reg_reg(inst,dreg,reg,size) amd64_xchg_reg_reg_size(inst,dreg,reg,size)
722     #define amd64_xchg_mem_reg(inst,mem,reg,size) amd64_xchg_mem_reg_size(inst,mem,reg,size)
723     #define amd64_xchg_membase_reg(inst,basereg,disp,reg,size) amd64_xchg_membase_reg_size(inst,basereg,disp,reg,size)
724     #define amd64_xadd_reg_reg(inst,dreg,reg,size) amd64_xadd_reg_reg_size(inst,dreg,reg,size)
725     #define amd64_xadd_mem_reg(inst,mem,reg,size) amd64_xadd_mem_reg_size(inst,mem,reg,size)
726     #define amd64_xadd_membase_reg(inst,basereg,disp,reg,size) amd64_xadd_membase_reg_size(inst,basereg,disp,reg,size)
727     #define amd64_inc_mem(inst,mem) amd64_inc_mem_size(inst,mem,8)
728     #define amd64_inc_membase(inst,basereg,disp) amd64_inc_membase_size(inst,basereg,disp,8)
729     #define amd64_inc_reg(inst,reg) amd64_inc_reg_size(inst,reg,8)
730     #define amd64_dec_mem(inst,mem) amd64_dec_mem_size(inst,mem,8)
731     #define amd64_dec_membase(inst,basereg,disp) amd64_dec_membase_size(inst,basereg,disp,8)
732     #define amd64_dec_reg(inst,reg) amd64_dec_reg_size(inst,reg,8)
733     #define amd64_not_mem(inst,mem) amd64_not_mem_size(inst,mem,8)
734     #define amd64_not_membase(inst,basereg,disp) amd64_not_membase_size(inst,basereg,disp,8)
735     #define amd64_not_reg(inst,reg) amd64_not_reg_size(inst,reg,8)
736     #define amd64_neg_mem(inst,mem) amd64_neg_mem_size(inst,mem,8)
737     #define amd64_neg_membase(inst,basereg,disp) amd64_neg_membase_size(inst,basereg,disp,8)
738     #define amd64_neg_reg(inst,reg) amd64_neg_reg_size(inst,reg,8)
739     #define amd64_nop(inst) amd64_nop_size(inst,8)
740     //#define amd64_alu_reg_imm(inst,opc,reg,imm) amd64_alu_reg_imm_size(inst,opc,reg,imm,8)
741     #define amd64_alu_mem_imm(inst,opc,mem,imm) amd64_alu_mem_imm_size(inst,opc,mem,imm,8)
742     #define amd64_alu_membase_imm(inst,opc,basereg,disp,imm) amd64_alu_membase_imm_size(inst,opc,basereg,disp,imm,8)
743     #define amd64_alu_mem_reg(inst,opc,mem,reg) amd64_alu_mem_reg_size(inst,opc,mem,reg,8)
744     #define amd64_alu_membase_reg(inst,opc,basereg,disp,reg) amd64_alu_membase_reg_size(inst,opc,basereg,disp,reg,8)
745     //#define amd64_alu_reg_reg(inst,opc,dreg,reg) amd64_alu_reg_reg_size(inst,opc,dreg,reg,8)
746     #define amd64_alu_reg8_reg8(inst,opc,dreg,reg,is_dreg_h,is_reg_h) amd64_alu_reg8_reg8_size(inst,opc,dreg,reg,is_dreg_h,is_reg_h,8)
747     #define amd64_alu_reg_mem(inst,opc,reg,mem) amd64_alu_reg_mem_size(inst,opc,reg,mem,8)
748     #define amd64_alu_reg_membase(inst,opc,reg,basereg,disp) amd64_alu_reg_membase_size(inst,opc,reg,basereg,disp,8)
749     #define amd64_test_reg_imm(inst,reg,imm) amd64_test_reg_imm_size(inst,reg,imm,8)
750     #define amd64_test_mem_imm(inst,mem,imm) amd64_test_mem_imm_size(inst,mem,imm,8)
751     #define amd64_test_membase_imm(inst,basereg,disp,imm) amd64_test_membase_imm_size(inst,basereg,disp,imm,8)
752     #define amd64_test_reg_reg(inst,dreg,reg) amd64_test_reg_reg_size(inst,dreg,reg,8)
753     #define amd64_test_mem_reg(inst,mem,reg) amd64_test_mem_reg_size(inst,mem,reg,8)
754     #define amd64_test_membase_reg(inst,basereg,disp,reg) amd64_test_membase_reg_size(inst,basereg,disp,reg,8)
755     #define amd64_shift_reg_imm(inst,opc,reg,imm) amd64_shift_reg_imm_size(inst,opc,reg,imm,8)
756     #define amd64_shift_mem_imm(inst,opc,mem,imm) amd64_shift_mem_imm_size(inst,opc,mem,imm,8)
757     #define amd64_shift_membase_imm(inst,opc,basereg,disp,imm) amd64_shift_membase_imm_size(inst,opc,basereg,disp,imm,8)
758     #define amd64_shift_reg(inst,opc,reg) amd64_shift_reg_size(inst,opc,reg,8)
759     #define amd64_shift_mem(inst,opc,mem) amd64_shift_mem_size(inst,opc,mem,8)
760     #define amd64_shift_membase(inst,opc,basereg,disp) amd64_shift_membase_size(inst,opc,basereg,disp,8)
761     #define amd64_shrd_reg(inst,dreg,reg) amd64_shrd_reg_size(inst,dreg,reg,8)
762     #define amd64_shrd_reg_imm(inst,dreg,reg,shamt) amd64_shrd_reg_imm_size(inst,dreg,reg,shamt,8)
763     #define amd64_shld_reg(inst,dreg,reg) amd64_shld_reg_size(inst,dreg,reg,8)
764     #define amd64_shld_reg_imm(inst,dreg,reg,shamt) amd64_shld_reg_imm_size(inst,dreg,reg,shamt,8)
765     #define amd64_mul_reg(inst,reg,is_signed) amd64_mul_reg_size(inst,reg,is_signed,8)
766     #define amd64_mul_mem(inst,mem,is_signed) amd64_mul_mem_size(inst,mem,is_signed,8)
767     #define amd64_mul_membase(inst,basereg,disp,is_signed) amd64_mul_membase_size(inst,basereg,disp,is_signed,8)
768     #define amd64_imul_reg_reg(inst,dreg,reg) amd64_imul_reg_reg_size(inst,dreg,reg,8)
769     #define amd64_imul_reg_mem(inst,reg,mem) amd64_imul_reg_mem_size(inst,reg,mem,8)
770     #define amd64_imul_reg_membase(inst,reg,basereg,disp) amd64_imul_reg_membase_size(inst,reg,basereg,disp,8)
771     #define amd64_imul_reg_reg_imm(inst,dreg,reg,imm) amd64_imul_reg_reg_imm_size(inst,dreg,reg,imm,8)
772     #define amd64_imul_reg_mem_imm(inst,reg,mem,imm) amd64_imul_reg_mem_imm_size(inst,reg,mem,imm,8)
773     #define amd64_imul_reg_membase_imm(inst,reg,basereg,disp,imm) amd64_imul_reg_membase_imm_size(inst,reg,basereg,disp,imm,8)
774     #define amd64_div_reg(inst,reg,is_signed) amd64_div_reg_size(inst,reg,is_signed,8)
775     #define amd64_div_mem(inst,mem,is_signed) amd64_div_mem_size(inst,mem,is_signed,8)
776     #define amd64_div_membase(inst,basereg,disp,is_signed) amd64_div_membase_size(inst,basereg,disp,is_signed,8)
777     #define amd64_mov_mem_reg(inst,mem,reg,size) amd64_mov_mem_reg_size(inst,mem,reg,size)
778     //#define amd64_mov_regp_reg(inst,regp,reg,size) amd64_mov_regp_reg_size(inst,regp,reg,size)
779     //#define amd64_mov_membase_reg(inst,basereg,disp,reg,size) amd64_mov_membase_reg_size(inst,basereg,disp,reg,size)
780     #define amd64_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) amd64_mov_memindex_reg_size(inst,basereg,disp,indexreg,shift,reg,size)
781     //#define amd64_mov_reg_reg(inst,dreg,reg,size) amd64_mov_reg_reg_size(inst,dreg,reg,size)
782     //#define amd64_mov_reg_mem(inst,reg,mem,size) amd64_mov_reg_mem_size(inst,reg,mem,size)
783     //#define amd64_mov_reg_membase(inst,reg,basereg,disp,size) amd64_mov_reg_membase_size(inst,reg,basereg,disp,size)
784     #define amd64_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) amd64_mov_reg_memindex_size(inst,reg,basereg,disp,indexreg,shift,size)
785     #define amd64_clear_reg(inst,reg) amd64_clear_reg_size(inst,reg,8)
786     //#define amd64_mov_reg_imm(inst,reg,imm) amd64_mov_reg_imm_size(inst,reg,imm,8)
787     #define amd64_mov_mem_imm(inst,mem,imm,size) amd64_mov_mem_imm_size(inst,mem,imm,size)
788     //#define amd64_mov_membase_imm(inst,basereg,disp,imm,size) amd64_mov_membase_imm_size(inst,basereg,disp,imm,size)
789     #define amd64_mov_memindex_imm(inst,basereg,disp,indexreg,shift,imm,size) amd64_mov_memindex_imm_size(inst,basereg,disp,indexreg,shift,imm,size)
790     #define amd64_lea_mem(inst,reg,mem) amd64_lea_mem_size(inst,reg,mem,8)
791     //#define amd64_lea_membase(inst,reg,basereg,disp) amd64_lea_membase_size(inst,reg,basereg,disp,8)
792     #define amd64_lea_memindex(inst,reg,basereg,disp,indexreg,shift) amd64_lea_memindex_size(inst,reg,basereg,disp,indexreg,shift,8)
793     #define amd64_widen_reg(inst,dreg,reg,is_signed,is_half) amd64_widen_reg_size(inst,dreg,reg,is_signed,is_half,8)
794     #define amd64_widen_mem(inst,dreg,mem,is_signed,is_half) amd64_widen_mem_size(inst,dreg,mem,is_signed,is_half,8)
795     #define amd64_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) amd64_widen_membase_size(inst,dreg,basereg,disp,is_signed,is_half,8)
796     #define amd64_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half) amd64_widen_memindex_size(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half,8)
797     #define amd64_cdq(inst) amd64_cdq_size(inst,8)
798     #define amd64_wait(inst) amd64_wait_size(inst,8)
799     #define amd64_fp_op_mem(inst,opc,mem,is_double) amd64_fp_op_mem_size(inst,opc,mem,is_double,8)
800     #define amd64_fp_op_membase(inst,opc,basereg,disp,is_double) amd64_fp_op_membase_size(inst,opc,basereg,disp,is_double,8)
801     #define amd64_fp_op(inst,opc,index) amd64_fp_op_size(inst,opc,index,8)
802     #define amd64_fp_op_reg(inst,opc,index,pop_stack) amd64_fp_op_reg_size(inst,opc,index,pop_stack,8)
803     #define amd64_fp_int_op_membase(inst,opc,basereg,disp,is_int) amd64_fp_int_op_membase_size(inst,opc,basereg,disp,is_int,8)
804     #define amd64_fstp(inst,index) amd64_fstp_size(inst,index,8)
805     #define amd64_fcompp(inst) amd64_fcompp_size(inst,8)
806     #define amd64_fucompp(inst) amd64_fucompp_size(inst,8)
807     #define amd64_fnstsw(inst) amd64_fnstsw_size(inst,8)
808     #define amd64_fnstcw(inst,mem) amd64_fnstcw_size(inst,mem,8)
809     #define amd64_fnstcw_membase(inst,basereg,disp) amd64_fnstcw_membase_size(inst,basereg,disp,8)
810     #define amd64_fldcw(inst,mem) amd64_fldcw_size(inst,mem,8)
811     #define amd64_fldcw_membase(inst,basereg,disp) amd64_fldcw_membase_size(inst,basereg,disp,8)
812     #define amd64_fchs(inst) amd64_fchs_size(inst,8)
813     #define amd64_frem(inst) amd64_frem_size(inst,8)
814     #define amd64_fxch(inst,index) amd64_fxch_size(inst,index,8)
815     #define amd64_fcomi(inst,index) amd64_fcomi_size(inst,index,8)
816     #define amd64_fcomip(inst,index) amd64_fcomip_size(inst,index,8)
817     #define amd64_fucomi(inst,index) amd64_fucomi_size(inst,index,8)
818     #define amd64_fucomip(inst,index) amd64_fucomip_size(inst,index,8)
819     #define amd64_fld(inst,mem,is_double) amd64_fld_size(inst,mem,is_double,8)
820     #define amd64_fld_membase(inst,basereg,disp,is_double) amd64_fld_membase_size(inst,basereg,disp,is_double,8)
821     #define amd64_fld80_mem(inst,mem) amd64_fld80_mem_size(inst,mem,8)
822     #define amd64_fld80_membase(inst,basereg,disp) amd64_fld80_membase_size(inst,basereg,disp,8)
823     #define amd64_fild(inst,mem,is_long) amd64_fild_size(inst,mem,is_long,8)
824     #define amd64_fild_membase(inst,basereg,disp,is_long) amd64_fild_membase_size(inst,basereg,disp,is_long,8)
825     #define amd64_fld_reg(inst,index) amd64_fld_reg_size(inst,index,8)
826     #define amd64_fldz(inst) amd64_fldz_size(inst,8)
827     #define amd64_fld1(inst) amd64_fld1_size(inst,8)
828     #define amd64_fldpi(inst) amd64_fldpi_size(inst,8)
829     #define amd64_fst(inst,mem,is_double,pop_stack) amd64_fst_size(inst,mem,is_double,pop_stack,8)
830     #define amd64_fst_membase(inst,basereg,disp,is_double,pop_stack) amd64_fst_membase_size(inst,basereg,disp,is_double,pop_stack,8)
831     #define amd64_fst80_mem(inst,mem) amd64_fst80_mem_size(inst,mem,8)
832     #define amd64_fst80_membase(inst,basereg,disp) amd64_fst80_membase_size(inst,basereg,disp,8)
833     #define amd64_fist_pop(inst,mem,is_long) amd64_fist_pop_size(inst,mem,is_long,8)
834     #define amd64_fist_pop_membase(inst,basereg,disp,is_long) amd64_fist_pop_membase_size(inst,basereg,disp,is_long,8)
835     #define amd64_fstsw(inst) amd64_fstsw_size(inst,8)
836     #define amd64_fist_membase(inst,basereg,disp,is_int) amd64_fist_membase_size(inst,basereg,disp,is_int,8)
837     //#define amd64_push_reg(inst,reg) amd64_push_reg_size(inst,reg,8)
838     #define amd64_push_regp(inst,reg) amd64_push_regp_size(inst,reg,8)
839     #define amd64_push_mem(inst,mem) amd64_push_mem_size(inst,mem,8)
840     //#define amd64_push_membase(inst,basereg,disp) amd64_push_membase_size(inst,basereg,disp,8)
841     #define amd64_push_memindex(inst,basereg,disp,indexreg,shift) amd64_push_memindex_size(inst,basereg,disp,indexreg,shift,8)
842     #define amd64_push_imm(inst,imm) amd64_push_imm_size(inst,imm,8)
843     //#define amd64_pop_reg(inst,reg) amd64_pop_reg_size(inst,reg,8)
844     #define amd64_pop_mem(inst,mem) amd64_pop_mem_size(inst,mem,8)
845     #define amd64_pop_membase(inst,basereg,disp) amd64_pop_membase_size(inst,basereg,disp,8)
846     #define amd64_pushad(inst) amd64_pushad_size(inst,8)
847     #define amd64_pushfd(inst) amd64_pushfd_size(inst,8)
848     #define amd64_popad(inst) amd64_popad_size(inst,8)
849     #define amd64_popfd(inst) amd64_popfd_size(inst,8)
850     #define amd64_loop(inst,imm) amd64_loop_size(inst,imm,8)
851     #define amd64_loope(inst,imm) amd64_loope_size(inst,imm,8)
852     #define amd64_loopne(inst,imm) amd64_loopne_size(inst,imm,8)
853     #define amd64_jump32(inst,imm) amd64_jump32_size(inst,imm,8)
854     #define amd64_jump8(inst,imm) amd64_jump8_size(inst,imm,8)
855     #define amd64_jump_reg(inst,reg) amd64_jump_reg_size(inst,reg,8)
856     #define amd64_jump_mem(inst,mem) amd64_jump_mem_size(inst,mem,8)
857     #define amd64_jump_membase(inst,basereg,disp) amd64_jump_membase_size(inst,basereg,disp,8)
858     #define amd64_jump_code(inst,target) amd64_jump_code_size(inst,target,8)
859     #define amd64_jump_disp(inst,disp) amd64_jump_disp_size(inst,disp,8)
860     #define amd64_branch8(inst,cond,imm,is_signed) amd64_branch8_size(inst,cond,imm,is_signed,8)
861     #define amd64_branch32(inst,cond,imm,is_signed) amd64_branch32_size(inst,cond,imm,is_signed,8)
862     #define amd64_branch(inst,cond,target,is_signed) amd64_branch_size(inst,cond,target,is_signed,8)
863     #define amd64_branch_disp(inst,cond,disp,is_signed) amd64_branch_disp_size(inst,cond,disp,is_signed,8)
864     #define amd64_set_reg(inst,cond,reg,is_signed) amd64_set_reg_size(inst,cond,reg,is_signed,8)
865     #define amd64_set_mem(inst,cond,mem,is_signed) amd64_set_mem_size(inst,cond,mem,is_signed,8)
866     #define amd64_set_membase(inst,cond,basereg,disp,is_signed) amd64_set_membase_size(inst,cond,basereg,disp,is_signed,8)
867     #define amd64_call_imm(inst,disp) amd64_call_imm_size(inst,disp,8)
868     //#define amd64_call_reg(inst,reg) amd64_call_reg_size(inst,reg,8)
869     #define amd64_call_mem(inst,mem) amd64_call_mem_size(inst,mem,8)
870     #define amd64_call_membase(inst,basereg,disp) amd64_call_membase_size(inst,basereg,disp,8)
871     #define amd64_call_code(inst,target) amd64_call_code_size(inst,target,8)
872     //#define amd64_ret(inst) amd64_ret_size(inst,8)
873     #define amd64_ret_imm(inst,imm) amd64_ret_imm_size(inst,imm,8)
874     #define amd64_cmov_reg(inst,cond,is_signed,dreg,reg) amd64_cmov_reg_size(inst,cond,is_signed,dreg,reg,8)
875     #define amd64_cmov_mem(inst,cond,is_signed,reg,mem) amd64_cmov_mem_size(inst,cond,is_signed,reg,mem,8)
876     #define amd64_cmov_membase(inst,cond,is_signed,reg,basereg,disp) amd64_cmov_membase_size(inst,cond,is_signed,reg,basereg,disp,8)
877     #define amd64_enter(inst,framesize) amd64_enter_size(inst,framesize)
878     //#define amd64_leave(inst) amd64_leave_size(inst,8)
879     #define amd64_sahf(inst) amd64_sahf_size(inst,8)
880     #define amd64_fsin(inst) amd64_fsin_size(inst,8)
881     #define amd64_fcos(inst) amd64_fcos_size(inst,8)
882     #define amd64_fabs(inst) amd64_fabs_size(inst,8)
883     #define amd64_ftst(inst) amd64_ftst_size(inst,8)
884     #define amd64_fxam(inst) amd64_fxam_size(inst,8)
885     #define amd64_fpatan(inst) amd64_fpatan_size(inst,8)
886     #define amd64_fprem(inst) amd64_fprem_size(inst,8)
887     #define amd64_fprem1(inst) amd64_fprem1_size(inst,8)
888     #define amd64_frndint(inst) amd64_frndint_size(inst,8)
889     #define amd64_fsqrt(inst) amd64_fsqrt_size(inst,8)
890     #define amd64_fptan(inst) amd64_fptan_size(inst,8)
891     #define amd64_padding(inst,size) amd64_padding_size(inst,size)
892     #define amd64_prolog(inst,frame,reg_mask) amd64_prolog_size(inst,frame,reg_mask,8)
893     #define amd64_epilog(inst,reg_mask) amd64_epilog_size(inst,reg_mask,8)
894    
895     #endif // AMD64_H

  ViewVC Help
Powered by ViewVC 1.1.26