/[dynamips]/upstream/dynamips-0.2.8-RC1/x86-codegen.h
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Contents of /upstream/dynamips-0.2.8-RC1/x86-codegen.h

Parent Directory Parent Directory | Revision Log Revision Log


Revision 11 - (show annotations)
Sat Oct 6 16:33:40 2007 UTC (16 years, 5 months ago) by dpavlin
File MIME type: text/plain
File size: 47915 byte(s)
dynamips-0.2.8-RC1

1 /*
2 * x86-codegen.h: Macros for generating x86 code
3 *
4 * Authors:
5 * Paolo Molaro (lupus@ximian.com)
6 * Intel Corporation (ORP Project)
7 * Sergey Chaban (serge@wildwestsoftware.com)
8 * Dietmar Maurer (dietmar@ximian.com)
9 * Patrik Torstensson
10 *
11 * Copyright (C) 2000 Intel Corporation. All rights reserved.
12 * Copyright (C) 2001, 2002 Ximian, Inc.
13 */
14
15 #ifndef X86_H
16 #define X86_H
17 #include <assert.h>
18 /*
19 // x86 register numbers
20 */
21 typedef enum {
22 X86_EAX = 0,
23 X86_ECX = 1,
24 X86_EDX = 2,
25 X86_EBX = 3,
26 X86_ESP = 4,
27 X86_EBP = 5,
28 X86_ESI = 6,
29 X86_EDI = 7,
30 X86_NREG
31 } X86_Reg_No;
32 /*
33 // opcodes for alu instructions
34 */
35 typedef enum {
36 X86_ADD = 0,
37 X86_OR = 1,
38 X86_ADC = 2,
39 X86_SBB = 3,
40 X86_AND = 4,
41 X86_SUB = 5,
42 X86_XOR = 6,
43 X86_CMP = 7,
44 X86_NALU
45 } X86_ALU_Opcode;
46 /*
47 // opcodes for shift instructions
48 */
49 typedef enum {
50 X86_SHLD,
51 X86_SHLR,
52 X86_ROL = 0,
53 X86_ROR = 1,
54 X86_RCL = 2,
55 X86_RCR = 3,
56 X86_SHL = 4,
57 X86_SHR = 5,
58 X86_SAR = 7,
59 X86_NSHIFT = 8
60 } X86_Shift_Opcode;
61 /*
62 // opcodes for floating-point instructions
63 */
64 typedef enum {
65 X86_FADD = 0,
66 X86_FMUL = 1,
67 X86_FCOM = 2,
68 X86_FCOMP = 3,
69 X86_FSUB = 4,
70 X86_FSUBR = 5,
71 X86_FDIV = 6,
72 X86_FDIVR = 7,
73 X86_NFP = 8
74 } X86_FP_Opcode;
75 /*
76 // integer conditions codes
77 */
78 typedef enum {
79 X86_CC_EQ = 0, X86_CC_E = 0, X86_CC_Z = 0,
80 X86_CC_NE = 1, X86_CC_NZ = 1,
81 X86_CC_LT = 2, X86_CC_B = 2, X86_CC_C = 2, X86_CC_NAE = 2,
82 X86_CC_LE = 3, X86_CC_BE = 3, X86_CC_NA = 3,
83 X86_CC_GT = 4, X86_CC_A = 4, X86_CC_NBE = 4,
84 X86_CC_GE = 5, X86_CC_AE = 5, X86_CC_NB = 5, X86_CC_NC = 5,
85 X86_CC_LZ = 6, X86_CC_S = 6,
86 X86_CC_GEZ = 7, X86_CC_NS = 7,
87 X86_CC_P = 8, X86_CC_PE = 8,
88 X86_CC_NP = 9, X86_CC_PO = 9,
89 X86_CC_O = 10,
90 X86_CC_NO = 11,
91 X86_NCC
92 } X86_CC;
93
94 /* FP status */
95 enum {
96 X86_FP_C0 = 0x100,
97 X86_FP_C1 = 0x200,
98 X86_FP_C2 = 0x400,
99 X86_FP_C3 = 0x4000,
100 X86_FP_CC_MASK = 0x4500
101 };
102
103 /* FP control word */
104 enum {
105 X86_FPCW_INVOPEX_MASK = 0x1,
106 X86_FPCW_DENOPEX_MASK = 0x2,
107 X86_FPCW_ZERODIV_MASK = 0x4,
108 X86_FPCW_OVFEX_MASK = 0x8,
109 X86_FPCW_UNDFEX_MASK = 0x10,
110 X86_FPCW_PRECEX_MASK = 0x20,
111 X86_FPCW_PRECC_MASK = 0x300,
112 X86_FPCW_ROUNDC_MASK = 0xc00,
113
114 /* values for precision control */
115 X86_FPCW_PREC_SINGLE = 0,
116 X86_FPCW_PREC_DOUBLE = 0x200,
117 X86_FPCW_PREC_EXTENDED = 0x300,
118
119 /* values for rounding control */
120 X86_FPCW_ROUND_NEAREST = 0,
121 X86_FPCW_ROUND_DOWN = 0x400,
122 X86_FPCW_ROUND_UP = 0x800,
123 X86_FPCW_ROUND_TOZERO = 0xc00
124 };
125
126 /*
127 // prefix code
128 */
129 typedef enum {
130 X86_LOCK_PREFIX = 0xF0,
131 X86_REPNZ_PREFIX = 0xF2,
132 X86_REPZ_PREFIX = 0xF3,
133 X86_REP_PREFIX = 0xF3,
134 X86_CS_PREFIX = 0x2E,
135 X86_SS_PREFIX = 0x36,
136 X86_DS_PREFIX = 0x3E,
137 X86_ES_PREFIX = 0x26,
138 X86_FS_PREFIX = 0x64,
139 X86_GS_PREFIX = 0x65,
140 X86_UNLIKELY_PREFIX = 0x2E,
141 X86_LIKELY_PREFIX = 0x3E,
142 X86_OPERAND_PREFIX = 0x66,
143 X86_ADDRESS_PREFIX = 0x67
144 } X86_Prefix;
145
146 static const unsigned char
147 x86_cc_unsigned_map [X86_NCC] = {
148 0x74, /* eq */
149 0x75, /* ne */
150 0x72, /* lt */
151 0x76, /* le */
152 0x77, /* gt */
153 0x73, /* ge */
154 0x78, /* lz */
155 0x79, /* gez */
156 0x7a, /* p */
157 0x7b, /* np */
158 0x70, /* o */
159 0x71, /* no */
160 };
161
162 static const unsigned char
163 x86_cc_signed_map [X86_NCC] = {
164 0x74, /* eq */
165 0x75, /* ne */
166 0x7c, /* lt */
167 0x7e, /* le */
168 0x7f, /* gt */
169 0x7d, /* ge */
170 0x78, /* lz */
171 0x79, /* gez */
172 0x7a, /* p */
173 0x7b, /* np */
174 0x70, /* o */
175 0x71, /* no */
176 };
177
178 typedef union {
179 int val;
180 unsigned char b [4];
181 } x86_imm_buf;
182
183 #define X86_NOBASEREG (-1)
184
185 /*
186 // bitvector mask for callee-saved registers
187 */
188 #define X86_ESI_MASK (1<<X86_ESI)
189 #define X86_EDI_MASK (1<<X86_EDI)
190 #define X86_EBX_MASK (1<<X86_EBX)
191 #define X86_EBP_MASK (1<<X86_EBP)
192
193 #define X86_CALLEE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX))
194 #define X86_CALLER_REGS ((1<<X86_EBX) | (1<<X86_EBP) | (1<<X86_ESI) | (1<<X86_EDI))
195 #define X86_BYTE_REGS ((1<<X86_EAX) | (1<<X86_ECX) | (1<<X86_EDX) | (1<<X86_EBX))
196
197 #define X86_IS_SCRATCH(reg) (X86_CALLER_REGS & (1 << (reg))) /* X86_EAX, X86_ECX, or X86_EDX */
198 #define X86_IS_CALLEE(reg) (X86_CALLEE_REGS & (1 << (reg))) /* X86_ESI, X86_EDI, X86_EBX, or X86_EBP */
199
200 #define X86_IS_BYTE_REG(reg) ((reg) < 4)
201
202 /*
203 // Frame structure:
204 //
205 // +--------------------------------+
206 // | in_arg[0] = var[0] |
207 // | in_arg[1] = var[1] |
208 // | . . . |
209 // | in_arg[n_arg-1] = var[n_arg-1] |
210 // +--------------------------------+
211 // | return IP |
212 // +--------------------------------+
213 // | saved EBP | <-- frame pointer (EBP)
214 // +--------------------------------+
215 // | ... | n_extra
216 // +--------------------------------+
217 // | var[n_arg] |
218 // | var[n_arg+1] | local variables area
219 // | . . . |
220 // | var[n_var-1] |
221 // +--------------------------------+
222 // | |
223 // | |
224 // | spill area | area for spilling mimic stack
225 // | |
226 // +--------------------------------|
227 // | ebx |
228 // | ebp [ESP_Frame only] |
229 // | esi | 0..3 callee-saved regs
230 // | edi | <-- stack pointer (ESP)
231 // +--------------------------------+
232 // | stk0 |
233 // | stk1 | operand stack area/
234 // | . . . | out args
235 // | stkn-1 |
236 // +--------------------------------|
237 //
238 //
239 */
240
241
242 /*
243 * useful building blocks
244 */
245 #define x86_modrm_mod(modrm) ((modrm) >> 6)
246 #define x86_modrm_reg(modrm) (((modrm) >> 3) & 0x7)
247 #define x86_modrm_rm(modrm) ((modrm) & 0x7)
248
249 #define x86_address_byte(inst,m,o,r) do { *(inst)++ = ((((m)&0x03)<<6)|(((o)&0x07)<<3)|(((r)&0x07))); } while (0)
250 #define x86_imm_emit32(inst,imm) \
251 do { \
252 x86_imm_buf imb; imb.val = (int) (imm); \
253 *(inst)++ = imb.b [0]; \
254 *(inst)++ = imb.b [1]; \
255 *(inst)++ = imb.b [2]; \
256 *(inst)++ = imb.b [3]; \
257 } while (0)
258 #define x86_imm_emit16(inst,imm) do { *(short*)(inst) = (imm); (inst) += 2; } while (0)
259 #define x86_imm_emit8(inst,imm) do { *(inst) = (unsigned char)((imm) & 0xff); ++(inst); } while (0)
260 #define x86_is_imm8(imm) (((int)(imm) >= -128 && (int)(imm) <= 127))
261 #define x86_is_imm16(imm) (((int)(imm) >= -(1<<16) && (int)(imm) <= ((1<<16)-1)))
262
263 #define x86_reg_emit(inst,r,regno) do { x86_address_byte ((inst), 3, (r), (regno)); } while (0)
264 #define x86_reg8_emit(inst,r,regno,is_rh,is_rnoh) do {x86_address_byte ((inst), 3, (is_rh)?((r)|4):(r), (is_rnoh)?((regno)|4):(regno));} while (0)
265 #define x86_regp_emit(inst,r,regno) do { x86_address_byte ((inst), 0, (r), (regno)); } while (0)
266 #define x86_mem_emit(inst,r,disp) do { x86_address_byte ((inst), 0, (r), 5); x86_imm_emit32((inst), (disp)); } while (0)
267
268 #define x86_membase_emit(inst,r,basereg,disp) do {\
269 if ((basereg) == X86_ESP) { \
270 if ((disp) == 0) { \
271 x86_address_byte ((inst), 0, (r), X86_ESP); \
272 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
273 } else if (x86_is_imm8((disp))) { \
274 x86_address_byte ((inst), 1, (r), X86_ESP); \
275 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
276 x86_imm_emit8 ((inst), (disp)); \
277 } else { \
278 x86_address_byte ((inst), 2, (r), X86_ESP); \
279 x86_address_byte ((inst), 0, X86_ESP, X86_ESP); \
280 x86_imm_emit32 ((inst), (disp)); \
281 } \
282 break; \
283 } \
284 if ((disp) == 0 && (basereg) != X86_EBP) { \
285 x86_address_byte ((inst), 0, (r), (basereg)); \
286 break; \
287 } \
288 if (x86_is_imm8((disp))) { \
289 x86_address_byte ((inst), 1, (r), (basereg)); \
290 x86_imm_emit8 ((inst), (disp)); \
291 } else { \
292 x86_address_byte ((inst), 2, (r), (basereg)); \
293 x86_imm_emit32 ((inst), (disp)); \
294 } \
295 } while (0)
296
297 #define x86_memindex_emit(inst,r,basereg,disp,indexreg,shift) \
298 do { \
299 if ((basereg) == X86_NOBASEREG) { \
300 x86_address_byte ((inst), 0, (r), 4); \
301 x86_address_byte ((inst), (shift), (indexreg), 5); \
302 x86_imm_emit32 ((inst), (disp)); \
303 } else if ((disp) == 0 && (basereg) != X86_EBP) { \
304 x86_address_byte ((inst), 0, (r), 4); \
305 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
306 } else if (x86_is_imm8((disp))) { \
307 x86_address_byte ((inst), 1, (r), 4); \
308 x86_address_byte ((inst), (shift), (indexreg), (basereg)); \
309 x86_imm_emit8 ((inst), (disp)); \
310 } else { \
311 x86_address_byte ((inst), 2, (r), 4); \
312 x86_address_byte ((inst), (shift), (indexreg), 5); \
313 x86_imm_emit32 ((inst), (disp)); \
314 } \
315 } while (0)
316
317 /*
318 * target is the position in the code where to jump to:
319 * target = code;
320 * .. output loop code...
321 * x86_mov_reg_imm (code, X86_EAX, 0);
322 * loop = code;
323 * x86_loop (code, -1);
324 * ... finish method
325 *
326 * patch displacement
327 * x86_patch (loop, target);
328 *
329 * ins should point at the start of the instruction that encodes a target.
330 * the instruction is inspected for validity and the correct displacement
331 * is inserted.
332 */
333 #define x86_patch(ins,target) \
334 do { \
335 unsigned char* pos = (ins) + 1; \
336 int disp, size = 0; \
337 switch (*(unsigned char*)(ins)) { \
338 case 0xe8: case 0xe9: ++size; break; /* call, jump32 */ \
339 case 0x0f: if (!(*pos >= 0x70 && *pos <= 0x8f)) assert (0); \
340 ++size; ++pos; break; /* prefix for 32-bit disp */ \
341 case 0xe0: case 0xe1: case 0xe2: /* loop */ \
342 case 0xeb: /* jump8 */ \
343 /* conditional jump opcodes */ \
344 case 0x70: case 0x71: case 0x72: case 0x73: \
345 case 0x74: case 0x75: case 0x76: case 0x77: \
346 case 0x78: case 0x79: case 0x7a: case 0x7b: \
347 case 0x7c: case 0x7d: case 0x7e: case 0x7f: \
348 break; \
349 default: assert (0); \
350 } \
351 disp = (target) - pos; \
352 if (size) x86_imm_emit32 (pos, disp - 4); \
353 else if (x86_is_imm8 (disp - 1)) x86_imm_emit8 (pos, disp - 1); \
354 else assert (0); \
355 } while (0)
356
357 #define x86_breakpoint(inst) \
358 do { \
359 *(inst)++ = 0xcc; \
360 } while (0)
361
362 #define x86_clc(inst) do { *(inst)++ =(unsigned char)0xf8; } while (0)
363 #define x86_cld(inst) do { *(inst)++ =(unsigned char)0xfc; } while (0)
364 #define x86_stosb(inst) do { *(inst)++ =(unsigned char)0xaa; } while (0)
365 #define x86_stosl(inst) do { *(inst)++ =(unsigned char)0xab; } while (0)
366 #define x86_stosd(inst) x86_stosl((inst))
367 #define x86_movsb(inst) do { *(inst)++ =(unsigned char)0xa4; } while (0)
368 #define x86_movsl(inst) do { *(inst)++ =(unsigned char)0xa5; } while (0)
369 #define x86_movsd(inst) x86_movsl((inst))
370
371 #define x86_prefix(inst,p) do { *(inst)++ =(unsigned char) (p); } while (0)
372
373 #define x86_bswap(inst,reg) \
374 do { \
375 *(inst)++ = 0x0f; \
376 *(inst)++ = (unsigned char)0xc8 + (reg); \
377 } while (0)
378
379 #define x86_rdtsc(inst) \
380 do { \
381 *(inst)++ = 0x0f; \
382 *(inst)++ = 0x31; \
383 } while (0)
384
385 #define x86_cmpxchg_reg_reg(inst,dreg,reg) \
386 do { \
387 *(inst)++ = (unsigned char)0x0f; \
388 *(inst)++ = (unsigned char)0xb1; \
389 x86_reg_emit ((inst), (reg), (dreg)); \
390 } while (0)
391
392 #define x86_cmpxchg_mem_reg(inst,mem,reg) \
393 do { \
394 *(inst)++ = (unsigned char)0x0f; \
395 *(inst)++ = (unsigned char)0xb1; \
396 x86_mem_emit ((inst), (reg), (mem)); \
397 } while (0)
398
399 #define x86_cmpxchg_membase_reg(inst,basereg,disp,reg) \
400 do { \
401 *(inst)++ = (unsigned char)0x0f; \
402 *(inst)++ = (unsigned char)0xb1; \
403 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
404 } while (0)
405
406 #define x86_xchg_reg_reg(inst,dreg,reg,size) \
407 do { \
408 if ((size) == 1) \
409 *(inst)++ = (unsigned char)0x86; \
410 else \
411 *(inst)++ = (unsigned char)0x87; \
412 x86_reg_emit ((inst), (reg), (dreg)); \
413 } while (0)
414
415 #define x86_xchg_mem_reg(inst,mem,reg,size) \
416 do { \
417 if ((size) == 1) \
418 *(inst)++ = (unsigned char)0x86; \
419 else \
420 *(inst)++ = (unsigned char)0x87; \
421 x86_mem_emit ((inst), (reg), (mem)); \
422 } while (0)
423
424 #define x86_xchg_membase_reg(inst,basereg,disp,reg,size) \
425 do { \
426 if ((size) == 1) \
427 *(inst)++ = (unsigned char)0x86; \
428 else \
429 *(inst)++ = (unsigned char)0x87; \
430 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
431 } while (0)
432
433 #define x86_xadd_reg_reg(inst,dreg,reg,size) \
434 do { \
435 *(inst)++ = (unsigned char)0x0F; \
436 if ((size) == 1) \
437 *(inst)++ = (unsigned char)0xC0; \
438 else \
439 *(inst)++ = (unsigned char)0xC1; \
440 x86_reg_emit ((inst), (reg), (dreg)); \
441 } while (0)
442
443 #define x86_xadd_mem_reg(inst,mem,reg,size) \
444 do { \
445 *(inst)++ = (unsigned char)0x0F; \
446 if ((size) == 1) \
447 *(inst)++ = (unsigned char)0xC0; \
448 else \
449 *(inst)++ = (unsigned char)0xC1; \
450 x86_mem_emit ((inst), (reg), (mem)); \
451 } while (0)
452
453 #define x86_xadd_membase_reg(inst,basereg,disp,reg,size) \
454 do { \
455 *(inst)++ = (unsigned char)0x0F; \
456 if ((size) == 1) \
457 *(inst)++ = (unsigned char)0xC0; \
458 else \
459 *(inst)++ = (unsigned char)0xC1; \
460 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
461 } while (0)
462
463 #define x86_inc_mem(inst,mem) \
464 do { \
465 *(inst)++ = (unsigned char)0xff; \
466 x86_mem_emit ((inst), 0, (mem)); \
467 } while (0)
468
469 #define x86_inc_membase(inst,basereg,disp) \
470 do { \
471 *(inst)++ = (unsigned char)0xff; \
472 x86_membase_emit ((inst), 0, (basereg), (disp)); \
473 } while (0)
474
475 #define x86_inc_reg(inst,reg) do { *(inst)++ = (unsigned char)0x40 + (reg); } while (0)
476
477 #define x86_dec_mem(inst,mem) \
478 do { \
479 *(inst)++ = (unsigned char)0xff; \
480 x86_mem_emit ((inst), 1, (mem)); \
481 } while (0)
482
483 #define x86_dec_membase(inst,basereg,disp) \
484 do { \
485 *(inst)++ = (unsigned char)0xff; \
486 x86_membase_emit ((inst), 1, (basereg), (disp)); \
487 } while (0)
488
489 #define x86_dec_reg(inst,reg) do { *(inst)++ = (unsigned char)0x48 + (reg); } while (0)
490
491 #define x86_not_mem(inst,mem) \
492 do { \
493 *(inst)++ = (unsigned char)0xf7; \
494 x86_mem_emit ((inst), 2, (mem)); \
495 } while (0)
496
497 #define x86_not_membase(inst,basereg,disp) \
498 do { \
499 *(inst)++ = (unsigned char)0xf7; \
500 x86_membase_emit ((inst), 2, (basereg), (disp)); \
501 } while (0)
502
503 #define x86_not_reg(inst,reg) \
504 do { \
505 *(inst)++ = (unsigned char)0xf7; \
506 x86_reg_emit ((inst), 2, (reg)); \
507 } while (0)
508
509 #define x86_neg_mem(inst,mem) \
510 do { \
511 *(inst)++ = (unsigned char)0xf7; \
512 x86_mem_emit ((inst), 3, (mem)); \
513 } while (0)
514
515 #define x86_neg_membase(inst,basereg,disp) \
516 do { \
517 *(inst)++ = (unsigned char)0xf7; \
518 x86_membase_emit ((inst), 3, (basereg), (disp)); \
519 } while (0)
520
521 #define x86_neg_reg(inst,reg) \
522 do { \
523 *(inst)++ = (unsigned char)0xf7; \
524 x86_reg_emit ((inst), 3, (reg)); \
525 } while (0)
526
527 #define x86_nop(inst) do { *(inst)++ = (unsigned char)0x90; } while (0)
528
529 #define x86_alu_reg_imm(inst,opc,reg,imm) \
530 do { \
531 if ((reg) == X86_EAX) { \
532 *(inst)++ = (((unsigned char)(opc)) << 3) + 5; \
533 x86_imm_emit32 ((inst), (imm)); \
534 break; \
535 } \
536 if (x86_is_imm8((imm))) { \
537 *(inst)++ = (unsigned char)0x83; \
538 x86_reg_emit ((inst), (opc), (reg)); \
539 x86_imm_emit8 ((inst), (imm)); \
540 } else { \
541 *(inst)++ = (unsigned char)0x81; \
542 x86_reg_emit ((inst), (opc), (reg)); \
543 x86_imm_emit32 ((inst), (imm)); \
544 } \
545 } while (0)
546
547 #define x86_alu_mem_imm(inst,opc,mem,imm) \
548 do { \
549 if (x86_is_imm8((imm))) { \
550 *(inst)++ = (unsigned char)0x83; \
551 x86_mem_emit ((inst), (opc), (mem)); \
552 x86_imm_emit8 ((inst), (imm)); \
553 } else { \
554 *(inst)++ = (unsigned char)0x81; \
555 x86_mem_emit ((inst), (opc), (mem)); \
556 x86_imm_emit32 ((inst), (imm)); \
557 } \
558 } while (0)
559
560 #define x86_alu_membase_imm(inst,opc,basereg,disp,imm) \
561 do { \
562 if (x86_is_imm8((imm))) { \
563 *(inst)++ = (unsigned char)0x83; \
564 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
565 x86_imm_emit8 ((inst), (imm)); \
566 } else { \
567 *(inst)++ = (unsigned char)0x81; \
568 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
569 x86_imm_emit32 ((inst), (imm)); \
570 } \
571 } while (0)
572
573 #define x86_alu_membase8_imm(inst,opc,basereg,disp,imm) \
574 do { \
575 *(inst)++ = (unsigned char)0x80; \
576 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
577 x86_imm_emit8 ((inst), (imm)); \
578 } while (0)
579
580 #define x86_alu_mem_reg(inst,opc,mem,reg) \
581 do { \
582 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
583 x86_mem_emit ((inst), (reg), (mem)); \
584 } while (0)
585
586 #define x86_alu_membase_reg(inst,opc,basereg,disp,reg) \
587 do { \
588 *(inst)++ = (((unsigned char)(opc)) << 3) + 1; \
589 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
590 } while (0)
591
592 #define x86_alu_reg_reg(inst,opc,dreg,reg) \
593 do { \
594 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
595 x86_reg_emit ((inst), (dreg), (reg)); \
596 } while (0)
597
598 /**
599 * @x86_alu_reg8_reg8:
600 * Supports ALU operations between two 8-bit registers.
601 * dreg := dreg opc reg
602 * X86_Reg_No enum is used to specify the registers.
603 * Additionally is_*_h flags are used to specify what part
604 * of a given 32-bit register is used - high (TRUE) or low (FALSE).
605 * For example: dreg = X86_EAX, is_dreg_h = TRUE -> use AH
606 */
607 #define x86_alu_reg8_reg8(inst,opc,dreg,reg,is_dreg_h,is_reg_h) \
608 do { \
609 *(inst)++ = (((unsigned char)(opc)) << 3) + 2; \
610 x86_reg8_emit ((inst), (dreg), (reg), (is_dreg_h), (is_reg_h)); \
611 } while (0)
612
613 #define x86_alu_reg_mem(inst,opc,reg,mem) \
614 do { \
615 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
616 x86_mem_emit ((inst), (reg), (mem)); \
617 } while (0)
618
619 #define x86_alu_reg_membase(inst,opc,reg,basereg,disp) \
620 do { \
621 *(inst)++ = (((unsigned char)(opc)) << 3) + 3; \
622 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
623 } while (0)
624
625 #define x86_test_reg_imm(inst,reg,imm) \
626 do { \
627 if ((reg) == X86_EAX) { \
628 *(inst)++ = (unsigned char)0xa9; \
629 } else { \
630 *(inst)++ = (unsigned char)0xf7; \
631 x86_reg_emit ((inst), 0, (reg)); \
632 } \
633 x86_imm_emit32 ((inst), (imm)); \
634 } while (0)
635
636 #define x86_test_mem_imm(inst,mem,imm) \
637 do { \
638 *(inst)++ = (unsigned char)0xf7; \
639 x86_mem_emit ((inst), 0, (mem)); \
640 x86_imm_emit32 ((inst), (imm)); \
641 } while (0)
642
643 #define x86_test_membase_imm(inst,basereg,disp,imm) \
644 do { \
645 *(inst)++ = (unsigned char)0xf7; \
646 x86_membase_emit ((inst), 0, (basereg), (disp)); \
647 x86_imm_emit32 ((inst), (imm)); \
648 } while (0)
649
650 #define x86_test_reg_reg(inst,dreg,reg) \
651 do { \
652 *(inst)++ = (unsigned char)0x85; \
653 x86_reg_emit ((inst), (reg), (dreg)); \
654 } while (0)
655
656 #define x86_test_mem_reg(inst,mem,reg) \
657 do { \
658 *(inst)++ = (unsigned char)0x85; \
659 x86_mem_emit ((inst), (reg), (mem)); \
660 } while (0)
661
662 #define x86_test_membase_reg(inst,basereg,disp,reg) \
663 do { \
664 *(inst)++ = (unsigned char)0x85; \
665 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
666 } while (0)
667
668 #define x86_shift_reg_imm(inst,opc,reg,imm) \
669 do { \
670 if ((imm) == 1) { \
671 *(inst)++ = (unsigned char)0xd1; \
672 x86_reg_emit ((inst), (opc), (reg)); \
673 } else { \
674 *(inst)++ = (unsigned char)0xc1; \
675 x86_reg_emit ((inst), (opc), (reg)); \
676 x86_imm_emit8 ((inst), (imm)); \
677 } \
678 } while (0)
679
680 #define x86_shift_mem_imm(inst,opc,mem,imm) \
681 do { \
682 if ((imm) == 1) { \
683 *(inst)++ = (unsigned char)0xd1; \
684 x86_mem_emit ((inst), (opc), (mem)); \
685 } else { \
686 *(inst)++ = (unsigned char)0xc1; \
687 x86_mem_emit ((inst), (opc), (mem)); \
688 x86_imm_emit8 ((inst), (imm)); \
689 } \
690 } while (0)
691
692 #define x86_shift_membase_imm(inst,opc,basereg,disp,imm) \
693 do { \
694 if ((imm) == 1) { \
695 *(inst)++ = (unsigned char)0xd1; \
696 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
697 } else { \
698 *(inst)++ = (unsigned char)0xc1; \
699 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
700 x86_imm_emit8 ((inst), (imm)); \
701 } \
702 } while (0)
703
704 #define x86_shift_reg(inst,opc,reg) \
705 do { \
706 *(inst)++ = (unsigned char)0xd3; \
707 x86_reg_emit ((inst), (opc), (reg)); \
708 } while (0)
709
710 #define x86_shift_mem(inst,opc,mem) \
711 do { \
712 *(inst)++ = (unsigned char)0xd3; \
713 x86_mem_emit ((inst), (opc), (mem)); \
714 } while (0)
715
716 #define x86_shift_membase(inst,opc,basereg,disp) \
717 do { \
718 *(inst)++ = (unsigned char)0xd3; \
719 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
720 } while (0)
721
722 /*
723 * Multi op shift missing.
724 */
725
726 #define x86_shrd_reg(inst,dreg,reg) \
727 do { \
728 *(inst)++ = (unsigned char)0x0f; \
729 *(inst)++ = (unsigned char)0xad; \
730 x86_reg_emit ((inst), (reg), (dreg)); \
731 } while (0)
732
733 #define x86_shrd_reg_imm(inst,dreg,reg,shamt) \
734 do { \
735 *(inst)++ = (unsigned char)0x0f; \
736 *(inst)++ = (unsigned char)0xac; \
737 x86_reg_emit ((inst), (reg), (dreg)); \
738 x86_imm_emit8 ((inst), (shamt)); \
739 } while (0)
740
741 #define x86_shld_reg(inst,dreg,reg) \
742 do { \
743 *(inst)++ = (unsigned char)0x0f; \
744 *(inst)++ = (unsigned char)0xa5; \
745 x86_reg_emit ((inst), (reg), (dreg)); \
746 } while (0)
747
748 #define x86_shld_reg_imm(inst,dreg,reg,shamt) \
749 do { \
750 *(inst)++ = (unsigned char)0x0f; \
751 *(inst)++ = (unsigned char)0xa4; \
752 x86_reg_emit ((inst), (reg), (dreg)); \
753 x86_imm_emit8 ((inst), (shamt)); \
754 } while (0)
755
756 /*
757 * EDX:EAX = EAX * rm
758 */
759 #define x86_mul_reg(inst,reg,is_signed) \
760 do { \
761 *(inst)++ = (unsigned char)0xf7; \
762 x86_reg_emit ((inst), 4 + ((is_signed) ? 1 : 0), (reg)); \
763 } while (0)
764
765 #define x86_mul_mem(inst,mem,is_signed) \
766 do { \
767 *(inst)++ = (unsigned char)0xf7; \
768 x86_mem_emit ((inst), 4 + ((is_signed) ? 1 : 0), (mem)); \
769 } while (0)
770
771 #define x86_mul_membase(inst,basereg,disp,is_signed) \
772 do { \
773 *(inst)++ = (unsigned char)0xf7; \
774 x86_membase_emit ((inst), 4 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
775 } while (0)
776
777 /*
778 * r *= rm
779 */
780 #define x86_imul_reg_reg(inst,dreg,reg) \
781 do { \
782 *(inst)++ = (unsigned char)0x0f; \
783 *(inst)++ = (unsigned char)0xaf; \
784 x86_reg_emit ((inst), (dreg), (reg)); \
785 } while (0)
786
787 #define x86_imul_reg_mem(inst,reg,mem) \
788 do { \
789 *(inst)++ = (unsigned char)0x0f; \
790 *(inst)++ = (unsigned char)0xaf; \
791 x86_mem_emit ((inst), (reg), (mem)); \
792 } while (0)
793
794 #define x86_imul_reg_membase(inst,reg,basereg,disp) \
795 do { \
796 *(inst)++ = (unsigned char)0x0f; \
797 *(inst)++ = (unsigned char)0xaf; \
798 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
799 } while (0)
800
801 /*
802 * dreg = rm * imm
803 */
804 #define x86_imul_reg_reg_imm(inst,dreg,reg,imm) \
805 do { \
806 if (x86_is_imm8 ((imm))) { \
807 *(inst)++ = (unsigned char)0x6b; \
808 x86_reg_emit ((inst), (dreg), (reg)); \
809 x86_imm_emit8 ((inst), (imm)); \
810 } else { \
811 *(inst)++ = (unsigned char)0x69; \
812 x86_reg_emit ((inst), (dreg), (reg)); \
813 x86_imm_emit32 ((inst), (imm)); \
814 } \
815 } while (0)
816
817 #define x86_imul_reg_mem_imm(inst,reg,mem,imm) \
818 do { \
819 if (x86_is_imm8 ((imm))) { \
820 *(inst)++ = (unsigned char)0x6b; \
821 x86_mem_emit ((inst), (reg), (mem)); \
822 x86_imm_emit8 ((inst), (imm)); \
823 } else { \
824 *(inst)++ = (unsigned char)0x69; \
825 x86_reg_emit ((inst), (reg), (mem)); \
826 x86_imm_emit32 ((inst), (imm)); \
827 } \
828 } while (0)
829
830 #define x86_imul_reg_membase_imm(inst,reg,basereg,disp,imm) \
831 do { \
832 if (x86_is_imm8 ((imm))) { \
833 *(inst)++ = (unsigned char)0x6b; \
834 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
835 x86_imm_emit8 ((inst), (imm)); \
836 } else { \
837 *(inst)++ = (unsigned char)0x69; \
838 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
839 x86_imm_emit32 ((inst), (imm)); \
840 } \
841 } while (0)
842
843 /*
844 * divide EDX:EAX by rm;
845 * eax = quotient, edx = remainder
846 */
847
848 #define x86_div_reg(inst,reg,is_signed) \
849 do { \
850 *(inst)++ = (unsigned char)0xf7; \
851 x86_reg_emit ((inst), 6 + ((is_signed) ? 1 : 0), (reg)); \
852 } while (0)
853
854 #define x86_div_mem(inst,mem,is_signed) \
855 do { \
856 *(inst)++ = (unsigned char)0xf7; \
857 x86_mem_emit ((inst), 6 + ((is_signed) ? 1 : 0), (mem)); \
858 } while (0)
859
860 #define x86_div_membase(inst,basereg,disp,is_signed) \
861 do { \
862 *(inst)++ = (unsigned char)0xf7; \
863 x86_membase_emit ((inst), 6 + ((is_signed) ? 1 : 0), (basereg), (disp)); \
864 } while (0)
865
866 #define x86_mov_mem_reg(inst,mem,reg,size) \
867 do { \
868 switch ((size)) { \
869 case 1: *(inst)++ = (unsigned char)0x88; break; \
870 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
871 case 4: *(inst)++ = (unsigned char)0x89; break; \
872 default: assert (0); \
873 } \
874 x86_mem_emit ((inst), (reg), (mem)); \
875 } while (0)
876
877 #define x86_mov_regp_reg(inst,regp,reg,size) \
878 do { \
879 switch ((size)) { \
880 case 1: *(inst)++ = (unsigned char)0x88; break; \
881 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
882 case 4: *(inst)++ = (unsigned char)0x89; break; \
883 default: assert (0); \
884 } \
885 x86_regp_emit ((inst), (reg), (regp)); \
886 } while (0)
887
888 #define x86_mov_membase_reg(inst,basereg,disp,reg,size) \
889 do { \
890 switch ((size)) { \
891 case 1: *(inst)++ = (unsigned char)0x88; break; \
892 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
893 case 4: *(inst)++ = (unsigned char)0x89; break; \
894 default: assert (0); \
895 } \
896 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
897 } while (0)
898
899 #define x86_mov_memindex_reg(inst,basereg,disp,indexreg,shift,reg,size) \
900 do { \
901 switch ((size)) { \
902 case 1: *(inst)++ = (unsigned char)0x88; break; \
903 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
904 case 4: *(inst)++ = (unsigned char)0x89; break; \
905 default: assert (0); \
906 } \
907 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
908 } while (0)
909
910 #define x86_mov_reg_reg(inst,dreg,reg,size) \
911 do { \
912 switch ((size)) { \
913 case 1: *(inst)++ = (unsigned char)0x8a; break; \
914 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
915 case 4: *(inst)++ = (unsigned char)0x8b; break; \
916 default: assert (0); \
917 } \
918 x86_reg_emit ((inst), (dreg), (reg)); \
919 } while (0)
920
921 #define x86_mov_reg_mem(inst,reg,mem,size) \
922 do { \
923 switch ((size)) { \
924 case 1: *(inst)++ = (unsigned char)0x8a; break; \
925 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
926 case 4: *(inst)++ = (unsigned char)0x8b; break; \
927 default: assert (0); \
928 } \
929 x86_mem_emit ((inst), (reg), (mem)); \
930 } while (0)
931
932 #define x86_mov_reg_membase(inst,reg,basereg,disp,size) \
933 do { \
934 switch ((size)) { \
935 case 1: *(inst)++ = (unsigned char)0x8a; break; \
936 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
937 case 4: *(inst)++ = (unsigned char)0x8b; break; \
938 default: assert (0); \
939 } \
940 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
941 } while (0)
942
943 #define x86_mov_reg_memindex(inst,reg,basereg,disp,indexreg,shift,size) \
944 do { \
945 switch ((size)) { \
946 case 1: *(inst)++ = (unsigned char)0x8a; break; \
947 case 2: *(inst)++ = (unsigned char)0x66; /* fall through */ \
948 case 4: *(inst)++ = (unsigned char)0x8b; break; \
949 default: assert (0); \
950 } \
951 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
952 } while (0)
953
954 /*
955 * Note: x86_clear_reg () chacnges the condition code!
956 */
957 #define x86_clear_reg(inst,reg) x86_alu_reg_reg((inst), X86_XOR, (reg), (reg))
958
959 #define x86_mov_reg_imm(inst,reg,imm) \
960 do { \
961 *(inst)++ = (unsigned char)0xb8 + (reg); \
962 x86_imm_emit32 ((inst), (imm)); \
963 } while (0)
964
965 #define x86_mov_mem_imm(inst,mem,imm,size) \
966 do { \
967 if ((size) == 1) { \
968 *(inst)++ = (unsigned char)0xc6; \
969 x86_mem_emit ((inst), 0, (mem)); \
970 x86_imm_emit8 ((inst), (imm)); \
971 } else if ((size) == 2) { \
972 *(inst)++ = (unsigned char)0x66; \
973 *(inst)++ = (unsigned char)0xc7; \
974 x86_mem_emit ((inst), 0, (mem)); \
975 x86_imm_emit16 ((inst), (imm)); \
976 } else { \
977 *(inst)++ = (unsigned char)0xc7; \
978 x86_mem_emit ((inst), 0, (mem)); \
979 x86_imm_emit32 ((inst), (imm)); \
980 } \
981 } while (0)
982
983 #define x86_mov_membase_imm(inst,basereg,disp,imm,size) \
984 do { \
985 if ((size) == 1) { \
986 *(inst)++ = (unsigned char)0xc6; \
987 x86_membase_emit ((inst), 0, (basereg), (disp)); \
988 x86_imm_emit8 ((inst), (imm)); \
989 } else if ((size) == 2) { \
990 *(inst)++ = (unsigned char)0x66; \
991 *(inst)++ = (unsigned char)0xc7; \
992 x86_membase_emit ((inst), 0, (basereg), (disp)); \
993 x86_imm_emit16 ((inst), (imm)); \
994 } else { \
995 *(inst)++ = (unsigned char)0xc7; \
996 x86_membase_emit ((inst), 0, (basereg), (disp)); \
997 x86_imm_emit32 ((inst), (imm)); \
998 } \
999 } while (0)
1000
1001 #define x86_mov_memindex_imm(inst,basereg,disp,indexreg,shift,imm,size) \
1002 do { \
1003 if ((size) == 1) { \
1004 *(inst)++ = (unsigned char)0xc6; \
1005 x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
1006 x86_imm_emit8 ((inst), (imm)); \
1007 } else if ((size) == 2) { \
1008 *(inst)++ = (unsigned char)0x66; \
1009 *(inst)++ = (unsigned char)0xc7; \
1010 x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
1011 x86_imm_emit16 ((inst), (imm)); \
1012 } else { \
1013 *(inst)++ = (unsigned char)0xc7; \
1014 x86_memindex_emit ((inst), 0, (basereg), (disp), (indexreg), (shift)); \
1015 x86_imm_emit32 ((inst), (imm)); \
1016 } \
1017 } while (0)
1018
1019 #define x86_lea_mem(inst,reg,mem) \
1020 do { \
1021 *(inst)++ = (unsigned char)0x8d; \
1022 x86_mem_emit ((inst), (reg), (mem)); \
1023 } while (0)
1024
1025 #define x86_lea_membase(inst,reg,basereg,disp) \
1026 do { \
1027 *(inst)++ = (unsigned char)0x8d; \
1028 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
1029 } while (0)
1030
1031 #define x86_lea_memindex(inst,reg,basereg,disp,indexreg,shift) \
1032 do { \
1033 *(inst)++ = (unsigned char)0x8d; \
1034 x86_memindex_emit ((inst), (reg), (basereg), (disp), (indexreg), (shift)); \
1035 } while (0)
1036
1037 #define x86_widen_reg(inst,dreg,reg,is_signed,is_half) \
1038 do { \
1039 unsigned char op = 0xb6; \
1040 assert (is_half || X86_IS_BYTE_REG (reg)); \
1041 *(inst)++ = (unsigned char)0x0f; \
1042 if ((is_signed)) op += 0x08; \
1043 if ((is_half)) op += 0x01; \
1044 *(inst)++ = op; \
1045 x86_reg_emit ((inst), (dreg), (reg)); \
1046 } while (0)
1047
1048 #define x86_widen_mem(inst,dreg,mem,is_signed,is_half) \
1049 do { \
1050 unsigned char op = 0xb6; \
1051 *(inst)++ = (unsigned char)0x0f; \
1052 if ((is_signed)) op += 0x08; \
1053 if ((is_half)) op += 0x01; \
1054 *(inst)++ = op; \
1055 x86_mem_emit ((inst), (dreg), (mem)); \
1056 } while (0)
1057
1058 #define x86_widen_membase(inst,dreg,basereg,disp,is_signed,is_half) \
1059 do { \
1060 unsigned char op = 0xb6; \
1061 *(inst)++ = (unsigned char)0x0f; \
1062 if ((is_signed)) op += 0x08; \
1063 if ((is_half)) op += 0x01; \
1064 *(inst)++ = op; \
1065 x86_membase_emit ((inst), (dreg), (basereg), (disp)); \
1066 } while (0)
1067
1068 #define x86_widen_memindex(inst,dreg,basereg,disp,indexreg,shift,is_signed,is_half) \
1069 do { \
1070 unsigned char op = 0xb6; \
1071 *(inst)++ = (unsigned char)0x0f; \
1072 if ((is_signed)) op += 0x08; \
1073 if ((is_half)) op += 0x01; \
1074 *(inst)++ = op; \
1075 x86_memindex_emit ((inst), (dreg), (basereg), (disp), (indexreg), (shift)); \
1076 } while (0)
1077
1078 #define x86_lahf(inst) do { *(inst)++ = (unsigned char)0x9f; } while (0)
1079 #define x86_sahf(inst) do { *(inst)++ = (unsigned char)0x9e; } while (0)
1080 #define x86_xchg_ah_al(inst) \
1081 do { \
1082 *(inst)++ = (unsigned char)0x86; \
1083 *(inst)++ = (unsigned char)0xe0; \
1084 } while (0)
1085
1086
1087 #define x86_cdq(inst) do { *(inst)++ = (unsigned char)0x99; } while (0)
1088 #define x86_wait(inst) do { *(inst)++ = (unsigned char)0x9b; } while (0)
1089
1090 #define x86_fp_op_mem(inst,opc,mem,is_double) \
1091 do { \
1092 *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
1093 x86_mem_emit ((inst), (opc), (mem)); \
1094 } while (0)
1095
1096 #define x86_fp_op_membase(inst,opc,basereg,disp,is_double) \
1097 do { \
1098 *(inst)++ = (is_double) ? (unsigned char)0xdc : (unsigned char)0xd8; \
1099 x86_membase_emit ((inst), (opc), (basereg), (disp)); \
1100 } while (0)
1101
1102 #define x86_fp_op(inst,opc,index) \
1103 do { \
1104 *(inst)++ = (unsigned char)0xd8; \
1105 *(inst)++ = (unsigned char)0xc0+((opc)<<3)+((index)&0x07); \
1106 } while (0)
1107
1108 #define x86_fp_op_reg(inst,opc,index,pop_stack) \
1109 do { \
1110 static const unsigned char map[] = { 0, 1, 2, 3, 5, 4, 7, 6, 8}; \
1111 *(inst)++ = (pop_stack) ? (unsigned char)0xde : (unsigned char)0xdc; \
1112 *(inst)++ = (unsigned char)0xc0+(map[(opc)]<<3)+((index)&0x07); \
1113 } while (0)
1114
1115 /**
1116 * @x86_fp_int_op_membase
1117 * Supports FPU operations between ST(0) and integer operand in memory.
1118 * Operation encoded using X86_FP_Opcode enum.
1119 * Operand is addressed by [basereg + disp].
1120 * is_int specifies whether operand is int32 (TRUE) or int16 (FALSE).
1121 */
1122 #define x86_fp_int_op_membase(inst,opc,basereg,disp,is_int) \
1123 do { \
1124 *(inst)++ = (is_int) ? (unsigned char)0xda : (unsigned char)0xde; \
1125 x86_membase_emit ((inst), opc, (basereg), (disp)); \
1126 } while (0)
1127
1128 #define x86_fstp(inst,index) \
1129 do { \
1130 *(inst)++ = (unsigned char)0xdd; \
1131 *(inst)++ = (unsigned char)0xd8+(index); \
1132 } while (0)
1133
1134 #define x86_fcompp(inst) \
1135 do { \
1136 *(inst)++ = (unsigned char)0xde; \
1137 *(inst)++ = (unsigned char)0xd9; \
1138 } while (0)
1139
1140 #define x86_fucompp(inst) \
1141 do { \
1142 *(inst)++ = (unsigned char)0xda; \
1143 *(inst)++ = (unsigned char)0xe9; \
1144 } while (0)
1145
1146 #define x86_fnstsw(inst) \
1147 do { \
1148 *(inst)++ = (unsigned char)0xdf; \
1149 *(inst)++ = (unsigned char)0xe0; \
1150 } while (0)
1151
1152 #define x86_fnstcw(inst,mem) \
1153 do { \
1154 *(inst)++ = (unsigned char)0xd9; \
1155 x86_mem_emit ((inst), 7, (mem)); \
1156 } while (0)
1157
1158 #define x86_fnstcw_membase(inst,basereg,disp) \
1159 do { \
1160 *(inst)++ = (unsigned char)0xd9; \
1161 x86_membase_emit ((inst), 7, (basereg), (disp)); \
1162 } while (0)
1163
1164 #define x86_fldcw(inst,mem) \
1165 do { \
1166 *(inst)++ = (unsigned char)0xd9; \
1167 x86_mem_emit ((inst), 5, (mem)); \
1168 } while (0)
1169
1170 #define x86_fldcw_membase(inst,basereg,disp) \
1171 do { \
1172 *(inst)++ = (unsigned char)0xd9; \
1173 x86_membase_emit ((inst), 5, (basereg), (disp)); \
1174 } while (0)
1175
1176 #define x86_fchs(inst) \
1177 do { \
1178 *(inst)++ = (unsigned char)0xd9; \
1179 *(inst)++ = (unsigned char)0xe0; \
1180 } while (0)
1181
1182 #define x86_frem(inst) \
1183 do { \
1184 *(inst)++ = (unsigned char)0xd9; \
1185 *(inst)++ = (unsigned char)0xf8; \
1186 } while (0)
1187
1188 #define x86_fxch(inst,index) \
1189 do { \
1190 *(inst)++ = (unsigned char)0xd9; \
1191 *(inst)++ = (unsigned char)0xc8 + ((index) & 0x07); \
1192 } while (0)
1193
1194 #define x86_fcomi(inst,index) \
1195 do { \
1196 *(inst)++ = (unsigned char)0xdb; \
1197 *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
1198 } while (0)
1199
1200 #define x86_fcomip(inst,index) \
1201 do { \
1202 *(inst)++ = (unsigned char)0xdf; \
1203 *(inst)++ = (unsigned char)0xf0 + ((index) & 0x07); \
1204 } while (0)
1205
1206 #define x86_fucomi(inst,index) \
1207 do { \
1208 *(inst)++ = (unsigned char)0xdb; \
1209 *(inst)++ = (unsigned char)0xe8 + ((index) & 0x07); \
1210 } while (0)
1211
1212 #define x86_fucomip(inst,index) \
1213 do { \
1214 *(inst)++ = (unsigned char)0xdf; \
1215 *(inst)++ = (unsigned char)0xe8 + ((index) & 0x07); \
1216 } while (0)
1217
1218 #define x86_fld(inst,mem,is_double) \
1219 do { \
1220 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
1221 x86_mem_emit ((inst), 0, (mem)); \
1222 } while (0)
1223
1224 #define x86_fld_membase(inst,basereg,disp,is_double) \
1225 do { \
1226 *(inst)++ = (is_double) ? (unsigned char)0xdd : (unsigned char)0xd9; \
1227 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1228 } while (0)
1229
1230 #define x86_fld80_mem(inst,mem) \
1231 do { \
1232 *(inst)++ = (unsigned char)0xdb; \
1233 x86_mem_emit ((inst), 5, (mem)); \
1234 } while (0)
1235
1236 #define x86_fld80_membase(inst,basereg,disp) \
1237 do { \
1238 *(inst)++ = (unsigned char)0xdb; \
1239 x86_membase_emit ((inst), 5, (basereg), (disp)); \
1240 } while (0)
1241
1242 #define x86_fild(inst,mem,is_long) \
1243 do { \
1244 if ((is_long)) { \
1245 *(inst)++ = (unsigned char)0xdf; \
1246 x86_mem_emit ((inst), 5, (mem)); \
1247 } else { \
1248 *(inst)++ = (unsigned char)0xdb; \
1249 x86_mem_emit ((inst), 0, (mem)); \
1250 } \
1251 } while (0)
1252
1253 #define x86_fild_membase(inst,basereg,disp,is_long) \
1254 do { \
1255 if ((is_long)) { \
1256 *(inst)++ = (unsigned char)0xdf; \
1257 x86_membase_emit ((inst), 5, (basereg), (disp)); \
1258 } else { \
1259 *(inst)++ = (unsigned char)0xdb; \
1260 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1261 } \
1262 } while (0)
1263
1264 #define x86_fld_reg(inst,index) \
1265 do { \
1266 *(inst)++ = (unsigned char)0xd9; \
1267 *(inst)++ = (unsigned char)0xc0 + ((index) & 0x07); \
1268 } while (0)
1269
1270 #define x86_fldz(inst) \
1271 do { \
1272 *(inst)++ = (unsigned char)0xd9; \
1273 *(inst)++ = (unsigned char)0xee; \
1274 } while (0)
1275
1276 #define x86_fld1(inst) \
1277 do { \
1278 *(inst)++ = (unsigned char)0xd9; \
1279 *(inst)++ = (unsigned char)0xe8; \
1280 } while (0)
1281
1282 #define x86_fldpi(inst) \
1283 do { \
1284 *(inst)++ = (unsigned char)0xd9; \
1285 *(inst)++ = (unsigned char)0xeb; \
1286 } while (0)
1287
1288 #define x86_fst(inst,mem,is_double,pop_stack) \
1289 do { \
1290 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
1291 x86_mem_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (mem)); \
1292 } while (0)
1293
1294 #define x86_fst_membase(inst,basereg,disp,is_double,pop_stack) \
1295 do { \
1296 *(inst)++ = (is_double) ? (unsigned char)0xdd: (unsigned char)0xd9; \
1297 x86_membase_emit ((inst), 2 + ((pop_stack) ? 1 : 0), (basereg), (disp)); \
1298 } while (0)
1299
1300 #define x86_fst80_mem(inst,mem) \
1301 do { \
1302 *(inst)++ = (unsigned char)0xdb; \
1303 x86_mem_emit ((inst), 7, (mem)); \
1304 } while (0)
1305
1306
1307 #define x86_fst80_membase(inst,basereg,disp) \
1308 do { \
1309 *(inst)++ = (unsigned char)0xdb; \
1310 x86_membase_emit ((inst), 7, (basereg), (disp)); \
1311 } while (0)
1312
1313
1314 #define x86_fist_pop(inst,mem,is_long) \
1315 do { \
1316 if ((is_long)) { \
1317 *(inst)++ = (unsigned char)0xdf; \
1318 x86_mem_emit ((inst), 7, (mem)); \
1319 } else { \
1320 *(inst)++ = (unsigned char)0xdb; \
1321 x86_mem_emit ((inst), 3, (mem)); \
1322 } \
1323 } while (0)
1324
1325 #define x86_fist_pop_membase(inst,basereg,disp,is_long) \
1326 do { \
1327 if ((is_long)) { \
1328 *(inst)++ = (unsigned char)0xdf; \
1329 x86_membase_emit ((inst), 7, (basereg), (disp)); \
1330 } else { \
1331 *(inst)++ = (unsigned char)0xdb; \
1332 x86_membase_emit ((inst), 3, (basereg), (disp)); \
1333 } \
1334 } while (0)
1335
1336 #define x86_fstsw(inst) \
1337 do { \
1338 *(inst)++ = (unsigned char)0x9b; \
1339 *(inst)++ = (unsigned char)0xdf; \
1340 *(inst)++ = (unsigned char)0xe0; \
1341 } while (0)
1342
1343 /**
1344 * @x86_fist_membase
1345 * Converts content of ST(0) to integer and stores it at memory location
1346 * addressed by [basereg + disp].
1347 * is_int specifies whether destination is int32 (TRUE) or int16 (FALSE).
1348 */
1349 #define x86_fist_membase(inst,basereg,disp,is_int) \
1350 do { \
1351 if ((is_int)) { \
1352 *(inst)++ = (unsigned char)0xdb; \
1353 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1354 } else { \
1355 *(inst)++ = (unsigned char)0xdf; \
1356 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1357 } \
1358 } while (0)
1359
1360
1361 #define x86_push_reg(inst,reg) \
1362 do { \
1363 *(inst)++ = (unsigned char)0x50 + (reg); \
1364 } while (0)
1365
1366 #define x86_push_regp(inst,reg) \
1367 do { \
1368 *(inst)++ = (unsigned char)0xff; \
1369 x86_regp_emit ((inst), 6, (reg)); \
1370 } while (0)
1371
1372 #define x86_push_mem(inst,mem) \
1373 do { \
1374 *(inst)++ = (unsigned char)0xff; \
1375 x86_mem_emit ((inst), 6, (mem)); \
1376 } while (0)
1377
1378 #define x86_push_membase(inst,basereg,disp) \
1379 do { \
1380 *(inst)++ = (unsigned char)0xff; \
1381 x86_membase_emit ((inst), 6, (basereg), (disp)); \
1382 } while (0)
1383
1384 #define x86_push_memindex(inst,basereg,disp,indexreg,shift) \
1385 do { \
1386 *(inst)++ = (unsigned char)0xff; \
1387 x86_memindex_emit ((inst), 6, (basereg), (disp), (indexreg), (shift)); \
1388 } while (0)
1389
1390 #define x86_push_imm_template(inst) x86_push_imm (inst, 0xf0f0f0f0)
1391
1392 #define x86_push_imm(inst,imm) \
1393 do { \
1394 int _imm = (int) (imm); \
1395 if (x86_is_imm8 (_imm)) { \
1396 *(inst)++ = (unsigned char)0x6A; \
1397 x86_imm_emit8 ((inst), (_imm)); \
1398 } else { \
1399 *(inst)++ = (unsigned char)0x68; \
1400 x86_imm_emit32 ((inst), (_imm)); \
1401 } \
1402 } while (0)
1403
1404 #define x86_pop_reg(inst,reg) \
1405 do { \
1406 *(inst)++ = (unsigned char)0x58 + (reg); \
1407 } while (0)
1408
1409 #define x86_pop_mem(inst,mem) \
1410 do { \
1411 *(inst)++ = (unsigned char)0x87; \
1412 x86_mem_emit ((inst), 0, (mem)); \
1413 } while (0)
1414
1415 #define x86_pop_membase(inst,basereg,disp) \
1416 do { \
1417 *(inst)++ = (unsigned char)0x87; \
1418 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1419 } while (0)
1420
1421 #define x86_pushad(inst) do { *(inst)++ = (unsigned char)0x60; } while (0)
1422 #define x86_pushfd(inst) do { *(inst)++ = (unsigned char)0x9c; } while (0)
1423 #define x86_popad(inst) do { *(inst)++ = (unsigned char)0x61; } while (0)
1424 #define x86_popfd(inst) do { *(inst)++ = (unsigned char)0x9d; } while (0)
1425
1426 #define x86_loop(inst,imm) \
1427 do { \
1428 *(inst)++ = (unsigned char)0xe2; \
1429 x86_imm_emit8 ((inst), (imm)); \
1430 } while (0)
1431
1432 #define x86_loope(inst,imm) \
1433 do { \
1434 *(inst)++ = (unsigned char)0xe1; \
1435 x86_imm_emit8 ((inst), (imm)); \
1436 } while (0)
1437
1438 #define x86_loopne(inst,imm) \
1439 do { \
1440 *(inst)++ = (unsigned char)0xe0; \
1441 x86_imm_emit8 ((inst), (imm)); \
1442 } while (0)
1443
1444 #define x86_jump32(inst,imm) \
1445 do { \
1446 *(inst)++ = (unsigned char)0xe9; \
1447 x86_imm_emit32 ((inst), (imm)); \
1448 } while (0)
1449
1450 #define x86_jump8(inst,imm) \
1451 do { \
1452 *(inst)++ = (unsigned char)0xeb; \
1453 x86_imm_emit8 ((inst), (imm)); \
1454 } while (0)
1455
1456 #define x86_jump_reg(inst,reg) \
1457 do { \
1458 *(inst)++ = (unsigned char)0xff; \
1459 x86_reg_emit ((inst), 4, (reg)); \
1460 } while (0)
1461
1462 #define x86_jump_mem(inst,mem) \
1463 do { \
1464 *(inst)++ = (unsigned char)0xff; \
1465 x86_mem_emit ((inst), 4, (mem)); \
1466 } while (0)
1467
1468 #define x86_jump_membase(inst,basereg,disp) \
1469 do { \
1470 *(inst)++ = (unsigned char)0xff; \
1471 x86_membase_emit ((inst), 4, (basereg), (disp)); \
1472 } while (0)
1473
1474 /*
1475 * target is a pointer in our buffer.
1476 */
1477 #define x86_jump_code(inst,target) \
1478 do { \
1479 int t = (unsigned char*)(target) - (inst) - 2; \
1480 if (x86_is_imm8(t)) { \
1481 x86_jump8 ((inst), t); \
1482 } else { \
1483 t -= 3; \
1484 x86_jump32 ((inst), t); \
1485 } \
1486 } while (0)
1487
1488 #define x86_jump_disp(inst,disp) \
1489 do { \
1490 int t = (disp) - 2; \
1491 if (x86_is_imm8(t)) { \
1492 x86_jump8 ((inst), t); \
1493 } else { \
1494 t -= 3; \
1495 x86_jump32 ((inst), t); \
1496 } \
1497 } while (0)
1498
1499 #define x86_branch8(inst,cond,imm,is_signed) \
1500 do { \
1501 if ((is_signed)) \
1502 *(inst)++ = x86_cc_signed_map [(cond)]; \
1503 else \
1504 *(inst)++ = x86_cc_unsigned_map [(cond)]; \
1505 x86_imm_emit8 ((inst), (imm)); \
1506 } while (0)
1507
1508 #define x86_branch32(inst,cond,imm,is_signed) \
1509 do { \
1510 *(inst)++ = (unsigned char)0x0f; \
1511 if ((is_signed)) \
1512 *(inst)++ = x86_cc_signed_map [(cond)] + 0x10; \
1513 else \
1514 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x10; \
1515 x86_imm_emit32 ((inst), (imm)); \
1516 } while (0)
1517
1518 #define x86_branch(inst,cond,target,is_signed) \
1519 do { \
1520 int offset = (target) - (inst) - 2; \
1521 if (x86_is_imm8 ((offset))) \
1522 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1523 else { \
1524 offset -= 4; \
1525 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1526 } \
1527 } while (0)
1528
1529 #define x86_branch_disp(inst,cond,disp,is_signed) \
1530 do { \
1531 int offset = (disp) - 2; \
1532 if (x86_is_imm8 ((offset))) \
1533 x86_branch8 ((inst), (cond), offset, (is_signed)); \
1534 else { \
1535 offset -= 4; \
1536 x86_branch32 ((inst), (cond), offset, (is_signed)); \
1537 } \
1538 } while (0)
1539
1540 #define x86_set_reg(inst,cond,reg,is_signed) \
1541 do { \
1542 assert (X86_IS_BYTE_REG (reg)); \
1543 *(inst)++ = (unsigned char)0x0f; \
1544 if ((is_signed)) \
1545 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1546 else \
1547 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1548 x86_reg_emit ((inst), 0, (reg)); \
1549 } while (0)
1550
1551 #define x86_set_mem(inst,cond,mem,is_signed) \
1552 do { \
1553 *(inst)++ = (unsigned char)0x0f; \
1554 if ((is_signed)) \
1555 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1556 else \
1557 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1558 x86_mem_emit ((inst), 0, (mem)); \
1559 } while (0)
1560
1561 #define x86_set_membase(inst,cond,basereg,disp,is_signed) \
1562 do { \
1563 *(inst)++ = (unsigned char)0x0f; \
1564 if ((is_signed)) \
1565 *(inst)++ = x86_cc_signed_map [(cond)] + 0x20; \
1566 else \
1567 *(inst)++ = x86_cc_unsigned_map [(cond)] + 0x20; \
1568 x86_membase_emit ((inst), 0, (basereg), (disp)); \
1569 } while (0)
1570
1571 #define x86_call_imm(inst,disp) \
1572 do { \
1573 *(inst)++ = (unsigned char)0xe8; \
1574 x86_imm_emit32 ((inst), (int)(disp)); \
1575 } while (0)
1576
1577 #define x86_call_reg(inst,reg) \
1578 do { \
1579 *(inst)++ = (unsigned char)0xff; \
1580 x86_reg_emit ((inst), 2, (reg)); \
1581 } while (0)
1582
1583 #define x86_call_mem(inst,mem) \
1584 do { \
1585 *(inst)++ = (unsigned char)0xff; \
1586 x86_mem_emit ((inst), 2, (mem)); \
1587 } while (0)
1588
1589 #define x86_call_membase(inst,basereg,disp) \
1590 do { \
1591 *(inst)++ = (unsigned char)0xff; \
1592 x86_membase_emit ((inst), 2, (basereg), (disp)); \
1593 } while (0)
1594
1595 #define x86_call_code(inst,target) \
1596 do { \
1597 int _x86_offset = (unsigned char*)(target) - (inst); \
1598 _x86_offset -= 5; \
1599 x86_call_imm ((inst), _x86_offset); \
1600 } while (0)
1601
1602 #define x86_ret(inst) do { *(inst)++ = (unsigned char)0xc3; } while (0)
1603
1604 #define x86_ret_imm(inst,imm) \
1605 do { \
1606 if ((imm) == 0) { \
1607 x86_ret ((inst)); \
1608 } else { \
1609 *(inst)++ = (unsigned char)0xc2; \
1610 x86_imm_emit16 ((inst), (imm)); \
1611 } \
1612 } while (0)
1613
1614 #define x86_cmov_reg(inst,cond,is_signed,dreg,reg) \
1615 do { \
1616 *(inst)++ = (unsigned char) 0x0f; \
1617 if ((is_signed)) \
1618 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1619 else \
1620 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1621 x86_reg_emit ((inst), (dreg), (reg)); \
1622 } while (0)
1623
1624 #define x86_cmov_mem(inst,cond,is_signed,reg,mem) \
1625 do { \
1626 *(inst)++ = (unsigned char) 0x0f; \
1627 if ((is_signed)) \
1628 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1629 else \
1630 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1631 x86_mem_emit ((inst), (reg), (mem)); \
1632 } while (0)
1633
1634 #define x86_cmov_membase(inst,cond,is_signed,reg,basereg,disp) \
1635 do { \
1636 *(inst)++ = (unsigned char) 0x0f; \
1637 if ((is_signed)) \
1638 *(inst)++ = x86_cc_signed_map [(cond)] - 0x30; \
1639 else \
1640 *(inst)++ = x86_cc_unsigned_map [(cond)] - 0x30; \
1641 x86_membase_emit ((inst), (reg), (basereg), (disp)); \
1642 } while (0)
1643
1644 #define x86_enter(inst,framesize) \
1645 do { \
1646 *(inst)++ = (unsigned char)0xc8; \
1647 x86_imm_emit16 ((inst), (framesize)); \
1648 *(inst)++ = 0; \
1649 } while (0)
1650
1651 #define x86_leave(inst) do { *(inst)++ = (unsigned char)0xc9; } while (0)
1652 #define x86_sahf(inst) do { *(inst)++ = (unsigned char)0x9e; } while (0)
1653
1654 #define x86_fsin(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfe; } while (0)
1655 #define x86_fcos(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xff; } while (0)
1656 #define x86_fabs(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe1; } while (0)
1657 #define x86_ftst(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe4; } while (0)
1658 #define x86_fxam(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xe5; } while (0)
1659 #define x86_fpatan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf3; } while (0)
1660 #define x86_fprem(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf8; } while (0)
1661 #define x86_fprem1(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf5; } while (0)
1662 #define x86_frndint(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfc; } while (0)
1663 #define x86_fsqrt(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xfa; } while (0)
1664 #define x86_fptan(inst) do { *(inst)++ = (unsigned char)0xd9; *(inst)++ = (unsigned char)0xf2; } while (0)
1665
1666 #define x86_padding(inst,size) \
1667 do { \
1668 switch ((size)) { \
1669 case 1: x86_nop ((inst)); break; \
1670 case 2: *(inst)++ = 0x8b; \
1671 *(inst)++ = 0xc0; break; \
1672 case 3: *(inst)++ = 0x8d; *(inst)++ = 0x6d; \
1673 *(inst)++ = 0x00; break; \
1674 case 4: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1675 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1676 break; \
1677 case 5: *(inst)++ = 0x8d; *(inst)++ = 0x64; \
1678 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1679 x86_nop ((inst)); break; \
1680 case 6: *(inst)++ = 0x8d; *(inst)++ = 0xad; \
1681 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1682 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1683 break; \
1684 case 7: *(inst)++ = 0x8d; *(inst)++ = 0xa4; \
1685 *(inst)++ = 0x24; *(inst)++ = 0x00; \
1686 *(inst)++ = 0x00; *(inst)++ = 0x00; \
1687 *(inst)++ = 0x00; break; \
1688 default: assert (0); \
1689 } \
1690 } while (0)
1691
1692 #define x86_prolog(inst,frame_size,reg_mask) \
1693 do { \
1694 unsigned i, m = 1; \
1695 x86_enter ((inst), (frame_size)); \
1696 for (i = 0; i < X86_NREG; ++i, m <<= 1) { \
1697 if ((reg_mask) & m) \
1698 x86_push_reg ((inst), i); \
1699 } \
1700 } while (0)
1701
1702 #define x86_epilog(inst,reg_mask) \
1703 do { \
1704 unsigned i, m = 1 << X86_EDI; \
1705 for (i = X86_EDI; m != 0; i--, m=m>>1) { \
1706 if ((reg_mask) & m) \
1707 x86_pop_reg ((inst), i); \
1708 } \
1709 x86_leave ((inst)); \
1710 x86_ret ((inst)); \
1711 } while (0)
1712
1713 #endif // X86_H

  ViewVC Help
Powered by ViewVC 1.1.26