1 |
/* |
2 |
* PearPC |
3 |
* jitc_tools.s |
4 |
* |
5 |
* Copyright (C) 2003-2006 Sebastian Biallas (sb@biallas.net) |
6 |
* Copyright (C) 2004 Daniel Foesch (dfoesch@cs.nmsu.edu) |
7 |
* |
8 |
* This program is free software; you can redistribute it and/or modify |
9 |
* it under the terms of the GNU General Public License version 2 as |
10 |
* published by the Free Software Foundation. |
11 |
* |
12 |
* This program is distributed in the hope that it will be useful, |
13 |
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 |
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 |
* GNU General Public License for more details. |
16 |
* |
17 |
* You should have received a copy of the GNU General Public License |
18 |
* along with this program; if not, write to the Free Software |
19 |
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. |
20 |
*/ |
21 |
|
22 |
#ifndef PREFIX |
23 |
#define PREFIX |
24 |
#endif |
25 |
|
26 |
#define EXPORT(sym) EXPORT2(PREFIX, sym) |
27 |
#define EXPORT2(p, sym) EXPORT3(p, sym) |
28 |
#define EXPORT3(p, sym) .globl p##sym; p##sym |
29 |
|
30 |
#define EXTERN(sym) EXTERN2(PREFIX, sym) |
31 |
#define EXTERN2(p, sym) EXTERN3(p, sym) |
32 |
#define EXTERN3(p, sym) p##sym |
33 |
|
34 |
#define STRUCT .struct 0 |
35 |
#define MEMBER(m, s) m:;.struct m+s |
36 |
|
37 |
.intel_syntax prefix |
38 |
|
39 |
# Define this if you want exact handling of the SO bit. |
40 |
/* #define EXACT_SO */ |
41 |
|
42 |
STRUCT #PPC_CPU_State |
43 |
MEMBER(dummy, 4) |
44 |
MEMBER(gpr, 32*4) |
45 |
MEMBER(fpr, 32*8) |
46 |
MEMBER(cr, 4) |
47 |
MEMBER(fpscr, 4) |
48 |
MEMBER(xer, 4) |
49 |
MEMBER(xer_ca, 4) |
50 |
MEMBER(lt, 4) |
51 |
MEMBER(ctr, 4) |
52 |
|
53 |
MEMBER(msr, 4) |
54 |
MEMBER(pvr, 4) |
55 |
|
56 |
MEMBER(ibatu, 4*4) |
57 |
MEMBER(ibatl, 4*4) |
58 |
MEMBER(ibat_bl, 4*4) |
59 |
MEMBER(ibat_nbl, 4*4) |
60 |
MEMBER(ibat_bepi, 4*4) |
61 |
MEMBER(ibat_brpn, 4*4) |
62 |
|
63 |
MEMBER(dbatu, 4*4) |
64 |
MEMBER(dbatl, 4*4) |
65 |
MEMBER(dbat_bl, 4*4) |
66 |
MEMBER(dbat_nbl, 4*4) |
67 |
MEMBER(dbat_bepi, 4*4) |
68 |
MEMBER(dbat_brpn, 4*4) |
69 |
|
70 |
MEMBER(sdr1, 4) |
71 |
|
72 |
MEMBER(sr, 16*4) |
73 |
|
74 |
MEMBER(dar, 4) |
75 |
MEMBER(dsisr, 4) |
76 |
MEMBER(sprg, 4*4) |
77 |
MEMBER(srr0, 4) |
78 |
MEMBER(srr1, 4) |
79 |
|
80 |
MEMBER(decr, 4) |
81 |
MEMBER(ear, 4) |
82 |
MEMBER(pir, 4) |
83 |
MEMBER(tb, 8) |
84 |
|
85 |
MEMBER(hid, 16*4) |
86 |
|
87 |
MEMBER(pc, 4) |
88 |
MEMBER(npc, 4) |
89 |
MEMBER(current_opc, 4) |
90 |
|
91 |
MEMBER(exception_pending, 1) |
92 |
MEMBER(dec_exception, 1) |
93 |
MEMBER(ext_exception, 1) |
94 |
MEMBER(stop_exception, 1) |
95 |
MEMBER(singlestep_ignore, 1) |
96 |
MEMBER(align1, 1) |
97 |
MEMBER(align2, 1) |
98 |
MEMBER(align3, 1) |
99 |
|
100 |
MEMBER(pagetable_base, 4) |
101 |
MEMBER(pagetable_hashmask, 4) |
102 |
MEMBER(reserve, 4) |
103 |
MEMBER(have_reservation, 4) |
104 |
|
105 |
MEMBER(tlb_last, 4) |
106 |
MEMBER(tlb_pa, 4*4) |
107 |
MEMBER(tlb_va, 4*4) |
108 |
MEMBER(effective_code_page, 4) |
109 |
MEMBER(physical_code_page, 4) |
110 |
MEMBER(pdec, 2*4) |
111 |
MEMBER(ptb, 2*4) |
112 |
|
113 |
MEMBER(temp, 4) |
114 |
MEMBER(temp2, 4) |
115 |
MEMBER(x87cw, 4) |
116 |
MEMBER(pc_ofs, 4) |
117 |
MEMBER(current_code_base, 4) |
118 |
|
119 |
STRUCT #JITC |
120 |
MEMBER(clientPages, 4) |
121 |
|
122 |
STRUCT #ClientPage |
123 |
MEMBER(entrypoints, 1024*4) |
124 |
MEMBER(baseaddress, 4) |
125 |
MEMBER(tcf_current, 4) |
126 |
MEMBER(bytesLeft, 4) |
127 |
MEMBER(tcp, 4) |
128 |
MEMBER(moreRU, 4) |
129 |
MEMBER(lessRU, 4) |
130 |
|
131 |
#define gCPU(r) EXTERN(gCPU)+r |
132 |
|
133 |
.text |
134 |
|
135 |
.balign 16 |
136 |
############################################################################################## |
137 |
## |
138 |
EXPORT(ppc_flush_carry_and_flags_asm): |
139 |
jc 1f |
140 |
call EXTERN(ppc_flush_flags_asm) |
141 |
and byte ptr [gCPU(xer+3)], ~(1<<5) |
142 |
ret |
143 |
1: |
144 |
call EXTERN(ppc_flush_flags_asm) |
145 |
or byte ptr [gCPU(xer+3)], (1<<5) |
146 |
ret |
147 |
|
148 |
############################################################################################## |
149 |
## |
150 |
|
151 |
#ifndef EXACT_SO |
152 |
#define HANDLE_SO |
153 |
#else |
154 |
#define HANDLE_SO test byte ptr [gCPU(xer+3)], 1<<7; jnz 4f |
155 |
#endif |
156 |
|
157 |
.balign 16 |
158 |
############################################################################################## |
159 |
## |
160 |
EXPORT(ppc_flush_flags_asm): |
161 |
js 3f |
162 |
jnz 2f |
163 |
1: |
164 |
and byte ptr [gCPU(cr+3)], 0x0f |
165 |
or byte ptr [gCPU(cr+3)], 1<<5 |
166 |
HANDLE_SO |
167 |
ret |
168 |
2: |
169 |
and byte ptr [gCPU(cr+3)], 0x0f |
170 |
or byte ptr [gCPU(cr+3)], 1<<6 |
171 |
HANDLE_SO |
172 |
ret |
173 |
3: |
174 |
and byte ptr [gCPU(cr+3)], 0x0f |
175 |
or byte ptr [gCPU(cr+3)], 1<<7 |
176 |
HANDLE_SO |
177 |
ret |
178 |
#ifdef EXACT_SO |
179 |
4: |
180 |
or byte ptr [gCPU(cr+3)], 1<<4 |
181 |
ret |
182 |
#endif |
183 |
|
184 |
.balign 16 |
185 |
############################################################################################## |
186 |
## called after "cmp cr0, ..", with X even |
187 |
EXPORT(ppc_flush_flags_signed_0_asm): |
188 |
jl 3f |
189 |
jg 2f |
190 |
1: |
191 |
and byte ptr [gCPU(cr+3)], 0x0f |
192 |
or byte ptr [gCPU(cr+3)], 1<<5 |
193 |
HANDLE_SO |
194 |
ret |
195 |
2: |
196 |
and byte ptr [gCPU(cr+3)], 0x0f |
197 |
or byte ptr [gCPU(cr+3)], 1<<6 |
198 |
HANDLE_SO |
199 |
ret |
200 |
3: |
201 |
and byte ptr [gCPU(cr+3)], 0x0f |
202 |
or byte ptr [gCPU(cr+3)], 1<<7 |
203 |
HANDLE_SO |
204 |
ret |
205 |
#ifdef EXACT_SO |
206 |
4: |
207 |
or byte ptr [gCPU(cr+3)], 1<<4 |
208 |
ret |
209 |
#endif |
210 |
|
211 |
.balign 16 |
212 |
############################################################################################## |
213 |
## called after "cmpl cr0, ..", with X even |
214 |
EXPORT(ppc_flush_flags_unsigned_0_asm): |
215 |
jb 3f |
216 |
ja 2f |
217 |
1: |
218 |
and byte ptr [gCPU(cr+3)], 0x0f |
219 |
or byte ptr [gCPU(cr+3)], 1<<5 |
220 |
HANDLE_SO |
221 |
ret |
222 |
2: |
223 |
and byte ptr [gCPU(cr+3)], 0x0f |
224 |
or byte ptr [gCPU(cr+3)], 1<<6 |
225 |
HANDLE_SO |
226 |
ret |
227 |
3: |
228 |
and byte ptr [gCPU(cr+3)], 0x0f |
229 |
or byte ptr [gCPU(cr+3)], 1<<7 |
230 |
HANDLE_SO |
231 |
ret |
232 |
#ifdef EXACT_SO |
233 |
.so: |
234 |
or byte ptr [gCPU(cr+3)], 1<<4 |
235 |
ret |
236 |
#endif |
237 |
|
238 |
.balign 16 |
239 |
############################################################################################## |
240 |
## called after "cmp crX, ..", with X even |
241 |
ppc_flush_flags_signed_even_asm: |
242 |
jl 3f |
243 |
jg 2f |
244 |
1: |
245 |
and byte ptr [gCPU(cr+%eax)], 0x0f |
246 |
or byte ptr [gCPU(cr+%eax)], 1<<5 |
247 |
HANDLE_SO |
248 |
ret |
249 |
2: |
250 |
and byte ptr [gCPU(cr+%eax)], 0x0f |
251 |
or byte ptr [gCPU(cr+%eax)], 1<<6 |
252 |
HANDLE_SO |
253 |
ret |
254 |
3: |
255 |
and byte ptr [gCPU(cr+%eax)], 0x0f |
256 |
or byte ptr [gCPU(cr+%eax)], 1<<7 |
257 |
HANDLE_SO |
258 |
ret |
259 |
#ifdef EXACT_SO |
260 |
4: |
261 |
or byte ptr [gCPU(cr+%eax)], 1<<4 |
262 |
ret |
263 |
#endif |
264 |
|
265 |
.balign 16 |
266 |
############################################################################################## |
267 |
## called after "cmpl crX, ..", with X even |
268 |
ppc_flush_flags_unsigned_even_asm: |
269 |
jb 3f |
270 |
ja 2f |
271 |
1: |
272 |
and byte ptr [gCPU(cr+%eax)], 0x0f |
273 |
or byte ptr [gCPU(cr+%eax)], 1<<5 |
274 |
HANDLE_SO |
275 |
ret |
276 |
2: |
277 |
and byte ptr [gCPU(cr+%eax)], 0x0f |
278 |
or byte ptr [gCPU(cr+%eax)], 1<<6 |
279 |
HANDLE_SO |
280 |
ret |
281 |
3: |
282 |
and byte ptr [gCPU(cr+%eax)], 0x0f |
283 |
or byte ptr [gCPU(cr+%eax)], 1<<7 |
284 |
HANDLE_SO |
285 |
ret |
286 |
#ifdef EXACT_SO |
287 |
4: |
288 |
or byte ptr [gCPU(cr+%eax)], 1<<4 |
289 |
ret |
290 |
#endif |
291 |
|
292 |
.balign 16 |
293 |
############################################################################################## |
294 |
## called after "cmp crX, ..", with X odd |
295 |
EXPORT(ppc_flush_flags_signed_odd_asm): |
296 |
jl 3f |
297 |
jg 2f |
298 |
1: |
299 |
and byte ptr [gCPU(cr+%eax)], 0xf0 |
300 |
or byte ptr [gCPU(cr+%eax)], 1<<1 |
301 |
HANDLE_SO |
302 |
ret |
303 |
2: |
304 |
and byte ptr [gCPU(cr+%eax)], 0xf0 |
305 |
or byte ptr [gCPU(cr+%eax)], 1<<2 |
306 |
HANDLE_SO |
307 |
ret |
308 |
3: |
309 |
and byte ptr [gCPU(cr+%eax)], 0xf0 |
310 |
or byte ptr [gCPU(cr+%eax)], 1<<3 |
311 |
HANDLE_SO |
312 |
ret |
313 |
#ifdef EXACT_SO |
314 |
4: |
315 |
or byte ptr [gCPU(cr+%eax)], 1<<4 |
316 |
ret |
317 |
#endif |
318 |
|
319 |
.balign 16 |
320 |
############################################################################################## |
321 |
## called after "cmpl crX, ..", with X odd |
322 |
EXPORT(ppc_flush_flags_unsigned_odd_asm): |
323 |
jb 3f |
324 |
ja 2f |
325 |
1: |
326 |
and byte ptr [gCPU(cr+%eax)], 0xf0 |
327 |
or byte ptr [gCPU(cr+%eax)], 1<<1 |
328 |
HANDLE_SO |
329 |
ret |
330 |
2: |
331 |
and byte ptr [gCPU(cr+%eax)], 0xf0 |
332 |
or byte ptr [gCPU(cr+%eax)], 1<<2 |
333 |
HANDLE_SO |
334 |
ret |
335 |
3: |
336 |
and byte ptr [gCPU(cr+%eax)], 0xf0 |
337 |
or byte ptr [gCPU(cr+%eax)], 1<<3 |
338 |
HANDLE_SO |
339 |
ret |
340 |
#ifdef EXACT_SO |
341 |
4: |
342 |
or byte ptr [gCPU(cr+%eax)], 1<<4 |
343 |
ret |
344 |
#endif |
345 |
|
346 |
############################################################################################## |
347 |
## ppc_set_msr_asm |
348 |
## |
349 |
## IN: %eax: new msr |
350 |
## |
351 |
singlestep_error: .asciz "Singlestep support not implemented yet\n" |
352 |
.balign 16 |
353 |
|
354 |
EXPORT(ppc_set_msr_asm): |
355 |
mov %ecx, [gCPU(msr)] |
356 |
test %eax, (1<<10) # MSR_SE |
357 |
jnz 4f |
358 |
test %eax, ~((1<<30)|(1<<27)|(1<<25)|(1<<18)|(1<<15)|(1<<14)|(1<<13)|(1<<12)|(1<<11)|(1<<10)|(1<<8)|(1<<5)|(1<<4)|(1<<1)) |
359 |
jnz 5f |
360 |
test %eax, (1<<18) # MSR_POW |
361 |
jnz 2f |
362 |
1: |
363 |
## Do this first so the invalidate can clobber %eax and |
364 |
## we won''t care |
365 |
mov [gCPU(msr)], %eax |
366 |
xor %eax, %ecx |
367 |
|
368 |
## See if the privilege level (MSR_PR), data address |
369 |
## translation (MSR_DR) or code address translation (MSR_IR) |
370 |
## is changing, in which case we need to invalidate the tlb |
371 |
test %eax, (1<<14) | (1<<4) | (1<<5) |
372 |
|
373 |
jnz EXTERN(ppc_mmu_tlb_invalidate_all_asm) |
374 |
ret |
375 |
|
376 |
2: |
377 |
push %eax |
378 |
call EXTERN(cpu_doze) |
379 |
pop %eax |
380 |
mov %ecx, [gCPU(msr)] |
381 |
and %eax, ~(1<<18) |
382 |
jmp 1b |
383 |
|
384 |
4: |
385 |
mov %eax, singlestep_error |
386 |
jmp EXTERN(jitc_error) |
387 |
|
388 |
5: |
389 |
jmp EXTERN(jitc_error_msr_unsupported_bits) |
390 |
|
391 |
############################################################################################## |
392 |
.macro ppc_atomic_raise_ext_exception_macro |
393 |
lock or dword ptr [gCPU(exception_pending)], 0x00010001 |
394 |
.endm |
395 |
|
396 |
############################################################################################## |
397 |
.macro ppc_atomic_cancel_ext_exception_macro |
398 |
mov %eax, [gCPU(exception_pending)] |
399 |
9: |
400 |
test %eax, 0x00000100 # dec_exception |
401 |
mov %ebx, %eax |
402 |
setnz %bl |
403 |
and %ebx, 0x00000101 |
404 |
lock cmpxchg dword ptr [gCPU(exception_pending)], %ebx |
405 |
jne 9b |
406 |
.endm |
407 |
|
408 |
############################################################################################## |
409 |
.macro ppc_atomic_raise_dec_exception_macro |
410 |
lock or dword ptr [gCPU(exception_pending)], 0x00000101 |
411 |
.endm |
412 |
|
413 |
############################################################################################## |
414 |
.macro ppc_atomic_cancel_dec_exception_macro |
415 |
mov %eax, [gCPU(exception_pending)] |
416 |
9: |
417 |
test %eax, 0x00010000 # ext_exception |
418 |
mov %ebx, %eax |
419 |
setnz %bl |
420 |
and %ebx, 0x00010001 |
421 |
lock cmpxchg dword ptr [gCPU(exception_pending)], %ebx |
422 |
jne 9b |
423 |
.endm |
424 |
|
425 |
.balign 16 |
426 |
############################################################################################## |
427 |
EXPORT(ppc_cpu_atomic_raise_dec_exception): |
428 |
ppc_atomic_raise_dec_exception_macro |
429 |
ret |
430 |
|
431 |
.balign 16 |
432 |
############################################################################################## |
433 |
EXPORT(ppc_cpu_atomic_raise_ext_exception): |
434 |
ppc_atomic_raise_ext_exception_macro |
435 |
ret |
436 |
|
437 |
.balign 16 |
438 |
############################################################################################## |
439 |
EXPORT(ppc_cpu_atomic_cancel_ext_exception): |
440 |
ppc_atomic_cancel_ext_exception_macro |
441 |
ret |
442 |
|
443 |
.balign 16 |
444 |
ppc_jitc_new_pc: |
445 |
# db 0xcc |
446 |
mov %ecx, [EXTERN(gJITC)+clientPages] |
447 |
mov %ebx, %eax |
448 |
shr %eax, 12 |
449 |
mov %eax, [%ecx+%eax*4] |
450 |
test %eax, %eax |
451 |
jnz 1f |
452 |
|
453 |
mov %eax, %ebx |
454 |
and %eax, 0xfffff000 |
455 |
call EXTERN(jitcCreateClientPage) |
456 |
|
457 |
1: # have client page |
458 |
call EXTERN(jitcTouchClientPage) |
459 |
cmp dword ptr [%eax+tcf_current], 0 |
460 |
je 3f |
461 |
mov %ecx, %ebx |
462 |
mov %esi, %eax |
463 |
and %ecx, 0x00000ffc |
464 |
mov %eax, [%eax + entrypoints + %ecx] |
465 |
test %eax, %eax |
466 |
jz 2f |
467 |
ret |
468 |
|
469 |
2: |
470 |
mov %eax, %esi |
471 |
mov %edx, %ebx |
472 |
and %edx, 0xfffff000 |
473 |
jmp EXTERN(jitcNewEntrypoint) |
474 |
|
475 |
3: |
476 |
mov %edx, %ebx |
477 |
mov %ecx, %ebx |
478 |
and %edx, 0xfffff000 |
479 |
and %ecx, 0x00000fff |
480 |
jmp EXTERN(jitcStartTranslation) |
481 |
|
482 |
############################################################################################## |
483 |
## |
484 |
## IN: %eax new client pc (physical address) |
485 |
## |
486 |
.macro ppc_new_pc_intern |
487 |
call EXTERN(jitcNewPC) |
488 |
# call ppc_jitc_new_pc |
489 |
jmp %eax |
490 |
.endm |
491 |
|
492 |
############################################################################################## |
493 |
## |
494 |
.macro exception_epilogue entry |
495 |
xor %eax, %eax |
496 |
mov [gCPU(msr)], %eax |
497 |
mov [gCPU(current_code_base)], %eax |
498 |
call EXTERN(ppc_mmu_tlb_invalidate_all_asm) |
499 |
mov %eax, \entry |
500 |
ppc_new_pc_intern |
501 |
.endm |
502 |
|
503 |
.balign 16 |
504 |
############################################################################################## |
505 |
## ppc_dsi_exception |
506 |
## |
507 |
## IN: %eax fault addr |
508 |
## %ecx dsisr bits |
509 |
## |
510 |
## does not return, so call this per JMP |
511 |
EXPORT(ppc_dsi_exception_asm): |
512 |
mov [gCPU(dar)], %eax |
513 |
mov [gCPU(dsisr)], %ecx |
514 |
EXPORT(ppc_dsi_exception_special_asm): |
515 |
mov %edx, [gCPU(pc_ofs)] |
516 |
mov %eax, [gCPU(msr)] |
517 |
add %edx, [gCPU(current_code_base)] |
518 |
and %eax, 0x87c0ffff |
519 |
mov [gCPU(srr1)], %eax |
520 |
mov [gCPU(srr0)], %edx |
521 |
exception_epilogue 0x300 |
522 |
|
523 |
.balign 16 |
524 |
############################################################################################## |
525 |
## ppc_isi_exception_asm |
526 |
## |
527 |
## IN: %eax: fault addr |
528 |
## %ecx: srr1 bits |
529 |
## |
530 |
## does not return, so call this per JMP |
531 |
EXPORT(ppc_isi_exception_asm): |
532 |
mov [gCPU(srr0)], %eax |
533 |
mov %eax, [gCPU(msr)] |
534 |
and %eax, 0x87c0ffff |
535 |
or %eax, %ecx |
536 |
mov [gCPU(srr1)], %eax |
537 |
exception_epilogue 0x400 |
538 |
|
539 |
.balign 16 |
540 |
############################################################################################## |
541 |
## |
542 |
## IN: |
543 |
## %eax: current pc |
544 |
## |
545 |
## this is only called indirectly |
546 |
EXPORT(ppc_ext_exception_asm): |
547 |
mov [gCPU(srr0)], %eax |
548 |
mov %edx, [gCPU(msr)] |
549 |
ppc_atomic_cancel_ext_exception_macro |
550 |
and %edx, 0x87c0ffff |
551 |
mov [gCPU(srr1)], %edx |
552 |
exception_epilogue 0x500 |
553 |
|
554 |
.balign 16 |
555 |
############################################################################################## |
556 |
## |
557 |
## IN: %ecx: srr1 bits |
558 |
## %esi: pc_ofs |
559 |
## |
560 |
## does not return, so call this per JMP |
561 |
EXPORT(ppc_program_exception_asm): |
562 |
|
563 |
# debug |
564 |
# pusha |
565 |
# mov %eax, %ecx |
566 |
# call EXTERN(jitc_error_program) |
567 |
# popa |
568 |
|
569 |
mov [gCPU(pc_ofs)], %esi |
570 |
mov %eax, [gCPU(msr)] |
571 |
mov %edx, %esi |
572 |
and %eax, 0x87c0ffff |
573 |
add %edx, [gCPU(current_code_base)] |
574 |
or %eax, %ecx |
575 |
mov [gCPU(srr0)], %edx |
576 |
mov [gCPU(srr1)], %eax |
577 |
exception_epilogue 0x700 |
578 |
|
579 |
.balign 16 |
580 |
############################################################################################## |
581 |
## |
582 |
## IN: |
583 |
## %esi: pc_ofs |
584 |
## |
585 |
## does not return, so call this per JMP |
586 |
EXPORT(ppc_no_fpu_exception_asm): |
587 |
mov %edx, %esi |
588 |
mov [gCPU(pc_ofs)], %esi |
589 |
mov %eax, [gCPU(msr)] |
590 |
add %edx, [gCPU(current_code_base)] |
591 |
and %eax, 0x87c0ffff |
592 |
mov [gCPU(srr0)], %edx |
593 |
mov [gCPU(srr1)], %eax |
594 |
exception_epilogue 0x800 |
595 |
|
596 |
.balign 16 |
597 |
############################################################################################## |
598 |
## |
599 |
## IN: |
600 |
## %esi: pc_ofs |
601 |
## |
602 |
## does not return, so call this per JMP |
603 |
EXPORT(ppc_no_vec_exception_asm): |
604 |
mov %edx, %esi |
605 |
mov [gCPU(pc_ofs)], %esi |
606 |
mov %eax, [gCPU(msr)] |
607 |
add %edx, [gCPU(current_code_base)] |
608 |
and %eax, 0x87c0ffff |
609 |
mov [gCPU(srr0)], %edx |
610 |
mov [gCPU(srr1)], %eax |
611 |
exception_epilogue 0xf20 |
612 |
|
613 |
.balign 16 |
614 |
############################################################################################## |
615 |
## |
616 |
## IN: |
617 |
## %eax: current pc |
618 |
## |
619 |
## this is only called indirectly |
620 |
EXTERN(ppc_dec_exception_asm): |
621 |
mov [gCPU(srr0)], %eax |
622 |
mov %edx, [gCPU(msr)] |
623 |
ppc_atomic_cancel_dec_exception_macro |
624 |
and %edx, 0x87c0ffff |
625 |
mov [gCPU(srr1)], %edx |
626 |
exception_epilogue 0x900 |
627 |
|
628 |
.balign 16 |
629 |
############################################################################################## |
630 |
## |
631 |
## IN: |
632 |
## %esi: pc_ofs |
633 |
## |
634 |
## does not return, so call this per JMP |
635 |
EXPORT(ppc_sc_exception_asm): |
636 |
mov %edx, %esi |
637 |
mov [gCPU(pc_ofs)], %esi |
638 |
mov %eax, [gCPU(msr)] |
639 |
add %edx, [gCPU(current_code_base)] |
640 |
and %eax, 0x87c0ffff |
641 |
mov [gCPU(srr0)], %edx |
642 |
mov [gCPU(srr1)], %eax |
643 |
xor %eax, %eax |
644 |
call EXTERN(ppc_set_msr_asm) |
645 |
xor %eax, %eax |
646 |
mov [gCPU(current_code_base)], %eax |
647 |
mov %eax, 0xc00 # entry of SC exception |
648 |
ppc_new_pc_intern |
649 |
|
650 |
.balign 16 |
651 |
############################################################################################## |
652 |
## ppc_heartbeat_ext_rel_asm |
653 |
## |
654 |
## |
655 |
EXPORT(ppc_heartbeat_ext_rel_asm): |
656 |
test byte ptr [gCPU(exception_pending)], 1 |
657 |
jnz 1f |
658 |
2: |
659 |
ret |
660 |
1: |
661 |
test byte ptr [gCPU(stop_exception)], 1 |
662 |
jnz 3f |
663 |
test byte ptr [gCPU(msr+1)], 1<<7 # MSR_EE |
664 |
jz 2b |
665 |
add %esp, 4 |
666 |
add %eax, [gCPU(current_code_base)] |
667 |
test byte ptr [gCPU(ext_exception)], 1 |
668 |
jnz EXTERN(ppc_ext_exception_asm) |
669 |
test byte ptr [gCPU(dec_exception)], 1 |
670 |
jnz EXTERN(ppc_dec_exception_asm) |
671 |
mov %eax, exception_error |
672 |
jmp EXTERN(jitc_error) |
673 |
3: |
674 |
add %esp, 4 |
675 |
jmp ppc_stop_jitc_asm |
676 |
|
677 |
.balign 16 |
678 |
############################################################################################## |
679 |
## ppc_heartbeat_ext_asm |
680 |
## %eax -- new pc |
681 |
## |
682 |
EXPORT(ppc_heartbeat_ext_asm): |
683 |
mov %edx, %eax |
684 |
and %edx, 0xfffff000 |
685 |
test byte ptr [gCPU(exception_pending)], 1 |
686 |
mov [gCPU(current_code_base)], %edx |
687 |
jnz 1f |
688 |
2: |
689 |
ret |
690 |
1: |
691 |
test byte ptr [gCPU(stop_exception)], 1 |
692 |
jnz 3f |
693 |
test byte ptr [gCPU(msr+1)], 1<<7 # MSR_EE |
694 |
jz 2b |
695 |
add %esp, 4 |
696 |
test byte ptr [gCPU(ext_exception)], 1 |
697 |
jnz EXTERN(ppc_ext_exception_asm) |
698 |
test byte ptr [gCPU(dec_exception)], 1 |
699 |
jnz EXTERN(ppc_dec_exception_asm) |
700 |
mov %eax, exception_error |
701 |
jmp EXTERN(jitc_error) |
702 |
3: |
703 |
add %esp, 4 |
704 |
jmp ppc_stop_jitc_asm |
705 |
|
706 |
exception_error: .asciz "Unknown exception signaled?!\n" |
707 |
|
708 |
|
709 |
.balign 16 |
710 |
############################################################################################## |
711 |
## ppc_new_pc_rel_asm |
712 |
## |
713 |
## IN: %eax new client pc relative |
714 |
## |
715 |
## does not return, so call this per JMP |
716 |
EXPORT(ppc_new_pc_rel_asm): |
717 |
add %eax, [gCPU(current_code_base)] |
718 |
call EXTERN(ppc_heartbeat_ext_asm) |
719 |
push 0 # bytes to unwind |
720 |
call EXTERN(ppc_effective_to_physical_code) |
721 |
ppc_new_pc_intern |
722 |
|
723 |
.balign 16 |
724 |
############################################################################################## |
725 |
## ppc_new_pc_asm |
726 |
## |
727 |
## IN: %eax new client pc (effective address) |
728 |
## |
729 |
## does not return, so call this per JMP |
730 |
EXPORT(ppc_new_pc_asm): |
731 |
call EXTERN(ppc_heartbeat_ext_asm) |
732 |
push 0 |
733 |
call EXTERN(ppc_effective_to_physical_code) |
734 |
ppc_new_pc_intern |
735 |
|
736 |
.balign 16 |
737 |
############################################################################################## |
738 |
## |
739 |
## |
740 |
EXPORT(ppc_new_pc_this_page_asm): |
741 |
# mov %esi, [%esp] |
742 |
# mov [%esi-6], %eax # patch it now, later we don''t have the value |
743 |
add %eax, [gCPU(current_code_base)] |
744 |
|
745 |
push 4 |
746 |
call EXTERN(ppc_effective_to_physical_code) |
747 |
call EXTERN(jitcNewPC) |
748 |
|
749 |
#if 0 |
750 |
pop %esi |
751 |
# now %eax and %esi are both native addresses |
752 |
# %eax is dest and %esi is source |
753 |
# |
754 |
# we assume that we can overwrite 15 bytes before the call |
755 |
# and 3 bytes after the call and the 5 bytes of the call instruction |
756 |
mov %edx, %eax |
757 |
sub %eax, %esi |
758 |
|
759 |
mov byte ptr [%esi-20], 0xf6 # test [gCPU(exception_pending)], 1 |
760 |
mov byte ptr [%esi-19], 0x05 |
761 |
mov dword ptr [%esi-18], gCPU(exception_pending) |
762 |
mov byte ptr [%esi-14], 1 |
763 |
|
764 |
add %eax, 7 |
765 |
|
766 |
mov byte ptr [%esi-13], 0x0f # jz dest (%edx) |
767 |
mov byte ptr [%esi-12], 0x84 |
768 |
mov dword [%esi-11], %eax # the jz is relative to (%esi-7) |
769 |
|
770 |
mov %eax, ppc_heartbeat_ext_rel_asm - 3 |
771 |
sub %eax, %esi |
772 |
|
773 |
mov byte ptr [%esi-7], 0xb8 # mov %eax, offset |
774 |
## mov dword ptr [%esi-6], ... # see above, this is already patched! |
775 |
|
776 |
mov byte ptr [%esi-2], 0xe8 # call ppc_heartbeat_ext_rel_asm |
777 |
mov dword ptr [%esi-1], %eax # the call is relative to (%esi+3) |
778 |
jmp %edx |
779 |
#endif |
780 |
pop %edi |
781 |
# now %eax and %edi are both native addresses |
782 |
# %eax is dest and %edi is source |
783 |
# |
784 |
# we assume that there is a "mov %eax, xxx" instruction before |
785 |
# calling this function, and note that 5 is also the length of a jmp xxx |
786 |
# so we patch %edi-10 |
787 |
mov %edx, %eax |
788 |
sub %edi, 5 |
789 |
mov byte ptr [%edi-5], 0xe9 |
790 |
sub %eax, %edi |
791 |
mov dword ptr [%edi-4], %eax |
792 |
jmp %edx |
793 |
|
794 |
.balign 2 |
795 |
ppc_start_fpu_cw: .short 0x37f |
796 |
|
797 |
.balign 16 |
798 |
############################################################################################## |
799 |
## |
800 |
## IN: %eax new client pc (effective address) |
801 |
## |
802 |
EXPORT(ppc_start_jitc_asm): |
803 |
push %ebx |
804 |
push %ebp |
805 |
push %esi |
806 |
push %edi |
807 |
fldcw [ppc_start_fpu_cw] |
808 |
jmp EXTERN(ppc_new_pc_asm) |
809 |
|
810 |
.balign 16 |
811 |
############################################################################################## |
812 |
## |
813 |
## call per JMP |
814 |
## |
815 |
ppc_stop_jitc_asm: |
816 |
pop %edi |
817 |
pop %esi |
818 |
pop %ebp |
819 |
pop %ebx |
820 |
ret |
821 |
|
822 |
############################################################################################## |
823 |
## |
824 |
## IN: %eax cpuid level |
825 |
## %edx dest |
826 |
## |
827 |
|
828 |
EXPORT(ppc_cpuid_asm): |
829 |
push %ebx |
830 |
|
831 |
pushfd |
832 |
pop %ebx |
833 |
mov %ecx, %ebx |
834 |
xor %ebx, 0x00200000 |
835 |
push %ebx |
836 |
popfd |
837 |
pushfd |
838 |
pop %ebx |
839 |
cmp %ebx, %ecx |
840 |
jne 1f |
841 |
|
842 |
pop %ebx |
843 |
xor %eax, %eax |
844 |
ret |
845 |
|
846 |
1: |
847 |
push %edi |
848 |
mov %edi, %edx |
849 |
cpuid |
850 |
mov [%edi], %eax |
851 |
mov [%edi+4], %ecx |
852 |
mov [%edi+8], %edx |
853 |
mov [%edi+12], %ebx |
854 |
pop %edi |
855 |
pop %ebx |
856 |
mov %eax, 1 |
857 |
ret |