/[dynamips]/upstream/dynamips-0.2.6-RC2/x86_asm.S
This is repository of my old source code which isn't updated any more. Go to git.rot13.org for current projects!
ViewVC logotype

Contents of /upstream/dynamips-0.2.6-RC2/x86_asm.S

Parent Directory Parent Directory | Revision Log Revision Log


Revision 1 - (show annotations)
Sat Oct 6 16:01:44 2007 UTC (16 years, 5 months ago) by dpavlin
Original Path: upstream/dynamips-0.2.5/x86_asm.S
File size: 4121 byte(s)
import 0.2.5 from upstream

1 /*
2 * Cisco 7200 Simulator.
3 * X86 Assembly-optimized routines.
4 *
5 * Copyright (c) 2006 Christophe Fillot (cf@utc.fr)
6 */
7
8 #define DYNAMIPS_ASM
9
10 #include "asmdefs.h"
11 #include "memory.h"
12
13 #if defined(CYGWIN) || defined(__APPLE__)
14 #define _P(f) _##f
15 #else
16 #define _P(f) f
17 #endif
18
19 /*
20 * Increment the count register. When value in compare register is hit,
21 * trigger the timer interrupt.
22 */
23 .globl _P(mips64_inc_cp0_cnt_asm)
24 _P(mips64_inc_cp0_cnt_asm):
25 movl CP0_VCNT_OFS(%edi), %ecx
26 incl %ecx
27 movl %ecx, CP0_VCNT_OFS(%edi)
28 cmpl %ecx, CP0_VCMP_OFS(%edi)
29 jne 1f
30 movl %edi, %eax
31 call _P(mips64_trigger_timer_irq)
32 1:
33 ret
34
35 /*
36 * MTS32 Load Word (LW) fast version.
37 *
38 * Inputs:
39 * %edi : cpu instance
40 * %ebx : target register
41 * %edx : virtual address (%ecx is high 32-bit word)
42 */
43 .globl _P(mts32_lw_asm)
44 _P(mts32_lw_asm):
45 pushl %esi
46 movl $((1 << (MTS32_LEVEL2_BITS + MTS32_OFFSET_BITS)) - 1), %ecx
47
48 /* compute L1 pos */
49 movl %edx, %eax
50 shr $(MTS32_LEVEL2_BITS + MTS32_OFFSET_BITS), %eax
51 movl MTS_L1_OFS(%edi), %esi
52 movl (%esi,%eax,4), %esi
53
54 /* %esi = L1 entry */
55 movl %esi, %eax
56 andl $MTS_ACC_MASK, %eax
57 jnz mts32_lw_asm_spec_acc
58
59 /* L2 entry chained ? */
60 movl %esi, %eax
61 andl $MTS_CHAIN_MASK, %eax
62 jz 1f
63
64 /* load L2 entry */
65 andl $0xfffffff0, %esi
66 movl %edx, %eax
67 shr $MTS32_OFFSET_BITS, %eax
68 andl $((1 << MTS32_LEVEL2_BITS) - 1), %eax
69 movl $((1 << MTS32_OFFSET_BITS) - 1), %ecx
70 movl (%esi,%eax,4), %esi
71
72 /* %esi = L2 entry */
73 movl %esi, %eax
74 andl $MTS_ACC_MASK, %eax
75 jnz mts32_lw_asm_spec_acc
76
77 1:
78 /* device access ? */
79 movl %esi, %eax
80 andl $MTS_DEV_MASK, %eax
81 jnz mts32_lw_asm_dev_acc
82
83 /* raw memory access */
84 andl $0xfffffff0, %esi
85 andl %edx, %ecx
86 addl %ecx, %esi
87
88 mts32_lw_asm_load_val:
89 /* %esi = host address */
90 movl (%esi), %eax
91 bswap %eax
92 cdq
93
94 /* %edx:%eax = sign-extended value */
95 lea CPU_GPR_OFS(%edi,%ebx,8), %esi
96 movl %eax, (%esi)
97 movl %edx, 4(%esi)
98
99 popl %esi
100 xorl %eax, %eax
101 ret
102
103 mts32_lw_asm_dev_acc:
104 subl $8, %esp
105 movl %esp, %eax
106
107 pushl %eax /* data */
108 pushl $MTS_READ /* op_type = read */
109 pushl $4 /* op_size = 4 bytes */
110
111 /* %esi = entry, %ecx = shift, %edx = vaddr */
112 movl %esi, %eax
113 andl $MTS_DEVID_MASK, %eax
114 shr $MTS_DEVID_SHIFT, %eax
115 andl %ecx, %edx
116 andl $MTS_DEVOFF_MASK, %esi
117 addl %edx, %esi
118
119 pushl %esi /* haddr */
120 pushl %eax /* dev_id */
121 pushl %edi /* cpu */
122
123 /* call device access routine */
124 call _P(dev_access)
125 addl $32, %esp
126
127 /* %eax = haddr if raw access */
128 movl %eax, %esi
129 testl %esi ,%esi
130 jnz mts32_lw_asm_load_val
131
132 movl -8(%esp), %eax
133 cdq
134 lea CPU_GPR_OFS(%edi,%ebx,8), %esi
135 movl %eax, (%esi)
136 movl %edx, 4(%esi)
137
138 popl %esi
139 xorl %eax, %eax
140 ret
141
142 mts32_lw_asm_spec_acc:
143 /* %eax = mask */
144 subl $12, %esp
145 movl %esp, %ecx
146
147 movl $0, (%esp) /* clear exception */
148
149 pushl %ecx /* exception */
150 addl $4, %ecx
151 pushl %ecx /* data */
152
153 pushl $4 /* op_size = 4 */
154 pushl $MTS_READ /* op_type = read */
155 pushl $MIPS_MEMOP_LW /* op_code = LW */
156 pushl %eax /* mask */
157
158 pushl %edx /* vaddr(lo) */
159 movl %edx, %eax
160 cdq
161 pushl %edx /* vaddr(hi) */
162 pushl %edi /* cpu */
163 call _P(mts_access_special)
164 addl $((9*4)+12), %esp
165
166 /* exception ? */
167 movl -12(%esp), %eax
168 testl %eax, %eax
169 jnz mts32_lw_asm_end
170
171 /* save data */
172 movl -8(%esp), %eax
173 cdq
174 lea CPU_GPR_OFS(%edi,%ebx,8), %esi
175 movl %eax, (%esi)
176 movl %edx, 4(%esi)
177 xorl %eax, %eax
178 mts32_lw_asm_end:
179 popl %esi
180 ret
181
182 /*
183 * MTS64 Load Word (LW) fast version.
184 *
185 * Inputs:
186 * %edi : cpu instance
187 * %ebx : target register
188 * %ecx:edx : virtual address
189 */
190 .globl _P(mts64_lw_asm)
191 _P(mts64_lw_asm):
192 /* Load entry from MTS64 cache */
193 movl %edx, %eax
194 shr $MTS64_HASH_SHIFT, %edx
195 andl $MTS64_HASH_MASK, %edx
196 lea CPU_MTS64_CACHE_OFS(%edi,%edx,4), %esi
197
198 /* %esi = entry pointer */
199 test %esi, %esi
200 jz mts64_lw_slow_lookup
201
202 /* Load entry start address in %eax */
203 lea (MTS64_ENTRY_START_OFS+4)(%esi), %eax
204
205 ret
206
207 mts64_lw_device_access:
208 ret
209
210 mts64_lw_slow_lookup:
211
212 ret

  ViewVC Help
Powered by ViewVC 1.1.26