1 |
/* |
2 |
* Copyright (C) 2005-2007 Anders Gavare. All rights reserved. |
3 |
* |
4 |
* Redistribution and use in source and binary forms, with or without |
5 |
* modification, are permitted provided that the following conditions are met: |
6 |
* |
7 |
* 1. Redistributions of source code must retain the above copyright |
8 |
* notice, this list of conditions and the following disclaimer. |
9 |
* 2. Redistributions in binary form must reproduce the above copyright |
10 |
* notice, this list of conditions and the following disclaimer in the |
11 |
* documentation and/or other materials provided with the distribution. |
12 |
* 3. The name of the author may not be used to endorse or promote products |
13 |
* derived from this software without specific prior written permission. |
14 |
* |
15 |
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND |
16 |
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
17 |
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE |
18 |
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE |
19 |
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL |
20 |
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS |
21 |
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) |
22 |
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT |
23 |
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY |
24 |
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF |
25 |
* SUCH DAMAGE. |
26 |
* |
27 |
* |
28 |
* $Id: cpu_arm_instr_dpi.c,v 1.18 2006/12/30 13:30:53 debug Exp $ |
29 |
* |
30 |
* |
31 |
* ARM Data Processing Instructions |
32 |
* -------------------------------- |
33 |
* |
34 |
* xxxx000a aaaSnnnn ddddcccc ctttmmmm Register form |
35 |
* xxxx001a aaaSnnnn ddddrrrr bbbbbbbb Immediate form |
36 |
* |
37 |
* 4 bits to select which instruction, one of the following: |
38 |
* |
39 |
* 0000 and 1000 tst |
40 |
* 0001 eor 1001 teq |
41 |
* 0010 sub 1010 cmp |
42 |
* 0011 rsb 1011 cmn |
43 |
* 0100 add 1100 orr |
44 |
* 0101 adc 1101 mov |
45 |
* 0110 sbc 1110 bic |
46 |
* 0111 rsc 1111 mvn |
47 |
* |
48 |
* 1 bit to select Status flag update. |
49 |
* |
50 |
* 1 bit to select Register form or Immediate form. |
51 |
* |
52 |
* 1 bit to select if the PC register is used. |
53 |
* |
54 |
* Each function must also (as always) be repeated for each possible ARM |
55 |
* condition code (15 in total). Total: 1920 functions. |
56 |
* |
57 |
* NOTE: This does not include any special common cases, which might be |
58 |
* nice to have. Examples: comparing against zero, moving common |
59 |
* constants. |
60 |
* |
61 |
* See src/tools/generate_arm_dpi.c for more details. |
62 |
*/ |
63 |
|
64 |
|
65 |
/* |
66 |
* arg[0] = pointer to rn |
67 |
* arg[1] = int32_t immediate value OR ptr to a reg_func() function |
68 |
* arg[2] = pointer to rd |
69 |
*/ |
70 |
void A__NAME(struct cpu *cpu, struct arm_instr_call *ic) |
71 |
{ |
72 |
#if defined(A__RSB) || defined(A__RSC) |
73 |
#define VAR_A b |
74 |
#define VAR_B a |
75 |
#else |
76 |
#define VAR_A a |
77 |
#define VAR_B b |
78 |
#endif |
79 |
|
80 |
#ifdef A__REG |
81 |
uint32_t (*reg_func)(struct cpu *, struct arm_instr_call *) |
82 |
= (void *)(size_t)ic->arg[1]; |
83 |
#endif |
84 |
|
85 |
#ifdef A__S |
86 |
uint32_t c32; |
87 |
#endif |
88 |
#if defined(A__CMP) || defined(A__CMN) || defined(A__ADC) || defined(A__ADD) \ |
89 |
|| defined(A__RSC) || defined(A__RSC) || defined(A__SBC) || defined(A__SUB) |
90 |
#ifdef A__S |
91 |
uint64_t |
92 |
#else |
93 |
uint32_t |
94 |
#endif |
95 |
#else |
96 |
uint32_t |
97 |
#endif |
98 |
VAR_B = |
99 |
#ifdef A__REG |
100 |
reg_func(cpu, ic) |
101 |
#else |
102 |
#ifdef A__REGSHORT |
103 |
reg(ic->arg[1]) |
104 |
#else |
105 |
ic->arg[1] |
106 |
#endif |
107 |
#endif |
108 |
, c64 |
109 |
#if !defined(A__MOV) && !defined(A__MVN) |
110 |
, VAR_A = reg(ic->arg[0]) |
111 |
#endif |
112 |
; |
113 |
|
114 |
#if defined(A__MOV) || defined(A__MVN) || defined(A__TST) || defined(A__TEQ) \ |
115 |
|| defined(A__AND) || defined(A__BIC) || defined(A__EOR) || defined(A__ORR) |
116 |
#if !defined(A__REG) && defined(A__S) |
117 |
/* |
118 |
* TODO: This is not 100% correct, but should work with "recommended" |
119 |
* ARM code: Immediate values larger than 255 are encoded with |
120 |
* rotation. If the S-bit is set, then the carry bit is set to the |
121 |
* highest bit of the operand. |
122 |
* |
123 |
* TODO 2: Perhaps this check should be moved out from here, and into |
124 |
* cpu_arm_instr.c. (More correct, and higher performance.) |
125 |
*/ |
126 |
if (VAR_B > 255) { |
127 |
if (VAR_B & 0x80000000) |
128 |
cpu->cd.arm.flags |= ARM_F_C; |
129 |
else |
130 |
cpu->cd.arm.flags &= ~ARM_F_C; |
131 |
} |
132 |
#endif |
133 |
#endif |
134 |
|
135 |
|
136 |
#if !defined(A__MOV) && !defined(A__MVN) |
137 |
#ifdef A__PC |
138 |
if (ic->arg[0] == (size_t)&cpu->cd.arm.r[ARM_PC]) { |
139 |
uint32_t low_pc = ((size_t)ic - (size_t) |
140 |
cpu->cd.arm.cur_ic_page) / sizeof(struct arm_instr_call); |
141 |
VAR_A = cpu->pc & ~((ARM_IC_ENTRIES_PER_PAGE-1) |
142 |
<< ARM_INSTR_ALIGNMENT_SHIFT); |
143 |
VAR_A += (low_pc << ARM_INSTR_ALIGNMENT_SHIFT) + 8; |
144 |
} |
145 |
#endif |
146 |
#endif |
147 |
|
148 |
/* |
149 |
* Perform the operation: |
150 |
*/ |
151 |
#if defined(A__AND) || defined(A__TST) |
152 |
c64 = a & b; |
153 |
#endif |
154 |
#if defined(A__EOR) || defined(A__TEQ) |
155 |
c64 = a ^ b; |
156 |
#endif |
157 |
#if defined(A__SUB) || defined(A__CMP) || defined(A__RSB) |
158 |
c64 = a - b; |
159 |
#endif |
160 |
#if defined(A__ADD) || defined(A__CMN) |
161 |
c64 = a + b; |
162 |
#endif |
163 |
#if defined(A__ADC) |
164 |
c64 = a + b + (cpu->cd.arm.flags & ARM_F_C? 1 : 0); |
165 |
#endif |
166 |
#if defined(A__SBC) || defined(A__RSC) |
167 |
b += (cpu->cd.arm.flags & ARM_F_C? 0 : 1); |
168 |
c64 = a - b; |
169 |
#endif |
170 |
#if defined(A__ORR) |
171 |
c64 = a | b; |
172 |
#endif |
173 |
#if defined(A__MOV) |
174 |
c64 = b; |
175 |
#endif |
176 |
#if defined(A__BIC) |
177 |
c64 = a & ~b; |
178 |
#endif |
179 |
#if defined(A__MVN) |
180 |
c64 = ~b; |
181 |
#endif |
182 |
|
183 |
|
184 |
#if defined(A__CMP) || defined(A__CMN) || defined(A__TST) || defined(A__TEQ) |
185 |
/* No write to rd for compare/test. */ |
186 |
#else |
187 |
#ifdef A__PC |
188 |
if (ic->arg[2] == (size_t)&cpu->cd.arm.r[ARM_PC]) { |
189 |
#ifndef A__S |
190 |
uint32_t old_pc = cpu->pc; |
191 |
uint32_t mask_within_page = ((ARM_IC_ENTRIES_PER_PAGE-1) |
192 |
<< ARM_INSTR_ALIGNMENT_SHIFT) | |
193 |
((1 << ARM_INSTR_ALIGNMENT_SHIFT) - 1); |
194 |
#endif |
195 |
cpu->pc = (uint32_t)c64; |
196 |
#ifdef A__S |
197 |
/* Copy the right SPSR into CPSR: */ |
198 |
arm_save_register_bank(cpu); |
199 |
switch (cpu->cd.arm.cpsr & ARM_FLAG_MODE) { |
200 |
case ARM_MODE_FIQ32: |
201 |
cpu->cd.arm.cpsr = cpu->cd.arm.spsr_fiq; break; |
202 |
case ARM_MODE_IRQ32: |
203 |
cpu->cd.arm.cpsr = cpu->cd.arm.spsr_irq; break; |
204 |
case ARM_MODE_SVC32: |
205 |
cpu->cd.arm.cpsr = cpu->cd.arm.spsr_svc; break; |
206 |
case ARM_MODE_ABT32: |
207 |
cpu->cd.arm.cpsr = cpu->cd.arm.spsr_abt; break; |
208 |
case ARM_MODE_UND32: |
209 |
cpu->cd.arm.cpsr = cpu->cd.arm.spsr_und; break; |
210 |
} |
211 |
cpu->cd.arm.flags = cpu->cd.arm.cpsr >> 28; |
212 |
arm_load_register_bank(cpu); |
213 |
#else |
214 |
if ((old_pc & ~mask_within_page) == |
215 |
((uint32_t)cpu->pc & ~mask_within_page)) { |
216 |
cpu->cd.arm.next_ic = cpu->cd.arm.cur_ic_page + |
217 |
((cpu->pc & mask_within_page) >> |
218 |
ARM_INSTR_ALIGNMENT_SHIFT); |
219 |
} else |
220 |
#endif |
221 |
quick_pc_to_pointers(cpu); |
222 |
return; |
223 |
} else |
224 |
reg(ic->arg[2]) = c64; |
225 |
#else |
226 |
reg(ic->arg[2]) = c64; |
227 |
#endif |
228 |
#endif |
229 |
|
230 |
|
231 |
/* |
232 |
* Status flag update (if the S-bit is set): |
233 |
*/ |
234 |
#ifdef A__S |
235 |
c32 = c64; |
236 |
cpu->cd.arm.flags |
237 |
#if defined(A__CMP) || defined(A__CMN) || defined(A__ADC) || defined(A__ADD) \ |
238 |
|| defined(A__RSB) || defined(A__RSC) || defined(A__SBC) || defined(A__SUB) |
239 |
= 0; |
240 |
#else |
241 |
&= ~(ARM_F_Z | ARM_F_N); |
242 |
#endif |
243 |
|
244 |
#if defined(A__CMP) || defined(A__RSB) || defined(A__SUB) || \ |
245 |
defined(A__RSC) || defined(A__SBC) |
246 |
if ((uint32_t)a >= (uint32_t)b) |
247 |
cpu->cd.arm.flags |= ARM_F_C; |
248 |
#else |
249 |
#if defined(A__ADC) || defined(A__ADD) || defined(A__CMN) |
250 |
if (c32 != c64) |
251 |
cpu->cd.arm.flags |= ARM_F_C; |
252 |
#endif |
253 |
#endif |
254 |
|
255 |
if (c32 == 0) |
256 |
cpu->cd.arm.flags |= ARM_F_Z; |
257 |
|
258 |
if ((int32_t)c32 < 0) |
259 |
cpu->cd.arm.flags |= ARM_F_N; |
260 |
|
261 |
/* Calculate the Overflow bit: */ |
262 |
#if defined(A__CMP) || defined(A__CMN) || defined(A__ADC) || defined(A__ADD) \ |
263 |
|| defined(A__RSB) || defined(A__RSC) || defined(A__SBC) || defined(A__SUB) |
264 |
{ |
265 |
int v = 0; |
266 |
#if defined(A__ADD) || defined(A__CMN) |
267 |
if (((int32_t)a >= 0 && (int32_t)b >= 0 && |
268 |
(int32_t)c32 < 0) || |
269 |
((int32_t)a < 0 && (int32_t)b < 0 && |
270 |
(int32_t)c32 >= 0)) |
271 |
v = 1; |
272 |
#else |
273 |
#if defined(A__SUB) || defined(A__RSB) || defined(A__CMP) || \ |
274 |
defined(A__RSC) || defined(A__SBC) |
275 |
if (((int32_t)a >= 0 && (int32_t)b < 0 && |
276 |
(int32_t)c32 < 0) || |
277 |
((int32_t)a < 0 && (int32_t)b >= 0 && |
278 |
(int32_t)c32 >= 0)) |
279 |
v = 1; |
280 |
#endif |
281 |
#endif |
282 |
if (v) |
283 |
cpu->cd.arm.flags |= ARM_F_V; |
284 |
} |
285 |
#endif |
286 |
#endif /* A__S */ |
287 |
|
288 |
#undef VAR_A |
289 |
#undef VAR_B |
290 |
} |
291 |
|
292 |
|
293 |
void A__NAME__eq(struct cpu *cpu, struct arm_instr_call *ic) |
294 |
{ if (cpu->cd.arm.flags & ARM_F_Z) A__NAME(cpu, ic); } |
295 |
void A__NAME__ne(struct cpu *cpu, struct arm_instr_call *ic) |
296 |
{ if (!(cpu->cd.arm.flags & ARM_F_Z)) A__NAME(cpu, ic); } |
297 |
void A__NAME__cs(struct cpu *cpu, struct arm_instr_call *ic) |
298 |
{ if (cpu->cd.arm.flags & ARM_F_C) A__NAME(cpu, ic); } |
299 |
void A__NAME__cc(struct cpu *cpu, struct arm_instr_call *ic) |
300 |
{ if (!(cpu->cd.arm.flags & ARM_F_C)) A__NAME(cpu, ic); } |
301 |
void A__NAME__mi(struct cpu *cpu, struct arm_instr_call *ic) |
302 |
{ if (cpu->cd.arm.flags & ARM_F_N) A__NAME(cpu, ic); } |
303 |
void A__NAME__pl(struct cpu *cpu, struct arm_instr_call *ic) |
304 |
{ if (!(cpu->cd.arm.flags & ARM_F_N)) A__NAME(cpu, ic); } |
305 |
void A__NAME__vs(struct cpu *cpu, struct arm_instr_call *ic) |
306 |
{ if (cpu->cd.arm.flags & ARM_F_V) A__NAME(cpu, ic); } |
307 |
void A__NAME__vc(struct cpu *cpu, struct arm_instr_call *ic) |
308 |
{ if (!(cpu->cd.arm.flags & ARM_F_V)) A__NAME(cpu, ic); } |
309 |
|
310 |
#ifndef BLAHURG |
311 |
#define BLAHURG |
312 |
extern uint8_t condition_hi[16]; |
313 |
extern uint8_t condition_ge[16]; |
314 |
extern uint8_t condition_gt[16]; |
315 |
#endif |
316 |
|
317 |
void A__NAME__hi(struct cpu *cpu, struct arm_instr_call *ic) |
318 |
{ if (condition_hi[cpu->cd.arm.flags]) A__NAME(cpu, ic); } |
319 |
void A__NAME__ls(struct cpu *cpu, struct arm_instr_call *ic) |
320 |
{ if (!condition_hi[cpu->cd.arm.flags]) A__NAME(cpu, ic); } |
321 |
void A__NAME__ge(struct cpu *cpu, struct arm_instr_call *ic) |
322 |
{ if (condition_ge[cpu->cd.arm.flags]) A__NAME(cpu, ic); } |
323 |
void A__NAME__lt(struct cpu *cpu, struct arm_instr_call *ic) |
324 |
{ if (!condition_ge[cpu->cd.arm.flags]) A__NAME(cpu, ic); } |
325 |
void A__NAME__gt(struct cpu *cpu, struct arm_instr_call *ic) |
326 |
{ if (condition_gt[cpu->cd.arm.flags]) A__NAME(cpu, ic); } |
327 |
void A__NAME__le(struct cpu *cpu, struct arm_instr_call *ic) |
328 |
{ if (!condition_gt[cpu->cd.arm.flags]) A__NAME(cpu, ic); } |
329 |
|