Commit | Line | Data |
---|---|---|
d9311bfa AT |
1 | /* Common code for ARM software single stepping support. |
2 | ||
618f726f | 3 | Copyright (C) 1988-2016 Free Software Foundation, Inc. |
d9311bfa AT |
4 | |
5 | This file is part of GDB. | |
6 | ||
7 | This program is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 3 of the License, or | |
10 | (at your option) any later version. | |
11 | ||
12 | This program is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with this program. If not, see <http://www.gnu.org/licenses/>. */ | |
19 | ||
20 | #include "common-defs.h" | |
21 | #include "gdb_vecs.h" | |
22 | #include "common-regcache.h" | |
23 | #include "arm.h" | |
24 | #include "arm-get-next-pcs.h" | |
25 | ||
26 | /* See arm-get-next-pcs.h. */ | |
27 | ||
28 | void | |
29 | arm_get_next_pcs_ctor (struct arm_get_next_pcs *self, | |
30 | struct arm_get_next_pcs_ops *ops, | |
31 | int byte_order, | |
32 | int byte_order_for_code, | |
1b451dda | 33 | int has_thumb2_breakpoint, |
d9311bfa AT |
34 | struct regcache *regcache) |
35 | { | |
36 | self->ops = ops; | |
37 | self->byte_order = byte_order; | |
38 | self->byte_order_for_code = byte_order_for_code; | |
1b451dda | 39 | self->has_thumb2_breakpoint = has_thumb2_breakpoint; |
d9311bfa AT |
40 | self->regcache = regcache; |
41 | } | |
42 | ||
43 | /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D} | |
44 | instruction and ending with a STREX{,B,H,D} instruction. If such a sequence | |
45 | is found, attempt to step through it. The end of the sequence address is | |
46 | added to the next_pcs list. */ | |
47 | ||
48 | static VEC (CORE_ADDR) * | |
49 | thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self, | |
50 | CORE_ADDR pc) | |
51 | { | |
52 | int byte_order_for_code = self->byte_order_for_code; | |
53 | CORE_ADDR breaks[2] = {-1, -1}; | |
54 | CORE_ADDR loc = pc; | |
55 | unsigned short insn1, insn2; | |
56 | int insn_count; | |
57 | int index; | |
58 | int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */ | |
59 | const int atomic_sequence_length = 16; /* Instruction sequence length. */ | |
60 | ULONGEST status, itstate; | |
61 | VEC (CORE_ADDR) *next_pcs = NULL; | |
62 | ||
63 | /* We currently do not support atomic sequences within an IT block. */ | |
64 | status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM); | |
65 | itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3); | |
66 | if (itstate & 0x0f) | |
67 | return NULL; | |
68 | ||
69 | /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */ | |
70 | insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code); | |
71 | ||
72 | loc += 2; | |
73 | if (thumb_insn_size (insn1) != 4) | |
74 | return NULL; | |
75 | ||
76 | insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code); | |
77 | ||
78 | loc += 2; | |
79 | if (!((insn1 & 0xfff0) == 0xe850 | |
80 | || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040))) | |
81 | return NULL; | |
82 | ||
83 | /* Assume that no atomic sequence is longer than "atomic_sequence_length" | |
84 | instructions. */ | |
85 | for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count) | |
86 | { | |
87 | insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code); | |
88 | loc += 2; | |
89 | ||
90 | if (thumb_insn_size (insn1) != 4) | |
91 | { | |
92 | /* Assume that there is at most one conditional branch in the | |
93 | atomic sequence. If a conditional branch is found, put a | |
94 | breakpoint in its destination address. */ | |
95 | if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f) | |
96 | { | |
97 | if (last_breakpoint > 0) | |
98 | return NULL; /* More than one conditional branch found, | |
99 | fallback to the standard code. */ | |
100 | ||
101 | breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1); | |
102 | last_breakpoint++; | |
103 | } | |
104 | ||
105 | /* We do not support atomic sequences that use any *other* | |
106 | instructions but conditional branches to change the PC. | |
107 | Fall back to standard code to avoid losing control of | |
108 | execution. */ | |
109 | else if (thumb_instruction_changes_pc (insn1)) | |
110 | return NULL; | |
111 | } | |
112 | else | |
113 | { | |
114 | insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code); | |
115 | ||
116 | loc += 2; | |
117 | ||
118 | /* Assume that there is at most one conditional branch in the | |
119 | atomic sequence. If a conditional branch is found, put a | |
120 | breakpoint in its destination address. */ | |
121 | if ((insn1 & 0xf800) == 0xf000 | |
122 | && (insn2 & 0xd000) == 0x8000 | |
123 | && (insn1 & 0x0380) != 0x0380) | |
124 | { | |
125 | int sign, j1, j2, imm1, imm2; | |
126 | unsigned int offset; | |
127 | ||
128 | sign = sbits (insn1, 10, 10); | |
129 | imm1 = bits (insn1, 0, 5); | |
130 | imm2 = bits (insn2, 0, 10); | |
131 | j1 = bit (insn2, 13); | |
132 | j2 = bit (insn2, 11); | |
133 | ||
134 | offset = (sign << 20) + (j2 << 19) + (j1 << 18); | |
135 | offset += (imm1 << 12) + (imm2 << 1); | |
136 | ||
137 | if (last_breakpoint > 0) | |
138 | return 0; /* More than one conditional branch found, | |
139 | fallback to the standard code. */ | |
140 | ||
141 | breaks[1] = loc + offset; | |
142 | last_breakpoint++; | |
143 | } | |
144 | ||
145 | /* We do not support atomic sequences that use any *other* | |
146 | instructions but conditional branches to change the PC. | |
147 | Fall back to standard code to avoid losing control of | |
148 | execution. */ | |
149 | else if (thumb2_instruction_changes_pc (insn1, insn2)) | |
150 | return NULL; | |
151 | ||
152 | /* If we find a strex{,b,h,d}, we're done. */ | |
153 | if ((insn1 & 0xfff0) == 0xe840 | |
154 | || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040)) | |
155 | break; | |
156 | } | |
157 | } | |
158 | ||
159 | /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */ | |
160 | if (insn_count == atomic_sequence_length) | |
161 | return NULL; | |
162 | ||
163 | /* Insert a breakpoint right after the end of the atomic sequence. */ | |
164 | breaks[0] = loc; | |
165 | ||
166 | /* Check for duplicated breakpoints. Check also for a breakpoint | |
167 | placed (branch instruction's destination) anywhere in sequence. */ | |
168 | if (last_breakpoint | |
169 | && (breaks[1] == breaks[0] | |
170 | || (breaks[1] >= pc && breaks[1] < loc))) | |
171 | last_breakpoint = 0; | |
172 | ||
173 | /* Adds the breakpoints to the list to be inserted. */ | |
174 | for (index = 0; index <= last_breakpoint; index++) | |
175 | VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (breaks[index])); | |
176 | ||
177 | return next_pcs; | |
178 | } | |
179 | ||
180 | /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D} | |
181 | instruction and ending with a STREX{,B,H,D} instruction. If such a sequence | |
182 | is found, attempt to step through it. The end of the sequence address is | |
183 | added to the next_pcs list. */ | |
184 | ||
185 | static VEC (CORE_ADDR) * | |
186 | arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self, | |
187 | CORE_ADDR pc) | |
188 | { | |
189 | int byte_order_for_code = self->byte_order_for_code; | |
190 | CORE_ADDR breaks[2] = {-1, -1}; | |
191 | CORE_ADDR loc = pc; | |
192 | unsigned int insn; | |
193 | int insn_count; | |
194 | int index; | |
195 | int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */ | |
196 | const int atomic_sequence_length = 16; /* Instruction sequence length. */ | |
197 | VEC (CORE_ADDR) *next_pcs = NULL; | |
198 | ||
199 | /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. | |
200 | Note that we do not currently support conditionally executed atomic | |
201 | instructions. */ | |
202 | insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code); | |
203 | ||
204 | loc += 4; | |
205 | if ((insn & 0xff9000f0) != 0xe1900090) | |
206 | return NULL; | |
207 | ||
208 | /* Assume that no atomic sequence is longer than "atomic_sequence_length" | |
209 | instructions. */ | |
210 | for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count) | |
211 | { | |
212 | insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code); | |
213 | ||
214 | loc += 4; | |
215 | ||
216 | /* Assume that there is at most one conditional branch in the atomic | |
217 | sequence. If a conditional branch is found, put a breakpoint in | |
218 | its destination address. */ | |
219 | if (bits (insn, 24, 27) == 0xa) | |
220 | { | |
221 | if (last_breakpoint > 0) | |
222 | return NULL; /* More than one conditional branch found, fallback | |
223 | to the standard single-step code. */ | |
224 | ||
225 | breaks[1] = BranchDest (loc - 4, insn); | |
226 | last_breakpoint++; | |
227 | } | |
228 | ||
229 | /* We do not support atomic sequences that use any *other* instructions | |
230 | but conditional branches to change the PC. Fall back to standard | |
231 | code to avoid losing control of execution. */ | |
232 | else if (arm_instruction_changes_pc (insn)) | |
233 | return NULL; | |
234 | ||
235 | /* If we find a strex{,b,h,d}, we're done. */ | |
236 | if ((insn & 0xff9000f0) == 0xe1800090) | |
237 | break; | |
238 | } | |
239 | ||
240 | /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */ | |
241 | if (insn_count == atomic_sequence_length) | |
242 | return NULL; | |
243 | ||
244 | /* Insert a breakpoint right after the end of the atomic sequence. */ | |
245 | breaks[0] = loc; | |
246 | ||
247 | /* Check for duplicated breakpoints. Check also for a breakpoint | |
248 | placed (branch instruction's destination) anywhere in sequence. */ | |
249 | if (last_breakpoint | |
250 | && (breaks[1] == breaks[0] | |
251 | || (breaks[1] >= pc && breaks[1] < loc))) | |
252 | last_breakpoint = 0; | |
253 | ||
254 | /* Adds the breakpoints to the list to be inserted. */ | |
255 | for (index = 0; index <= last_breakpoint; index++) | |
256 | VEC_safe_push (CORE_ADDR, next_pcs, breaks[index]); | |
257 | ||
258 | return next_pcs; | |
259 | } | |
260 | ||
f5aa3069 | 261 | /* Find the next possible PCs for thumb mode. */ |
d9311bfa | 262 | |
f5aa3069 YQ |
263 | static VEC (CORE_ADDR) * |
264 | thumb_get_next_pcs_raw (struct arm_get_next_pcs *self, CORE_ADDR pc) | |
d9311bfa AT |
265 | { |
266 | int byte_order = self->byte_order; | |
267 | int byte_order_for_code = self->byte_order_for_code; | |
268 | unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */ | |
269 | unsigned short inst1; | |
270 | CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */ | |
271 | unsigned long offset; | |
272 | ULONGEST status, itstate; | |
273 | struct regcache *regcache = self->regcache; | |
274 | VEC (CORE_ADDR) * next_pcs = NULL; | |
275 | ||
276 | nextpc = MAKE_THUMB_ADDR (nextpc); | |
277 | pc_val = MAKE_THUMB_ADDR (pc_val); | |
278 | ||
279 | inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code); | |
280 | ||
281 | /* Thumb-2 conditional execution support. There are eight bits in | |
282 | the CPSR which describe conditional execution state. Once | |
283 | reconstructed (they're in a funny order), the low five bits | |
284 | describe the low bit of the condition for each instruction and | |
285 | how many instructions remain. The high three bits describe the | |
286 | base condition. One of the low four bits will be set if an IT | |
287 | block is active. These bits read as zero on earlier | |
288 | processors. */ | |
289 | status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM); | |
290 | itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3); | |
291 | ||
292 | /* If-Then handling. On GNU/Linux, where this routine is used, we | |
293 | use an undefined instruction as a breakpoint. Unlike BKPT, IT | |
294 | can disable execution of the undefined instruction. So we might | |
295 | miss the breakpoint if we set it on a skipped conditional | |
296 | instruction. Because conditional instructions can change the | |
297 | flags, affecting the execution of further instructions, we may | |
298 | need to set two breakpoints. */ | |
299 | ||
1b451dda | 300 | if (self->has_thumb2_breakpoint) |
d9311bfa AT |
301 | { |
302 | if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0) | |
303 | { | |
304 | /* An IT instruction. Because this instruction does not | |
305 | modify the flags, we can accurately predict the next | |
306 | executed instruction. */ | |
307 | itstate = inst1 & 0x00ff; | |
308 | pc += thumb_insn_size (inst1); | |
309 | ||
310 | while (itstate != 0 && ! condition_true (itstate >> 4, status)) | |
311 | { | |
312 | inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code); | |
313 | pc += thumb_insn_size (inst1); | |
314 | itstate = thumb_advance_itstate (itstate); | |
315 | } | |
316 | ||
317 | VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc)); | |
318 | return next_pcs; | |
319 | } | |
320 | else if (itstate != 0) | |
321 | { | |
322 | /* We are in a conditional block. Check the condition. */ | |
323 | if (! condition_true (itstate >> 4, status)) | |
324 | { | |
325 | /* Advance to the next executed instruction. */ | |
326 | pc += thumb_insn_size (inst1); | |
327 | itstate = thumb_advance_itstate (itstate); | |
328 | ||
329 | while (itstate != 0 && ! condition_true (itstate >> 4, status)) | |
330 | { | |
331 | inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code); | |
332 | ||
333 | pc += thumb_insn_size (inst1); | |
334 | itstate = thumb_advance_itstate (itstate); | |
335 | } | |
336 | ||
337 | VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc)); | |
338 | return next_pcs; | |
339 | } | |
340 | else if ((itstate & 0x0f) == 0x08) | |
341 | { | |
342 | /* This is the last instruction of the conditional | |
343 | block, and it is executed. We can handle it normally | |
344 | because the following instruction is not conditional, | |
345 | and we must handle it normally because it is | |
346 | permitted to branch. Fall through. */ | |
347 | } | |
348 | else | |
349 | { | |
350 | int cond_negated; | |
351 | ||
352 | /* There are conditional instructions after this one. | |
353 | If this instruction modifies the flags, then we can | |
354 | not predict what the next executed instruction will | |
355 | be. Fortunately, this instruction is architecturally | |
356 | forbidden to branch; we know it will fall through. | |
357 | Start by skipping past it. */ | |
358 | pc += thumb_insn_size (inst1); | |
359 | itstate = thumb_advance_itstate (itstate); | |
360 | ||
361 | /* Set a breakpoint on the following instruction. */ | |
362 | gdb_assert ((itstate & 0x0f) != 0); | |
363 | VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc)); | |
364 | ||
365 | cond_negated = (itstate >> 4) & 1; | |
366 | ||
367 | /* Skip all following instructions with the same | |
368 | condition. If there is a later instruction in the IT | |
369 | block with the opposite condition, set the other | |
370 | breakpoint there. If not, then set a breakpoint on | |
371 | the instruction after the IT block. */ | |
372 | do | |
373 | { | |
374 | inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code); | |
375 | pc += thumb_insn_size (inst1); | |
376 | itstate = thumb_advance_itstate (itstate); | |
377 | } | |
378 | while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated); | |
379 | ||
380 | VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc)); | |
381 | ||
382 | return next_pcs; | |
383 | } | |
384 | } | |
385 | } | |
386 | else if (itstate & 0x0f) | |
387 | { | |
388 | /* We are in a conditional block. Check the condition. */ | |
389 | int cond = itstate >> 4; | |
390 | ||
391 | if (! condition_true (cond, status)) | |
392 | { | |
393 | /* Advance to the next instruction. All the 32-bit | |
394 | instructions share a common prefix. */ | |
395 | VEC_safe_push (CORE_ADDR, next_pcs, | |
396 | MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1))); | |
397 | } | |
398 | ||
399 | return next_pcs; | |
400 | ||
401 | /* Otherwise, handle the instruction normally. */ | |
402 | } | |
403 | ||
404 | if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */ | |
405 | { | |
406 | CORE_ADDR sp; | |
407 | ||
408 | /* Fetch the saved PC from the stack. It's stored above | |
409 | all of the other registers. */ | |
410 | offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE; | |
411 | sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM); | |
412 | nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order); | |
413 | } | |
414 | else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */ | |
415 | { | |
416 | unsigned long cond = bits (inst1, 8, 11); | |
417 | if (cond == 0x0f) /* 0x0f = SWI */ | |
418 | { | |
419 | nextpc = self->ops->syscall_next_pc (self, pc); | |
420 | } | |
421 | else if (cond != 0x0f && condition_true (cond, status)) | |
422 | nextpc = pc_val + (sbits (inst1, 0, 7) << 1); | |
423 | } | |
424 | else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */ | |
425 | { | |
426 | nextpc = pc_val + (sbits (inst1, 0, 10) << 1); | |
427 | } | |
428 | else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */ | |
429 | { | |
430 | unsigned short inst2; | |
431 | inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code); | |
432 | ||
433 | /* Default to the next instruction. */ | |
434 | nextpc = pc + 4; | |
435 | nextpc = MAKE_THUMB_ADDR (nextpc); | |
436 | ||
437 | if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000) | |
438 | { | |
439 | /* Branches and miscellaneous control instructions. */ | |
440 | ||
441 | if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000) | |
442 | { | |
443 | /* B, BL, BLX. */ | |
444 | int j1, j2, imm1, imm2; | |
445 | ||
446 | imm1 = sbits (inst1, 0, 10); | |
447 | imm2 = bits (inst2, 0, 10); | |
448 | j1 = bit (inst2, 13); | |
449 | j2 = bit (inst2, 11); | |
450 | ||
451 | offset = ((imm1 << 12) + (imm2 << 1)); | |
452 | offset ^= ((!j2) << 22) | ((!j1) << 23); | |
453 | ||
454 | nextpc = pc_val + offset; | |
455 | /* For BLX make sure to clear the low bits. */ | |
456 | if (bit (inst2, 12) == 0) | |
457 | nextpc = nextpc & 0xfffffffc; | |
458 | } | |
459 | else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00) | |
460 | { | |
461 | /* SUBS PC, LR, #imm8. */ | |
462 | nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM); | |
463 | nextpc -= inst2 & 0x00ff; | |
464 | } | |
465 | else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380) | |
466 | { | |
467 | /* Conditional branch. */ | |
468 | if (condition_true (bits (inst1, 6, 9), status)) | |
469 | { | |
470 | int sign, j1, j2, imm1, imm2; | |
471 | ||
472 | sign = sbits (inst1, 10, 10); | |
473 | imm1 = bits (inst1, 0, 5); | |
474 | imm2 = bits (inst2, 0, 10); | |
475 | j1 = bit (inst2, 13); | |
476 | j2 = bit (inst2, 11); | |
477 | ||
478 | offset = (sign << 20) + (j2 << 19) + (j1 << 18); | |
479 | offset += (imm1 << 12) + (imm2 << 1); | |
480 | ||
481 | nextpc = pc_val + offset; | |
482 | } | |
483 | } | |
484 | } | |
485 | else if ((inst1 & 0xfe50) == 0xe810) | |
486 | { | |
487 | /* Load multiple or RFE. */ | |
488 | int rn, offset, load_pc = 1; | |
489 | ||
490 | rn = bits (inst1, 0, 3); | |
491 | if (bit (inst1, 7) && !bit (inst1, 8)) | |
492 | { | |
493 | /* LDMIA or POP */ | |
494 | if (!bit (inst2, 15)) | |
495 | load_pc = 0; | |
496 | offset = bitcount (inst2) * 4 - 4; | |
497 | } | |
498 | else if (!bit (inst1, 7) && bit (inst1, 8)) | |
499 | { | |
500 | /* LDMDB */ | |
501 | if (!bit (inst2, 15)) | |
502 | load_pc = 0; | |
503 | offset = -4; | |
504 | } | |
505 | else if (bit (inst1, 7) && bit (inst1, 8)) | |
506 | { | |
507 | /* RFEIA */ | |
508 | offset = 0; | |
509 | } | |
510 | else if (!bit (inst1, 7) && !bit (inst1, 8)) | |
511 | { | |
512 | /* RFEDB */ | |
513 | offset = -8; | |
514 | } | |
515 | else | |
516 | load_pc = 0; | |
517 | ||
518 | if (load_pc) | |
519 | { | |
520 | CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn); | |
521 | nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order); | |
522 | } | |
523 | } | |
524 | else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00) | |
525 | { | |
526 | /* MOV PC or MOVS PC. */ | |
527 | nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3)); | |
528 | nextpc = MAKE_THUMB_ADDR (nextpc); | |
529 | } | |
530 | else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000) | |
531 | { | |
532 | /* LDR PC. */ | |
533 | CORE_ADDR base; | |
534 | int rn, load_pc = 1; | |
535 | ||
536 | rn = bits (inst1, 0, 3); | |
537 | base = regcache_raw_get_unsigned (regcache, rn); | |
538 | if (rn == ARM_PC_REGNUM) | |
539 | { | |
540 | base = (base + 4) & ~(CORE_ADDR) 0x3; | |
541 | if (bit (inst1, 7)) | |
542 | base += bits (inst2, 0, 11); | |
543 | else | |
544 | base -= bits (inst2, 0, 11); | |
545 | } | |
546 | else if (bit (inst1, 7)) | |
547 | base += bits (inst2, 0, 11); | |
548 | else if (bit (inst2, 11)) | |
549 | { | |
550 | if (bit (inst2, 10)) | |
551 | { | |
552 | if (bit (inst2, 9)) | |
553 | base += bits (inst2, 0, 7); | |
554 | else | |
555 | base -= bits (inst2, 0, 7); | |
556 | } | |
557 | } | |
558 | else if ((inst2 & 0x0fc0) == 0x0000) | |
559 | { | |
560 | int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3); | |
561 | base += regcache_raw_get_unsigned (regcache, rm) << shift; | |
562 | } | |
563 | else | |
564 | /* Reserved. */ | |
565 | load_pc = 0; | |
566 | ||
567 | if (load_pc) | |
568 | nextpc | |
569 | = self->ops->read_mem_uint (base, 4, byte_order); | |
570 | } | |
571 | else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000) | |
572 | { | |
573 | /* TBB. */ | |
574 | CORE_ADDR tbl_reg, table, offset, length; | |
575 | ||
576 | tbl_reg = bits (inst1, 0, 3); | |
577 | if (tbl_reg == 0x0f) | |
578 | table = pc + 4; /* Regcache copy of PC isn't right yet. */ | |
579 | else | |
580 | table = regcache_raw_get_unsigned (regcache, tbl_reg); | |
581 | ||
582 | offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3)); | |
583 | length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order); | |
584 | nextpc = pc_val + length; | |
585 | } | |
586 | else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010) | |
587 | { | |
588 | /* TBH. */ | |
589 | CORE_ADDR tbl_reg, table, offset, length; | |
590 | ||
591 | tbl_reg = bits (inst1, 0, 3); | |
592 | if (tbl_reg == 0x0f) | |
593 | table = pc + 4; /* Regcache copy of PC isn't right yet. */ | |
594 | else | |
595 | table = regcache_raw_get_unsigned (regcache, tbl_reg); | |
596 | ||
597 | offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3)); | |
598 | length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order); | |
599 | nextpc = pc_val + length; | |
600 | } | |
601 | } | |
602 | else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */ | |
603 | { | |
604 | if (bits (inst1, 3, 6) == 0x0f) | |
605 | nextpc = UNMAKE_THUMB_ADDR (pc_val); | |
606 | else | |
607 | nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6)); | |
608 | } | |
609 | else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */ | |
610 | { | |
611 | if (bits (inst1, 3, 6) == 0x0f) | |
612 | nextpc = pc_val; | |
613 | else | |
614 | nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6)); | |
615 | ||
616 | nextpc = MAKE_THUMB_ADDR (nextpc); | |
617 | } | |
618 | else if ((inst1 & 0xf500) == 0xb100) | |
619 | { | |
620 | /* CBNZ or CBZ. */ | |
621 | int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1); | |
622 | ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2)); | |
623 | ||
624 | if (bit (inst1, 11) && reg != 0) | |
625 | nextpc = pc_val + imm; | |
626 | else if (!bit (inst1, 11) && reg == 0) | |
627 | nextpc = pc_val + imm; | |
628 | } | |
629 | ||
630 | VEC_safe_push (CORE_ADDR, next_pcs, nextpc); | |
631 | ||
632 | return next_pcs; | |
633 | } | |
634 | ||
635 | /* Get the raw next possible addresses. PC in next_pcs is the current program | |
636 | counter, which is assumed to be executing in ARM mode. | |
637 | ||
638 | The values returned have the execution state of the next instruction | |
639 | encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is | |
640 | in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory | |
641 | address in GDB and arm_addr_bits_remove in GDBServer. */ | |
642 | ||
f5aa3069 YQ |
643 | static VEC (CORE_ADDR) * |
644 | arm_get_next_pcs_raw (struct arm_get_next_pcs *self, CORE_ADDR pc) | |
d9311bfa AT |
645 | { |
646 | int byte_order = self->byte_order; | |
4e7b8bea | 647 | int byte_order_for_code = self->byte_order_for_code; |
d9311bfa AT |
648 | unsigned long pc_val; |
649 | unsigned long this_instr = 0; | |
650 | unsigned long status; | |
651 | CORE_ADDR nextpc; | |
652 | struct regcache *regcache = self->regcache; | |
653 | VEC (CORE_ADDR) *next_pcs = NULL; | |
654 | ||
655 | pc_val = (unsigned long) pc; | |
4e7b8bea | 656 | this_instr = self->ops->read_mem_uint (pc, 4, byte_order_for_code); |
d9311bfa AT |
657 | |
658 | status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM); | |
659 | nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */ | |
660 | ||
661 | if (bits (this_instr, 28, 31) == INST_NV) | |
662 | switch (bits (this_instr, 24, 27)) | |
663 | { | |
664 | case 0xa: | |
665 | case 0xb: | |
666 | { | |
667 | /* Branch with Link and change to Thumb. */ | |
668 | nextpc = BranchDest (pc, this_instr); | |
669 | nextpc |= bit (this_instr, 24) << 1; | |
670 | nextpc = MAKE_THUMB_ADDR (nextpc); | |
671 | break; | |
672 | } | |
673 | case 0xc: | |
674 | case 0xd: | |
675 | case 0xe: | |
676 | /* Coprocessor register transfer. */ | |
677 | if (bits (this_instr, 12, 15) == 15) | |
678 | error (_("Invalid update to pc in instruction")); | |
679 | break; | |
680 | } | |
681 | else if (condition_true (bits (this_instr, 28, 31), status)) | |
682 | { | |
683 | switch (bits (this_instr, 24, 27)) | |
684 | { | |
685 | case 0x0: | |
686 | case 0x1: /* data processing */ | |
687 | case 0x2: | |
688 | case 0x3: | |
689 | { | |
690 | unsigned long operand1, operand2, result = 0; | |
691 | unsigned long rn; | |
692 | int c; | |
693 | ||
694 | if (bits (this_instr, 12, 15) != 15) | |
695 | break; | |
696 | ||
697 | if (bits (this_instr, 22, 25) == 0 | |
698 | && bits (this_instr, 4, 7) == 9) /* multiply */ | |
699 | error (_("Invalid update to pc in instruction")); | |
700 | ||
701 | /* BX <reg>, BLX <reg> */ | |
702 | if (bits (this_instr, 4, 27) == 0x12fff1 | |
703 | || bits (this_instr, 4, 27) == 0x12fff3) | |
704 | { | |
705 | rn = bits (this_instr, 0, 3); | |
706 | nextpc = ((rn == ARM_PC_REGNUM) | |
707 | ? (pc_val + 8) | |
708 | : regcache_raw_get_unsigned (regcache, rn)); | |
709 | ||
710 | VEC_safe_push (CORE_ADDR, next_pcs, nextpc); | |
711 | return next_pcs; | |
712 | } | |
713 | ||
714 | /* Multiply into PC. */ | |
715 | c = (status & FLAG_C) ? 1 : 0; | |
716 | rn = bits (this_instr, 16, 19); | |
717 | operand1 = ((rn == ARM_PC_REGNUM) | |
718 | ? (pc_val + 8) | |
719 | : regcache_raw_get_unsigned (regcache, rn)); | |
720 | ||
721 | if (bit (this_instr, 25)) | |
722 | { | |
723 | unsigned long immval = bits (this_instr, 0, 7); | |
724 | unsigned long rotate = 2 * bits (this_instr, 8, 11); | |
725 | operand2 = ((immval >> rotate) | (immval << (32 - rotate))) | |
726 | & 0xffffffff; | |
727 | } | |
728 | else /* operand 2 is a shifted register. */ | |
729 | operand2 = shifted_reg_val (regcache, this_instr, c, | |
730 | pc_val, status); | |
731 | ||
732 | switch (bits (this_instr, 21, 24)) | |
733 | { | |
734 | case 0x0: /*and */ | |
735 | result = operand1 & operand2; | |
736 | break; | |
737 | ||
738 | case 0x1: /*eor */ | |
739 | result = operand1 ^ operand2; | |
740 | break; | |
741 | ||
742 | case 0x2: /*sub */ | |
743 | result = operand1 - operand2; | |
744 | break; | |
745 | ||
746 | case 0x3: /*rsb */ | |
747 | result = operand2 - operand1; | |
748 | break; | |
749 | ||
750 | case 0x4: /*add */ | |
751 | result = operand1 + operand2; | |
752 | break; | |
753 | ||
754 | case 0x5: /*adc */ | |
755 | result = operand1 + operand2 + c; | |
756 | break; | |
757 | ||
758 | case 0x6: /*sbc */ | |
759 | result = operand1 - operand2 + c; | |
760 | break; | |
761 | ||
762 | case 0x7: /*rsc */ | |
763 | result = operand2 - operand1 + c; | |
764 | break; | |
765 | ||
766 | case 0x8: | |
767 | case 0x9: | |
768 | case 0xa: | |
769 | case 0xb: /* tst, teq, cmp, cmn */ | |
770 | result = (unsigned long) nextpc; | |
771 | break; | |
772 | ||
773 | case 0xc: /*orr */ | |
774 | result = operand1 | operand2; | |
775 | break; | |
776 | ||
777 | case 0xd: /*mov */ | |
778 | /* Always step into a function. */ | |
779 | result = operand2; | |
780 | break; | |
781 | ||
782 | case 0xe: /*bic */ | |
783 | result = operand1 & ~operand2; | |
784 | break; | |
785 | ||
786 | case 0xf: /*mvn */ | |
787 | result = ~operand2; | |
788 | break; | |
789 | } | |
790 | nextpc = self->ops->addr_bits_remove (self, result); | |
791 | break; | |
792 | } | |
793 | ||
794 | case 0x4: | |
795 | case 0x5: /* data transfer */ | |
796 | case 0x6: | |
797 | case 0x7: | |
798 | if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1) | |
799 | { | |
800 | /* Media instructions and architecturally undefined | |
801 | instructions. */ | |
802 | break; | |
803 | } | |
804 | ||
805 | if (bit (this_instr, 20)) | |
806 | { | |
807 | /* load */ | |
808 | if (bits (this_instr, 12, 15) == 15) | |
809 | { | |
810 | /* rd == pc */ | |
811 | unsigned long rn; | |
812 | unsigned long base; | |
813 | ||
814 | if (bit (this_instr, 22)) | |
815 | error (_("Invalid update to pc in instruction")); | |
816 | ||
817 | /* byte write to PC */ | |
818 | rn = bits (this_instr, 16, 19); | |
819 | base = ((rn == ARM_PC_REGNUM) | |
820 | ? (pc_val + 8) | |
821 | : regcache_raw_get_unsigned (regcache, rn)); | |
822 | ||
823 | if (bit (this_instr, 24)) | |
824 | { | |
825 | /* pre-indexed */ | |
826 | int c = (status & FLAG_C) ? 1 : 0; | |
827 | unsigned long offset = | |
828 | (bit (this_instr, 25) | |
829 | ? shifted_reg_val (regcache, this_instr, c, | |
830 | pc_val, status) | |
831 | : bits (this_instr, 0, 11)); | |
832 | ||
833 | if (bit (this_instr, 23)) | |
834 | base += offset; | |
835 | else | |
836 | base -= offset; | |
837 | } | |
838 | nextpc | |
839 | = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base, | |
840 | 4, byte_order); | |
841 | } | |
842 | } | |
843 | break; | |
844 | ||
845 | case 0x8: | |
846 | case 0x9: /* block transfer */ | |
847 | if (bit (this_instr, 20)) | |
848 | { | |
849 | /* LDM */ | |
850 | if (bit (this_instr, 15)) | |
851 | { | |
852 | /* loading pc */ | |
853 | int offset = 0; | |
854 | CORE_ADDR rn_val_offset = 0; | |
855 | unsigned long rn_val | |
856 | = regcache_raw_get_unsigned (regcache, | |
857 | bits (this_instr, 16, 19)); | |
858 | ||
859 | if (bit (this_instr, 23)) | |
860 | { | |
861 | /* up */ | |
862 | unsigned long reglist = bits (this_instr, 0, 14); | |
863 | offset = bitcount (reglist) * 4; | |
864 | if (bit (this_instr, 24)) /* pre */ | |
865 | offset += 4; | |
866 | } | |
867 | else if (bit (this_instr, 24)) | |
868 | offset = -4; | |
869 | ||
870 | rn_val_offset = rn_val + offset; | |
871 | nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset, | |
872 | 4, byte_order); | |
873 | } | |
874 | } | |
875 | break; | |
876 | ||
877 | case 0xb: /* branch & link */ | |
878 | case 0xa: /* branch */ | |
879 | { | |
880 | nextpc = BranchDest (pc, this_instr); | |
881 | break; | |
882 | } | |
883 | ||
884 | case 0xc: | |
885 | case 0xd: | |
886 | case 0xe: /* coproc ops */ | |
887 | break; | |
888 | case 0xf: /* SWI */ | |
889 | { | |
890 | nextpc = self->ops->syscall_next_pc (self, pc); | |
891 | } | |
892 | break; | |
893 | ||
894 | default: | |
e7826da3 | 895 | error (_("Bad bit-field extraction")); |
d9311bfa AT |
896 | return next_pcs; |
897 | } | |
898 | } | |
899 | ||
900 | VEC_safe_push (CORE_ADDR, next_pcs, nextpc); | |
901 | return next_pcs; | |
902 | } | |
f5aa3069 YQ |
903 | |
904 | /* See arm-get-next-pcs.h. */ | |
905 | ||
906 | VEC (CORE_ADDR) * | |
907 | arm_get_next_pcs (struct arm_get_next_pcs *self, CORE_ADDR pc) | |
908 | { | |
909 | VEC (CORE_ADDR) *next_pcs = NULL; | |
910 | ||
911 | if (self->ops->is_thumb (self)) | |
912 | { | |
913 | next_pcs = thumb_deal_with_atomic_sequence_raw (self, pc); | |
914 | if (next_pcs == NULL) | |
915 | next_pcs = thumb_get_next_pcs_raw (self, pc); | |
916 | } | |
917 | else | |
918 | { | |
919 | next_pcs = arm_deal_with_atomic_sequence_raw (self, pc); | |
920 | if (next_pcs == NULL) | |
921 | next_pcs = arm_get_next_pcs_raw (self, pc); | |
922 | } | |
923 | ||
924 | return next_pcs; | |
925 | } |