Update copyright year range in all GDB files.
[deliverable/binutils-gdb.git] / gdb / arch / arm-get-next-pcs.c
1 /* Common code for ARM software single stepping support.
2
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "gdbsupport/common-defs.h"
21 #include "gdbsupport/gdb_vecs.h"
22 #include "gdbsupport/common-regcache.h"
23 #include "arm.h"
24 #include "arm-get-next-pcs.h"
25
26 /* See arm-get-next-pcs.h. */
27
28 void
29 arm_get_next_pcs_ctor (struct arm_get_next_pcs *self,
30 struct arm_get_next_pcs_ops *ops,
31 int byte_order,
32 int byte_order_for_code,
33 int has_thumb2_breakpoint,
34 struct regcache *regcache)
35 {
36 self->ops = ops;
37 self->byte_order = byte_order;
38 self->byte_order_for_code = byte_order_for_code;
39 self->has_thumb2_breakpoint = has_thumb2_breakpoint;
40 self->regcache = regcache;
41 }
42
43 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
44 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
45 is found, attempt to step through it. The end of the sequence address is
46 added to the next_pcs list. */
47
48 static std::vector<CORE_ADDR>
49 thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
50 {
51 int byte_order_for_code = self->byte_order_for_code;
52 CORE_ADDR breaks[2] = {CORE_ADDR_MAX, CORE_ADDR_MAX};
53 CORE_ADDR pc = regcache_read_pc (self->regcache);
54 CORE_ADDR loc = pc;
55 unsigned short insn1, insn2;
56 int insn_count;
57 int index;
58 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
59 const int atomic_sequence_length = 16; /* Instruction sequence length. */
60 ULONGEST status, itstate;
61
62 /* We currently do not support atomic sequences within an IT block. */
63 status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM);
64 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
65 if (itstate & 0x0f)
66 return {};
67
68 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
69 insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
70
71 loc += 2;
72 if (thumb_insn_size (insn1) != 4)
73 return {};
74
75 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
76
77 loc += 2;
78 if (!((insn1 & 0xfff0) == 0xe850
79 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
80 return {};
81
82 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
83 instructions. */
84 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
85 {
86 insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code);
87 loc += 2;
88
89 if (thumb_insn_size (insn1) != 4)
90 {
91 /* Assume that there is at most one conditional branch in the
92 atomic sequence. If a conditional branch is found, put a
93 breakpoint in its destination address. */
94 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
95 {
96 if (last_breakpoint > 0)
97 return {}; /* More than one conditional branch found,
98 fallback to the standard code. */
99
100 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
101 last_breakpoint++;
102 }
103
104 /* We do not support atomic sequences that use any *other*
105 instructions but conditional branches to change the PC.
106 Fall back to standard code to avoid losing control of
107 execution. */
108 else if (thumb_instruction_changes_pc (insn1))
109 return {};
110 }
111 else
112 {
113 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
114
115 loc += 2;
116
117 /* Assume that there is at most one conditional branch in the
118 atomic sequence. If a conditional branch is found, put a
119 breakpoint in its destination address. */
120 if ((insn1 & 0xf800) == 0xf000
121 && (insn2 & 0xd000) == 0x8000
122 && (insn1 & 0x0380) != 0x0380)
123 {
124 int sign, j1, j2, imm1, imm2;
125 unsigned int offset;
126
127 sign = sbits (insn1, 10, 10);
128 imm1 = bits (insn1, 0, 5);
129 imm2 = bits (insn2, 0, 10);
130 j1 = bit (insn2, 13);
131 j2 = bit (insn2, 11);
132
133 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
134 offset += (imm1 << 12) + (imm2 << 1);
135
136 if (last_breakpoint > 0)
137 return {}; /* More than one conditional branch found,
138 fallback to the standard code. */
139
140 breaks[1] = loc + offset;
141 last_breakpoint++;
142 }
143
144 /* We do not support atomic sequences that use any *other*
145 instructions but conditional branches to change the PC.
146 Fall back to standard code to avoid losing control of
147 execution. */
148 else if (thumb2_instruction_changes_pc (insn1, insn2))
149 return {};
150
151 /* If we find a strex{,b,h,d}, we're done. */
152 if ((insn1 & 0xfff0) == 0xe840
153 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
154 break;
155 }
156 }
157
158 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
159 if (insn_count == atomic_sequence_length)
160 return {};
161
162 /* Insert a breakpoint right after the end of the atomic sequence. */
163 breaks[0] = loc;
164
165 /* Check for duplicated breakpoints. Check also for a breakpoint
166 placed (branch instruction's destination) anywhere in sequence. */
167 if (last_breakpoint
168 && (breaks[1] == breaks[0]
169 || (breaks[1] >= pc && breaks[1] < loc)))
170 last_breakpoint = 0;
171
172 std::vector<CORE_ADDR> next_pcs;
173
174 /* Adds the breakpoints to the list to be inserted. */
175 for (index = 0; index <= last_breakpoint; index++)
176 next_pcs.push_back (MAKE_THUMB_ADDR (breaks[index]));
177
178 return next_pcs;
179 }
180
181 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
182 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
183 is found, attempt to step through it. The end of the sequence address is
184 added to the next_pcs list. */
185
186 static std::vector<CORE_ADDR>
187 arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
188 {
189 int byte_order_for_code = self->byte_order_for_code;
190 CORE_ADDR breaks[2] = {CORE_ADDR_MAX, CORE_ADDR_MAX};
191 CORE_ADDR pc = regcache_read_pc (self->regcache);
192 CORE_ADDR loc = pc;
193 unsigned int insn;
194 int insn_count;
195 int index;
196 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
197 const int atomic_sequence_length = 16; /* Instruction sequence length. */
198
199 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
200 Note that we do not currently support conditionally executed atomic
201 instructions. */
202 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
203
204 loc += 4;
205 if ((insn & 0xff9000f0) != 0xe1900090)
206 return {};
207
208 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
209 instructions. */
210 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
211 {
212 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
213
214 loc += 4;
215
216 /* Assume that there is at most one conditional branch in the atomic
217 sequence. If a conditional branch is found, put a breakpoint in
218 its destination address. */
219 if (bits (insn, 24, 27) == 0xa)
220 {
221 if (last_breakpoint > 0)
222 return {}; /* More than one conditional branch found, fallback
223 to the standard single-step code. */
224
225 breaks[1] = BranchDest (loc - 4, insn);
226 last_breakpoint++;
227 }
228
229 /* We do not support atomic sequences that use any *other* instructions
230 but conditional branches to change the PC. Fall back to standard
231 code to avoid losing control of execution. */
232 else if (arm_instruction_changes_pc (insn))
233 return {};
234
235 /* If we find a strex{,b,h,d}, we're done. */
236 if ((insn & 0xff9000f0) == 0xe1800090)
237 break;
238 }
239
240 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
241 if (insn_count == atomic_sequence_length)
242 return {};
243
244 /* Insert a breakpoint right after the end of the atomic sequence. */
245 breaks[0] = loc;
246
247 /* Check for duplicated breakpoints. Check also for a breakpoint
248 placed (branch instruction's destination) anywhere in sequence. */
249 if (last_breakpoint
250 && (breaks[1] == breaks[0]
251 || (breaks[1] >= pc && breaks[1] < loc)))
252 last_breakpoint = 0;
253
254 std::vector<CORE_ADDR> next_pcs;
255
256 /* Adds the breakpoints to the list to be inserted. */
257 for (index = 0; index <= last_breakpoint; index++)
258 next_pcs.push_back (breaks[index]);
259
260 return next_pcs;
261 }
262
263 /* Find the next possible PCs for thumb mode. */
264
265 static std::vector<CORE_ADDR>
266 thumb_get_next_pcs_raw (struct arm_get_next_pcs *self)
267 {
268 int byte_order = self->byte_order;
269 int byte_order_for_code = self->byte_order_for_code;
270 CORE_ADDR pc = regcache_read_pc (self->regcache);
271 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
272 unsigned short inst1;
273 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
274 ULONGEST status, itstate;
275 struct regcache *regcache = self->regcache;
276 std::vector<CORE_ADDR> next_pcs;
277
278 nextpc = MAKE_THUMB_ADDR (nextpc);
279 pc_val = MAKE_THUMB_ADDR (pc_val);
280
281 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
282
283 /* Thumb-2 conditional execution support. There are eight bits in
284 the CPSR which describe conditional execution state. Once
285 reconstructed (they're in a funny order), the low five bits
286 describe the low bit of the condition for each instruction and
287 how many instructions remain. The high three bits describe the
288 base condition. One of the low four bits will be set if an IT
289 block is active. These bits read as zero on earlier
290 processors. */
291 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
292 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
293
294 /* If-Then handling. On GNU/Linux, where this routine is used, we
295 use an undefined instruction as a breakpoint. Unlike BKPT, IT
296 can disable execution of the undefined instruction. So we might
297 miss the breakpoint if we set it on a skipped conditional
298 instruction. Because conditional instructions can change the
299 flags, affecting the execution of further instructions, we may
300 need to set two breakpoints. */
301
302 if (self->has_thumb2_breakpoint)
303 {
304 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
305 {
306 /* An IT instruction. Because this instruction does not
307 modify the flags, we can accurately predict the next
308 executed instruction. */
309 itstate = inst1 & 0x00ff;
310 pc += thumb_insn_size (inst1);
311
312 while (itstate != 0 && ! condition_true (itstate >> 4, status))
313 {
314 inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code);
315 pc += thumb_insn_size (inst1);
316 itstate = thumb_advance_itstate (itstate);
317 }
318
319 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
320 return next_pcs;
321 }
322 else if (itstate != 0)
323 {
324 /* We are in a conditional block. Check the condition. */
325 if (! condition_true (itstate >> 4, status))
326 {
327 /* Advance to the next executed instruction. */
328 pc += thumb_insn_size (inst1);
329 itstate = thumb_advance_itstate (itstate);
330
331 while (itstate != 0 && ! condition_true (itstate >> 4, status))
332 {
333 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
334
335 pc += thumb_insn_size (inst1);
336 itstate = thumb_advance_itstate (itstate);
337 }
338
339 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
340 return next_pcs;
341 }
342 else if ((itstate & 0x0f) == 0x08)
343 {
344 /* This is the last instruction of the conditional
345 block, and it is executed. We can handle it normally
346 because the following instruction is not conditional,
347 and we must handle it normally because it is
348 permitted to branch. Fall through. */
349 }
350 else
351 {
352 int cond_negated;
353
354 /* There are conditional instructions after this one.
355 If this instruction modifies the flags, then we can
356 not predict what the next executed instruction will
357 be. Fortunately, this instruction is architecturally
358 forbidden to branch; we know it will fall through.
359 Start by skipping past it. */
360 pc += thumb_insn_size (inst1);
361 itstate = thumb_advance_itstate (itstate);
362
363 /* Set a breakpoint on the following instruction. */
364 gdb_assert ((itstate & 0x0f) != 0);
365 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
366
367 cond_negated = (itstate >> 4) & 1;
368
369 /* Skip all following instructions with the same
370 condition. If there is a later instruction in the IT
371 block with the opposite condition, set the other
372 breakpoint there. If not, then set a breakpoint on
373 the instruction after the IT block. */
374 do
375 {
376 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
377 pc += thumb_insn_size (inst1);
378 itstate = thumb_advance_itstate (itstate);
379 }
380 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
381
382 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
383
384 return next_pcs;
385 }
386 }
387 }
388 else if (itstate & 0x0f)
389 {
390 /* We are in a conditional block. Check the condition. */
391 int cond = itstate >> 4;
392
393 if (! condition_true (cond, status))
394 {
395 /* Advance to the next instruction. All the 32-bit
396 instructions share a common prefix. */
397 next_pcs.push_back (MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1)));
398 }
399
400 return next_pcs;
401
402 /* Otherwise, handle the instruction normally. */
403 }
404
405 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
406 {
407 CORE_ADDR sp;
408
409 /* Fetch the saved PC from the stack. It's stored above
410 all of the other registers. */
411 unsigned long offset = bitcount (bits (inst1, 0, 7))
412 * ARM_INT_REGISTER_SIZE;
413 sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM);
414 nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order);
415 }
416 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
417 {
418 unsigned long cond = bits (inst1, 8, 11);
419 if (cond == 0x0f) /* 0x0f = SWI */
420 {
421 nextpc = self->ops->syscall_next_pc (self);
422 }
423 else if (cond != 0x0f && condition_true (cond, status))
424 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
425 }
426 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
427 {
428 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
429 }
430 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
431 {
432 unsigned short inst2;
433 inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code);
434
435 /* Default to the next instruction. */
436 nextpc = pc + 4;
437 nextpc = MAKE_THUMB_ADDR (nextpc);
438
439 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
440 {
441 /* Branches and miscellaneous control instructions. */
442
443 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
444 {
445 /* B, BL, BLX. */
446 int j1, j2, imm1, imm2;
447
448 imm1 = sbits (inst1, 0, 10);
449 imm2 = bits (inst2, 0, 10);
450 j1 = bit (inst2, 13);
451 j2 = bit (inst2, 11);
452
453 unsigned long offset = ((imm1 << 12) + (imm2 << 1));
454 offset ^= ((!j2) << 22) | ((!j1) << 23);
455
456 nextpc = pc_val + offset;
457 /* For BLX make sure to clear the low bits. */
458 if (bit (inst2, 12) == 0)
459 nextpc = nextpc & 0xfffffffc;
460 }
461 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
462 {
463 /* SUBS PC, LR, #imm8. */
464 nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM);
465 nextpc -= inst2 & 0x00ff;
466 }
467 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
468 {
469 /* Conditional branch. */
470 if (condition_true (bits (inst1, 6, 9), status))
471 {
472 int sign, j1, j2, imm1, imm2;
473
474 sign = sbits (inst1, 10, 10);
475 imm1 = bits (inst1, 0, 5);
476 imm2 = bits (inst2, 0, 10);
477 j1 = bit (inst2, 13);
478 j2 = bit (inst2, 11);
479
480 unsigned long offset
481 = (sign << 20) + (j2 << 19) + (j1 << 18);
482 offset += (imm1 << 12) + (imm2 << 1);
483
484 nextpc = pc_val + offset;
485 }
486 }
487 }
488 else if ((inst1 & 0xfe50) == 0xe810)
489 {
490 /* Load multiple or RFE. */
491 int rn, offset, load_pc = 1;
492
493 rn = bits (inst1, 0, 3);
494 if (bit (inst1, 7) && !bit (inst1, 8))
495 {
496 /* LDMIA or POP */
497 if (!bit (inst2, 15))
498 load_pc = 0;
499 offset = bitcount (inst2) * 4 - 4;
500 }
501 else if (!bit (inst1, 7) && bit (inst1, 8))
502 {
503 /* LDMDB */
504 if (!bit (inst2, 15))
505 load_pc = 0;
506 offset = -4;
507 }
508 else if (bit (inst1, 7) && bit (inst1, 8))
509 {
510 /* RFEIA */
511 offset = 0;
512 }
513 else if (!bit (inst1, 7) && !bit (inst1, 8))
514 {
515 /* RFEDB */
516 offset = -8;
517 }
518 else
519 load_pc = 0;
520
521 if (load_pc)
522 {
523 CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn);
524 nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order);
525 }
526 }
527 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
528 {
529 /* MOV PC or MOVS PC. */
530 nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
531 nextpc = MAKE_THUMB_ADDR (nextpc);
532 }
533 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
534 {
535 /* LDR PC. */
536 CORE_ADDR base;
537 int rn, load_pc = 1;
538
539 rn = bits (inst1, 0, 3);
540 base = regcache_raw_get_unsigned (regcache, rn);
541 if (rn == ARM_PC_REGNUM)
542 {
543 base = (base + 4) & ~(CORE_ADDR) 0x3;
544 if (bit (inst1, 7))
545 base += bits (inst2, 0, 11);
546 else
547 base -= bits (inst2, 0, 11);
548 }
549 else if (bit (inst1, 7))
550 base += bits (inst2, 0, 11);
551 else if (bit (inst2, 11))
552 {
553 if (bit (inst2, 10))
554 {
555 if (bit (inst2, 9))
556 base += bits (inst2, 0, 7);
557 else
558 base -= bits (inst2, 0, 7);
559 }
560 }
561 else if ((inst2 & 0x0fc0) == 0x0000)
562 {
563 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
564 base += regcache_raw_get_unsigned (regcache, rm) << shift;
565 }
566 else
567 /* Reserved. */
568 load_pc = 0;
569
570 if (load_pc)
571 nextpc
572 = self->ops->read_mem_uint (base, 4, byte_order);
573 }
574 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
575 {
576 /* TBB. */
577 CORE_ADDR tbl_reg, table, offset, length;
578
579 tbl_reg = bits (inst1, 0, 3);
580 if (tbl_reg == 0x0f)
581 table = pc + 4; /* Regcache copy of PC isn't right yet. */
582 else
583 table = regcache_raw_get_unsigned (regcache, tbl_reg);
584
585 offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
586 length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order);
587 nextpc = pc_val + length;
588 }
589 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
590 {
591 /* TBH. */
592 CORE_ADDR tbl_reg, table, offset, length;
593
594 tbl_reg = bits (inst1, 0, 3);
595 if (tbl_reg == 0x0f)
596 table = pc + 4; /* Regcache copy of PC isn't right yet. */
597 else
598 table = regcache_raw_get_unsigned (regcache, tbl_reg);
599
600 offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
601 length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order);
602 nextpc = pc_val + length;
603 }
604 }
605 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
606 {
607 if (bits (inst1, 3, 6) == 0x0f)
608 nextpc = UNMAKE_THUMB_ADDR (pc_val);
609 else
610 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
611 }
612 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
613 {
614 if (bits (inst1, 3, 6) == 0x0f)
615 nextpc = pc_val;
616 else
617 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
618
619 nextpc = MAKE_THUMB_ADDR (nextpc);
620 }
621 else if ((inst1 & 0xf500) == 0xb100)
622 {
623 /* CBNZ or CBZ. */
624 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
625 ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2));
626
627 if (bit (inst1, 11) && reg != 0)
628 nextpc = pc_val + imm;
629 else if (!bit (inst1, 11) && reg == 0)
630 nextpc = pc_val + imm;
631 }
632
633 next_pcs.push_back (nextpc);
634
635 return next_pcs;
636 }
637
638 /* Get the raw next possible addresses. PC in next_pcs is the current program
639 counter, which is assumed to be executing in ARM mode.
640
641 The values returned have the execution state of the next instruction
642 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
643 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
644 address in GDB and arm_addr_bits_remove in GDBServer. */
645
646 static std::vector<CORE_ADDR>
647 arm_get_next_pcs_raw (struct arm_get_next_pcs *self)
648 {
649 int byte_order = self->byte_order;
650 int byte_order_for_code = self->byte_order_for_code;
651 unsigned long pc_val;
652 unsigned long this_instr = 0;
653 unsigned long status;
654 CORE_ADDR nextpc;
655 struct regcache *regcache = self->regcache;
656 CORE_ADDR pc = regcache_read_pc (self->regcache);
657 std::vector<CORE_ADDR> next_pcs;
658
659 pc_val = (unsigned long) pc;
660 this_instr = self->ops->read_mem_uint (pc, 4, byte_order_for_code);
661
662 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
663 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
664
665 if (bits (this_instr, 28, 31) == INST_NV)
666 switch (bits (this_instr, 24, 27))
667 {
668 case 0xa:
669 case 0xb:
670 {
671 /* Branch with Link and change to Thumb. */
672 nextpc = BranchDest (pc, this_instr);
673 nextpc |= bit (this_instr, 24) << 1;
674 nextpc = MAKE_THUMB_ADDR (nextpc);
675 break;
676 }
677 case 0xc:
678 case 0xd:
679 case 0xe:
680 /* Coprocessor register transfer. */
681 if (bits (this_instr, 12, 15) == 15)
682 error (_("Invalid update to pc in instruction"));
683 break;
684 }
685 else if (condition_true (bits (this_instr, 28, 31), status))
686 {
687 switch (bits (this_instr, 24, 27))
688 {
689 case 0x0:
690 case 0x1: /* data processing */
691 case 0x2:
692 case 0x3:
693 {
694 unsigned long operand1, operand2, result = 0;
695 unsigned long rn;
696 int c;
697
698 if (bits (this_instr, 12, 15) != 15)
699 break;
700
701 if (bits (this_instr, 22, 25) == 0
702 && bits (this_instr, 4, 7) == 9) /* multiply */
703 error (_("Invalid update to pc in instruction"));
704
705 /* BX <reg>, BLX <reg> */
706 if (bits (this_instr, 4, 27) == 0x12fff1
707 || bits (this_instr, 4, 27) == 0x12fff3)
708 {
709 rn = bits (this_instr, 0, 3);
710 nextpc = ((rn == ARM_PC_REGNUM)
711 ? (pc_val + 8)
712 : regcache_raw_get_unsigned (regcache, rn));
713
714 next_pcs.push_back (nextpc);
715 return next_pcs;
716 }
717
718 /* Multiply into PC. */
719 c = (status & FLAG_C) ? 1 : 0;
720 rn = bits (this_instr, 16, 19);
721 operand1 = ((rn == ARM_PC_REGNUM)
722 ? (pc_val + 8)
723 : regcache_raw_get_unsigned (regcache, rn));
724
725 if (bit (this_instr, 25))
726 {
727 unsigned long immval = bits (this_instr, 0, 7);
728 unsigned long rotate = 2 * bits (this_instr, 8, 11);
729 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
730 & 0xffffffff;
731 }
732 else /* operand 2 is a shifted register. */
733 operand2 = shifted_reg_val (regcache, this_instr, c,
734 pc_val, status);
735
736 switch (bits (this_instr, 21, 24))
737 {
738 case 0x0: /*and */
739 result = operand1 & operand2;
740 break;
741
742 case 0x1: /*eor */
743 result = operand1 ^ operand2;
744 break;
745
746 case 0x2: /*sub */
747 result = operand1 - operand2;
748 break;
749
750 case 0x3: /*rsb */
751 result = operand2 - operand1;
752 break;
753
754 case 0x4: /*add */
755 result = operand1 + operand2;
756 break;
757
758 case 0x5: /*adc */
759 result = operand1 + operand2 + c;
760 break;
761
762 case 0x6: /*sbc */
763 result = operand1 - operand2 + c;
764 break;
765
766 case 0x7: /*rsc */
767 result = operand2 - operand1 + c;
768 break;
769
770 case 0x8:
771 case 0x9:
772 case 0xa:
773 case 0xb: /* tst, teq, cmp, cmn */
774 result = (unsigned long) nextpc;
775 break;
776
777 case 0xc: /*orr */
778 result = operand1 | operand2;
779 break;
780
781 case 0xd: /*mov */
782 /* Always step into a function. */
783 result = operand2;
784 break;
785
786 case 0xe: /*bic */
787 result = operand1 & ~operand2;
788 break;
789
790 case 0xf: /*mvn */
791 result = ~operand2;
792 break;
793 }
794 nextpc = self->ops->addr_bits_remove (self, result);
795 break;
796 }
797
798 case 0x4:
799 case 0x5: /* data transfer */
800 case 0x6:
801 case 0x7:
802 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
803 {
804 /* Media instructions and architecturally undefined
805 instructions. */
806 break;
807 }
808
809 if (bit (this_instr, 20))
810 {
811 /* load */
812 if (bits (this_instr, 12, 15) == 15)
813 {
814 /* rd == pc */
815 unsigned long rn;
816 unsigned long base;
817
818 if (bit (this_instr, 22))
819 error (_("Invalid update to pc in instruction"));
820
821 /* byte write to PC */
822 rn = bits (this_instr, 16, 19);
823 base = ((rn == ARM_PC_REGNUM)
824 ? (pc_val + 8)
825 : regcache_raw_get_unsigned (regcache, rn));
826
827 if (bit (this_instr, 24))
828 {
829 /* pre-indexed */
830 int c = (status & FLAG_C) ? 1 : 0;
831 unsigned long offset =
832 (bit (this_instr, 25)
833 ? shifted_reg_val (regcache, this_instr, c,
834 pc_val, status)
835 : bits (this_instr, 0, 11));
836
837 if (bit (this_instr, 23))
838 base += offset;
839 else
840 base -= offset;
841 }
842 nextpc
843 = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base,
844 4, byte_order);
845 }
846 }
847 break;
848
849 case 0x8:
850 case 0x9: /* block transfer */
851 if (bit (this_instr, 20))
852 {
853 /* LDM */
854 if (bit (this_instr, 15))
855 {
856 /* loading pc */
857 int offset = 0;
858 CORE_ADDR rn_val_offset = 0;
859 unsigned long rn_val
860 = regcache_raw_get_unsigned (regcache,
861 bits (this_instr, 16, 19));
862
863 if (bit (this_instr, 23))
864 {
865 /* up */
866 unsigned long reglist = bits (this_instr, 0, 14);
867 offset = bitcount (reglist) * 4;
868 if (bit (this_instr, 24)) /* pre */
869 offset += 4;
870 }
871 else if (bit (this_instr, 24))
872 offset = -4;
873
874 rn_val_offset = rn_val + offset;
875 nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset,
876 4, byte_order);
877 }
878 }
879 break;
880
881 case 0xb: /* branch & link */
882 case 0xa: /* branch */
883 {
884 nextpc = BranchDest (pc, this_instr);
885 break;
886 }
887
888 case 0xc:
889 case 0xd:
890 case 0xe: /* coproc ops */
891 break;
892 case 0xf: /* SWI */
893 {
894 nextpc = self->ops->syscall_next_pc (self);
895 }
896 break;
897
898 default:
899 error (_("Bad bit-field extraction"));
900 return next_pcs;
901 }
902 }
903
904 next_pcs.push_back (nextpc);
905
906 return next_pcs;
907 }
908
909 /* See arm-get-next-pcs.h. */
910
911 std::vector<CORE_ADDR>
912 arm_get_next_pcs (struct arm_get_next_pcs *self)
913 {
914 std::vector<CORE_ADDR> next_pcs;
915
916 if (self->ops->is_thumb (self))
917 {
918 next_pcs = thumb_deal_with_atomic_sequence_raw (self);
919 if (next_pcs.empty ())
920 next_pcs = thumb_get_next_pcs_raw (self);
921 }
922 else
923 {
924 next_pcs = arm_deal_with_atomic_sequence_raw (self);
925 if (next_pcs.empty ())
926 next_pcs = arm_get_next_pcs_raw (self);
927 }
928
929 if (self->ops->fixup != NULL)
930 {
931 for (CORE_ADDR &pc_ref : next_pcs)
932 pc_ref = self->ops->fixup (self, pc_ref);
933 }
934
935 return next_pcs;
936 }
This page took 0.049286 seconds and 4 git commands to generate.