Remove argument pc in get_next_pcs
[deliverable/binutils-gdb.git] / gdb / arch / arm-get-next-pcs.c
1 /* Common code for ARM software single stepping support.
2
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "common-defs.h"
21 #include "gdb_vecs.h"
22 #include "common-regcache.h"
23 #include "arm.h"
24 #include "arm-get-next-pcs.h"
25
26 /* See arm-get-next-pcs.h. */
27
28 void
29 arm_get_next_pcs_ctor (struct arm_get_next_pcs *self,
30 struct arm_get_next_pcs_ops *ops,
31 int byte_order,
32 int byte_order_for_code,
33 int has_thumb2_breakpoint,
34 struct regcache *regcache)
35 {
36 self->ops = ops;
37 self->byte_order = byte_order;
38 self->byte_order_for_code = byte_order_for_code;
39 self->has_thumb2_breakpoint = has_thumb2_breakpoint;
40 self->regcache = regcache;
41 }
42
43 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
44 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
45 is found, attempt to step through it. The end of the sequence address is
46 added to the next_pcs list. */
47
48 static VEC (CORE_ADDR) *
49 thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
50 {
51 int byte_order_for_code = self->byte_order_for_code;
52 CORE_ADDR breaks[2] = {-1, -1};
53 CORE_ADDR pc = regcache_read_pc (self->regcache);
54 CORE_ADDR loc = pc;
55 unsigned short insn1, insn2;
56 int insn_count;
57 int index;
58 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
59 const int atomic_sequence_length = 16; /* Instruction sequence length. */
60 ULONGEST status, itstate;
61 VEC (CORE_ADDR) *next_pcs = NULL;
62
63 /* We currently do not support atomic sequences within an IT block. */
64 status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM);
65 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
66 if (itstate & 0x0f)
67 return NULL;
68
69 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
70 insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
71
72 loc += 2;
73 if (thumb_insn_size (insn1) != 4)
74 return NULL;
75
76 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
77
78 loc += 2;
79 if (!((insn1 & 0xfff0) == 0xe850
80 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
81 return NULL;
82
83 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
84 instructions. */
85 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
86 {
87 insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code);
88 loc += 2;
89
90 if (thumb_insn_size (insn1) != 4)
91 {
92 /* Assume that there is at most one conditional branch in the
93 atomic sequence. If a conditional branch is found, put a
94 breakpoint in its destination address. */
95 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
96 {
97 if (last_breakpoint > 0)
98 return NULL; /* More than one conditional branch found,
99 fallback to the standard code. */
100
101 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
102 last_breakpoint++;
103 }
104
105 /* We do not support atomic sequences that use any *other*
106 instructions but conditional branches to change the PC.
107 Fall back to standard code to avoid losing control of
108 execution. */
109 else if (thumb_instruction_changes_pc (insn1))
110 return NULL;
111 }
112 else
113 {
114 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
115
116 loc += 2;
117
118 /* Assume that there is at most one conditional branch in the
119 atomic sequence. If a conditional branch is found, put a
120 breakpoint in its destination address. */
121 if ((insn1 & 0xf800) == 0xf000
122 && (insn2 & 0xd000) == 0x8000
123 && (insn1 & 0x0380) != 0x0380)
124 {
125 int sign, j1, j2, imm1, imm2;
126 unsigned int offset;
127
128 sign = sbits (insn1, 10, 10);
129 imm1 = bits (insn1, 0, 5);
130 imm2 = bits (insn2, 0, 10);
131 j1 = bit (insn2, 13);
132 j2 = bit (insn2, 11);
133
134 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
135 offset += (imm1 << 12) + (imm2 << 1);
136
137 if (last_breakpoint > 0)
138 return 0; /* More than one conditional branch found,
139 fallback to the standard code. */
140
141 breaks[1] = loc + offset;
142 last_breakpoint++;
143 }
144
145 /* We do not support atomic sequences that use any *other*
146 instructions but conditional branches to change the PC.
147 Fall back to standard code to avoid losing control of
148 execution. */
149 else if (thumb2_instruction_changes_pc (insn1, insn2))
150 return NULL;
151
152 /* If we find a strex{,b,h,d}, we're done. */
153 if ((insn1 & 0xfff0) == 0xe840
154 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
155 break;
156 }
157 }
158
159 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
160 if (insn_count == atomic_sequence_length)
161 return NULL;
162
163 /* Insert a breakpoint right after the end of the atomic sequence. */
164 breaks[0] = loc;
165
166 /* Check for duplicated breakpoints. Check also for a breakpoint
167 placed (branch instruction's destination) anywhere in sequence. */
168 if (last_breakpoint
169 && (breaks[1] == breaks[0]
170 || (breaks[1] >= pc && breaks[1] < loc)))
171 last_breakpoint = 0;
172
173 /* Adds the breakpoints to the list to be inserted. */
174 for (index = 0; index <= last_breakpoint; index++)
175 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (breaks[index]));
176
177 return next_pcs;
178 }
179
180 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
181 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
182 is found, attempt to step through it. The end of the sequence address is
183 added to the next_pcs list. */
184
185 static VEC (CORE_ADDR) *
186 arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
187 {
188 int byte_order_for_code = self->byte_order_for_code;
189 CORE_ADDR breaks[2] = {-1, -1};
190 CORE_ADDR pc = regcache_read_pc (self->regcache);
191 CORE_ADDR loc = pc;
192 unsigned int insn;
193 int insn_count;
194 int index;
195 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
196 const int atomic_sequence_length = 16; /* Instruction sequence length. */
197 VEC (CORE_ADDR) *next_pcs = NULL;
198
199 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
200 Note that we do not currently support conditionally executed atomic
201 instructions. */
202 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
203
204 loc += 4;
205 if ((insn & 0xff9000f0) != 0xe1900090)
206 return NULL;
207
208 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
209 instructions. */
210 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
211 {
212 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
213
214 loc += 4;
215
216 /* Assume that there is at most one conditional branch in the atomic
217 sequence. If a conditional branch is found, put a breakpoint in
218 its destination address. */
219 if (bits (insn, 24, 27) == 0xa)
220 {
221 if (last_breakpoint > 0)
222 return NULL; /* More than one conditional branch found, fallback
223 to the standard single-step code. */
224
225 breaks[1] = BranchDest (loc - 4, insn);
226 last_breakpoint++;
227 }
228
229 /* We do not support atomic sequences that use any *other* instructions
230 but conditional branches to change the PC. Fall back to standard
231 code to avoid losing control of execution. */
232 else if (arm_instruction_changes_pc (insn))
233 return NULL;
234
235 /* If we find a strex{,b,h,d}, we're done. */
236 if ((insn & 0xff9000f0) == 0xe1800090)
237 break;
238 }
239
240 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
241 if (insn_count == atomic_sequence_length)
242 return NULL;
243
244 /* Insert a breakpoint right after the end of the atomic sequence. */
245 breaks[0] = loc;
246
247 /* Check for duplicated breakpoints. Check also for a breakpoint
248 placed (branch instruction's destination) anywhere in sequence. */
249 if (last_breakpoint
250 && (breaks[1] == breaks[0]
251 || (breaks[1] >= pc && breaks[1] < loc)))
252 last_breakpoint = 0;
253
254 /* Adds the breakpoints to the list to be inserted. */
255 for (index = 0; index <= last_breakpoint; index++)
256 VEC_safe_push (CORE_ADDR, next_pcs, breaks[index]);
257
258 return next_pcs;
259 }
260
261 /* Find the next possible PCs for thumb mode. */
262
263 static VEC (CORE_ADDR) *
264 thumb_get_next_pcs_raw (struct arm_get_next_pcs *self)
265 {
266 int byte_order = self->byte_order;
267 int byte_order_for_code = self->byte_order_for_code;
268 CORE_ADDR pc = regcache_read_pc (self->regcache);
269 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
270 unsigned short inst1;
271 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
272 unsigned long offset;
273 ULONGEST status, itstate;
274 struct regcache *regcache = self->regcache;
275 VEC (CORE_ADDR) * next_pcs = NULL;
276
277 nextpc = MAKE_THUMB_ADDR (nextpc);
278 pc_val = MAKE_THUMB_ADDR (pc_val);
279
280 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
281
282 /* Thumb-2 conditional execution support. There are eight bits in
283 the CPSR which describe conditional execution state. Once
284 reconstructed (they're in a funny order), the low five bits
285 describe the low bit of the condition for each instruction and
286 how many instructions remain. The high three bits describe the
287 base condition. One of the low four bits will be set if an IT
288 block is active. These bits read as zero on earlier
289 processors. */
290 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
291 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
292
293 /* If-Then handling. On GNU/Linux, where this routine is used, we
294 use an undefined instruction as a breakpoint. Unlike BKPT, IT
295 can disable execution of the undefined instruction. So we might
296 miss the breakpoint if we set it on a skipped conditional
297 instruction. Because conditional instructions can change the
298 flags, affecting the execution of further instructions, we may
299 need to set two breakpoints. */
300
301 if (self->has_thumb2_breakpoint)
302 {
303 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
304 {
305 /* An IT instruction. Because this instruction does not
306 modify the flags, we can accurately predict the next
307 executed instruction. */
308 itstate = inst1 & 0x00ff;
309 pc += thumb_insn_size (inst1);
310
311 while (itstate != 0 && ! condition_true (itstate >> 4, status))
312 {
313 inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code);
314 pc += thumb_insn_size (inst1);
315 itstate = thumb_advance_itstate (itstate);
316 }
317
318 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
319 return next_pcs;
320 }
321 else if (itstate != 0)
322 {
323 /* We are in a conditional block. Check the condition. */
324 if (! condition_true (itstate >> 4, status))
325 {
326 /* Advance to the next executed instruction. */
327 pc += thumb_insn_size (inst1);
328 itstate = thumb_advance_itstate (itstate);
329
330 while (itstate != 0 && ! condition_true (itstate >> 4, status))
331 {
332 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
333
334 pc += thumb_insn_size (inst1);
335 itstate = thumb_advance_itstate (itstate);
336 }
337
338 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
339 return next_pcs;
340 }
341 else if ((itstate & 0x0f) == 0x08)
342 {
343 /* This is the last instruction of the conditional
344 block, and it is executed. We can handle it normally
345 because the following instruction is not conditional,
346 and we must handle it normally because it is
347 permitted to branch. Fall through. */
348 }
349 else
350 {
351 int cond_negated;
352
353 /* There are conditional instructions after this one.
354 If this instruction modifies the flags, then we can
355 not predict what the next executed instruction will
356 be. Fortunately, this instruction is architecturally
357 forbidden to branch; we know it will fall through.
358 Start by skipping past it. */
359 pc += thumb_insn_size (inst1);
360 itstate = thumb_advance_itstate (itstate);
361
362 /* Set a breakpoint on the following instruction. */
363 gdb_assert ((itstate & 0x0f) != 0);
364 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
365
366 cond_negated = (itstate >> 4) & 1;
367
368 /* Skip all following instructions with the same
369 condition. If there is a later instruction in the IT
370 block with the opposite condition, set the other
371 breakpoint there. If not, then set a breakpoint on
372 the instruction after the IT block. */
373 do
374 {
375 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
376 pc += thumb_insn_size (inst1);
377 itstate = thumb_advance_itstate (itstate);
378 }
379 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
380
381 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
382
383 return next_pcs;
384 }
385 }
386 }
387 else if (itstate & 0x0f)
388 {
389 /* We are in a conditional block. Check the condition. */
390 int cond = itstate >> 4;
391
392 if (! condition_true (cond, status))
393 {
394 /* Advance to the next instruction. All the 32-bit
395 instructions share a common prefix. */
396 VEC_safe_push (CORE_ADDR, next_pcs,
397 MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1)));
398 }
399
400 return next_pcs;
401
402 /* Otherwise, handle the instruction normally. */
403 }
404
405 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
406 {
407 CORE_ADDR sp;
408
409 /* Fetch the saved PC from the stack. It's stored above
410 all of the other registers. */
411 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
412 sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM);
413 nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order);
414 }
415 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
416 {
417 unsigned long cond = bits (inst1, 8, 11);
418 if (cond == 0x0f) /* 0x0f = SWI */
419 {
420 nextpc = self->ops->syscall_next_pc (self, pc);
421 }
422 else if (cond != 0x0f && condition_true (cond, status))
423 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
424 }
425 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
426 {
427 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
428 }
429 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
430 {
431 unsigned short inst2;
432 inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code);
433
434 /* Default to the next instruction. */
435 nextpc = pc + 4;
436 nextpc = MAKE_THUMB_ADDR (nextpc);
437
438 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
439 {
440 /* Branches and miscellaneous control instructions. */
441
442 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
443 {
444 /* B, BL, BLX. */
445 int j1, j2, imm1, imm2;
446
447 imm1 = sbits (inst1, 0, 10);
448 imm2 = bits (inst2, 0, 10);
449 j1 = bit (inst2, 13);
450 j2 = bit (inst2, 11);
451
452 offset = ((imm1 << 12) + (imm2 << 1));
453 offset ^= ((!j2) << 22) | ((!j1) << 23);
454
455 nextpc = pc_val + offset;
456 /* For BLX make sure to clear the low bits. */
457 if (bit (inst2, 12) == 0)
458 nextpc = nextpc & 0xfffffffc;
459 }
460 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
461 {
462 /* SUBS PC, LR, #imm8. */
463 nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM);
464 nextpc -= inst2 & 0x00ff;
465 }
466 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
467 {
468 /* Conditional branch. */
469 if (condition_true (bits (inst1, 6, 9), status))
470 {
471 int sign, j1, j2, imm1, imm2;
472
473 sign = sbits (inst1, 10, 10);
474 imm1 = bits (inst1, 0, 5);
475 imm2 = bits (inst2, 0, 10);
476 j1 = bit (inst2, 13);
477 j2 = bit (inst2, 11);
478
479 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
480 offset += (imm1 << 12) + (imm2 << 1);
481
482 nextpc = pc_val + offset;
483 }
484 }
485 }
486 else if ((inst1 & 0xfe50) == 0xe810)
487 {
488 /* Load multiple or RFE. */
489 int rn, offset, load_pc = 1;
490
491 rn = bits (inst1, 0, 3);
492 if (bit (inst1, 7) && !bit (inst1, 8))
493 {
494 /* LDMIA or POP */
495 if (!bit (inst2, 15))
496 load_pc = 0;
497 offset = bitcount (inst2) * 4 - 4;
498 }
499 else if (!bit (inst1, 7) && bit (inst1, 8))
500 {
501 /* LDMDB */
502 if (!bit (inst2, 15))
503 load_pc = 0;
504 offset = -4;
505 }
506 else if (bit (inst1, 7) && bit (inst1, 8))
507 {
508 /* RFEIA */
509 offset = 0;
510 }
511 else if (!bit (inst1, 7) && !bit (inst1, 8))
512 {
513 /* RFEDB */
514 offset = -8;
515 }
516 else
517 load_pc = 0;
518
519 if (load_pc)
520 {
521 CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn);
522 nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order);
523 }
524 }
525 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
526 {
527 /* MOV PC or MOVS PC. */
528 nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
529 nextpc = MAKE_THUMB_ADDR (nextpc);
530 }
531 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
532 {
533 /* LDR PC. */
534 CORE_ADDR base;
535 int rn, load_pc = 1;
536
537 rn = bits (inst1, 0, 3);
538 base = regcache_raw_get_unsigned (regcache, rn);
539 if (rn == ARM_PC_REGNUM)
540 {
541 base = (base + 4) & ~(CORE_ADDR) 0x3;
542 if (bit (inst1, 7))
543 base += bits (inst2, 0, 11);
544 else
545 base -= bits (inst2, 0, 11);
546 }
547 else if (bit (inst1, 7))
548 base += bits (inst2, 0, 11);
549 else if (bit (inst2, 11))
550 {
551 if (bit (inst2, 10))
552 {
553 if (bit (inst2, 9))
554 base += bits (inst2, 0, 7);
555 else
556 base -= bits (inst2, 0, 7);
557 }
558 }
559 else if ((inst2 & 0x0fc0) == 0x0000)
560 {
561 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
562 base += regcache_raw_get_unsigned (regcache, rm) << shift;
563 }
564 else
565 /* Reserved. */
566 load_pc = 0;
567
568 if (load_pc)
569 nextpc
570 = self->ops->read_mem_uint (base, 4, byte_order);
571 }
572 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
573 {
574 /* TBB. */
575 CORE_ADDR tbl_reg, table, offset, length;
576
577 tbl_reg = bits (inst1, 0, 3);
578 if (tbl_reg == 0x0f)
579 table = pc + 4; /* Regcache copy of PC isn't right yet. */
580 else
581 table = regcache_raw_get_unsigned (regcache, tbl_reg);
582
583 offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
584 length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order);
585 nextpc = pc_val + length;
586 }
587 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
588 {
589 /* TBH. */
590 CORE_ADDR tbl_reg, table, offset, length;
591
592 tbl_reg = bits (inst1, 0, 3);
593 if (tbl_reg == 0x0f)
594 table = pc + 4; /* Regcache copy of PC isn't right yet. */
595 else
596 table = regcache_raw_get_unsigned (regcache, tbl_reg);
597
598 offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
599 length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order);
600 nextpc = pc_val + length;
601 }
602 }
603 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
604 {
605 if (bits (inst1, 3, 6) == 0x0f)
606 nextpc = UNMAKE_THUMB_ADDR (pc_val);
607 else
608 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
609 }
610 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
611 {
612 if (bits (inst1, 3, 6) == 0x0f)
613 nextpc = pc_val;
614 else
615 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
616
617 nextpc = MAKE_THUMB_ADDR (nextpc);
618 }
619 else if ((inst1 & 0xf500) == 0xb100)
620 {
621 /* CBNZ or CBZ. */
622 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
623 ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2));
624
625 if (bit (inst1, 11) && reg != 0)
626 nextpc = pc_val + imm;
627 else if (!bit (inst1, 11) && reg == 0)
628 nextpc = pc_val + imm;
629 }
630
631 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
632
633 return next_pcs;
634 }
635
636 /* Get the raw next possible addresses. PC in next_pcs is the current program
637 counter, which is assumed to be executing in ARM mode.
638
639 The values returned have the execution state of the next instruction
640 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
641 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
642 address in GDB and arm_addr_bits_remove in GDBServer. */
643
644 static VEC (CORE_ADDR) *
645 arm_get_next_pcs_raw (struct arm_get_next_pcs *self)
646 {
647 int byte_order = self->byte_order;
648 int byte_order_for_code = self->byte_order_for_code;
649 unsigned long pc_val;
650 unsigned long this_instr = 0;
651 unsigned long status;
652 CORE_ADDR nextpc;
653 struct regcache *regcache = self->regcache;
654 CORE_ADDR pc = regcache_read_pc (self->regcache);
655 VEC (CORE_ADDR) *next_pcs = NULL;
656
657 pc_val = (unsigned long) pc;
658 this_instr = self->ops->read_mem_uint (pc, 4, byte_order_for_code);
659
660 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
661 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
662
663 if (bits (this_instr, 28, 31) == INST_NV)
664 switch (bits (this_instr, 24, 27))
665 {
666 case 0xa:
667 case 0xb:
668 {
669 /* Branch with Link and change to Thumb. */
670 nextpc = BranchDest (pc, this_instr);
671 nextpc |= bit (this_instr, 24) << 1;
672 nextpc = MAKE_THUMB_ADDR (nextpc);
673 break;
674 }
675 case 0xc:
676 case 0xd:
677 case 0xe:
678 /* Coprocessor register transfer. */
679 if (bits (this_instr, 12, 15) == 15)
680 error (_("Invalid update to pc in instruction"));
681 break;
682 }
683 else if (condition_true (bits (this_instr, 28, 31), status))
684 {
685 switch (bits (this_instr, 24, 27))
686 {
687 case 0x0:
688 case 0x1: /* data processing */
689 case 0x2:
690 case 0x3:
691 {
692 unsigned long operand1, operand2, result = 0;
693 unsigned long rn;
694 int c;
695
696 if (bits (this_instr, 12, 15) != 15)
697 break;
698
699 if (bits (this_instr, 22, 25) == 0
700 && bits (this_instr, 4, 7) == 9) /* multiply */
701 error (_("Invalid update to pc in instruction"));
702
703 /* BX <reg>, BLX <reg> */
704 if (bits (this_instr, 4, 27) == 0x12fff1
705 || bits (this_instr, 4, 27) == 0x12fff3)
706 {
707 rn = bits (this_instr, 0, 3);
708 nextpc = ((rn == ARM_PC_REGNUM)
709 ? (pc_val + 8)
710 : regcache_raw_get_unsigned (regcache, rn));
711
712 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
713 return next_pcs;
714 }
715
716 /* Multiply into PC. */
717 c = (status & FLAG_C) ? 1 : 0;
718 rn = bits (this_instr, 16, 19);
719 operand1 = ((rn == ARM_PC_REGNUM)
720 ? (pc_val + 8)
721 : regcache_raw_get_unsigned (regcache, rn));
722
723 if (bit (this_instr, 25))
724 {
725 unsigned long immval = bits (this_instr, 0, 7);
726 unsigned long rotate = 2 * bits (this_instr, 8, 11);
727 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
728 & 0xffffffff;
729 }
730 else /* operand 2 is a shifted register. */
731 operand2 = shifted_reg_val (regcache, this_instr, c,
732 pc_val, status);
733
734 switch (bits (this_instr, 21, 24))
735 {
736 case 0x0: /*and */
737 result = operand1 & operand2;
738 break;
739
740 case 0x1: /*eor */
741 result = operand1 ^ operand2;
742 break;
743
744 case 0x2: /*sub */
745 result = operand1 - operand2;
746 break;
747
748 case 0x3: /*rsb */
749 result = operand2 - operand1;
750 break;
751
752 case 0x4: /*add */
753 result = operand1 + operand2;
754 break;
755
756 case 0x5: /*adc */
757 result = operand1 + operand2 + c;
758 break;
759
760 case 0x6: /*sbc */
761 result = operand1 - operand2 + c;
762 break;
763
764 case 0x7: /*rsc */
765 result = operand2 - operand1 + c;
766 break;
767
768 case 0x8:
769 case 0x9:
770 case 0xa:
771 case 0xb: /* tst, teq, cmp, cmn */
772 result = (unsigned long) nextpc;
773 break;
774
775 case 0xc: /*orr */
776 result = operand1 | operand2;
777 break;
778
779 case 0xd: /*mov */
780 /* Always step into a function. */
781 result = operand2;
782 break;
783
784 case 0xe: /*bic */
785 result = operand1 & ~operand2;
786 break;
787
788 case 0xf: /*mvn */
789 result = ~operand2;
790 break;
791 }
792 nextpc = self->ops->addr_bits_remove (self, result);
793 break;
794 }
795
796 case 0x4:
797 case 0x5: /* data transfer */
798 case 0x6:
799 case 0x7:
800 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
801 {
802 /* Media instructions and architecturally undefined
803 instructions. */
804 break;
805 }
806
807 if (bit (this_instr, 20))
808 {
809 /* load */
810 if (bits (this_instr, 12, 15) == 15)
811 {
812 /* rd == pc */
813 unsigned long rn;
814 unsigned long base;
815
816 if (bit (this_instr, 22))
817 error (_("Invalid update to pc in instruction"));
818
819 /* byte write to PC */
820 rn = bits (this_instr, 16, 19);
821 base = ((rn == ARM_PC_REGNUM)
822 ? (pc_val + 8)
823 : regcache_raw_get_unsigned (regcache, rn));
824
825 if (bit (this_instr, 24))
826 {
827 /* pre-indexed */
828 int c = (status & FLAG_C) ? 1 : 0;
829 unsigned long offset =
830 (bit (this_instr, 25)
831 ? shifted_reg_val (regcache, this_instr, c,
832 pc_val, status)
833 : bits (this_instr, 0, 11));
834
835 if (bit (this_instr, 23))
836 base += offset;
837 else
838 base -= offset;
839 }
840 nextpc
841 = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base,
842 4, byte_order);
843 }
844 }
845 break;
846
847 case 0x8:
848 case 0x9: /* block transfer */
849 if (bit (this_instr, 20))
850 {
851 /* LDM */
852 if (bit (this_instr, 15))
853 {
854 /* loading pc */
855 int offset = 0;
856 CORE_ADDR rn_val_offset = 0;
857 unsigned long rn_val
858 = regcache_raw_get_unsigned (regcache,
859 bits (this_instr, 16, 19));
860
861 if (bit (this_instr, 23))
862 {
863 /* up */
864 unsigned long reglist = bits (this_instr, 0, 14);
865 offset = bitcount (reglist) * 4;
866 if (bit (this_instr, 24)) /* pre */
867 offset += 4;
868 }
869 else if (bit (this_instr, 24))
870 offset = -4;
871
872 rn_val_offset = rn_val + offset;
873 nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset,
874 4, byte_order);
875 }
876 }
877 break;
878
879 case 0xb: /* branch & link */
880 case 0xa: /* branch */
881 {
882 nextpc = BranchDest (pc, this_instr);
883 break;
884 }
885
886 case 0xc:
887 case 0xd:
888 case 0xe: /* coproc ops */
889 break;
890 case 0xf: /* SWI */
891 {
892 nextpc = self->ops->syscall_next_pc (self, pc);
893 }
894 break;
895
896 default:
897 error (_("Bad bit-field extraction"));
898 return next_pcs;
899 }
900 }
901
902 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
903 return next_pcs;
904 }
905
906 /* See arm-get-next-pcs.h. */
907
908 VEC (CORE_ADDR) *
909 arm_get_next_pcs (struct arm_get_next_pcs *self)
910 {
911 VEC (CORE_ADDR) *next_pcs = NULL;
912
913 if (self->ops->is_thumb (self))
914 {
915 next_pcs = thumb_deal_with_atomic_sequence_raw (self);
916 if (next_pcs == NULL)
917 next_pcs = thumb_get_next_pcs_raw (self);
918 }
919 else
920 {
921 next_pcs = arm_deal_with_atomic_sequence_raw (self);
922 if (next_pcs == NULL)
923 next_pcs = arm_get_next_pcs_raw (self);
924 }
925
926 return next_pcs;
927 }
This page took 0.051041 seconds and 5 git commands to generate.