Add $_gthread convenience variable
[deliverable/binutils-gdb.git] / gdb / arch / arm-get-next-pcs.c
CommitLineData
d9311bfa
AT
1/* Common code for ARM software single stepping support.
2
618f726f 3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
d9311bfa
AT
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20#include "common-defs.h"
21#include "gdb_vecs.h"
22#include "common-regcache.h"
23#include "arm.h"
24#include "arm-get-next-pcs.h"
25
26/* See arm-get-next-pcs.h. */
27
28void
29arm_get_next_pcs_ctor (struct arm_get_next_pcs *self,
30 struct arm_get_next_pcs_ops *ops,
31 int byte_order,
32 int byte_order_for_code,
33 const gdb_byte *arm_thumb2_breakpoint,
34 struct regcache *regcache)
35{
36 self->ops = ops;
37 self->byte_order = byte_order;
38 self->byte_order_for_code = byte_order_for_code;
39 self->arm_thumb2_breakpoint = arm_thumb2_breakpoint;
40 self->regcache = regcache;
41}
42
43/* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
44 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
45 is found, attempt to step through it. The end of the sequence address is
46 added to the next_pcs list. */
47
48static VEC (CORE_ADDR) *
49thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self,
50 CORE_ADDR pc)
51{
52 int byte_order_for_code = self->byte_order_for_code;
53 CORE_ADDR breaks[2] = {-1, -1};
54 CORE_ADDR loc = pc;
55 unsigned short insn1, insn2;
56 int insn_count;
57 int index;
58 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
59 const int atomic_sequence_length = 16; /* Instruction sequence length. */
60 ULONGEST status, itstate;
61 VEC (CORE_ADDR) *next_pcs = NULL;
62
63 /* We currently do not support atomic sequences within an IT block. */
64 status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM);
65 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
66 if (itstate & 0x0f)
67 return NULL;
68
69 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
70 insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
71
72 loc += 2;
73 if (thumb_insn_size (insn1) != 4)
74 return NULL;
75
76 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
77
78 loc += 2;
79 if (!((insn1 & 0xfff0) == 0xe850
80 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
81 return NULL;
82
83 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
84 instructions. */
85 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
86 {
87 insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code);
88 loc += 2;
89
90 if (thumb_insn_size (insn1) != 4)
91 {
92 /* Assume that there is at most one conditional branch in the
93 atomic sequence. If a conditional branch is found, put a
94 breakpoint in its destination address. */
95 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
96 {
97 if (last_breakpoint > 0)
98 return NULL; /* More than one conditional branch found,
99 fallback to the standard code. */
100
101 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
102 last_breakpoint++;
103 }
104
105 /* We do not support atomic sequences that use any *other*
106 instructions but conditional branches to change the PC.
107 Fall back to standard code to avoid losing control of
108 execution. */
109 else if (thumb_instruction_changes_pc (insn1))
110 return NULL;
111 }
112 else
113 {
114 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
115
116 loc += 2;
117
118 /* Assume that there is at most one conditional branch in the
119 atomic sequence. If a conditional branch is found, put a
120 breakpoint in its destination address. */
121 if ((insn1 & 0xf800) == 0xf000
122 && (insn2 & 0xd000) == 0x8000
123 && (insn1 & 0x0380) != 0x0380)
124 {
125 int sign, j1, j2, imm1, imm2;
126 unsigned int offset;
127
128 sign = sbits (insn1, 10, 10);
129 imm1 = bits (insn1, 0, 5);
130 imm2 = bits (insn2, 0, 10);
131 j1 = bit (insn2, 13);
132 j2 = bit (insn2, 11);
133
134 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
135 offset += (imm1 << 12) + (imm2 << 1);
136
137 if (last_breakpoint > 0)
138 return 0; /* More than one conditional branch found,
139 fallback to the standard code. */
140
141 breaks[1] = loc + offset;
142 last_breakpoint++;
143 }
144
145 /* We do not support atomic sequences that use any *other*
146 instructions but conditional branches to change the PC.
147 Fall back to standard code to avoid losing control of
148 execution. */
149 else if (thumb2_instruction_changes_pc (insn1, insn2))
150 return NULL;
151
152 /* If we find a strex{,b,h,d}, we're done. */
153 if ((insn1 & 0xfff0) == 0xe840
154 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
155 break;
156 }
157 }
158
159 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
160 if (insn_count == atomic_sequence_length)
161 return NULL;
162
163 /* Insert a breakpoint right after the end of the atomic sequence. */
164 breaks[0] = loc;
165
166 /* Check for duplicated breakpoints. Check also for a breakpoint
167 placed (branch instruction's destination) anywhere in sequence. */
168 if (last_breakpoint
169 && (breaks[1] == breaks[0]
170 || (breaks[1] >= pc && breaks[1] < loc)))
171 last_breakpoint = 0;
172
173 /* Adds the breakpoints to the list to be inserted. */
174 for (index = 0; index <= last_breakpoint; index++)
175 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (breaks[index]));
176
177 return next_pcs;
178}
179
180/* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
181 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
182 is found, attempt to step through it. The end of the sequence address is
183 added to the next_pcs list. */
184
185static VEC (CORE_ADDR) *
186arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self,
187 CORE_ADDR pc)
188{
189 int byte_order_for_code = self->byte_order_for_code;
190 CORE_ADDR breaks[2] = {-1, -1};
191 CORE_ADDR loc = pc;
192 unsigned int insn;
193 int insn_count;
194 int index;
195 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
196 const int atomic_sequence_length = 16; /* Instruction sequence length. */
197 VEC (CORE_ADDR) *next_pcs = NULL;
198
199 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
200 Note that we do not currently support conditionally executed atomic
201 instructions. */
202 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
203
204 loc += 4;
205 if ((insn & 0xff9000f0) != 0xe1900090)
206 return NULL;
207
208 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
209 instructions. */
210 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
211 {
212 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
213
214 loc += 4;
215
216 /* Assume that there is at most one conditional branch in the atomic
217 sequence. If a conditional branch is found, put a breakpoint in
218 its destination address. */
219 if (bits (insn, 24, 27) == 0xa)
220 {
221 if (last_breakpoint > 0)
222 return NULL; /* More than one conditional branch found, fallback
223 to the standard single-step code. */
224
225 breaks[1] = BranchDest (loc - 4, insn);
226 last_breakpoint++;
227 }
228
229 /* We do not support atomic sequences that use any *other* instructions
230 but conditional branches to change the PC. Fall back to standard
231 code to avoid losing control of execution. */
232 else if (arm_instruction_changes_pc (insn))
233 return NULL;
234
235 /* If we find a strex{,b,h,d}, we're done. */
236 if ((insn & 0xff9000f0) == 0xe1800090)
237 break;
238 }
239
240 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
241 if (insn_count == atomic_sequence_length)
242 return NULL;
243
244 /* Insert a breakpoint right after the end of the atomic sequence. */
245 breaks[0] = loc;
246
247 /* Check for duplicated breakpoints. Check also for a breakpoint
248 placed (branch instruction's destination) anywhere in sequence. */
249 if (last_breakpoint
250 && (breaks[1] == breaks[0]
251 || (breaks[1] >= pc && breaks[1] < loc)))
252 last_breakpoint = 0;
253
254 /* Adds the breakpoints to the list to be inserted. */
255 for (index = 0; index <= last_breakpoint; index++)
256 VEC_safe_push (CORE_ADDR, next_pcs, breaks[index]);
257
258 return next_pcs;
259}
260
f5aa3069 261/* Find the next possible PCs for thumb mode. */
d9311bfa 262
f5aa3069
YQ
263static VEC (CORE_ADDR) *
264thumb_get_next_pcs_raw (struct arm_get_next_pcs *self, CORE_ADDR pc)
d9311bfa
AT
265{
266 int byte_order = self->byte_order;
267 int byte_order_for_code = self->byte_order_for_code;
268 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
269 unsigned short inst1;
270 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
271 unsigned long offset;
272 ULONGEST status, itstate;
273 struct regcache *regcache = self->regcache;
274 VEC (CORE_ADDR) * next_pcs = NULL;
275
276 nextpc = MAKE_THUMB_ADDR (nextpc);
277 pc_val = MAKE_THUMB_ADDR (pc_val);
278
279 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
280
281 /* Thumb-2 conditional execution support. There are eight bits in
282 the CPSR which describe conditional execution state. Once
283 reconstructed (they're in a funny order), the low five bits
284 describe the low bit of the condition for each instruction and
285 how many instructions remain. The high three bits describe the
286 base condition. One of the low four bits will be set if an IT
287 block is active. These bits read as zero on earlier
288 processors. */
289 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
290 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
291
292 /* If-Then handling. On GNU/Linux, where this routine is used, we
293 use an undefined instruction as a breakpoint. Unlike BKPT, IT
294 can disable execution of the undefined instruction. So we might
295 miss the breakpoint if we set it on a skipped conditional
296 instruction. Because conditional instructions can change the
297 flags, affecting the execution of further instructions, we may
298 need to set two breakpoints. */
299
300 if (self->arm_thumb2_breakpoint != NULL)
301 {
302 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
303 {
304 /* An IT instruction. Because this instruction does not
305 modify the flags, we can accurately predict the next
306 executed instruction. */
307 itstate = inst1 & 0x00ff;
308 pc += thumb_insn_size (inst1);
309
310 while (itstate != 0 && ! condition_true (itstate >> 4, status))
311 {
312 inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code);
313 pc += thumb_insn_size (inst1);
314 itstate = thumb_advance_itstate (itstate);
315 }
316
317 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
318 return next_pcs;
319 }
320 else if (itstate != 0)
321 {
322 /* We are in a conditional block. Check the condition. */
323 if (! condition_true (itstate >> 4, status))
324 {
325 /* Advance to the next executed instruction. */
326 pc += thumb_insn_size (inst1);
327 itstate = thumb_advance_itstate (itstate);
328
329 while (itstate != 0 && ! condition_true (itstate >> 4, status))
330 {
331 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
332
333 pc += thumb_insn_size (inst1);
334 itstate = thumb_advance_itstate (itstate);
335 }
336
337 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
338 return next_pcs;
339 }
340 else if ((itstate & 0x0f) == 0x08)
341 {
342 /* This is the last instruction of the conditional
343 block, and it is executed. We can handle it normally
344 because the following instruction is not conditional,
345 and we must handle it normally because it is
346 permitted to branch. Fall through. */
347 }
348 else
349 {
350 int cond_negated;
351
352 /* There are conditional instructions after this one.
353 If this instruction modifies the flags, then we can
354 not predict what the next executed instruction will
355 be. Fortunately, this instruction is architecturally
356 forbidden to branch; we know it will fall through.
357 Start by skipping past it. */
358 pc += thumb_insn_size (inst1);
359 itstate = thumb_advance_itstate (itstate);
360
361 /* Set a breakpoint on the following instruction. */
362 gdb_assert ((itstate & 0x0f) != 0);
363 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
364
365 cond_negated = (itstate >> 4) & 1;
366
367 /* Skip all following instructions with the same
368 condition. If there is a later instruction in the IT
369 block with the opposite condition, set the other
370 breakpoint there. If not, then set a breakpoint on
371 the instruction after the IT block. */
372 do
373 {
374 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
375 pc += thumb_insn_size (inst1);
376 itstate = thumb_advance_itstate (itstate);
377 }
378 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
379
380 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
381
382 return next_pcs;
383 }
384 }
385 }
386 else if (itstate & 0x0f)
387 {
388 /* We are in a conditional block. Check the condition. */
389 int cond = itstate >> 4;
390
391 if (! condition_true (cond, status))
392 {
393 /* Advance to the next instruction. All the 32-bit
394 instructions share a common prefix. */
395 VEC_safe_push (CORE_ADDR, next_pcs,
396 MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1)));
397 }
398
399 return next_pcs;
400
401 /* Otherwise, handle the instruction normally. */
402 }
403
404 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
405 {
406 CORE_ADDR sp;
407
408 /* Fetch the saved PC from the stack. It's stored above
409 all of the other registers. */
410 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
411 sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM);
412 nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order);
413 }
414 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
415 {
416 unsigned long cond = bits (inst1, 8, 11);
417 if (cond == 0x0f) /* 0x0f = SWI */
418 {
419 nextpc = self->ops->syscall_next_pc (self, pc);
420 }
421 else if (cond != 0x0f && condition_true (cond, status))
422 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
423 }
424 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
425 {
426 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
427 }
428 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
429 {
430 unsigned short inst2;
431 inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code);
432
433 /* Default to the next instruction. */
434 nextpc = pc + 4;
435 nextpc = MAKE_THUMB_ADDR (nextpc);
436
437 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
438 {
439 /* Branches and miscellaneous control instructions. */
440
441 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
442 {
443 /* B, BL, BLX. */
444 int j1, j2, imm1, imm2;
445
446 imm1 = sbits (inst1, 0, 10);
447 imm2 = bits (inst2, 0, 10);
448 j1 = bit (inst2, 13);
449 j2 = bit (inst2, 11);
450
451 offset = ((imm1 << 12) + (imm2 << 1));
452 offset ^= ((!j2) << 22) | ((!j1) << 23);
453
454 nextpc = pc_val + offset;
455 /* For BLX make sure to clear the low bits. */
456 if (bit (inst2, 12) == 0)
457 nextpc = nextpc & 0xfffffffc;
458 }
459 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
460 {
461 /* SUBS PC, LR, #imm8. */
462 nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM);
463 nextpc -= inst2 & 0x00ff;
464 }
465 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
466 {
467 /* Conditional branch. */
468 if (condition_true (bits (inst1, 6, 9), status))
469 {
470 int sign, j1, j2, imm1, imm2;
471
472 sign = sbits (inst1, 10, 10);
473 imm1 = bits (inst1, 0, 5);
474 imm2 = bits (inst2, 0, 10);
475 j1 = bit (inst2, 13);
476 j2 = bit (inst2, 11);
477
478 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
479 offset += (imm1 << 12) + (imm2 << 1);
480
481 nextpc = pc_val + offset;
482 }
483 }
484 }
485 else if ((inst1 & 0xfe50) == 0xe810)
486 {
487 /* Load multiple or RFE. */
488 int rn, offset, load_pc = 1;
489
490 rn = bits (inst1, 0, 3);
491 if (bit (inst1, 7) && !bit (inst1, 8))
492 {
493 /* LDMIA or POP */
494 if (!bit (inst2, 15))
495 load_pc = 0;
496 offset = bitcount (inst2) * 4 - 4;
497 }
498 else if (!bit (inst1, 7) && bit (inst1, 8))
499 {
500 /* LDMDB */
501 if (!bit (inst2, 15))
502 load_pc = 0;
503 offset = -4;
504 }
505 else if (bit (inst1, 7) && bit (inst1, 8))
506 {
507 /* RFEIA */
508 offset = 0;
509 }
510 else if (!bit (inst1, 7) && !bit (inst1, 8))
511 {
512 /* RFEDB */
513 offset = -8;
514 }
515 else
516 load_pc = 0;
517
518 if (load_pc)
519 {
520 CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn);
521 nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order);
522 }
523 }
524 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
525 {
526 /* MOV PC or MOVS PC. */
527 nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
528 nextpc = MAKE_THUMB_ADDR (nextpc);
529 }
530 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
531 {
532 /* LDR PC. */
533 CORE_ADDR base;
534 int rn, load_pc = 1;
535
536 rn = bits (inst1, 0, 3);
537 base = regcache_raw_get_unsigned (regcache, rn);
538 if (rn == ARM_PC_REGNUM)
539 {
540 base = (base + 4) & ~(CORE_ADDR) 0x3;
541 if (bit (inst1, 7))
542 base += bits (inst2, 0, 11);
543 else
544 base -= bits (inst2, 0, 11);
545 }
546 else if (bit (inst1, 7))
547 base += bits (inst2, 0, 11);
548 else if (bit (inst2, 11))
549 {
550 if (bit (inst2, 10))
551 {
552 if (bit (inst2, 9))
553 base += bits (inst2, 0, 7);
554 else
555 base -= bits (inst2, 0, 7);
556 }
557 }
558 else if ((inst2 & 0x0fc0) == 0x0000)
559 {
560 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
561 base += regcache_raw_get_unsigned (regcache, rm) << shift;
562 }
563 else
564 /* Reserved. */
565 load_pc = 0;
566
567 if (load_pc)
568 nextpc
569 = self->ops->read_mem_uint (base, 4, byte_order);
570 }
571 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
572 {
573 /* TBB. */
574 CORE_ADDR tbl_reg, table, offset, length;
575
576 tbl_reg = bits (inst1, 0, 3);
577 if (tbl_reg == 0x0f)
578 table = pc + 4; /* Regcache copy of PC isn't right yet. */
579 else
580 table = regcache_raw_get_unsigned (regcache, tbl_reg);
581
582 offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
583 length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order);
584 nextpc = pc_val + length;
585 }
586 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
587 {
588 /* TBH. */
589 CORE_ADDR tbl_reg, table, offset, length;
590
591 tbl_reg = bits (inst1, 0, 3);
592 if (tbl_reg == 0x0f)
593 table = pc + 4; /* Regcache copy of PC isn't right yet. */
594 else
595 table = regcache_raw_get_unsigned (regcache, tbl_reg);
596
597 offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
598 length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order);
599 nextpc = pc_val + length;
600 }
601 }
602 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
603 {
604 if (bits (inst1, 3, 6) == 0x0f)
605 nextpc = UNMAKE_THUMB_ADDR (pc_val);
606 else
607 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
608 }
609 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
610 {
611 if (bits (inst1, 3, 6) == 0x0f)
612 nextpc = pc_val;
613 else
614 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
615
616 nextpc = MAKE_THUMB_ADDR (nextpc);
617 }
618 else if ((inst1 & 0xf500) == 0xb100)
619 {
620 /* CBNZ or CBZ. */
621 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
622 ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2));
623
624 if (bit (inst1, 11) && reg != 0)
625 nextpc = pc_val + imm;
626 else if (!bit (inst1, 11) && reg == 0)
627 nextpc = pc_val + imm;
628 }
629
630 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
631
632 return next_pcs;
633}
634
635/* Get the raw next possible addresses. PC in next_pcs is the current program
636 counter, which is assumed to be executing in ARM mode.
637
638 The values returned have the execution state of the next instruction
639 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
640 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
641 address in GDB and arm_addr_bits_remove in GDBServer. */
642
f5aa3069
YQ
643static VEC (CORE_ADDR) *
644arm_get_next_pcs_raw (struct arm_get_next_pcs *self, CORE_ADDR pc)
d9311bfa
AT
645{
646 int byte_order = self->byte_order;
647 unsigned long pc_val;
648 unsigned long this_instr = 0;
649 unsigned long status;
650 CORE_ADDR nextpc;
651 struct regcache *regcache = self->regcache;
652 VEC (CORE_ADDR) *next_pcs = NULL;
653
654 pc_val = (unsigned long) pc;
655 this_instr = self->ops->read_mem_uint (pc, 4, byte_order);
656
657 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
658 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
659
660 if (bits (this_instr, 28, 31) == INST_NV)
661 switch (bits (this_instr, 24, 27))
662 {
663 case 0xa:
664 case 0xb:
665 {
666 /* Branch with Link and change to Thumb. */
667 nextpc = BranchDest (pc, this_instr);
668 nextpc |= bit (this_instr, 24) << 1;
669 nextpc = MAKE_THUMB_ADDR (nextpc);
670 break;
671 }
672 case 0xc:
673 case 0xd:
674 case 0xe:
675 /* Coprocessor register transfer. */
676 if (bits (this_instr, 12, 15) == 15)
677 error (_("Invalid update to pc in instruction"));
678 break;
679 }
680 else if (condition_true (bits (this_instr, 28, 31), status))
681 {
682 switch (bits (this_instr, 24, 27))
683 {
684 case 0x0:
685 case 0x1: /* data processing */
686 case 0x2:
687 case 0x3:
688 {
689 unsigned long operand1, operand2, result = 0;
690 unsigned long rn;
691 int c;
692
693 if (bits (this_instr, 12, 15) != 15)
694 break;
695
696 if (bits (this_instr, 22, 25) == 0
697 && bits (this_instr, 4, 7) == 9) /* multiply */
698 error (_("Invalid update to pc in instruction"));
699
700 /* BX <reg>, BLX <reg> */
701 if (bits (this_instr, 4, 27) == 0x12fff1
702 || bits (this_instr, 4, 27) == 0x12fff3)
703 {
704 rn = bits (this_instr, 0, 3);
705 nextpc = ((rn == ARM_PC_REGNUM)
706 ? (pc_val + 8)
707 : regcache_raw_get_unsigned (regcache, rn));
708
709 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
710 return next_pcs;
711 }
712
713 /* Multiply into PC. */
714 c = (status & FLAG_C) ? 1 : 0;
715 rn = bits (this_instr, 16, 19);
716 operand1 = ((rn == ARM_PC_REGNUM)
717 ? (pc_val + 8)
718 : regcache_raw_get_unsigned (regcache, rn));
719
720 if (bit (this_instr, 25))
721 {
722 unsigned long immval = bits (this_instr, 0, 7);
723 unsigned long rotate = 2 * bits (this_instr, 8, 11);
724 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
725 & 0xffffffff;
726 }
727 else /* operand 2 is a shifted register. */
728 operand2 = shifted_reg_val (regcache, this_instr, c,
729 pc_val, status);
730
731 switch (bits (this_instr, 21, 24))
732 {
733 case 0x0: /*and */
734 result = operand1 & operand2;
735 break;
736
737 case 0x1: /*eor */
738 result = operand1 ^ operand2;
739 break;
740
741 case 0x2: /*sub */
742 result = operand1 - operand2;
743 break;
744
745 case 0x3: /*rsb */
746 result = operand2 - operand1;
747 break;
748
749 case 0x4: /*add */
750 result = operand1 + operand2;
751 break;
752
753 case 0x5: /*adc */
754 result = operand1 + operand2 + c;
755 break;
756
757 case 0x6: /*sbc */
758 result = operand1 - operand2 + c;
759 break;
760
761 case 0x7: /*rsc */
762 result = operand2 - operand1 + c;
763 break;
764
765 case 0x8:
766 case 0x9:
767 case 0xa:
768 case 0xb: /* tst, teq, cmp, cmn */
769 result = (unsigned long) nextpc;
770 break;
771
772 case 0xc: /*orr */
773 result = operand1 | operand2;
774 break;
775
776 case 0xd: /*mov */
777 /* Always step into a function. */
778 result = operand2;
779 break;
780
781 case 0xe: /*bic */
782 result = operand1 & ~operand2;
783 break;
784
785 case 0xf: /*mvn */
786 result = ~operand2;
787 break;
788 }
789 nextpc = self->ops->addr_bits_remove (self, result);
790 break;
791 }
792
793 case 0x4:
794 case 0x5: /* data transfer */
795 case 0x6:
796 case 0x7:
797 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
798 {
799 /* Media instructions and architecturally undefined
800 instructions. */
801 break;
802 }
803
804 if (bit (this_instr, 20))
805 {
806 /* load */
807 if (bits (this_instr, 12, 15) == 15)
808 {
809 /* rd == pc */
810 unsigned long rn;
811 unsigned long base;
812
813 if (bit (this_instr, 22))
814 error (_("Invalid update to pc in instruction"));
815
816 /* byte write to PC */
817 rn = bits (this_instr, 16, 19);
818 base = ((rn == ARM_PC_REGNUM)
819 ? (pc_val + 8)
820 : regcache_raw_get_unsigned (regcache, rn));
821
822 if (bit (this_instr, 24))
823 {
824 /* pre-indexed */
825 int c = (status & FLAG_C) ? 1 : 0;
826 unsigned long offset =
827 (bit (this_instr, 25)
828 ? shifted_reg_val (regcache, this_instr, c,
829 pc_val, status)
830 : bits (this_instr, 0, 11));
831
832 if (bit (this_instr, 23))
833 base += offset;
834 else
835 base -= offset;
836 }
837 nextpc
838 = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base,
839 4, byte_order);
840 }
841 }
842 break;
843
844 case 0x8:
845 case 0x9: /* block transfer */
846 if (bit (this_instr, 20))
847 {
848 /* LDM */
849 if (bit (this_instr, 15))
850 {
851 /* loading pc */
852 int offset = 0;
853 CORE_ADDR rn_val_offset = 0;
854 unsigned long rn_val
855 = regcache_raw_get_unsigned (regcache,
856 bits (this_instr, 16, 19));
857
858 if (bit (this_instr, 23))
859 {
860 /* up */
861 unsigned long reglist = bits (this_instr, 0, 14);
862 offset = bitcount (reglist) * 4;
863 if (bit (this_instr, 24)) /* pre */
864 offset += 4;
865 }
866 else if (bit (this_instr, 24))
867 offset = -4;
868
869 rn_val_offset = rn_val + offset;
870 nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset,
871 4, byte_order);
872 }
873 }
874 break;
875
876 case 0xb: /* branch & link */
877 case 0xa: /* branch */
878 {
879 nextpc = BranchDest (pc, this_instr);
880 break;
881 }
882
883 case 0xc:
884 case 0xd:
885 case 0xe: /* coproc ops */
886 break;
887 case 0xf: /* SWI */
888 {
889 nextpc = self->ops->syscall_next_pc (self, pc);
890 }
891 break;
892
893 default:
e7826da3 894 error (_("Bad bit-field extraction"));
d9311bfa
AT
895 return next_pcs;
896 }
897 }
898
899 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
900 return next_pcs;
901}
f5aa3069
YQ
902
903/* See arm-get-next-pcs.h. */
904
905VEC (CORE_ADDR) *
906arm_get_next_pcs (struct arm_get_next_pcs *self, CORE_ADDR pc)
907{
908 VEC (CORE_ADDR) *next_pcs = NULL;
909
910 if (self->ops->is_thumb (self))
911 {
912 next_pcs = thumb_deal_with_atomic_sequence_raw (self, pc);
913 if (next_pcs == NULL)
914 next_pcs = thumb_get_next_pcs_raw (self, pc);
915 }
916 else
917 {
918 next_pcs = arm_deal_with_atomic_sequence_raw (self, pc);
919 if (next_pcs == NULL)
920 next_pcs = arm_get_next_pcs_raw (self, pc);
921 }
922
923 return next_pcs;
924}
This page took 0.061727 seconds and 4 git commands to generate.