Convert the rest x86 target descriptions
[deliverable/binutils-gdb.git] / gdb / arch / arm-get-next-pcs.c
1 /* Common code for ARM software single stepping support.
2
3 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "common-defs.h"
21 #include "gdb_vecs.h"
22 #include "common-regcache.h"
23 #include "arm.h"
24 #include "arm-get-next-pcs.h"
25
26 /* See arm-get-next-pcs.h. */
27
28 void
29 arm_get_next_pcs_ctor (struct arm_get_next_pcs *self,
30 struct arm_get_next_pcs_ops *ops,
31 int byte_order,
32 int byte_order_for_code,
33 int has_thumb2_breakpoint,
34 struct regcache *regcache)
35 {
36 self->ops = ops;
37 self->byte_order = byte_order;
38 self->byte_order_for_code = byte_order_for_code;
39 self->has_thumb2_breakpoint = has_thumb2_breakpoint;
40 self->regcache = regcache;
41 }
42
43 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
44 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
45 is found, attempt to step through it. The end of the sequence address is
46 added to the next_pcs list. */
47
48 static std::vector<CORE_ADDR>
49 thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
50 {
51 int byte_order_for_code = self->byte_order_for_code;
52 CORE_ADDR breaks[2] = {-1, -1};
53 CORE_ADDR pc = regcache_read_pc (self->regcache);
54 CORE_ADDR loc = pc;
55 unsigned short insn1, insn2;
56 int insn_count;
57 int index;
58 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
59 const int atomic_sequence_length = 16; /* Instruction sequence length. */
60 ULONGEST status, itstate;
61
62 /* We currently do not support atomic sequences within an IT block. */
63 status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM);
64 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
65 if (itstate & 0x0f)
66 return {};
67
68 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
69 insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
70
71 loc += 2;
72 if (thumb_insn_size (insn1) != 4)
73 return {};
74
75 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
76
77 loc += 2;
78 if (!((insn1 & 0xfff0) == 0xe850
79 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
80 return {};
81
82 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
83 instructions. */
84 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
85 {
86 insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code);
87 loc += 2;
88
89 if (thumb_insn_size (insn1) != 4)
90 {
91 /* Assume that there is at most one conditional branch in the
92 atomic sequence. If a conditional branch is found, put a
93 breakpoint in its destination address. */
94 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
95 {
96 if (last_breakpoint > 0)
97 return {}; /* More than one conditional branch found,
98 fallback to the standard code. */
99
100 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
101 last_breakpoint++;
102 }
103
104 /* We do not support atomic sequences that use any *other*
105 instructions but conditional branches to change the PC.
106 Fall back to standard code to avoid losing control of
107 execution. */
108 else if (thumb_instruction_changes_pc (insn1))
109 return {};
110 }
111 else
112 {
113 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
114
115 loc += 2;
116
117 /* Assume that there is at most one conditional branch in the
118 atomic sequence. If a conditional branch is found, put a
119 breakpoint in its destination address. */
120 if ((insn1 & 0xf800) == 0xf000
121 && (insn2 & 0xd000) == 0x8000
122 && (insn1 & 0x0380) != 0x0380)
123 {
124 int sign, j1, j2, imm1, imm2;
125 unsigned int offset;
126
127 sign = sbits (insn1, 10, 10);
128 imm1 = bits (insn1, 0, 5);
129 imm2 = bits (insn2, 0, 10);
130 j1 = bit (insn2, 13);
131 j2 = bit (insn2, 11);
132
133 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
134 offset += (imm1 << 12) + (imm2 << 1);
135
136 if (last_breakpoint > 0)
137 return {}; /* More than one conditional branch found,
138 fallback to the standard code. */
139
140 breaks[1] = loc + offset;
141 last_breakpoint++;
142 }
143
144 /* We do not support atomic sequences that use any *other*
145 instructions but conditional branches to change the PC.
146 Fall back to standard code to avoid losing control of
147 execution. */
148 else if (thumb2_instruction_changes_pc (insn1, insn2))
149 return {};
150
151 /* If we find a strex{,b,h,d}, we're done. */
152 if ((insn1 & 0xfff0) == 0xe840
153 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
154 break;
155 }
156 }
157
158 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
159 if (insn_count == atomic_sequence_length)
160 return {};
161
162 /* Insert a breakpoint right after the end of the atomic sequence. */
163 breaks[0] = loc;
164
165 /* Check for duplicated breakpoints. Check also for a breakpoint
166 placed (branch instruction's destination) anywhere in sequence. */
167 if (last_breakpoint
168 && (breaks[1] == breaks[0]
169 || (breaks[1] >= pc && breaks[1] < loc)))
170 last_breakpoint = 0;
171
172 std::vector<CORE_ADDR> next_pcs;
173
174 /* Adds the breakpoints to the list to be inserted. */
175 for (index = 0; index <= last_breakpoint; index++)
176 next_pcs.push_back (MAKE_THUMB_ADDR (breaks[index]));
177
178 return next_pcs;
179 }
180
181 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
182 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
183 is found, attempt to step through it. The end of the sequence address is
184 added to the next_pcs list. */
185
186 static std::vector<CORE_ADDR>
187 arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
188 {
189 int byte_order_for_code = self->byte_order_for_code;
190 CORE_ADDR breaks[2] = {-1, -1};
191 CORE_ADDR pc = regcache_read_pc (self->regcache);
192 CORE_ADDR loc = pc;
193 unsigned int insn;
194 int insn_count;
195 int index;
196 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
197 const int atomic_sequence_length = 16; /* Instruction sequence length. */
198
199 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
200 Note that we do not currently support conditionally executed atomic
201 instructions. */
202 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
203
204 loc += 4;
205 if ((insn & 0xff9000f0) != 0xe1900090)
206 return {};
207
208 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
209 instructions. */
210 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
211 {
212 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
213
214 loc += 4;
215
216 /* Assume that there is at most one conditional branch in the atomic
217 sequence. If a conditional branch is found, put a breakpoint in
218 its destination address. */
219 if (bits (insn, 24, 27) == 0xa)
220 {
221 if (last_breakpoint > 0)
222 return {}; /* More than one conditional branch found, fallback
223 to the standard single-step code. */
224
225 breaks[1] = BranchDest (loc - 4, insn);
226 last_breakpoint++;
227 }
228
229 /* We do not support atomic sequences that use any *other* instructions
230 but conditional branches to change the PC. Fall back to standard
231 code to avoid losing control of execution. */
232 else if (arm_instruction_changes_pc (insn))
233 return {};
234
235 /* If we find a strex{,b,h,d}, we're done. */
236 if ((insn & 0xff9000f0) == 0xe1800090)
237 break;
238 }
239
240 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
241 if (insn_count == atomic_sequence_length)
242 return {};
243
244 /* Insert a breakpoint right after the end of the atomic sequence. */
245 breaks[0] = loc;
246
247 /* Check for duplicated breakpoints. Check also for a breakpoint
248 placed (branch instruction's destination) anywhere in sequence. */
249 if (last_breakpoint
250 && (breaks[1] == breaks[0]
251 || (breaks[1] >= pc && breaks[1] < loc)))
252 last_breakpoint = 0;
253
254 std::vector<CORE_ADDR> next_pcs;
255
256 /* Adds the breakpoints to the list to be inserted. */
257 for (index = 0; index <= last_breakpoint; index++)
258 next_pcs.push_back (breaks[index]);
259
260 return next_pcs;
261 }
262
263 /* Find the next possible PCs for thumb mode. */
264
265 static std::vector<CORE_ADDR>
266 thumb_get_next_pcs_raw (struct arm_get_next_pcs *self)
267 {
268 int byte_order = self->byte_order;
269 int byte_order_for_code = self->byte_order_for_code;
270 CORE_ADDR pc = regcache_read_pc (self->regcache);
271 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
272 unsigned short inst1;
273 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
274 unsigned long offset;
275 ULONGEST status, itstate;
276 struct regcache *regcache = self->regcache;
277 std::vector<CORE_ADDR> next_pcs;
278
279 nextpc = MAKE_THUMB_ADDR (nextpc);
280 pc_val = MAKE_THUMB_ADDR (pc_val);
281
282 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
283
284 /* Thumb-2 conditional execution support. There are eight bits in
285 the CPSR which describe conditional execution state. Once
286 reconstructed (they're in a funny order), the low five bits
287 describe the low bit of the condition for each instruction and
288 how many instructions remain. The high three bits describe the
289 base condition. One of the low four bits will be set if an IT
290 block is active. These bits read as zero on earlier
291 processors. */
292 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
293 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
294
295 /* If-Then handling. On GNU/Linux, where this routine is used, we
296 use an undefined instruction as a breakpoint. Unlike BKPT, IT
297 can disable execution of the undefined instruction. So we might
298 miss the breakpoint if we set it on a skipped conditional
299 instruction. Because conditional instructions can change the
300 flags, affecting the execution of further instructions, we may
301 need to set two breakpoints. */
302
303 if (self->has_thumb2_breakpoint)
304 {
305 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
306 {
307 /* An IT instruction. Because this instruction does not
308 modify the flags, we can accurately predict the next
309 executed instruction. */
310 itstate = inst1 & 0x00ff;
311 pc += thumb_insn_size (inst1);
312
313 while (itstate != 0 && ! condition_true (itstate >> 4, status))
314 {
315 inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code);
316 pc += thumb_insn_size (inst1);
317 itstate = thumb_advance_itstate (itstate);
318 }
319
320 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
321 return next_pcs;
322 }
323 else if (itstate != 0)
324 {
325 /* We are in a conditional block. Check the condition. */
326 if (! condition_true (itstate >> 4, status))
327 {
328 /* Advance to the next executed instruction. */
329 pc += thumb_insn_size (inst1);
330 itstate = thumb_advance_itstate (itstate);
331
332 while (itstate != 0 && ! condition_true (itstate >> 4, status))
333 {
334 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
335
336 pc += thumb_insn_size (inst1);
337 itstate = thumb_advance_itstate (itstate);
338 }
339
340 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
341 return next_pcs;
342 }
343 else if ((itstate & 0x0f) == 0x08)
344 {
345 /* This is the last instruction of the conditional
346 block, and it is executed. We can handle it normally
347 because the following instruction is not conditional,
348 and we must handle it normally because it is
349 permitted to branch. Fall through. */
350 }
351 else
352 {
353 int cond_negated;
354
355 /* There are conditional instructions after this one.
356 If this instruction modifies the flags, then we can
357 not predict what the next executed instruction will
358 be. Fortunately, this instruction is architecturally
359 forbidden to branch; we know it will fall through.
360 Start by skipping past it. */
361 pc += thumb_insn_size (inst1);
362 itstate = thumb_advance_itstate (itstate);
363
364 /* Set a breakpoint on the following instruction. */
365 gdb_assert ((itstate & 0x0f) != 0);
366 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
367
368 cond_negated = (itstate >> 4) & 1;
369
370 /* Skip all following instructions with the same
371 condition. If there is a later instruction in the IT
372 block with the opposite condition, set the other
373 breakpoint there. If not, then set a breakpoint on
374 the instruction after the IT block. */
375 do
376 {
377 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
378 pc += thumb_insn_size (inst1);
379 itstate = thumb_advance_itstate (itstate);
380 }
381 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
382
383 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
384
385 return next_pcs;
386 }
387 }
388 }
389 else if (itstate & 0x0f)
390 {
391 /* We are in a conditional block. Check the condition. */
392 int cond = itstate >> 4;
393
394 if (! condition_true (cond, status))
395 {
396 /* Advance to the next instruction. All the 32-bit
397 instructions share a common prefix. */
398 next_pcs.push_back (MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1)));
399 }
400
401 return next_pcs;
402
403 /* Otherwise, handle the instruction normally. */
404 }
405
406 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
407 {
408 CORE_ADDR sp;
409
410 /* Fetch the saved PC from the stack. It's stored above
411 all of the other registers. */
412 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
413 sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM);
414 nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order);
415 }
416 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
417 {
418 unsigned long cond = bits (inst1, 8, 11);
419 if (cond == 0x0f) /* 0x0f = SWI */
420 {
421 nextpc = self->ops->syscall_next_pc (self);
422 }
423 else if (cond != 0x0f && condition_true (cond, status))
424 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
425 }
426 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
427 {
428 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
429 }
430 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
431 {
432 unsigned short inst2;
433 inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code);
434
435 /* Default to the next instruction. */
436 nextpc = pc + 4;
437 nextpc = MAKE_THUMB_ADDR (nextpc);
438
439 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
440 {
441 /* Branches and miscellaneous control instructions. */
442
443 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
444 {
445 /* B, BL, BLX. */
446 int j1, j2, imm1, imm2;
447
448 imm1 = sbits (inst1, 0, 10);
449 imm2 = bits (inst2, 0, 10);
450 j1 = bit (inst2, 13);
451 j2 = bit (inst2, 11);
452
453 offset = ((imm1 << 12) + (imm2 << 1));
454 offset ^= ((!j2) << 22) | ((!j1) << 23);
455
456 nextpc = pc_val + offset;
457 /* For BLX make sure to clear the low bits. */
458 if (bit (inst2, 12) == 0)
459 nextpc = nextpc & 0xfffffffc;
460 }
461 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
462 {
463 /* SUBS PC, LR, #imm8. */
464 nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM);
465 nextpc -= inst2 & 0x00ff;
466 }
467 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
468 {
469 /* Conditional branch. */
470 if (condition_true (bits (inst1, 6, 9), status))
471 {
472 int sign, j1, j2, imm1, imm2;
473
474 sign = sbits (inst1, 10, 10);
475 imm1 = bits (inst1, 0, 5);
476 imm2 = bits (inst2, 0, 10);
477 j1 = bit (inst2, 13);
478 j2 = bit (inst2, 11);
479
480 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
481 offset += (imm1 << 12) + (imm2 << 1);
482
483 nextpc = pc_val + offset;
484 }
485 }
486 }
487 else if ((inst1 & 0xfe50) == 0xe810)
488 {
489 /* Load multiple or RFE. */
490 int rn, offset, load_pc = 1;
491
492 rn = bits (inst1, 0, 3);
493 if (bit (inst1, 7) && !bit (inst1, 8))
494 {
495 /* LDMIA or POP */
496 if (!bit (inst2, 15))
497 load_pc = 0;
498 offset = bitcount (inst2) * 4 - 4;
499 }
500 else if (!bit (inst1, 7) && bit (inst1, 8))
501 {
502 /* LDMDB */
503 if (!bit (inst2, 15))
504 load_pc = 0;
505 offset = -4;
506 }
507 else if (bit (inst1, 7) && bit (inst1, 8))
508 {
509 /* RFEIA */
510 offset = 0;
511 }
512 else if (!bit (inst1, 7) && !bit (inst1, 8))
513 {
514 /* RFEDB */
515 offset = -8;
516 }
517 else
518 load_pc = 0;
519
520 if (load_pc)
521 {
522 CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn);
523 nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order);
524 }
525 }
526 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
527 {
528 /* MOV PC or MOVS PC. */
529 nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
530 nextpc = MAKE_THUMB_ADDR (nextpc);
531 }
532 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
533 {
534 /* LDR PC. */
535 CORE_ADDR base;
536 int rn, load_pc = 1;
537
538 rn = bits (inst1, 0, 3);
539 base = regcache_raw_get_unsigned (regcache, rn);
540 if (rn == ARM_PC_REGNUM)
541 {
542 base = (base + 4) & ~(CORE_ADDR) 0x3;
543 if (bit (inst1, 7))
544 base += bits (inst2, 0, 11);
545 else
546 base -= bits (inst2, 0, 11);
547 }
548 else if (bit (inst1, 7))
549 base += bits (inst2, 0, 11);
550 else if (bit (inst2, 11))
551 {
552 if (bit (inst2, 10))
553 {
554 if (bit (inst2, 9))
555 base += bits (inst2, 0, 7);
556 else
557 base -= bits (inst2, 0, 7);
558 }
559 }
560 else if ((inst2 & 0x0fc0) == 0x0000)
561 {
562 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
563 base += regcache_raw_get_unsigned (regcache, rm) << shift;
564 }
565 else
566 /* Reserved. */
567 load_pc = 0;
568
569 if (load_pc)
570 nextpc
571 = self->ops->read_mem_uint (base, 4, byte_order);
572 }
573 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
574 {
575 /* TBB. */
576 CORE_ADDR tbl_reg, table, offset, length;
577
578 tbl_reg = bits (inst1, 0, 3);
579 if (tbl_reg == 0x0f)
580 table = pc + 4; /* Regcache copy of PC isn't right yet. */
581 else
582 table = regcache_raw_get_unsigned (regcache, tbl_reg);
583
584 offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
585 length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order);
586 nextpc = pc_val + length;
587 }
588 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
589 {
590 /* TBH. */
591 CORE_ADDR tbl_reg, table, offset, length;
592
593 tbl_reg = bits (inst1, 0, 3);
594 if (tbl_reg == 0x0f)
595 table = pc + 4; /* Regcache copy of PC isn't right yet. */
596 else
597 table = regcache_raw_get_unsigned (regcache, tbl_reg);
598
599 offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
600 length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order);
601 nextpc = pc_val + length;
602 }
603 }
604 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
605 {
606 if (bits (inst1, 3, 6) == 0x0f)
607 nextpc = UNMAKE_THUMB_ADDR (pc_val);
608 else
609 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
610 }
611 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
612 {
613 if (bits (inst1, 3, 6) == 0x0f)
614 nextpc = pc_val;
615 else
616 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
617
618 nextpc = MAKE_THUMB_ADDR (nextpc);
619 }
620 else if ((inst1 & 0xf500) == 0xb100)
621 {
622 /* CBNZ or CBZ. */
623 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
624 ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2));
625
626 if (bit (inst1, 11) && reg != 0)
627 nextpc = pc_val + imm;
628 else if (!bit (inst1, 11) && reg == 0)
629 nextpc = pc_val + imm;
630 }
631
632 next_pcs.push_back (nextpc);
633
634 return next_pcs;
635 }
636
637 /* Get the raw next possible addresses. PC in next_pcs is the current program
638 counter, which is assumed to be executing in ARM mode.
639
640 The values returned have the execution state of the next instruction
641 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
642 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
643 address in GDB and arm_addr_bits_remove in GDBServer. */
644
645 static std::vector<CORE_ADDR>
646 arm_get_next_pcs_raw (struct arm_get_next_pcs *self)
647 {
648 int byte_order = self->byte_order;
649 int byte_order_for_code = self->byte_order_for_code;
650 unsigned long pc_val;
651 unsigned long this_instr = 0;
652 unsigned long status;
653 CORE_ADDR nextpc;
654 struct regcache *regcache = self->regcache;
655 CORE_ADDR pc = regcache_read_pc (self->regcache);
656 std::vector<CORE_ADDR> next_pcs;
657
658 pc_val = (unsigned long) pc;
659 this_instr = self->ops->read_mem_uint (pc, 4, byte_order_for_code);
660
661 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
662 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
663
664 if (bits (this_instr, 28, 31) == INST_NV)
665 switch (bits (this_instr, 24, 27))
666 {
667 case 0xa:
668 case 0xb:
669 {
670 /* Branch with Link and change to Thumb. */
671 nextpc = BranchDest (pc, this_instr);
672 nextpc |= bit (this_instr, 24) << 1;
673 nextpc = MAKE_THUMB_ADDR (nextpc);
674 break;
675 }
676 case 0xc:
677 case 0xd:
678 case 0xe:
679 /* Coprocessor register transfer. */
680 if (bits (this_instr, 12, 15) == 15)
681 error (_("Invalid update to pc in instruction"));
682 break;
683 }
684 else if (condition_true (bits (this_instr, 28, 31), status))
685 {
686 switch (bits (this_instr, 24, 27))
687 {
688 case 0x0:
689 case 0x1: /* data processing */
690 case 0x2:
691 case 0x3:
692 {
693 unsigned long operand1, operand2, result = 0;
694 unsigned long rn;
695 int c;
696
697 if (bits (this_instr, 12, 15) != 15)
698 break;
699
700 if (bits (this_instr, 22, 25) == 0
701 && bits (this_instr, 4, 7) == 9) /* multiply */
702 error (_("Invalid update to pc in instruction"));
703
704 /* BX <reg>, BLX <reg> */
705 if (bits (this_instr, 4, 27) == 0x12fff1
706 || bits (this_instr, 4, 27) == 0x12fff3)
707 {
708 rn = bits (this_instr, 0, 3);
709 nextpc = ((rn == ARM_PC_REGNUM)
710 ? (pc_val + 8)
711 : regcache_raw_get_unsigned (regcache, rn));
712
713 next_pcs.push_back (nextpc);
714 return next_pcs;
715 }
716
717 /* Multiply into PC. */
718 c = (status & FLAG_C) ? 1 : 0;
719 rn = bits (this_instr, 16, 19);
720 operand1 = ((rn == ARM_PC_REGNUM)
721 ? (pc_val + 8)
722 : regcache_raw_get_unsigned (regcache, rn));
723
724 if (bit (this_instr, 25))
725 {
726 unsigned long immval = bits (this_instr, 0, 7);
727 unsigned long rotate = 2 * bits (this_instr, 8, 11);
728 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
729 & 0xffffffff;
730 }
731 else /* operand 2 is a shifted register. */
732 operand2 = shifted_reg_val (regcache, this_instr, c,
733 pc_val, status);
734
735 switch (bits (this_instr, 21, 24))
736 {
737 case 0x0: /*and */
738 result = operand1 & operand2;
739 break;
740
741 case 0x1: /*eor */
742 result = operand1 ^ operand2;
743 break;
744
745 case 0x2: /*sub */
746 result = operand1 - operand2;
747 break;
748
749 case 0x3: /*rsb */
750 result = operand2 - operand1;
751 break;
752
753 case 0x4: /*add */
754 result = operand1 + operand2;
755 break;
756
757 case 0x5: /*adc */
758 result = operand1 + operand2 + c;
759 break;
760
761 case 0x6: /*sbc */
762 result = operand1 - operand2 + c;
763 break;
764
765 case 0x7: /*rsc */
766 result = operand2 - operand1 + c;
767 break;
768
769 case 0x8:
770 case 0x9:
771 case 0xa:
772 case 0xb: /* tst, teq, cmp, cmn */
773 result = (unsigned long) nextpc;
774 break;
775
776 case 0xc: /*orr */
777 result = operand1 | operand2;
778 break;
779
780 case 0xd: /*mov */
781 /* Always step into a function. */
782 result = operand2;
783 break;
784
785 case 0xe: /*bic */
786 result = operand1 & ~operand2;
787 break;
788
789 case 0xf: /*mvn */
790 result = ~operand2;
791 break;
792 }
793 nextpc = self->ops->addr_bits_remove (self, result);
794 break;
795 }
796
797 case 0x4:
798 case 0x5: /* data transfer */
799 case 0x6:
800 case 0x7:
801 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
802 {
803 /* Media instructions and architecturally undefined
804 instructions. */
805 break;
806 }
807
808 if (bit (this_instr, 20))
809 {
810 /* load */
811 if (bits (this_instr, 12, 15) == 15)
812 {
813 /* rd == pc */
814 unsigned long rn;
815 unsigned long base;
816
817 if (bit (this_instr, 22))
818 error (_("Invalid update to pc in instruction"));
819
820 /* byte write to PC */
821 rn = bits (this_instr, 16, 19);
822 base = ((rn == ARM_PC_REGNUM)
823 ? (pc_val + 8)
824 : regcache_raw_get_unsigned (regcache, rn));
825
826 if (bit (this_instr, 24))
827 {
828 /* pre-indexed */
829 int c = (status & FLAG_C) ? 1 : 0;
830 unsigned long offset =
831 (bit (this_instr, 25)
832 ? shifted_reg_val (regcache, this_instr, c,
833 pc_val, status)
834 : bits (this_instr, 0, 11));
835
836 if (bit (this_instr, 23))
837 base += offset;
838 else
839 base -= offset;
840 }
841 nextpc
842 = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base,
843 4, byte_order);
844 }
845 }
846 break;
847
848 case 0x8:
849 case 0x9: /* block transfer */
850 if (bit (this_instr, 20))
851 {
852 /* LDM */
853 if (bit (this_instr, 15))
854 {
855 /* loading pc */
856 int offset = 0;
857 CORE_ADDR rn_val_offset = 0;
858 unsigned long rn_val
859 = regcache_raw_get_unsigned (regcache,
860 bits (this_instr, 16, 19));
861
862 if (bit (this_instr, 23))
863 {
864 /* up */
865 unsigned long reglist = bits (this_instr, 0, 14);
866 offset = bitcount (reglist) * 4;
867 if (bit (this_instr, 24)) /* pre */
868 offset += 4;
869 }
870 else if (bit (this_instr, 24))
871 offset = -4;
872
873 rn_val_offset = rn_val + offset;
874 nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset,
875 4, byte_order);
876 }
877 }
878 break;
879
880 case 0xb: /* branch & link */
881 case 0xa: /* branch */
882 {
883 nextpc = BranchDest (pc, this_instr);
884 break;
885 }
886
887 case 0xc:
888 case 0xd:
889 case 0xe: /* coproc ops */
890 break;
891 case 0xf: /* SWI */
892 {
893 nextpc = self->ops->syscall_next_pc (self);
894 }
895 break;
896
897 default:
898 error (_("Bad bit-field extraction"));
899 return next_pcs;
900 }
901 }
902
903 next_pcs.push_back (nextpc);
904
905 return next_pcs;
906 }
907
908 /* See arm-get-next-pcs.h. */
909
910 std::vector<CORE_ADDR>
911 arm_get_next_pcs (struct arm_get_next_pcs *self)
912 {
913 std::vector<CORE_ADDR> next_pcs;
914
915 if (self->ops->is_thumb (self))
916 {
917 next_pcs = thumb_deal_with_atomic_sequence_raw (self);
918 if (next_pcs.empty ())
919 next_pcs = thumb_get_next_pcs_raw (self);
920 }
921 else
922 {
923 next_pcs = arm_deal_with_atomic_sequence_raw (self);
924 if (next_pcs.empty ())
925 next_pcs = arm_get_next_pcs_raw (self);
926 }
927
928 if (self->ops->fixup != NULL)
929 {
930 for (CORE_ADDR &pc_ref : next_pcs)
931 pc_ref = self->ops->fixup (self, pc_ref);
932 }
933
934 return next_pcs;
935 }
This page took 0.048414 seconds and 4 git commands to generate.