1 /* Common code for ARM software single stepping support.
3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
20 #include "common-defs.h"
22 #include "common-regcache.h"
24 #include "arm-get-next-pcs.h"
26 /* See arm-get-next-pcs.h. */
29 arm_get_next_pcs_ctor (struct arm_get_next_pcs
*self
,
30 struct arm_get_next_pcs_ops
*ops
,
32 int byte_order_for_code
,
33 const gdb_byte
*arm_thumb2_breakpoint
,
34 struct regcache
*regcache
)
37 self
->byte_order
= byte_order
;
38 self
->byte_order_for_code
= byte_order_for_code
;
39 self
->arm_thumb2_breakpoint
= arm_thumb2_breakpoint
;
40 self
->regcache
= regcache
;
43 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
44 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
45 is found, attempt to step through it. The end of the sequence address is
46 added to the next_pcs list. */
48 static VEC (CORE_ADDR
) *
49 thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs
*self
,
52 int byte_order_for_code
= self
->byte_order_for_code
;
53 CORE_ADDR breaks
[2] = {-1, -1};
55 unsigned short insn1
, insn2
;
58 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
59 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
60 ULONGEST status
, itstate
;
61 VEC (CORE_ADDR
) *next_pcs
= NULL
;
63 /* We currently do not support atomic sequences within an IT block. */
64 status
= regcache_raw_get_unsigned (self
->regcache
, ARM_PS_REGNUM
);
65 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
69 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
70 insn1
= self
->ops
->read_mem_uint (loc
, 2, byte_order_for_code
);
73 if (thumb_insn_size (insn1
) != 4)
76 insn2
= self
->ops
->read_mem_uint (loc
, 2, byte_order_for_code
);
79 if (!((insn1
& 0xfff0) == 0xe850
80 || ((insn1
& 0xfff0) == 0xe8d0 && (insn2
& 0x00c0) == 0x0040)))
83 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
85 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
87 insn1
= self
->ops
->read_mem_uint (loc
, 2,byte_order_for_code
);
90 if (thumb_insn_size (insn1
) != 4)
92 /* Assume that there is at most one conditional branch in the
93 atomic sequence. If a conditional branch is found, put a
94 breakpoint in its destination address. */
95 if ((insn1
& 0xf000) == 0xd000 && bits (insn1
, 8, 11) != 0x0f)
97 if (last_breakpoint
> 0)
98 return NULL
; /* More than one conditional branch found,
99 fallback to the standard code. */
101 breaks
[1] = loc
+ 2 + (sbits (insn1
, 0, 7) << 1);
105 /* We do not support atomic sequences that use any *other*
106 instructions but conditional branches to change the PC.
107 Fall back to standard code to avoid losing control of
109 else if (thumb_instruction_changes_pc (insn1
))
114 insn2
= self
->ops
->read_mem_uint (loc
, 2, byte_order_for_code
);
118 /* Assume that there is at most one conditional branch in the
119 atomic sequence. If a conditional branch is found, put a
120 breakpoint in its destination address. */
121 if ((insn1
& 0xf800) == 0xf000
122 && (insn2
& 0xd000) == 0x8000
123 && (insn1
& 0x0380) != 0x0380)
125 int sign
, j1
, j2
, imm1
, imm2
;
128 sign
= sbits (insn1
, 10, 10);
129 imm1
= bits (insn1
, 0, 5);
130 imm2
= bits (insn2
, 0, 10);
131 j1
= bit (insn2
, 13);
132 j2
= bit (insn2
, 11);
134 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
135 offset
+= (imm1
<< 12) + (imm2
<< 1);
137 if (last_breakpoint
> 0)
138 return 0; /* More than one conditional branch found,
139 fallback to the standard code. */
141 breaks
[1] = loc
+ offset
;
145 /* We do not support atomic sequences that use any *other*
146 instructions but conditional branches to change the PC.
147 Fall back to standard code to avoid losing control of
149 else if (thumb2_instruction_changes_pc (insn1
, insn2
))
152 /* If we find a strex{,b,h,d}, we're done. */
153 if ((insn1
& 0xfff0) == 0xe840
154 || ((insn1
& 0xfff0) == 0xe8c0 && (insn2
& 0x00c0) == 0x0040))
159 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
160 if (insn_count
== atomic_sequence_length
)
163 /* Insert a breakpoint right after the end of the atomic sequence. */
166 /* Check for duplicated breakpoints. Check also for a breakpoint
167 placed (branch instruction's destination) anywhere in sequence. */
169 && (breaks
[1] == breaks
[0]
170 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
173 /* Adds the breakpoints to the list to be inserted. */
174 for (index
= 0; index
<= last_breakpoint
; index
++)
175 VEC_safe_push (CORE_ADDR
, next_pcs
, MAKE_THUMB_ADDR (breaks
[index
]));
180 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
181 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
182 is found, attempt to step through it. The end of the sequence address is
183 added to the next_pcs list. */
185 static VEC (CORE_ADDR
) *
186 arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs
*self
,
189 int byte_order_for_code
= self
->byte_order_for_code
;
190 CORE_ADDR breaks
[2] = {-1, -1};
195 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
196 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
197 VEC (CORE_ADDR
) *next_pcs
= NULL
;
199 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
200 Note that we do not currently support conditionally executed atomic
202 insn
= self
->ops
->read_mem_uint (loc
, 4, byte_order_for_code
);
205 if ((insn
& 0xff9000f0) != 0xe1900090)
208 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
210 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
212 insn
= self
->ops
->read_mem_uint (loc
, 4, byte_order_for_code
);
216 /* Assume that there is at most one conditional branch in the atomic
217 sequence. If a conditional branch is found, put a breakpoint in
218 its destination address. */
219 if (bits (insn
, 24, 27) == 0xa)
221 if (last_breakpoint
> 0)
222 return NULL
; /* More than one conditional branch found, fallback
223 to the standard single-step code. */
225 breaks
[1] = BranchDest (loc
- 4, insn
);
229 /* We do not support atomic sequences that use any *other* instructions
230 but conditional branches to change the PC. Fall back to standard
231 code to avoid losing control of execution. */
232 else if (arm_instruction_changes_pc (insn
))
235 /* If we find a strex{,b,h,d}, we're done. */
236 if ((insn
& 0xff9000f0) == 0xe1800090)
240 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
241 if (insn_count
== atomic_sequence_length
)
244 /* Insert a breakpoint right after the end of the atomic sequence. */
247 /* Check for duplicated breakpoints. Check also for a breakpoint
248 placed (branch instruction's destination) anywhere in sequence. */
250 && (breaks
[1] == breaks
[0]
251 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
254 /* Adds the breakpoints to the list to be inserted. */
255 for (index
= 0; index
<= last_breakpoint
; index
++)
256 VEC_safe_push (CORE_ADDR
, next_pcs
, breaks
[index
]);
261 /* Find the next possible PCs for thumb mode. */
263 static VEC (CORE_ADDR
) *
264 thumb_get_next_pcs_raw (struct arm_get_next_pcs
*self
, CORE_ADDR pc
)
266 int byte_order
= self
->byte_order
;
267 int byte_order_for_code
= self
->byte_order_for_code
;
268 unsigned long pc_val
= ((unsigned long) pc
) + 4; /* PC after prefetch */
269 unsigned short inst1
;
270 CORE_ADDR nextpc
= pc
+ 2; /* Default is next instruction. */
271 unsigned long offset
;
272 ULONGEST status
, itstate
;
273 struct regcache
*regcache
= self
->regcache
;
274 VEC (CORE_ADDR
) * next_pcs
= NULL
;
276 nextpc
= MAKE_THUMB_ADDR (nextpc
);
277 pc_val
= MAKE_THUMB_ADDR (pc_val
);
279 inst1
= self
->ops
->read_mem_uint (pc
, 2, byte_order_for_code
);
281 /* Thumb-2 conditional execution support. There are eight bits in
282 the CPSR which describe conditional execution state. Once
283 reconstructed (they're in a funny order), the low five bits
284 describe the low bit of the condition for each instruction and
285 how many instructions remain. The high three bits describe the
286 base condition. One of the low four bits will be set if an IT
287 block is active. These bits read as zero on earlier
289 status
= regcache_raw_get_unsigned (regcache
, ARM_PS_REGNUM
);
290 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
292 /* If-Then handling. On GNU/Linux, where this routine is used, we
293 use an undefined instruction as a breakpoint. Unlike BKPT, IT
294 can disable execution of the undefined instruction. So we might
295 miss the breakpoint if we set it on a skipped conditional
296 instruction. Because conditional instructions can change the
297 flags, affecting the execution of further instructions, we may
298 need to set two breakpoints. */
300 if (self
->arm_thumb2_breakpoint
!= NULL
)
302 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
304 /* An IT instruction. Because this instruction does not
305 modify the flags, we can accurately predict the next
306 executed instruction. */
307 itstate
= inst1
& 0x00ff;
308 pc
+= thumb_insn_size (inst1
);
310 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
312 inst1
= self
->ops
->read_mem_uint (pc
, 2,byte_order_for_code
);
313 pc
+= thumb_insn_size (inst1
);
314 itstate
= thumb_advance_itstate (itstate
);
317 VEC_safe_push (CORE_ADDR
, next_pcs
, MAKE_THUMB_ADDR (pc
));
320 else if (itstate
!= 0)
322 /* We are in a conditional block. Check the condition. */
323 if (! condition_true (itstate
>> 4, status
))
325 /* Advance to the next executed instruction. */
326 pc
+= thumb_insn_size (inst1
);
327 itstate
= thumb_advance_itstate (itstate
);
329 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
331 inst1
= self
->ops
->read_mem_uint (pc
, 2, byte_order_for_code
);
333 pc
+= thumb_insn_size (inst1
);
334 itstate
= thumb_advance_itstate (itstate
);
337 VEC_safe_push (CORE_ADDR
, next_pcs
, MAKE_THUMB_ADDR (pc
));
340 else if ((itstate
& 0x0f) == 0x08)
342 /* This is the last instruction of the conditional
343 block, and it is executed. We can handle it normally
344 because the following instruction is not conditional,
345 and we must handle it normally because it is
346 permitted to branch. Fall through. */
352 /* There are conditional instructions after this one.
353 If this instruction modifies the flags, then we can
354 not predict what the next executed instruction will
355 be. Fortunately, this instruction is architecturally
356 forbidden to branch; we know it will fall through.
357 Start by skipping past it. */
358 pc
+= thumb_insn_size (inst1
);
359 itstate
= thumb_advance_itstate (itstate
);
361 /* Set a breakpoint on the following instruction. */
362 gdb_assert ((itstate
& 0x0f) != 0);
363 VEC_safe_push (CORE_ADDR
, next_pcs
, MAKE_THUMB_ADDR (pc
));
365 cond_negated
= (itstate
>> 4) & 1;
367 /* Skip all following instructions with the same
368 condition. If there is a later instruction in the IT
369 block with the opposite condition, set the other
370 breakpoint there. If not, then set a breakpoint on
371 the instruction after the IT block. */
374 inst1
= self
->ops
->read_mem_uint (pc
, 2, byte_order_for_code
);
375 pc
+= thumb_insn_size (inst1
);
376 itstate
= thumb_advance_itstate (itstate
);
378 while (itstate
!= 0 && ((itstate
>> 4) & 1) == cond_negated
);
380 VEC_safe_push (CORE_ADDR
, next_pcs
, MAKE_THUMB_ADDR (pc
));
386 else if (itstate
& 0x0f)
388 /* We are in a conditional block. Check the condition. */
389 int cond
= itstate
>> 4;
391 if (! condition_true (cond
, status
))
393 /* Advance to the next instruction. All the 32-bit
394 instructions share a common prefix. */
395 VEC_safe_push (CORE_ADDR
, next_pcs
,
396 MAKE_THUMB_ADDR (pc
+ thumb_insn_size (inst1
)));
401 /* Otherwise, handle the instruction normally. */
404 if ((inst1
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
408 /* Fetch the saved PC from the stack. It's stored above
409 all of the other registers. */
410 offset
= bitcount (bits (inst1
, 0, 7)) * INT_REGISTER_SIZE
;
411 sp
= regcache_raw_get_unsigned (regcache
, ARM_SP_REGNUM
);
412 nextpc
= self
->ops
->read_mem_uint (sp
+ offset
, 4, byte_order
);
414 else if ((inst1
& 0xf000) == 0xd000) /* conditional branch */
416 unsigned long cond
= bits (inst1
, 8, 11);
417 if (cond
== 0x0f) /* 0x0f = SWI */
419 nextpc
= self
->ops
->syscall_next_pc (self
, pc
);
421 else if (cond
!= 0x0f && condition_true (cond
, status
))
422 nextpc
= pc_val
+ (sbits (inst1
, 0, 7) << 1);
424 else if ((inst1
& 0xf800) == 0xe000) /* unconditional branch */
426 nextpc
= pc_val
+ (sbits (inst1
, 0, 10) << 1);
428 else if (thumb_insn_size (inst1
) == 4) /* 32-bit instruction */
430 unsigned short inst2
;
431 inst2
= self
->ops
->read_mem_uint (pc
+ 2, 2, byte_order_for_code
);
433 /* Default to the next instruction. */
435 nextpc
= MAKE_THUMB_ADDR (nextpc
);
437 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
439 /* Branches and miscellaneous control instructions. */
441 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
444 int j1
, j2
, imm1
, imm2
;
446 imm1
= sbits (inst1
, 0, 10);
447 imm2
= bits (inst2
, 0, 10);
448 j1
= bit (inst2
, 13);
449 j2
= bit (inst2
, 11);
451 offset
= ((imm1
<< 12) + (imm2
<< 1));
452 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
454 nextpc
= pc_val
+ offset
;
455 /* For BLX make sure to clear the low bits. */
456 if (bit (inst2
, 12) == 0)
457 nextpc
= nextpc
& 0xfffffffc;
459 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
461 /* SUBS PC, LR, #imm8. */
462 nextpc
= regcache_raw_get_unsigned (regcache
, ARM_LR_REGNUM
);
463 nextpc
-= inst2
& 0x00ff;
465 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
467 /* Conditional branch. */
468 if (condition_true (bits (inst1
, 6, 9), status
))
470 int sign
, j1
, j2
, imm1
, imm2
;
472 sign
= sbits (inst1
, 10, 10);
473 imm1
= bits (inst1
, 0, 5);
474 imm2
= bits (inst2
, 0, 10);
475 j1
= bit (inst2
, 13);
476 j2
= bit (inst2
, 11);
478 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
479 offset
+= (imm1
<< 12) + (imm2
<< 1);
481 nextpc
= pc_val
+ offset
;
485 else if ((inst1
& 0xfe50) == 0xe810)
487 /* Load multiple or RFE. */
488 int rn
, offset
, load_pc
= 1;
490 rn
= bits (inst1
, 0, 3);
491 if (bit (inst1
, 7) && !bit (inst1
, 8))
494 if (!bit (inst2
, 15))
496 offset
= bitcount (inst2
) * 4 - 4;
498 else if (!bit (inst1
, 7) && bit (inst1
, 8))
501 if (!bit (inst2
, 15))
505 else if (bit (inst1
, 7) && bit (inst1
, 8))
510 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
520 CORE_ADDR addr
= regcache_raw_get_unsigned (regcache
, rn
);
521 nextpc
= self
->ops
->read_mem_uint (addr
+ offset
, 4, byte_order
);
524 else if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
526 /* MOV PC or MOVS PC. */
527 nextpc
= regcache_raw_get_unsigned (regcache
, bits (inst2
, 0, 3));
528 nextpc
= MAKE_THUMB_ADDR (nextpc
);
530 else if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
536 rn
= bits (inst1
, 0, 3);
537 base
= regcache_raw_get_unsigned (regcache
, rn
);
538 if (rn
== ARM_PC_REGNUM
)
540 base
= (base
+ 4) & ~(CORE_ADDR
) 0x3;
542 base
+= bits (inst2
, 0, 11);
544 base
-= bits (inst2
, 0, 11);
546 else if (bit (inst1
, 7))
547 base
+= bits (inst2
, 0, 11);
548 else if (bit (inst2
, 11))
553 base
+= bits (inst2
, 0, 7);
555 base
-= bits (inst2
, 0, 7);
558 else if ((inst2
& 0x0fc0) == 0x0000)
560 int shift
= bits (inst2
, 4, 5), rm
= bits (inst2
, 0, 3);
561 base
+= regcache_raw_get_unsigned (regcache
, rm
) << shift
;
569 = self
->ops
->read_mem_uint (base
, 4, byte_order
);
571 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
574 CORE_ADDR tbl_reg
, table
, offset
, length
;
576 tbl_reg
= bits (inst1
, 0, 3);
578 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
580 table
= regcache_raw_get_unsigned (regcache
, tbl_reg
);
582 offset
= regcache_raw_get_unsigned (regcache
, bits (inst2
, 0, 3));
583 length
= 2 * self
->ops
->read_mem_uint (table
+ offset
, 1, byte_order
);
584 nextpc
= pc_val
+ length
;
586 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
589 CORE_ADDR tbl_reg
, table
, offset
, length
;
591 tbl_reg
= bits (inst1
, 0, 3);
593 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
595 table
= regcache_raw_get_unsigned (regcache
, tbl_reg
);
597 offset
= 2 * regcache_raw_get_unsigned (regcache
, bits (inst2
, 0, 3));
598 length
= 2 * self
->ops
->read_mem_uint (table
+ offset
, 2, byte_order
);
599 nextpc
= pc_val
+ length
;
602 else if ((inst1
& 0xff00) == 0x4700) /* bx REG, blx REG */
604 if (bits (inst1
, 3, 6) == 0x0f)
605 nextpc
= UNMAKE_THUMB_ADDR (pc_val
);
607 nextpc
= regcache_raw_get_unsigned (regcache
, bits (inst1
, 3, 6));
609 else if ((inst1
& 0xff87) == 0x4687) /* mov pc, REG */
611 if (bits (inst1
, 3, 6) == 0x0f)
614 nextpc
= regcache_raw_get_unsigned (regcache
, bits (inst1
, 3, 6));
616 nextpc
= MAKE_THUMB_ADDR (nextpc
);
618 else if ((inst1
& 0xf500) == 0xb100)
621 int imm
= (bit (inst1
, 9) << 6) + (bits (inst1
, 3, 7) << 1);
622 ULONGEST reg
= regcache_raw_get_unsigned (regcache
, bits (inst1
, 0, 2));
624 if (bit (inst1
, 11) && reg
!= 0)
625 nextpc
= pc_val
+ imm
;
626 else if (!bit (inst1
, 11) && reg
== 0)
627 nextpc
= pc_val
+ imm
;
630 VEC_safe_push (CORE_ADDR
, next_pcs
, nextpc
);
635 /* Get the raw next possible addresses. PC in next_pcs is the current program
636 counter, which is assumed to be executing in ARM mode.
638 The values returned have the execution state of the next instruction
639 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
640 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
641 address in GDB and arm_addr_bits_remove in GDBServer. */
643 static VEC (CORE_ADDR
) *
644 arm_get_next_pcs_raw (struct arm_get_next_pcs
*self
, CORE_ADDR pc
)
646 int byte_order
= self
->byte_order
;
647 unsigned long pc_val
;
648 unsigned long this_instr
= 0;
649 unsigned long status
;
651 struct regcache
*regcache
= self
->regcache
;
652 VEC (CORE_ADDR
) *next_pcs
= NULL
;
654 pc_val
= (unsigned long) pc
;
655 this_instr
= self
->ops
->read_mem_uint (pc
, 4, byte_order
);
657 status
= regcache_raw_get_unsigned (regcache
, ARM_PS_REGNUM
);
658 nextpc
= (CORE_ADDR
) (pc_val
+ 4); /* Default case */
660 if (bits (this_instr
, 28, 31) == INST_NV
)
661 switch (bits (this_instr
, 24, 27))
666 /* Branch with Link and change to Thumb. */
667 nextpc
= BranchDest (pc
, this_instr
);
668 nextpc
|= bit (this_instr
, 24) << 1;
669 nextpc
= MAKE_THUMB_ADDR (nextpc
);
675 /* Coprocessor register transfer. */
676 if (bits (this_instr
, 12, 15) == 15)
677 error (_("Invalid update to pc in instruction"));
680 else if (condition_true (bits (this_instr
, 28, 31), status
))
682 switch (bits (this_instr
, 24, 27))
685 case 0x1: /* data processing */
689 unsigned long operand1
, operand2
, result
= 0;
693 if (bits (this_instr
, 12, 15) != 15)
696 if (bits (this_instr
, 22, 25) == 0
697 && bits (this_instr
, 4, 7) == 9) /* multiply */
698 error (_("Invalid update to pc in instruction"));
700 /* BX <reg>, BLX <reg> */
701 if (bits (this_instr
, 4, 27) == 0x12fff1
702 || bits (this_instr
, 4, 27) == 0x12fff3)
704 rn
= bits (this_instr
, 0, 3);
705 nextpc
= ((rn
== ARM_PC_REGNUM
)
707 : regcache_raw_get_unsigned (regcache
, rn
));
709 VEC_safe_push (CORE_ADDR
, next_pcs
, nextpc
);
713 /* Multiply into PC. */
714 c
= (status
& FLAG_C
) ? 1 : 0;
715 rn
= bits (this_instr
, 16, 19);
716 operand1
= ((rn
== ARM_PC_REGNUM
)
718 : regcache_raw_get_unsigned (regcache
, rn
));
720 if (bit (this_instr
, 25))
722 unsigned long immval
= bits (this_instr
, 0, 7);
723 unsigned long rotate
= 2 * bits (this_instr
, 8, 11);
724 operand2
= ((immval
>> rotate
) | (immval
<< (32 - rotate
)))
727 else /* operand 2 is a shifted register. */
728 operand2
= shifted_reg_val (regcache
, this_instr
, c
,
731 switch (bits (this_instr
, 21, 24))
734 result
= operand1
& operand2
;
738 result
= operand1
^ operand2
;
742 result
= operand1
- operand2
;
746 result
= operand2
- operand1
;
750 result
= operand1
+ operand2
;
754 result
= operand1
+ operand2
+ c
;
758 result
= operand1
- operand2
+ c
;
762 result
= operand2
- operand1
+ c
;
768 case 0xb: /* tst, teq, cmp, cmn */
769 result
= (unsigned long) nextpc
;
773 result
= operand1
| operand2
;
777 /* Always step into a function. */
782 result
= operand1
& ~operand2
;
789 nextpc
= self
->ops
->addr_bits_remove (self
, result
);
794 case 0x5: /* data transfer */
797 if (bits (this_instr
, 25, 27) == 0x3 && bit (this_instr
, 4) == 1)
799 /* Media instructions and architecturally undefined
804 if (bit (this_instr
, 20))
807 if (bits (this_instr
, 12, 15) == 15)
813 if (bit (this_instr
, 22))
814 error (_("Invalid update to pc in instruction"));
816 /* byte write to PC */
817 rn
= bits (this_instr
, 16, 19);
818 base
= ((rn
== ARM_PC_REGNUM
)
820 : regcache_raw_get_unsigned (regcache
, rn
));
822 if (bit (this_instr
, 24))
825 int c
= (status
& FLAG_C
) ? 1 : 0;
826 unsigned long offset
=
827 (bit (this_instr
, 25)
828 ? shifted_reg_val (regcache
, this_instr
, c
,
830 : bits (this_instr
, 0, 11));
832 if (bit (this_instr
, 23))
838 = (CORE_ADDR
) self
->ops
->read_mem_uint ((CORE_ADDR
) base
,
845 case 0x9: /* block transfer */
846 if (bit (this_instr
, 20))
849 if (bit (this_instr
, 15))
853 CORE_ADDR rn_val_offset
= 0;
855 = regcache_raw_get_unsigned (regcache
,
856 bits (this_instr
, 16, 19));
858 if (bit (this_instr
, 23))
861 unsigned long reglist
= bits (this_instr
, 0, 14);
862 offset
= bitcount (reglist
) * 4;
863 if (bit (this_instr
, 24)) /* pre */
866 else if (bit (this_instr
, 24))
869 rn_val_offset
= rn_val
+ offset
;
870 nextpc
= (CORE_ADDR
) self
->ops
->read_mem_uint (rn_val_offset
,
876 case 0xb: /* branch & link */
877 case 0xa: /* branch */
879 nextpc
= BranchDest (pc
, this_instr
);
885 case 0xe: /* coproc ops */
889 nextpc
= self
->ops
->syscall_next_pc (self
, pc
);
894 error (_("Bad bit-field extraction"));
899 VEC_safe_push (CORE_ADDR
, next_pcs
, nextpc
);
903 /* See arm-get-next-pcs.h. */
906 arm_get_next_pcs (struct arm_get_next_pcs
*self
, CORE_ADDR pc
)
908 VEC (CORE_ADDR
) *next_pcs
= NULL
;
910 if (self
->ops
->is_thumb (self
))
912 next_pcs
= thumb_deal_with_atomic_sequence_raw (self
, pc
);
913 if (next_pcs
== NULL
)
914 next_pcs
= thumb_get_next_pcs_raw (self
, pc
);
918 next_pcs
= arm_deal_with_atomic_sequence_raw (self
, pc
);
919 if (next_pcs
== NULL
)
920 next_pcs
= arm_get_next_pcs_raw (self
, pc
);
This page took 0.051777 seconds and 5 git commands to generate.