1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2014 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data
*arm_objfile_data_key
;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s
;
92 DEF_VEC_O(arm_mapping_symbol_s
);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s
) **section_maps
;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element
*setarmcmdlist
= NULL
;
101 static struct cmd_list_element
*showarmcmdlist
= NULL
;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings
[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
117 static const char *current_fp_model
= "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings
[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
130 static const char *arm_abi_string
= "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings
[] =
141 static const char *arm_fallback_mode_string
= "auto";
142 static const char *arm_force_mode_string
= "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode
= -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options
;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases
[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names
[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles
;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style
;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element
*);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat
*, const void *,
228 static void convert_to_extended (const struct floatformat
*, void *,
231 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
232 struct regcache
*regcache
,
233 int regnum
, gdb_byte
*buf
);
234 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
235 struct regcache
*regcache
,
236 int regnum
, const gdb_byte
*buf
);
238 static int thumb_insn_size (unsigned short inst1
);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg
*saved_regs
;
260 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
261 CORE_ADDR prologue_start
,
262 CORE_ADDR prologue_end
,
263 struct arm_prologue_cache
*cache
);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
285 if (gdbarch_tdep (gdbarch
)->is_m
)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info
*frame
)
297 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
305 return (cpsr
& t_bit
) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol
*lhs
,
312 const struct arm_mapping_symbol
*rhs
)
314 return lhs
->value
< rhs
->value
;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
324 struct obj_section
*sec
;
326 /* If there are mapping symbols, consult them. */
327 sec
= find_pc_section (memaddr
);
330 struct arm_per_objfile
*data
;
331 VEC(arm_mapping_symbol_s
) *map
;
332 struct arm_mapping_symbol map_key
= { memaddr
- obj_section_addr (sec
),
336 data
= objfile_data (sec
->objfile
, arm_objfile_data_key
);
339 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
340 if (!VEC_empty (arm_mapping_symbol_s
, map
))
342 struct arm_mapping_symbol
*map_sym
;
344 idx
= VEC_lower_bound (arm_mapping_symbol_s
, map
, &map_key
,
345 arm_compare_mapping_symbols
);
347 /* VEC_lower_bound finds the earliest ordered insertion
348 point. If the following symbol starts at this exact
349 address, we use that; otherwise, the preceding
350 mapping symbol covers this address. */
351 if (idx
< VEC_length (arm_mapping_symbol_s
, map
))
353 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
);
354 if (map_sym
->value
== map_key
.value
)
357 *start
= map_sym
->value
+ obj_section_addr (sec
);
358 return map_sym
->type
;
364 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
- 1);
366 *start
= map_sym
->value
+ obj_section_addr (sec
);
367 return map_sym
->type
;
376 /* Determine if the program counter specified in MEMADDR is in a Thumb
377 function. This function should be called for addresses unrelated to
378 any executing frame; otherwise, prefer arm_frame_is_thumb. */
381 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
383 struct bound_minimal_symbol sym
;
385 struct displaced_step_closure
* dsc
386 = get_displaced_step_closure_by_addr(memaddr
);
388 /* If checking the mode of displaced instruction in copy area, the mode
389 should be determined by instruction on the original address. */
393 fprintf_unfiltered (gdb_stdlog
,
394 "displaced: check mode of %.8lx instead of %.8lx\n",
395 (unsigned long) dsc
->insn_addr
,
396 (unsigned long) memaddr
);
397 memaddr
= dsc
->insn_addr
;
400 /* If bit 0 of the address is set, assume this is a Thumb address. */
401 if (IS_THUMB_ADDR (memaddr
))
404 /* Respect internal mode override if active. */
405 if (arm_override_mode
!= -1)
406 return arm_override_mode
;
408 /* If the user wants to override the symbol table, let him. */
409 if (strcmp (arm_force_mode_string
, "arm") == 0)
411 if (strcmp (arm_force_mode_string
, "thumb") == 0)
414 /* ARM v6-M and v7-M are always in Thumb mode. */
415 if (gdbarch_tdep (gdbarch
)->is_m
)
418 /* If there are mapping symbols, consult them. */
419 type
= arm_find_mapping_symbol (memaddr
, NULL
);
423 /* Thumb functions have a "special" bit set in minimal symbols. */
424 sym
= lookup_minimal_symbol_by_pc (memaddr
);
426 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
428 /* If the user wants to override the fallback mode, let them. */
429 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
431 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
434 /* If we couldn't find any symbol, but we're talking to a running
435 target, then trust the current value of $cpsr. This lets
436 "display/i $pc" always show the correct mode (though if there is
437 a symbol table we will not reach here, so it still may not be
438 displayed in the mode it will be executed). */
439 if (target_has_registers
)
440 return arm_frame_is_thumb (get_current_frame ());
442 /* Otherwise we're out of luck; we assume ARM. */
446 /* Remove useless bits from addresses in a running program. */
448 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
450 /* On M-profile devices, do not strip the low bit from EXC_RETURN
451 (the magic exception return address). */
452 if (gdbarch_tdep (gdbarch
)->is_m
453 && (val
& 0xfffffff0) == 0xfffffff0)
457 return UNMAKE_THUMB_ADDR (val
);
459 return (val
& 0x03fffffc);
462 /* Return 1 if PC is the start of a compiler helper function which
463 can be safely ignored during prologue skipping. IS_THUMB is true
464 if the function is known to be a Thumb function due to the way it
467 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
469 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
470 struct bound_minimal_symbol msym
;
472 msym
= lookup_minimal_symbol_by_pc (pc
);
473 if (msym
.minsym
!= NULL
474 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
475 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
477 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
479 /* The GNU linker's Thumb call stub to foo is named
481 if (strstr (name
, "_from_thumb") != NULL
)
484 /* On soft-float targets, __truncdfsf2 is called to convert promoted
485 arguments to their argument types in non-prototyped
487 if (strncmp (name
, "__truncdfsf2", strlen ("__truncdfsf2")) == 0)
489 if (strncmp (name
, "__aeabi_d2f", strlen ("__aeabi_d2f")) == 0)
492 /* Internal functions related to thread-local storage. */
493 if (strncmp (name
, "__tls_get_addr", strlen ("__tls_get_addr")) == 0)
495 if (strncmp (name
, "__aeabi_read_tp", strlen ("__aeabi_read_tp")) == 0)
500 /* If we run against a stripped glibc, we may be unable to identify
501 special functions by name. Check for one important case,
502 __aeabi_read_tp, by comparing the *code* against the default
503 implementation (this is hand-written ARM assembler in glibc). */
506 && read_memory_unsigned_integer (pc
, 4, byte_order_for_code
)
507 == 0xe3e00a0f /* mov r0, #0xffff0fff */
508 && read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
509 == 0xe240f01f) /* sub pc, r0, #31 */
516 /* Support routines for instruction parsing. */
517 #define submask(x) ((1L << ((x) + 1)) - 1)
518 #define bit(obj,st) (((obj) >> (st)) & 1)
519 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
520 #define sbits(obj,st,fn) \
521 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
522 #define BranchDest(addr,instr) \
523 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
525 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
526 the first 16-bit of instruction, and INSN2 is the second 16-bit of
528 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
529 ((bits ((insn1), 0, 3) << 12) \
530 | (bits ((insn1), 10, 10) << 11) \
531 | (bits ((insn2), 12, 14) << 8) \
532 | bits ((insn2), 0, 7))
534 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
535 the 32-bit instruction. */
536 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
537 ((bits ((insn), 16, 19) << 12) \
538 | bits ((insn), 0, 11))
540 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
543 thumb_expand_immediate (unsigned int imm
)
545 unsigned int count
= imm
>> 7;
553 return (imm
& 0xff) | ((imm
& 0xff) << 16);
555 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
557 return (imm
& 0xff) | ((imm
& 0xff) << 8)
558 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
561 return (0x80 | (imm
& 0x7f)) << (32 - count
);
564 /* Return 1 if the 16-bit Thumb instruction INST might change
565 control flow, 0 otherwise. */
568 thumb_instruction_changes_pc (unsigned short inst
)
570 if ((inst
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
573 if ((inst
& 0xf000) == 0xd000) /* conditional branch */
576 if ((inst
& 0xf800) == 0xe000) /* unconditional branch */
579 if ((inst
& 0xff00) == 0x4700) /* bx REG, blx REG */
582 if ((inst
& 0xff87) == 0x4687) /* mov pc, REG */
585 if ((inst
& 0xf500) == 0xb100) /* CBNZ or CBZ. */
591 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
592 might change control flow, 0 otherwise. */
595 thumb2_instruction_changes_pc (unsigned short inst1
, unsigned short inst2
)
597 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
599 /* Branches and miscellaneous control instructions. */
601 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
606 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
608 /* SUBS PC, LR, #imm8. */
611 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
613 /* Conditional branch. */
620 if ((inst1
& 0xfe50) == 0xe810)
622 /* Load multiple or RFE. */
624 if (bit (inst1
, 7) && !bit (inst1
, 8))
630 else if (!bit (inst1
, 7) && bit (inst1
, 8))
636 else if (bit (inst1
, 7) && bit (inst1
, 8))
641 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
650 if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
652 /* MOV PC or MOVS PC. */
656 if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
659 if (bits (inst1
, 0, 3) == 15)
665 if ((inst2
& 0x0fc0) == 0x0000)
671 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
677 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
686 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
687 epilogue, 0 otherwise. */
690 thumb_instruction_restores_sp (unsigned short insn
)
692 return (insn
== 0x46bd /* mov sp, r7 */
693 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
694 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
697 /* Analyze a Thumb prologue, looking for a recognizable stack frame
698 and frame pointer. Scan until we encounter a store that could
699 clobber the stack frame unexpectedly, or an unknown instruction.
700 Return the last address which is definitely safe to skip for an
701 initial breakpoint. */
704 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
705 CORE_ADDR start
, CORE_ADDR limit
,
706 struct arm_prologue_cache
*cache
)
708 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
709 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
712 struct pv_area
*stack
;
713 struct cleanup
*back_to
;
715 CORE_ADDR unrecognized_pc
= 0;
717 for (i
= 0; i
< 16; i
++)
718 regs
[i
] = pv_register (i
, 0);
719 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
720 back_to
= make_cleanup_free_pv_area (stack
);
722 while (start
< limit
)
726 insn
= read_memory_unsigned_integer (start
, 2, byte_order_for_code
);
728 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
733 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
736 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
737 whether to save LR (R14). */
738 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
740 /* Calculate offsets of saved R0-R7 and LR. */
741 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
742 if (mask
& (1 << regno
))
744 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
746 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
749 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
751 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
752 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
755 else if (thumb_instruction_restores_sp (insn
))
757 /* Don't scan past the epilogue. */
760 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
761 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
763 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
764 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
765 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
767 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
768 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
769 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
771 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
772 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
773 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
774 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
775 regs
[bits (insn
, 6, 8)]);
776 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
777 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
779 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
780 int rm
= bits (insn
, 3, 6);
781 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
783 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
785 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
786 int src_reg
= (insn
& 0x78) >> 3;
787 regs
[dst_reg
] = regs
[src_reg
];
789 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
791 /* Handle stores to the stack. Normally pushes are used,
792 but with GCC -mtpcs-frame, there may be other stores
793 in the prologue to create the frame. */
794 int regno
= (insn
>> 8) & 0x7;
797 offset
= (insn
& 0xff) << 2;
798 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
800 if (pv_area_store_would_trash (stack
, addr
))
803 pv_area_store (stack
, addr
, 4, regs
[regno
]);
805 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
807 int rd
= bits (insn
, 0, 2);
808 int rn
= bits (insn
, 3, 5);
811 offset
= bits (insn
, 6, 10) << 2;
812 addr
= pv_add_constant (regs
[rn
], offset
);
814 if (pv_area_store_would_trash (stack
, addr
))
817 pv_area_store (stack
, addr
, 4, regs
[rd
]);
819 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
820 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
821 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
822 /* Ignore stores of argument registers to the stack. */
824 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
825 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
826 /* Ignore block loads from the stack, potentially copying
827 parameters from memory. */
829 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
830 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
831 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
832 /* Similarly ignore single loads from the stack. */
834 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
835 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
836 /* Skip register copies, i.e. saves to another register
837 instead of the stack. */
839 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
840 /* Recognize constant loads; even with small stacks these are necessary
842 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
843 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
845 /* Constant pool loads, for the same reason. */
846 unsigned int constant
;
849 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
850 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
851 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
853 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
855 unsigned short inst2
;
857 inst2
= read_memory_unsigned_integer (start
+ 2, 2,
858 byte_order_for_code
);
860 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
862 /* BL, BLX. Allow some special function calls when
863 skipping the prologue; GCC generates these before
864 storing arguments to the stack. */
866 int j1
, j2
, imm1
, imm2
;
868 imm1
= sbits (insn
, 0, 10);
869 imm2
= bits (inst2
, 0, 10);
870 j1
= bit (inst2
, 13);
871 j2
= bit (inst2
, 11);
873 offset
= ((imm1
<< 12) + (imm2
<< 1));
874 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
876 nextpc
= start
+ 4 + offset
;
877 /* For BLX make sure to clear the low bits. */
878 if (bit (inst2
, 12) == 0)
879 nextpc
= nextpc
& 0xfffffffc;
881 if (!skip_prologue_function (gdbarch
, nextpc
,
882 bit (inst2
, 12) != 0))
886 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
888 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
890 pv_t addr
= regs
[bits (insn
, 0, 3)];
893 if (pv_area_store_would_trash (stack
, addr
))
896 /* Calculate offsets of saved registers. */
897 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
898 if (inst2
& (1 << regno
))
900 addr
= pv_add_constant (addr
, -4);
901 pv_area_store (stack
, addr
, 4, regs
[regno
]);
905 regs
[bits (insn
, 0, 3)] = addr
;
908 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
910 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
912 int regno1
= bits (inst2
, 12, 15);
913 int regno2
= bits (inst2
, 8, 11);
914 pv_t addr
= regs
[bits (insn
, 0, 3)];
916 offset
= inst2
& 0xff;
918 addr
= pv_add_constant (addr
, offset
);
920 addr
= pv_add_constant (addr
, -offset
);
922 if (pv_area_store_would_trash (stack
, addr
))
925 pv_area_store (stack
, addr
, 4, regs
[regno1
]);
926 pv_area_store (stack
, pv_add_constant (addr
, 4),
930 regs
[bits (insn
, 0, 3)] = addr
;
933 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
934 && (inst2
& 0x0c00) == 0x0c00
935 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
937 int regno
= bits (inst2
, 12, 15);
938 pv_t addr
= regs
[bits (insn
, 0, 3)];
940 offset
= inst2
& 0xff;
942 addr
= pv_add_constant (addr
, offset
);
944 addr
= pv_add_constant (addr
, -offset
);
946 if (pv_area_store_would_trash (stack
, addr
))
949 pv_area_store (stack
, addr
, 4, regs
[regno
]);
952 regs
[bits (insn
, 0, 3)] = addr
;
955 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
956 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
958 int regno
= bits (inst2
, 12, 15);
961 offset
= inst2
& 0xfff;
962 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
964 if (pv_area_store_would_trash (stack
, addr
))
967 pv_area_store (stack
, addr
, 4, regs
[regno
]);
970 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
971 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
972 /* Ignore stores of argument registers to the stack. */
975 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
976 && (inst2
& 0x0d00) == 0x0c00
977 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
978 /* Ignore stores of argument registers to the stack. */
981 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
983 && (inst2
& 0x8000) == 0x0000
984 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
985 /* Ignore block loads from the stack, potentially copying
986 parameters from memory. */
989 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
991 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
992 /* Similarly ignore dual loads from the stack. */
995 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
996 && (inst2
& 0x0d00) == 0x0c00
997 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
998 /* Similarly ignore single loads from the stack. */
1001 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1002 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
1003 /* Similarly ignore single loads from the stack. */
1006 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1007 && (inst2
& 0x8000) == 0x0000)
1009 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1010 | (bits (inst2
, 12, 14) << 8)
1011 | bits (inst2
, 0, 7));
1013 regs
[bits (inst2
, 8, 11)]
1014 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1015 thumb_expand_immediate (imm
));
1018 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1019 && (inst2
& 0x8000) == 0x0000)
1021 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1022 | (bits (inst2
, 12, 14) << 8)
1023 | bits (inst2
, 0, 7));
1025 regs
[bits (inst2
, 8, 11)]
1026 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
1029 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1030 && (inst2
& 0x8000) == 0x0000)
1032 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1033 | (bits (inst2
, 12, 14) << 8)
1034 | bits (inst2
, 0, 7));
1036 regs
[bits (inst2
, 8, 11)]
1037 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1038 - (CORE_ADDR
) thumb_expand_immediate (imm
));
1041 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1042 && (inst2
& 0x8000) == 0x0000)
1044 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1045 | (bits (inst2
, 12, 14) << 8)
1046 | bits (inst2
, 0, 7));
1048 regs
[bits (inst2
, 8, 11)]
1049 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1052 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1054 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1055 | (bits (inst2
, 12, 14) << 8)
1056 | bits (inst2
, 0, 7));
1058 regs
[bits (inst2
, 8, 11)]
1059 = pv_constant (thumb_expand_immediate (imm
));
1062 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1065 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1067 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1070 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1071 && (inst2
& 0xf0f0) == 0)
1073 int dst_reg
= (inst2
& 0x0f00) >> 8;
1074 int src_reg
= inst2
& 0xf;
1075 regs
[dst_reg
] = regs
[src_reg
];
1078 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1080 /* Constant pool loads. */
1081 unsigned int constant
;
1084 offset
= bits (inst2
, 0, 11);
1086 loc
= start
+ 4 + offset
;
1088 loc
= start
+ 4 - offset
;
1090 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1091 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1094 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1096 /* Constant pool loads. */
1097 unsigned int constant
;
1100 offset
= bits (inst2
, 0, 7) << 2;
1102 loc
= start
+ 4 + offset
;
1104 loc
= start
+ 4 - offset
;
1106 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1107 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1109 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1110 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1113 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1115 /* Don't scan past anything that might change control flow. */
1120 /* The optimizer might shove anything into the prologue,
1121 so we just skip what we don't recognize. */
1122 unrecognized_pc
= start
;
1127 else if (thumb_instruction_changes_pc (insn
))
1129 /* Don't scan past anything that might change control flow. */
1134 /* The optimizer might shove anything into the prologue,
1135 so we just skip what we don't recognize. */
1136 unrecognized_pc
= start
;
1143 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1144 paddress (gdbarch
, start
));
1146 if (unrecognized_pc
== 0)
1147 unrecognized_pc
= start
;
1151 do_cleanups (back_to
);
1152 return unrecognized_pc
;
1155 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1157 /* Frame pointer is fp. Frame size is constant. */
1158 cache
->framereg
= ARM_FP_REGNUM
;
1159 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1161 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1163 /* Frame pointer is r7. Frame size is constant. */
1164 cache
->framereg
= THUMB_FP_REGNUM
;
1165 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1169 /* Try the stack pointer... this is a bit desperate. */
1170 cache
->framereg
= ARM_SP_REGNUM
;
1171 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1174 for (i
= 0; i
< 16; i
++)
1175 if (pv_area_find_reg (stack
, gdbarch
, i
, &offset
))
1176 cache
->saved_regs
[i
].addr
= offset
;
1178 do_cleanups (back_to
);
1179 return unrecognized_pc
;
1183 /* Try to analyze the instructions starting from PC, which load symbol
1184 __stack_chk_guard. Return the address of instruction after loading this
1185 symbol, set the dest register number to *BASEREG, and set the size of
1186 instructions for loading symbol in OFFSET. Return 0 if instructions are
1190 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1191 unsigned int *destreg
, int *offset
)
1193 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1194 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1195 unsigned int low
, high
, address
;
1200 unsigned short insn1
1201 = read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
1203 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1205 *destreg
= bits (insn1
, 8, 10);
1207 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1208 address
= read_memory_unsigned_integer (address
, 4,
1209 byte_order_for_code
);
1211 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1213 unsigned short insn2
1214 = read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1216 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1219 = read_memory_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1221 = read_memory_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1223 /* movt Rd, #const */
1224 if ((insn1
& 0xfbc0) == 0xf2c0)
1226 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1227 *destreg
= bits (insn2
, 8, 11);
1229 address
= (high
<< 16 | low
);
1236 = read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
1238 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1240 address
= bits (insn
, 0, 11) + pc
+ 8;
1241 address
= read_memory_unsigned_integer (address
, 4,
1242 byte_order_for_code
);
1244 *destreg
= bits (insn
, 12, 15);
1247 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1249 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1252 = read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1254 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1256 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1257 *destreg
= bits (insn
, 12, 15);
1259 address
= (high
<< 16 | low
);
1267 /* Try to skip a sequence of instructions used for stack protector. If PC
1268 points to the first instruction of this sequence, return the address of
1269 first instruction after this sequence, otherwise, return original PC.
1271 On arm, this sequence of instructions is composed of mainly three steps,
1272 Step 1: load symbol __stack_chk_guard,
1273 Step 2: load from address of __stack_chk_guard,
1274 Step 3: store it to somewhere else.
1276 Usually, instructions on step 2 and step 3 are the same on various ARM
1277 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1278 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1279 instructions in step 1 vary from different ARM architectures. On ARMv7,
1282 movw Rn, #:lower16:__stack_chk_guard
1283 movt Rn, #:upper16:__stack_chk_guard
1290 .word __stack_chk_guard
1292 Since ldr/str is a very popular instruction, we can't use them as
1293 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1294 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1295 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1298 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1300 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1301 unsigned int basereg
;
1302 struct bound_minimal_symbol stack_chk_guard
;
1304 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1307 /* Try to parse the instructions in Step 1. */
1308 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1313 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1314 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1315 Otherwise, this sequence cannot be for stack protector. */
1316 if (stack_chk_guard
.minsym
== NULL
1317 || strncmp (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
),
1318 "__stack_chk_guard",
1319 strlen ("__stack_chk_guard")) != 0)
1324 unsigned int destreg
;
1326 = read_memory_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1328 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1329 if ((insn
& 0xf800) != 0x6800)
1331 if (bits (insn
, 3, 5) != basereg
)
1333 destreg
= bits (insn
, 0, 2);
1335 insn
= read_memory_unsigned_integer (pc
+ offset
+ 2, 2,
1336 byte_order_for_code
);
1337 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1338 if ((insn
& 0xf800) != 0x6000)
1340 if (destreg
!= bits (insn
, 0, 2))
1345 unsigned int destreg
;
1347 = read_memory_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1349 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1350 if ((insn
& 0x0e500000) != 0x04100000)
1352 if (bits (insn
, 16, 19) != basereg
)
1354 destreg
= bits (insn
, 12, 15);
1355 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1356 insn
= read_memory_unsigned_integer (pc
+ offset
+ 4,
1357 4, byte_order_for_code
);
1358 if ((insn
& 0x0e500000) != 0x04000000)
1360 if (bits (insn
, 12, 15) != destreg
)
1363 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1366 return pc
+ offset
+ 4;
1368 return pc
+ offset
+ 8;
1371 /* Advance the PC across any function entry prologue instructions to
1372 reach some "real" code.
1374 The APCS (ARM Procedure Call Standard) defines the following
1378 [stmfd sp!, {a1,a2,a3,a4}]
1379 stmfd sp!, {...,fp,ip,lr,pc}
1380 [stfe f7, [sp, #-12]!]
1381 [stfe f6, [sp, #-12]!]
1382 [stfe f5, [sp, #-12]!]
1383 [stfe f4, [sp, #-12]!]
1384 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1387 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1389 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1392 CORE_ADDR func_addr
, limit_pc
;
1394 /* See if we can determine the end of the prologue via the symbol table.
1395 If so, then return either PC, or the PC after the prologue, whichever
1397 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1399 CORE_ADDR post_prologue_pc
1400 = skip_prologue_using_sal (gdbarch
, func_addr
);
1401 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1403 if (post_prologue_pc
)
1405 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1408 /* GCC always emits a line note before the prologue and another
1409 one after, even if the two are at the same address or on the
1410 same line. Take advantage of this so that we do not need to
1411 know every instruction that might appear in the prologue. We
1412 will have producer information for most binaries; if it is
1413 missing (e.g. for -gstabs), assuming the GNU tools. */
1414 if (post_prologue_pc
1416 || COMPUNIT_PRODUCER (cust
) == NULL
1417 || strncmp (COMPUNIT_PRODUCER (cust
), "GNU ",
1418 sizeof ("GNU ") - 1) == 0
1419 || strncmp (COMPUNIT_PRODUCER (cust
), "clang ",
1420 sizeof ("clang ") - 1) == 0))
1421 return post_prologue_pc
;
1423 if (post_prologue_pc
!= 0)
1425 CORE_ADDR analyzed_limit
;
1427 /* For non-GCC compilers, make sure the entire line is an
1428 acceptable prologue; GDB will round this function's
1429 return value up to the end of the following line so we
1430 can not skip just part of a line (and we do not want to).
1432 RealView does not treat the prologue specially, but does
1433 associate prologue code with the opening brace; so this
1434 lets us skip the first line if we think it is the opening
1436 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1437 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1438 post_prologue_pc
, NULL
);
1440 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1441 post_prologue_pc
, NULL
);
1443 if (analyzed_limit
!= post_prologue_pc
)
1446 return post_prologue_pc
;
1450 /* Can't determine prologue from the symbol table, need to examine
1453 /* Find an upper limit on the function prologue using the debug
1454 information. If the debug information could not be used to provide
1455 that bound, then use an arbitrary large number as the upper bound. */
1456 /* Like arm_scan_prologue, stop no later than pc + 64. */
1457 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1459 limit_pc
= pc
+ 64; /* Magic. */
1462 /* Check if this is Thumb code. */
1463 if (arm_pc_is_thumb (gdbarch
, pc
))
1464 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1466 for (skip_pc
= pc
; skip_pc
< limit_pc
; skip_pc
+= 4)
1468 inst
= read_memory_unsigned_integer (skip_pc
, 4, byte_order_for_code
);
1470 /* "mov ip, sp" is no longer a required part of the prologue. */
1471 if (inst
== 0xe1a0c00d) /* mov ip, sp */
1474 if ((inst
& 0xfffff000) == 0xe28dc000) /* add ip, sp #n */
1477 if ((inst
& 0xfffff000) == 0xe24dc000) /* sub ip, sp #n */
1480 /* Some prologues begin with "str lr, [sp, #-4]!". */
1481 if (inst
== 0xe52de004) /* str lr, [sp, #-4]! */
1484 if ((inst
& 0xfffffff0) == 0xe92d0000) /* stmfd sp!,{a1,a2,a3,a4} */
1487 if ((inst
& 0xfffff800) == 0xe92dd800) /* stmfd sp!,{fp,ip,lr,pc} */
1490 /* Any insns after this point may float into the code, if it makes
1491 for better instruction scheduling, so we skip them only if we
1492 find them, but still consider the function to be frame-ful. */
1494 /* We may have either one sfmfd instruction here, or several stfe
1495 insns, depending on the version of floating point code we
1497 if ((inst
& 0xffbf0fff) == 0xec2d0200) /* sfmfd fn, <cnt>, [sp]! */
1500 if ((inst
& 0xffff8fff) == 0xed6d0103) /* stfe fn, [sp, #-12]! */
1503 if ((inst
& 0xfffff000) == 0xe24cb000) /* sub fp, ip, #nn */
1506 if ((inst
& 0xfffff000) == 0xe24dd000) /* sub sp, sp, #nn */
1509 if ((inst
& 0xffffc000) == 0xe54b0000 /* strb r(0123),[r11,#-nn] */
1510 || (inst
& 0xffffc0f0) == 0xe14b00b0 /* strh r(0123),[r11,#-nn] */
1511 || (inst
& 0xffffc000) == 0xe50b0000) /* str r(0123),[r11,#-nn] */
1514 if ((inst
& 0xffffc000) == 0xe5cd0000 /* strb r(0123),[sp,#nn] */
1515 || (inst
& 0xffffc0f0) == 0xe1cd00b0 /* strh r(0123),[sp,#nn] */
1516 || (inst
& 0xffffc000) == 0xe58d0000) /* str r(0123),[sp,#nn] */
1519 /* Un-recognized instruction; stop scanning. */
1523 return skip_pc
; /* End of prologue. */
1527 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1528 This function decodes a Thumb function prologue to determine:
1529 1) the size of the stack frame
1530 2) which registers are saved on it
1531 3) the offsets of saved regs
1532 4) the offset from the stack pointer to the frame pointer
1534 A typical Thumb function prologue would create this stack frame
1535 (offsets relative to FP)
1536 old SP -> 24 stack parameters
1539 R7 -> 0 local variables (16 bytes)
1540 SP -> -12 additional stack space (12 bytes)
1541 The frame size would thus be 36 bytes, and the frame offset would be
1542 12 bytes. The frame register is R7.
1544 The comments for thumb_skip_prolog() describe the algorithm we use
1545 to detect the end of the prolog. */
1549 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1550 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1552 CORE_ADDR prologue_start
;
1553 CORE_ADDR prologue_end
;
1555 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1558 /* See comment in arm_scan_prologue for an explanation of
1560 if (prologue_end
> prologue_start
+ 64)
1562 prologue_end
= prologue_start
+ 64;
1566 /* We're in the boondocks: we have no idea where the start of the
1570 prologue_end
= min (prologue_end
, prev_pc
);
1572 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1575 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1578 arm_instruction_changes_pc (uint32_t this_instr
)
1580 if (bits (this_instr
, 28, 31) == INST_NV
)
1581 /* Unconditional instructions. */
1582 switch (bits (this_instr
, 24, 27))
1586 /* Branch with Link and change to Thumb. */
1591 /* Coprocessor register transfer. */
1592 if (bits (this_instr
, 12, 15) == 15)
1593 error (_("Invalid update to pc in instruction"));
1599 switch (bits (this_instr
, 25, 27))
1602 if (bits (this_instr
, 23, 24) == 2 && bit (this_instr
, 20) == 0)
1604 /* Multiplies and extra load/stores. */
1605 if (bit (this_instr
, 4) == 1 && bit (this_instr
, 7) == 1)
1606 /* Neither multiplies nor extension load/stores are allowed
1610 /* Otherwise, miscellaneous instructions. */
1612 /* BX <reg>, BXJ <reg>, BLX <reg> */
1613 if (bits (this_instr
, 4, 27) == 0x12fff1
1614 || bits (this_instr
, 4, 27) == 0x12fff2
1615 || bits (this_instr
, 4, 27) == 0x12fff3)
1618 /* Other miscellaneous instructions are unpredictable if they
1622 /* Data processing instruction. Fall through. */
1625 if (bits (this_instr
, 12, 15) == 15)
1632 /* Media instructions and architecturally undefined instructions. */
1633 if (bits (this_instr
, 25, 27) == 3 && bit (this_instr
, 4) == 1)
1637 if (bit (this_instr
, 20) == 0)
1641 if (bits (this_instr
, 12, 15) == ARM_PC_REGNUM
)
1647 /* Load/store multiple. */
1648 if (bit (this_instr
, 20) == 1 && bit (this_instr
, 15) == 1)
1654 /* Branch and branch with link. */
1659 /* Coprocessor transfers or SWIs can not affect PC. */
1663 internal_error (__FILE__
, __LINE__
, _("bad value in switch"));
1667 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1671 arm_instruction_restores_sp (unsigned int insn
)
1673 if (bits (insn
, 28, 31) != INST_NV
)
1675 if ((insn
& 0x0df0f000) == 0x0080d000
1676 /* ADD SP (register or immediate). */
1677 || (insn
& 0x0df0f000) == 0x0040d000
1678 /* SUB SP (register or immediate). */
1679 || (insn
& 0x0ffffff0) == 0x01a0d000
1681 || (insn
& 0x0fff0000) == 0x08bd0000
1683 || (insn
& 0x0fff0000) == 0x049d0000)
1684 /* POP of a single register. */
1691 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1692 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1693 fill it in. Return the first address not recognized as a prologue
1696 We recognize all the instructions typically found in ARM prologues,
1697 plus harmless instructions which can be skipped (either for analysis
1698 purposes, or a more restrictive set that can be skipped when finding
1699 the end of the prologue). */
1702 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1703 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1704 struct arm_prologue_cache
*cache
)
1706 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1707 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1709 CORE_ADDR offset
, current_pc
;
1710 pv_t regs
[ARM_FPS_REGNUM
];
1711 struct pv_area
*stack
;
1712 struct cleanup
*back_to
;
1713 CORE_ADDR unrecognized_pc
= 0;
1715 /* Search the prologue looking for instructions that set up the
1716 frame pointer, adjust the stack pointer, and save registers.
1718 Be careful, however, and if it doesn't look like a prologue,
1719 don't try to scan it. If, for instance, a frameless function
1720 begins with stmfd sp!, then we will tell ourselves there is
1721 a frame, which will confuse stack traceback, as well as "finish"
1722 and other operations that rely on a knowledge of the stack
1725 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1726 regs
[regno
] = pv_register (regno
, 0);
1727 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1728 back_to
= make_cleanup_free_pv_area (stack
);
1730 for (current_pc
= prologue_start
;
1731 current_pc
< prologue_end
;
1735 = read_memory_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1737 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1739 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1742 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1743 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1745 unsigned imm
= insn
& 0xff; /* immediate value */
1746 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1747 int rd
= bits (insn
, 12, 15);
1748 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1749 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1752 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1753 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1755 unsigned imm
= insn
& 0xff; /* immediate value */
1756 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1757 int rd
= bits (insn
, 12, 15);
1758 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1759 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1762 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1765 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1767 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1768 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4,
1769 regs
[bits (insn
, 12, 15)]);
1772 else if ((insn
& 0xffff0000) == 0xe92d0000)
1773 /* stmfd sp!, {..., fp, ip, lr, pc}
1775 stmfd sp!, {a1, a2, a3, a4} */
1777 int mask
= insn
& 0xffff;
1779 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1782 /* Calculate offsets of saved registers. */
1783 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1784 if (mask
& (1 << regno
))
1787 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1788 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1791 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1792 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1793 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1795 /* No need to add this to saved_regs -- it's just an arg reg. */
1798 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1799 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1800 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1802 /* No need to add this to saved_regs -- it's just an arg reg. */
1805 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1807 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1809 /* No need to add this to saved_regs -- it's just arg regs. */
1812 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1814 unsigned imm
= insn
& 0xff; /* immediate value */
1815 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1816 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1817 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1819 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1821 unsigned imm
= insn
& 0xff; /* immediate value */
1822 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1823 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1824 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1826 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1828 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1830 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1833 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1834 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1835 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1837 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1839 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1841 int n_saved_fp_regs
;
1842 unsigned int fp_start_reg
, fp_bound_reg
;
1844 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1847 if ((insn
& 0x800) == 0x800) /* N0 is set */
1849 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1850 n_saved_fp_regs
= 3;
1852 n_saved_fp_regs
= 1;
1856 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1857 n_saved_fp_regs
= 2;
1859 n_saved_fp_regs
= 4;
1862 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1863 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1864 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1866 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1867 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12,
1868 regs
[fp_start_reg
++]);
1871 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1873 /* Allow some special function calls when skipping the
1874 prologue; GCC generates these before storing arguments to
1876 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1878 if (skip_prologue_function (gdbarch
, dest
, 0))
1883 else if ((insn
& 0xf0000000) != 0xe0000000)
1884 break; /* Condition not true, exit early. */
1885 else if (arm_instruction_changes_pc (insn
))
1886 /* Don't scan past anything that might change control flow. */
1888 else if (arm_instruction_restores_sp (insn
))
1890 /* Don't scan past the epilogue. */
1893 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1894 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1895 /* Ignore block loads from the stack, potentially copying
1896 parameters from memory. */
1898 else if ((insn
& 0xfc500000) == 0xe4100000
1899 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1900 /* Similarly ignore single loads from the stack. */
1902 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1903 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1904 register instead of the stack. */
1908 /* The optimizer might shove anything into the prologue,
1909 so we just skip what we don't recognize. */
1910 unrecognized_pc
= current_pc
;
1915 if (unrecognized_pc
== 0)
1916 unrecognized_pc
= current_pc
;
1920 int framereg
, framesize
;
1922 /* The frame size is just the distance from the frame register
1923 to the original stack pointer. */
1924 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1926 /* Frame pointer is fp. */
1927 framereg
= ARM_FP_REGNUM
;
1928 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1932 /* Try the stack pointer... this is a bit desperate. */
1933 framereg
= ARM_SP_REGNUM
;
1934 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1937 cache
->framereg
= framereg
;
1938 cache
->framesize
= framesize
;
1940 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1941 if (pv_area_find_reg (stack
, gdbarch
, regno
, &offset
))
1942 cache
->saved_regs
[regno
].addr
= offset
;
1946 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1947 paddress (gdbarch
, unrecognized_pc
));
1949 do_cleanups (back_to
);
1950 return unrecognized_pc
;
1954 arm_scan_prologue (struct frame_info
*this_frame
,
1955 struct arm_prologue_cache
*cache
)
1957 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1958 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1960 CORE_ADDR prologue_start
, prologue_end
, current_pc
;
1961 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1962 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1963 pv_t regs
[ARM_FPS_REGNUM
];
1964 struct pv_area
*stack
;
1965 struct cleanup
*back_to
;
1968 /* Assume there is no frame until proven otherwise. */
1969 cache
->framereg
= ARM_SP_REGNUM
;
1970 cache
->framesize
= 0;
1972 /* Check for Thumb prologue. */
1973 if (arm_frame_is_thumb (this_frame
))
1975 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1979 /* Find the function prologue. If we can't find the function in
1980 the symbol table, peek in the stack frame to find the PC. */
1981 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1984 /* One way to find the end of the prologue (which works well
1985 for unoptimized code) is to do the following:
1987 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1990 prologue_end = prev_pc;
1991 else if (sal.end < prologue_end)
1992 prologue_end = sal.end;
1994 This mechanism is very accurate so long as the optimizer
1995 doesn't move any instructions from the function body into the
1996 prologue. If this happens, sal.end will be the last
1997 instruction in the first hunk of prologue code just before
1998 the first instruction that the scheduler has moved from
1999 the body to the prologue.
2001 In order to make sure that we scan all of the prologue
2002 instructions, we use a slightly less accurate mechanism which
2003 may scan more than necessary. To help compensate for this
2004 lack of accuracy, the prologue scanning loop below contains
2005 several clauses which'll cause the loop to terminate early if
2006 an implausible prologue instruction is encountered.
2012 is a suitable endpoint since it accounts for the largest
2013 possible prologue plus up to five instructions inserted by
2016 if (prologue_end
> prologue_start
+ 64)
2018 prologue_end
= prologue_start
+ 64; /* See above. */
2023 /* We have no symbol information. Our only option is to assume this
2024 function has a standard stack frame and the normal frame register.
2025 Then, we can find the value of our frame pointer on entrance to
2026 the callee (or at the present moment if this is the innermost frame).
2027 The value stored there should be the address of the stmfd + 8. */
2028 CORE_ADDR frame_loc
;
2029 LONGEST return_value
;
2031 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
2032 if (!safe_read_memory_integer (frame_loc
, 4, byte_order
, &return_value
))
2036 prologue_start
= gdbarch_addr_bits_remove
2037 (gdbarch
, return_value
) - 8;
2038 prologue_end
= prologue_start
+ 64; /* See above. */
2042 if (prev_pc
< prologue_end
)
2043 prologue_end
= prev_pc
;
2045 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
2048 static struct arm_prologue_cache
*
2049 arm_make_prologue_cache (struct frame_info
*this_frame
)
2052 struct arm_prologue_cache
*cache
;
2053 CORE_ADDR unwound_fp
;
2055 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2056 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2058 arm_scan_prologue (this_frame
, cache
);
2060 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
2061 if (unwound_fp
== 0)
2064 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
2066 /* Calculate actual addresses of saved registers using offsets
2067 determined by arm_scan_prologue. */
2068 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2069 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2070 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2075 /* Our frame ID for a normal frame is the current function's starting PC
2076 and the caller's SP when we were called. */
2079 arm_prologue_this_id (struct frame_info
*this_frame
,
2081 struct frame_id
*this_id
)
2083 struct arm_prologue_cache
*cache
;
2087 if (*this_cache
== NULL
)
2088 *this_cache
= arm_make_prologue_cache (this_frame
);
2089 cache
= *this_cache
;
2091 /* This is meant to halt the backtrace at "_start". */
2092 pc
= get_frame_pc (this_frame
);
2093 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
2096 /* If we've hit a wall, stop. */
2097 if (cache
->prev_sp
== 0)
2100 /* Use function start address as part of the frame ID. If we cannot
2101 identify the start address (due to missing symbol information),
2102 fall back to just using the current PC. */
2103 func
= get_frame_func (this_frame
);
2107 id
= frame_id_build (cache
->prev_sp
, func
);
2111 static struct value
*
2112 arm_prologue_prev_register (struct frame_info
*this_frame
,
2116 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2117 struct arm_prologue_cache
*cache
;
2119 if (*this_cache
== NULL
)
2120 *this_cache
= arm_make_prologue_cache (this_frame
);
2121 cache
= *this_cache
;
2123 /* If we are asked to unwind the PC, then we need to return the LR
2124 instead. The prologue may save PC, but it will point into this
2125 frame's prologue, not the next frame's resume location. Also
2126 strip the saved T bit. A valid LR may have the low bit set, but
2127 a valid PC never does. */
2128 if (prev_regnum
== ARM_PC_REGNUM
)
2132 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2133 return frame_unwind_got_constant (this_frame
, prev_regnum
,
2134 arm_addr_bits_remove (gdbarch
, lr
));
2137 /* SP is generally not saved to the stack, but this frame is
2138 identified by the next frame's stack pointer at the time of the call.
2139 The value was already reconstructed into PREV_SP. */
2140 if (prev_regnum
== ARM_SP_REGNUM
)
2141 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
2143 /* The CPSR may have been changed by the call instruction and by the
2144 called function. The only bit we can reconstruct is the T bit,
2145 by checking the low bit of LR as of the call. This is a reliable
2146 indicator of Thumb-ness except for some ARM v4T pre-interworking
2147 Thumb code, which could get away with a clear low bit as long as
2148 the called function did not use bx. Guess that all other
2149 bits are unchanged; the condition flags are presumably lost,
2150 but the processor status is likely valid. */
2151 if (prev_regnum
== ARM_PS_REGNUM
)
2154 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
2156 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
2157 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2158 if (IS_THUMB_ADDR (lr
))
2162 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
2165 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
2169 struct frame_unwind arm_prologue_unwind
= {
2171 default_frame_unwind_stop_reason
,
2172 arm_prologue_this_id
,
2173 arm_prologue_prev_register
,
2175 default_frame_sniffer
2178 /* Maintain a list of ARM exception table entries per objfile, similar to the
2179 list of mapping symbols. We only cache entries for standard ARM-defined
2180 personality routines; the cache will contain only the frame unwinding
2181 instructions associated with the entry (not the descriptors). */
2183 static const struct objfile_data
*arm_exidx_data_key
;
2185 struct arm_exidx_entry
2190 typedef struct arm_exidx_entry arm_exidx_entry_s
;
2191 DEF_VEC_O(arm_exidx_entry_s
);
2193 struct arm_exidx_data
2195 VEC(arm_exidx_entry_s
) **section_maps
;
2199 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
2201 struct arm_exidx_data
*data
= arg
;
2204 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
2205 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2209 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2210 const struct arm_exidx_entry
*rhs
)
2212 return lhs
->addr
< rhs
->addr
;
2215 static struct obj_section
*
2216 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2218 struct obj_section
*osect
;
2220 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2221 if (bfd_get_section_flags (objfile
->obfd
,
2222 osect
->the_bfd_section
) & SEC_ALLOC
)
2224 bfd_vma start
, size
;
2225 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2226 size
= bfd_get_section_size (osect
->the_bfd_section
);
2228 if (start
<= vma
&& vma
< start
+ size
)
2235 /* Parse contents of exception table and exception index sections
2236 of OBJFILE, and fill in the exception table entry cache.
2238 For each entry that refers to a standard ARM-defined personality
2239 routine, extract the frame unwinding instructions (from either
2240 the index or the table section). The unwinding instructions
2242 - extracting them from the rest of the table data
2243 - converting to host endianness
2244 - appending the implicit 0xb0 ("Finish") code
2246 The extracted and normalized instructions are stored for later
2247 retrieval by the arm_find_exidx_entry routine. */
2250 arm_exidx_new_objfile (struct objfile
*objfile
)
2252 struct cleanup
*cleanups
;
2253 struct arm_exidx_data
*data
;
2254 asection
*exidx
, *extab
;
2255 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2256 bfd_size_type exidx_size
= 0, extab_size
= 0;
2257 gdb_byte
*exidx_data
= NULL
, *extab_data
= NULL
;
2260 /* If we've already touched this file, do nothing. */
2261 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2263 cleanups
= make_cleanup (null_cleanup
, NULL
);
2265 /* Read contents of exception table and index. */
2266 exidx
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.exidx");
2269 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2270 exidx_size
= bfd_get_section_size (exidx
);
2271 exidx_data
= xmalloc (exidx_size
);
2272 make_cleanup (xfree
, exidx_data
);
2274 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2275 exidx_data
, 0, exidx_size
))
2277 do_cleanups (cleanups
);
2282 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2285 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2286 extab_size
= bfd_get_section_size (extab
);
2287 extab_data
= xmalloc (extab_size
);
2288 make_cleanup (xfree
, extab_data
);
2290 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2291 extab_data
, 0, extab_size
))
2293 do_cleanups (cleanups
);
2298 /* Allocate exception table data structure. */
2299 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2300 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2301 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2302 objfile
->obfd
->section_count
,
2303 VEC(arm_exidx_entry_s
) *);
2305 /* Fill in exception table. */
2306 for (i
= 0; i
< exidx_size
/ 8; i
++)
2308 struct arm_exidx_entry new_exidx_entry
;
2309 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8);
2310 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8 + 4);
2311 bfd_vma addr
= 0, word
= 0;
2312 int n_bytes
= 0, n_words
= 0;
2313 struct obj_section
*sec
;
2314 gdb_byte
*entry
= NULL
;
2316 /* Extract address of start of function. */
2317 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2318 idx
+= exidx_vma
+ i
* 8;
2320 /* Find section containing function and compute section offset. */
2321 sec
= arm_obj_section_from_vma (objfile
, idx
);
2324 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2326 /* Determine address of exception table entry. */
2329 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2331 else if ((val
& 0xff000000) == 0x80000000)
2333 /* Exception table entry embedded in .ARM.exidx
2334 -- must be short form. */
2338 else if (!(val
& 0x80000000))
2340 /* Exception table entry in .ARM.extab. */
2341 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2342 addr
+= exidx_vma
+ i
* 8 + 4;
2344 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_size
)
2346 word
= bfd_h_get_32 (objfile
->obfd
,
2347 extab_data
+ addr
- extab_vma
);
2350 if ((word
& 0xff000000) == 0x80000000)
2355 else if ((word
& 0xff000000) == 0x81000000
2356 || (word
& 0xff000000) == 0x82000000)
2360 n_words
= ((word
>> 16) & 0xff);
2362 else if (!(word
& 0x80000000))
2365 struct obj_section
*pers_sec
;
2366 int gnu_personality
= 0;
2368 /* Custom personality routine. */
2369 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2370 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2372 /* Check whether we've got one of the variants of the
2373 GNU personality routines. */
2374 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2377 static const char *personality
[] =
2379 "__gcc_personality_v0",
2380 "__gxx_personality_v0",
2381 "__gcj_personality_v0",
2382 "__gnu_objc_personality_v0",
2386 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2389 for (k
= 0; personality
[k
]; k
++)
2390 if (lookup_minimal_symbol_by_pc_name
2391 (pc
, personality
[k
], objfile
))
2393 gnu_personality
= 1;
2398 /* If so, the next word contains a word count in the high
2399 byte, followed by the same unwind instructions as the
2400 pre-defined forms. */
2402 && addr
+ 4 <= extab_vma
+ extab_size
)
2404 word
= bfd_h_get_32 (objfile
->obfd
,
2405 extab_data
+ addr
- extab_vma
);
2408 n_words
= ((word
>> 24) & 0xff);
2414 /* Sanity check address. */
2416 if (addr
< extab_vma
|| addr
+ 4 * n_words
> extab_vma
+ extab_size
)
2417 n_words
= n_bytes
= 0;
2419 /* The unwind instructions reside in WORD (only the N_BYTES least
2420 significant bytes are valid), followed by N_WORDS words in the
2421 extab section starting at ADDR. */
2422 if (n_bytes
|| n_words
)
2424 gdb_byte
*p
= entry
= obstack_alloc (&objfile
->objfile_obstack
,
2425 n_bytes
+ n_words
* 4 + 1);
2428 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2432 word
= bfd_h_get_32 (objfile
->obfd
,
2433 extab_data
+ addr
- extab_vma
);
2436 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2437 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2438 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2439 *p
++ = (gdb_byte
) (word
& 0xff);
2442 /* Implied "Finish" to terminate the list. */
2446 /* Push entry onto vector. They are guaranteed to always
2447 appear in order of increasing addresses. */
2448 new_exidx_entry
.addr
= idx
;
2449 new_exidx_entry
.entry
= entry
;
2450 VEC_safe_push (arm_exidx_entry_s
,
2451 data
->section_maps
[sec
->the_bfd_section
->index
],
2455 do_cleanups (cleanups
);
2458 /* Search for the exception table entry covering MEMADDR. If one is found,
2459 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2460 set *START to the start of the region covered by this entry. */
2463 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2465 struct obj_section
*sec
;
2467 sec
= find_pc_section (memaddr
);
2470 struct arm_exidx_data
*data
;
2471 VEC(arm_exidx_entry_s
) *map
;
2472 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2475 data
= objfile_data (sec
->objfile
, arm_exidx_data_key
);
2478 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2479 if (!VEC_empty (arm_exidx_entry_s
, map
))
2481 struct arm_exidx_entry
*map_sym
;
2483 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2484 arm_compare_exidx_entries
);
2486 /* VEC_lower_bound finds the earliest ordered insertion
2487 point. If the following symbol starts at this exact
2488 address, we use that; otherwise, the preceding
2489 exception table entry covers this address. */
2490 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2492 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2493 if (map_sym
->addr
== map_key
.addr
)
2496 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2497 return map_sym
->entry
;
2503 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2505 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2506 return map_sym
->entry
;
2515 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2516 instruction list from the ARM exception table entry ENTRY, allocate and
2517 return a prologue cache structure describing how to unwind this frame.
2519 Return NULL if the unwinding instruction list contains a "spare",
2520 "reserved" or "refuse to unwind" instruction as defined in section
2521 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2522 for the ARM Architecture" document. */
2524 static struct arm_prologue_cache
*
2525 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2530 struct arm_prologue_cache
*cache
;
2531 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2532 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2538 /* Whenever we reload SP, we actually have to retrieve its
2539 actual value in the current frame. */
2542 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2544 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2545 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2549 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2550 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2556 /* Decode next unwind instruction. */
2559 if ((insn
& 0xc0) == 0)
2561 int offset
= insn
& 0x3f;
2562 vsp
+= (offset
<< 2) + 4;
2564 else if ((insn
& 0xc0) == 0x40)
2566 int offset
= insn
& 0x3f;
2567 vsp
-= (offset
<< 2) + 4;
2569 else if ((insn
& 0xf0) == 0x80)
2571 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2574 /* The special case of an all-zero mask identifies
2575 "Refuse to unwind". We return NULL to fall back
2576 to the prologue analyzer. */
2580 /* Pop registers r4..r15 under mask. */
2581 for (i
= 0; i
< 12; i
++)
2582 if (mask
& (1 << i
))
2584 cache
->saved_regs
[4 + i
].addr
= vsp
;
2588 /* Special-case popping SP -- we need to reload vsp. */
2589 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2592 else if ((insn
& 0xf0) == 0x90)
2594 int reg
= insn
& 0xf;
2596 /* Reserved cases. */
2597 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2600 /* Set SP from another register and mark VSP for reload. */
2601 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2604 else if ((insn
& 0xf0) == 0xa0)
2606 int count
= insn
& 0x7;
2607 int pop_lr
= (insn
& 0x8) != 0;
2610 /* Pop r4..r[4+count]. */
2611 for (i
= 0; i
<= count
; i
++)
2613 cache
->saved_regs
[4 + i
].addr
= vsp
;
2617 /* If indicated by flag, pop LR as well. */
2620 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2624 else if (insn
== 0xb0)
2626 /* We could only have updated PC by popping into it; if so, it
2627 will show up as address. Otherwise, copy LR into PC. */
2628 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2629 cache
->saved_regs
[ARM_PC_REGNUM
]
2630 = cache
->saved_regs
[ARM_LR_REGNUM
];
2635 else if (insn
== 0xb1)
2637 int mask
= *entry
++;
2640 /* All-zero mask and mask >= 16 is "spare". */
2641 if (mask
== 0 || mask
>= 16)
2644 /* Pop r0..r3 under mask. */
2645 for (i
= 0; i
< 4; i
++)
2646 if (mask
& (1 << i
))
2648 cache
->saved_regs
[i
].addr
= vsp
;
2652 else if (insn
== 0xb2)
2654 ULONGEST offset
= 0;
2659 offset
|= (*entry
& 0x7f) << shift
;
2662 while (*entry
++ & 0x80);
2664 vsp
+= 0x204 + (offset
<< 2);
2666 else if (insn
== 0xb3)
2668 int start
= *entry
>> 4;
2669 int count
= (*entry
++) & 0xf;
2672 /* Only registers D0..D15 are valid here. */
2673 if (start
+ count
>= 16)
2676 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2677 for (i
= 0; i
<= count
; i
++)
2679 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2683 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2686 else if ((insn
& 0xf8) == 0xb8)
2688 int count
= insn
& 0x7;
2691 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2692 for (i
= 0; i
<= count
; i
++)
2694 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2698 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2701 else if (insn
== 0xc6)
2703 int start
= *entry
>> 4;
2704 int count
= (*entry
++) & 0xf;
2707 /* Only registers WR0..WR15 are valid. */
2708 if (start
+ count
>= 16)
2711 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2712 for (i
= 0; i
<= count
; i
++)
2714 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2718 else if (insn
== 0xc7)
2720 int mask
= *entry
++;
2723 /* All-zero mask and mask >= 16 is "spare". */
2724 if (mask
== 0 || mask
>= 16)
2727 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2728 for (i
= 0; i
< 4; i
++)
2729 if (mask
& (1 << i
))
2731 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2735 else if ((insn
& 0xf8) == 0xc0)
2737 int count
= insn
& 0x7;
2740 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2741 for (i
= 0; i
<= count
; i
++)
2743 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2747 else if (insn
== 0xc8)
2749 int start
= *entry
>> 4;
2750 int count
= (*entry
++) & 0xf;
2753 /* Only registers D0..D31 are valid. */
2754 if (start
+ count
>= 16)
2757 /* Pop VFP double-precision registers
2758 D[16+start]..D[16+start+count]. */
2759 for (i
= 0; i
<= count
; i
++)
2761 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2765 else if (insn
== 0xc9)
2767 int start
= *entry
>> 4;
2768 int count
= (*entry
++) & 0xf;
2771 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2772 for (i
= 0; i
<= count
; i
++)
2774 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2778 else if ((insn
& 0xf8) == 0xd0)
2780 int count
= insn
& 0x7;
2783 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2784 for (i
= 0; i
<= count
; i
++)
2786 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2792 /* Everything else is "spare". */
2797 /* If we restore SP from a register, assume this was the frame register.
2798 Otherwise just fall back to SP as frame register. */
2799 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2800 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2802 cache
->framereg
= ARM_SP_REGNUM
;
2804 /* Determine offset to previous frame. */
2806 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2808 /* We already got the previous SP. */
2809 cache
->prev_sp
= vsp
;
2814 /* Unwinding via ARM exception table entries. Note that the sniffer
2815 already computes a filled-in prologue cache, which is then used
2816 with the same arm_prologue_this_id and arm_prologue_prev_register
2817 routines also used for prologue-parsing based unwinding. */
2820 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2821 struct frame_info
*this_frame
,
2822 void **this_prologue_cache
)
2824 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2825 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2826 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2827 struct arm_prologue_cache
*cache
;
2830 /* See if we have an ARM exception table entry covering this address. */
2831 addr_in_block
= get_frame_address_in_block (this_frame
);
2832 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2836 /* The ARM exception table does not describe unwind information
2837 for arbitrary PC values, but is guaranteed to be correct only
2838 at call sites. We have to decide here whether we want to use
2839 ARM exception table information for this frame, or fall back
2840 to using prologue parsing. (Note that if we have DWARF CFI,
2841 this sniffer isn't even called -- CFI is always preferred.)
2843 Before we make this decision, however, we check whether we
2844 actually have *symbol* information for the current frame.
2845 If not, prologue parsing would not work anyway, so we might
2846 as well use the exception table and hope for the best. */
2847 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2851 /* If the next frame is "normal", we are at a call site in this
2852 frame, so exception information is guaranteed to be valid. */
2853 if (get_next_frame (this_frame
)
2854 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2857 /* We also assume exception information is valid if we're currently
2858 blocked in a system call. The system library is supposed to
2859 ensure this, so that e.g. pthread cancellation works. */
2860 if (arm_frame_is_thumb (this_frame
))
2864 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 2, 2,
2865 byte_order_for_code
, &insn
)
2866 && (insn
& 0xff00) == 0xdf00 /* svc */)
2873 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 4, 4,
2874 byte_order_for_code
, &insn
)
2875 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2879 /* Bail out if we don't know that exception information is valid. */
2883 /* The ARM exception index does not mark the *end* of the region
2884 covered by the entry, and some functions will not have any entry.
2885 To correctly recognize the end of the covered region, the linker
2886 should have inserted dummy records with a CANTUNWIND marker.
2888 Unfortunately, current versions of GNU ld do not reliably do
2889 this, and thus we may have found an incorrect entry above.
2890 As a (temporary) sanity check, we only use the entry if it
2891 lies *within* the bounds of the function. Note that this check
2892 might reject perfectly valid entries that just happen to cover
2893 multiple functions; therefore this check ought to be removed
2894 once the linker is fixed. */
2895 if (func_start
> exidx_region
)
2899 /* Decode the list of unwinding instructions into a prologue cache.
2900 Note that this may fail due to e.g. a "refuse to unwind" code. */
2901 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2905 *this_prologue_cache
= cache
;
2909 struct frame_unwind arm_exidx_unwind
= {
2911 default_frame_unwind_stop_reason
,
2912 arm_prologue_this_id
,
2913 arm_prologue_prev_register
,
2915 arm_exidx_unwind_sniffer
2918 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2919 trampoline, return the target PC. Otherwise return 0.
2921 void call0a (char c, short s, int i, long l) {}
2925 (*pointer_to_call0a) (c, s, i, l);
2928 Instead of calling a stub library function _call_via_xx (xx is
2929 the register name), GCC may inline the trampoline in the object
2930 file as below (register r2 has the address of call0a).
2933 .type main, %function
2942 The trampoline 'bx r2' doesn't belong to main. */
2945 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2947 /* The heuristics of recognizing such trampoline is that FRAME is
2948 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2949 if (arm_frame_is_thumb (frame
))
2953 if (target_read_memory (pc
, buf
, 2) == 0)
2955 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2956 enum bfd_endian byte_order_for_code
2957 = gdbarch_byte_order_for_code (gdbarch
);
2959 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2961 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2964 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2966 /* Clear the LSB so that gdb core sets step-resume
2967 breakpoint at the right address. */
2968 return UNMAKE_THUMB_ADDR (dest
);
2976 static struct arm_prologue_cache
*
2977 arm_make_stub_cache (struct frame_info
*this_frame
)
2979 struct arm_prologue_cache
*cache
;
2981 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2982 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2984 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2989 /* Our frame ID for a stub frame is the current SP and LR. */
2992 arm_stub_this_id (struct frame_info
*this_frame
,
2994 struct frame_id
*this_id
)
2996 struct arm_prologue_cache
*cache
;
2998 if (*this_cache
== NULL
)
2999 *this_cache
= arm_make_stub_cache (this_frame
);
3000 cache
= *this_cache
;
3002 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
3006 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
3007 struct frame_info
*this_frame
,
3008 void **this_prologue_cache
)
3010 CORE_ADDR addr_in_block
;
3012 CORE_ADDR pc
, start_addr
;
3015 addr_in_block
= get_frame_address_in_block (this_frame
);
3016 pc
= get_frame_pc (this_frame
);
3017 if (in_plt_section (addr_in_block
)
3018 /* We also use the stub winder if the target memory is unreadable
3019 to avoid having the prologue unwinder trying to read it. */
3020 || target_read_memory (pc
, dummy
, 4) != 0)
3023 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
3024 && arm_skip_bx_reg (this_frame
, pc
) != 0)
3030 struct frame_unwind arm_stub_unwind
= {
3032 default_frame_unwind_stop_reason
,
3034 arm_prologue_prev_register
,
3036 arm_stub_unwind_sniffer
3039 /* Put here the code to store, into CACHE->saved_regs, the addresses
3040 of the saved registers of frame described by THIS_FRAME. CACHE is
3043 static struct arm_prologue_cache
*
3044 arm_m_exception_cache (struct frame_info
*this_frame
)
3046 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3047 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3048 struct arm_prologue_cache
*cache
;
3049 CORE_ADDR unwound_sp
;
3052 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
3053 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
3055 unwound_sp
= get_frame_register_unsigned (this_frame
,
3058 /* The hardware saves eight 32-bit words, comprising xPSR,
3059 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3060 "B1.5.6 Exception entry behavior" in
3061 "ARMv7-M Architecture Reference Manual". */
3062 cache
->saved_regs
[0].addr
= unwound_sp
;
3063 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
3064 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
3065 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
3066 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
3067 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
3068 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
3069 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
3071 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3072 aligner between the top of the 32-byte stack frame and the
3073 previous context's stack pointer. */
3074 cache
->prev_sp
= unwound_sp
+ 32;
3075 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
3076 && (xpsr
& (1 << 9)) != 0)
3077 cache
->prev_sp
+= 4;
3082 /* Implementation of function hook 'this_id' in
3083 'struct frame_uwnind'. */
3086 arm_m_exception_this_id (struct frame_info
*this_frame
,
3088 struct frame_id
*this_id
)
3090 struct arm_prologue_cache
*cache
;
3092 if (*this_cache
== NULL
)
3093 *this_cache
= arm_m_exception_cache (this_frame
);
3094 cache
= *this_cache
;
3096 /* Our frame ID for a stub frame is the current SP and LR. */
3097 *this_id
= frame_id_build (cache
->prev_sp
,
3098 get_frame_pc (this_frame
));
3101 /* Implementation of function hook 'prev_register' in
3102 'struct frame_uwnind'. */
3104 static struct value
*
3105 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3109 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3110 struct arm_prologue_cache
*cache
;
3112 if (*this_cache
== NULL
)
3113 *this_cache
= arm_m_exception_cache (this_frame
);
3114 cache
= *this_cache
;
3116 /* The value was already reconstructed into PREV_SP. */
3117 if (prev_regnum
== ARM_SP_REGNUM
)
3118 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3121 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3125 /* Implementation of function hook 'sniffer' in
3126 'struct frame_uwnind'. */
3129 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3130 struct frame_info
*this_frame
,
3131 void **this_prologue_cache
)
3133 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3135 /* No need to check is_m; this sniffer is only registered for
3136 M-profile architectures. */
3138 /* Exception frames return to one of these magic PCs. Other values
3139 are not defined as of v7-M. See details in "B1.5.8 Exception
3140 return behavior" in "ARMv7-M Architecture Reference Manual". */
3141 if (this_pc
== 0xfffffff1 || this_pc
== 0xfffffff9
3142 || this_pc
== 0xfffffffd)
3148 /* Frame unwinder for M-profile exceptions. */
3150 struct frame_unwind arm_m_exception_unwind
=
3153 default_frame_unwind_stop_reason
,
3154 arm_m_exception_this_id
,
3155 arm_m_exception_prev_register
,
3157 arm_m_exception_unwind_sniffer
3161 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3163 struct arm_prologue_cache
*cache
;
3165 if (*this_cache
== NULL
)
3166 *this_cache
= arm_make_prologue_cache (this_frame
);
3167 cache
= *this_cache
;
3169 return cache
->prev_sp
- cache
->framesize
;
3172 struct frame_base arm_normal_base
= {
3173 &arm_prologue_unwind
,
3174 arm_normal_frame_base
,
3175 arm_normal_frame_base
,
3176 arm_normal_frame_base
3179 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3180 dummy frame. The frame ID's base needs to match the TOS value
3181 saved by save_dummy_frame_tos() and returned from
3182 arm_push_dummy_call, and the PC needs to match the dummy frame's
3185 static struct frame_id
3186 arm_dummy_id (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3188 return frame_id_build (get_frame_register_unsigned (this_frame
,
3190 get_frame_pc (this_frame
));
3193 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3194 be used to construct the previous frame's ID, after looking up the
3195 containing function). */
3198 arm_unwind_pc (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3201 pc
= frame_unwind_register_unsigned (this_frame
, ARM_PC_REGNUM
);
3202 return arm_addr_bits_remove (gdbarch
, pc
);
3206 arm_unwind_sp (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3208 return frame_unwind_register_unsigned (this_frame
, ARM_SP_REGNUM
);
3211 static struct value
*
3212 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3215 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3217 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3222 /* The PC is normally copied from the return column, which
3223 describes saves of LR. However, that version may have an
3224 extra bit set to indicate Thumb state. The bit is not
3226 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3227 return frame_unwind_got_constant (this_frame
, regnum
,
3228 arm_addr_bits_remove (gdbarch
, lr
));
3231 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3232 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3233 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3234 if (IS_THUMB_ADDR (lr
))
3238 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3241 internal_error (__FILE__
, __LINE__
,
3242 _("Unexpected register %d"), regnum
);
3247 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3248 struct dwarf2_frame_state_reg
*reg
,
3249 struct frame_info
*this_frame
)
3255 reg
->how
= DWARF2_FRAME_REG_FN
;
3256 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3259 reg
->how
= DWARF2_FRAME_REG_CFA
;
3264 /* Return true if we are in the function's epilogue, i.e. after the
3265 instruction that destroyed the function's stack frame. */
3268 thumb_in_function_epilogue_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3270 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3271 unsigned int insn
, insn2
;
3272 int found_return
= 0, found_stack_adjust
= 0;
3273 CORE_ADDR func_start
, func_end
;
3277 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3280 /* The epilogue is a sequence of instructions along the following lines:
3282 - add stack frame size to SP or FP
3283 - [if frame pointer used] restore SP from FP
3284 - restore registers from SP [may include PC]
3285 - a return-type instruction [if PC wasn't already restored]
3287 In a first pass, we scan forward from the current PC and verify the
3288 instructions we find as compatible with this sequence, ending in a
3291 However, this is not sufficient to distinguish indirect function calls
3292 within a function from indirect tail calls in the epilogue in some cases.
3293 Therefore, if we didn't already find any SP-changing instruction during
3294 forward scan, we add a backward scanning heuristic to ensure we actually
3295 are in the epilogue. */
3298 while (scan_pc
< func_end
&& !found_return
)
3300 if (target_read_memory (scan_pc
, buf
, 2))
3304 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3306 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3308 else if (insn
== 0x46f7) /* mov pc, lr */
3310 else if (thumb_instruction_restores_sp (insn
))
3312 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3315 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3317 if (target_read_memory (scan_pc
, buf
, 2))
3321 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3323 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3325 if (insn2
& 0x8000) /* <registers> include PC. */
3328 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3329 && (insn2
& 0x0fff) == 0x0b04)
3331 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3334 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3335 && (insn2
& 0x0e00) == 0x0a00)
3347 /* Since any instruction in the epilogue sequence, with the possible
3348 exception of return itself, updates the stack pointer, we need to
3349 scan backwards for at most one instruction. Try either a 16-bit or
3350 a 32-bit instruction. This is just a heuristic, so we do not worry
3351 too much about false positives. */
3353 if (pc
- 4 < func_start
)
3355 if (target_read_memory (pc
- 4, buf
, 4))
3358 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3359 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3361 if (thumb_instruction_restores_sp (insn2
))
3362 found_stack_adjust
= 1;
3363 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3364 found_stack_adjust
= 1;
3365 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3366 && (insn2
& 0x0fff) == 0x0b04)
3367 found_stack_adjust
= 1;
3368 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3369 && (insn2
& 0x0e00) == 0x0a00)
3370 found_stack_adjust
= 1;
3372 return found_stack_adjust
;
3375 /* Return true if we are in the function's epilogue, i.e. after the
3376 instruction that destroyed the function's stack frame. */
3379 arm_in_function_epilogue_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3381 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3384 CORE_ADDR func_start
, func_end
;
3386 if (arm_pc_is_thumb (gdbarch
, pc
))
3387 return thumb_in_function_epilogue_p (gdbarch
, pc
);
3389 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3392 /* We are in the epilogue if the previous instruction was a stack
3393 adjustment and the next instruction is a possible return (bx, mov
3394 pc, or pop). We could have to scan backwards to find the stack
3395 adjustment, or forwards to find the return, but this is a decent
3396 approximation. First scan forwards. */
3399 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3400 if (bits (insn
, 28, 31) != INST_NV
)
3402 if ((insn
& 0x0ffffff0) == 0x012fff10)
3405 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3408 else if ((insn
& 0x0fff0000) == 0x08bd0000
3409 && (insn
& 0x0000c000) != 0)
3410 /* POP (LDMIA), including PC or LR. */
3417 /* Scan backwards. This is just a heuristic, so do not worry about
3418 false positives from mode changes. */
3420 if (pc
< func_start
+ 4)
3423 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3424 if (arm_instruction_restores_sp (insn
))
3431 /* When arguments must be pushed onto the stack, they go on in reverse
3432 order. The code below implements a FILO (stack) to do this. */
3437 struct stack_item
*prev
;
3441 static struct stack_item
*
3442 push_stack_item (struct stack_item
*prev
, const void *contents
, int len
)
3444 struct stack_item
*si
;
3445 si
= xmalloc (sizeof (struct stack_item
));
3446 si
->data
= xmalloc (len
);
3449 memcpy (si
->data
, contents
, len
);
3453 static struct stack_item
*
3454 pop_stack_item (struct stack_item
*si
)
3456 struct stack_item
*dead
= si
;
3464 /* Return the alignment (in bytes) of the given type. */
3467 arm_type_align (struct type
*t
)
3473 t
= check_typedef (t
);
3474 switch (TYPE_CODE (t
))
3477 /* Should never happen. */
3478 internal_error (__FILE__
, __LINE__
, _("unknown type alignment"));
3482 case TYPE_CODE_ENUM
:
3486 case TYPE_CODE_RANGE
:
3488 case TYPE_CODE_CHAR
:
3489 case TYPE_CODE_BOOL
:
3490 return TYPE_LENGTH (t
);
3492 case TYPE_CODE_ARRAY
:
3493 case TYPE_CODE_COMPLEX
:
3494 /* TODO: What about vector types? */
3495 return arm_type_align (TYPE_TARGET_TYPE (t
));
3497 case TYPE_CODE_STRUCT
:
3498 case TYPE_CODE_UNION
:
3500 for (n
= 0; n
< TYPE_NFIELDS (t
); n
++)
3502 falign
= arm_type_align (TYPE_FIELD_TYPE (t
, n
));
3510 /* Possible base types for a candidate for passing and returning in
3513 enum arm_vfp_cprc_base_type
3522 /* The length of one element of base type B. */
3525 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3529 case VFP_CPRC_SINGLE
:
3531 case VFP_CPRC_DOUBLE
:
3533 case VFP_CPRC_VEC64
:
3535 case VFP_CPRC_VEC128
:
3538 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3543 /* The character ('s', 'd' or 'q') for the type of VFP register used
3544 for passing base type B. */
3547 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3551 case VFP_CPRC_SINGLE
:
3553 case VFP_CPRC_DOUBLE
:
3555 case VFP_CPRC_VEC64
:
3557 case VFP_CPRC_VEC128
:
3560 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3565 /* Determine whether T may be part of a candidate for passing and
3566 returning in VFP registers, ignoring the limit on the total number
3567 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3568 classification of the first valid component found; if it is not
3569 VFP_CPRC_UNKNOWN, all components must have the same classification
3570 as *BASE_TYPE. If it is found that T contains a type not permitted
3571 for passing and returning in VFP registers, a type differently
3572 classified from *BASE_TYPE, or two types differently classified
3573 from each other, return -1, otherwise return the total number of
3574 base-type elements found (possibly 0 in an empty structure or
3575 array). Vector types are not currently supported, matching the
3576 generic AAPCS support. */
3579 arm_vfp_cprc_sub_candidate (struct type
*t
,
3580 enum arm_vfp_cprc_base_type
*base_type
)
3582 t
= check_typedef (t
);
3583 switch (TYPE_CODE (t
))
3586 switch (TYPE_LENGTH (t
))
3589 if (*base_type
== VFP_CPRC_UNKNOWN
)
3590 *base_type
= VFP_CPRC_SINGLE
;
3591 else if (*base_type
!= VFP_CPRC_SINGLE
)
3596 if (*base_type
== VFP_CPRC_UNKNOWN
)
3597 *base_type
= VFP_CPRC_DOUBLE
;
3598 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3607 case TYPE_CODE_COMPLEX
:
3608 /* Arguments of complex T where T is one of the types float or
3609 double get treated as if they are implemented as:
3618 switch (TYPE_LENGTH (t
))
3621 if (*base_type
== VFP_CPRC_UNKNOWN
)
3622 *base_type
= VFP_CPRC_SINGLE
;
3623 else if (*base_type
!= VFP_CPRC_SINGLE
)
3628 if (*base_type
== VFP_CPRC_UNKNOWN
)
3629 *base_type
= VFP_CPRC_DOUBLE
;
3630 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3639 case TYPE_CODE_ARRAY
:
3643 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
), base_type
);
3646 if (TYPE_LENGTH (t
) == 0)
3648 gdb_assert (count
== 0);
3651 else if (count
== 0)
3653 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3654 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3655 return TYPE_LENGTH (t
) / unitlen
;
3659 case TYPE_CODE_STRUCT
:
3664 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3666 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3668 if (sub_count
== -1)
3672 if (TYPE_LENGTH (t
) == 0)
3674 gdb_assert (count
== 0);
3677 else if (count
== 0)
3679 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3680 if (TYPE_LENGTH (t
) != unitlen
* count
)
3685 case TYPE_CODE_UNION
:
3690 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3692 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3694 if (sub_count
== -1)
3696 count
= (count
> sub_count
? count
: sub_count
);
3698 if (TYPE_LENGTH (t
) == 0)
3700 gdb_assert (count
== 0);
3703 else if (count
== 0)
3705 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3706 if (TYPE_LENGTH (t
) != unitlen
* count
)
3718 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3719 if passed to or returned from a non-variadic function with the VFP
3720 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3721 *BASE_TYPE to the base type for T and *COUNT to the number of
3722 elements of that base type before returning. */
3725 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3728 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3729 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3730 if (c
<= 0 || c
> 4)
3737 /* Return 1 if the VFP ABI should be used for passing arguments to and
3738 returning values from a function of type FUNC_TYPE, 0
3742 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3744 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3745 /* Variadic functions always use the base ABI. Assume that functions
3746 without debug info are not variadic. */
3747 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3749 /* The VFP ABI is only supported as a variant of AAPCS. */
3750 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3752 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3755 /* We currently only support passing parameters in integer registers, which
3756 conforms with GCC's default model, and VFP argument passing following
3757 the VFP variant of AAPCS. Several other variants exist and
3758 we should probably support some of them based on the selected ABI. */
3761 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3762 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3763 struct value
**args
, CORE_ADDR sp
, int struct_return
,
3764 CORE_ADDR struct_addr
)
3766 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3770 struct stack_item
*si
= NULL
;
3773 unsigned vfp_regs_free
= (1 << 16) - 1;
3775 /* Determine the type of this function and whether the VFP ABI
3777 ftype
= check_typedef (value_type (function
));
3778 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3779 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3780 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3782 /* Set the return address. For the ARM, the return breakpoint is
3783 always at BP_ADDR. */
3784 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3786 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3788 /* Walk through the list of args and determine how large a temporary
3789 stack is required. Need to take care here as structs may be
3790 passed on the stack, and we have to push them. */
3793 argreg
= ARM_A1_REGNUM
;
3796 /* The struct_return pointer occupies the first parameter
3797 passing register. */
3801 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3802 gdbarch_register_name (gdbarch
, argreg
),
3803 paddress (gdbarch
, struct_addr
));
3804 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3808 for (argnum
= 0; argnum
< nargs
; argnum
++)
3811 struct type
*arg_type
;
3812 struct type
*target_type
;
3813 enum type_code typecode
;
3814 const bfd_byte
*val
;
3816 enum arm_vfp_cprc_base_type vfp_base_type
;
3818 int may_use_core_reg
= 1;
3820 arg_type
= check_typedef (value_type (args
[argnum
]));
3821 len
= TYPE_LENGTH (arg_type
);
3822 target_type
= TYPE_TARGET_TYPE (arg_type
);
3823 typecode
= TYPE_CODE (arg_type
);
3824 val
= value_contents (args
[argnum
]);
3826 align
= arm_type_align (arg_type
);
3827 /* Round alignment up to a whole number of words. */
3828 align
= (align
+ INT_REGISTER_SIZE
- 1) & ~(INT_REGISTER_SIZE
- 1);
3829 /* Different ABIs have different maximum alignments. */
3830 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3832 /* The APCS ABI only requires word alignment. */
3833 align
= INT_REGISTER_SIZE
;
3837 /* The AAPCS requires at most doubleword alignment. */
3838 if (align
> INT_REGISTER_SIZE
* 2)
3839 align
= INT_REGISTER_SIZE
* 2;
3843 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3851 /* Because this is a CPRC it cannot go in a core register or
3852 cause a core register to be skipped for alignment.
3853 Either it goes in VFP registers and the rest of this loop
3854 iteration is skipped for this argument, or it goes on the
3855 stack (and the stack alignment code is correct for this
3857 may_use_core_reg
= 0;
3859 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3860 shift
= unit_length
/ 4;
3861 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3862 for (regno
= 0; regno
< 16; regno
+= shift
)
3863 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3872 vfp_regs_free
&= ~(mask
<< regno
);
3873 reg_scaled
= regno
/ shift
;
3874 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3875 for (i
= 0; i
< vfp_base_count
; i
++)
3879 if (reg_char
== 'q')
3880 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3881 val
+ i
* unit_length
);
3884 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3885 reg_char
, reg_scaled
+ i
);
3886 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3888 regcache_cooked_write (regcache
, regnum
,
3889 val
+ i
* unit_length
);
3896 /* This CPRC could not go in VFP registers, so all VFP
3897 registers are now marked as used. */
3902 /* Push stack padding for dowubleword alignment. */
3903 if (nstack
& (align
- 1))
3905 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3906 nstack
+= INT_REGISTER_SIZE
;
3909 /* Doubleword aligned quantities must go in even register pairs. */
3910 if (may_use_core_reg
3911 && argreg
<= ARM_LAST_ARG_REGNUM
3912 && align
> INT_REGISTER_SIZE
3916 /* If the argument is a pointer to a function, and it is a
3917 Thumb function, create a LOCAL copy of the value and set
3918 the THUMB bit in it. */
3919 if (TYPE_CODE_PTR
== typecode
3920 && target_type
!= NULL
3921 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3923 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3924 if (arm_pc_is_thumb (gdbarch
, regval
))
3926 bfd_byte
*copy
= alloca (len
);
3927 store_unsigned_integer (copy
, len
, byte_order
,
3928 MAKE_THUMB_ADDR (regval
));
3933 /* Copy the argument to general registers or the stack in
3934 register-sized pieces. Large arguments are split between
3935 registers and stack. */
3938 int partial_len
= len
< INT_REGISTER_SIZE
? len
: INT_REGISTER_SIZE
;
3940 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3942 /* The argument is being passed in a general purpose
3945 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3946 if (byte_order
== BFD_ENDIAN_BIG
)
3947 regval
<<= (INT_REGISTER_SIZE
- partial_len
) * 8;
3949 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3951 gdbarch_register_name
3953 phex (regval
, INT_REGISTER_SIZE
));
3954 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3959 /* Push the arguments onto the stack. */
3961 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3963 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3964 nstack
+= INT_REGISTER_SIZE
;
3971 /* If we have an odd number of words to push, then decrement the stack
3972 by one word now, so first stack argument will be dword aligned. */
3979 write_memory (sp
, si
->data
, si
->len
);
3980 si
= pop_stack_item (si
);
3983 /* Finally, update teh SP register. */
3984 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3990 /* Always align the frame to an 8-byte boundary. This is required on
3991 some platforms and harmless on the rest. */
3994 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3996 /* Align the stack to eight bytes. */
3997 return sp
& ~ (CORE_ADDR
) 7;
4001 print_fpu_flags (struct ui_file
*file
, int flags
)
4003 if (flags
& (1 << 0))
4004 fputs_filtered ("IVO ", file
);
4005 if (flags
& (1 << 1))
4006 fputs_filtered ("DVZ ", file
);
4007 if (flags
& (1 << 2))
4008 fputs_filtered ("OFL ", file
);
4009 if (flags
& (1 << 3))
4010 fputs_filtered ("UFL ", file
);
4011 if (flags
& (1 << 4))
4012 fputs_filtered ("INX ", file
);
4013 fputc_filtered ('\n', file
);
4016 /* Print interesting information about the floating point processor
4017 (if present) or emulator. */
4019 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
4020 struct frame_info
*frame
, const char *args
)
4022 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
4025 type
= (status
>> 24) & 127;
4026 if (status
& (1 << 31))
4027 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
4029 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
4030 /* i18n: [floating point unit] mask */
4031 fputs_filtered (_("mask: "), file
);
4032 print_fpu_flags (file
, status
>> 16);
4033 /* i18n: [floating point unit] flags */
4034 fputs_filtered (_("flags: "), file
);
4035 print_fpu_flags (file
, status
);
4038 /* Construct the ARM extended floating point type. */
4039 static struct type
*
4040 arm_ext_type (struct gdbarch
*gdbarch
)
4042 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4044 if (!tdep
->arm_ext_type
)
4046 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
4047 floatformats_arm_ext
);
4049 return tdep
->arm_ext_type
;
4052 static struct type
*
4053 arm_neon_double_type (struct gdbarch
*gdbarch
)
4055 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4057 if (tdep
->neon_double_type
== NULL
)
4059 struct type
*t
, *elem
;
4061 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
4063 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4064 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
4065 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4066 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
4067 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4068 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
4069 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4070 append_composite_type_field (t
, "u64", elem
);
4071 elem
= builtin_type (gdbarch
)->builtin_float
;
4072 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
4073 elem
= builtin_type (gdbarch
)->builtin_double
;
4074 append_composite_type_field (t
, "f64", elem
);
4076 TYPE_VECTOR (t
) = 1;
4077 TYPE_NAME (t
) = "neon_d";
4078 tdep
->neon_double_type
= t
;
4081 return tdep
->neon_double_type
;
4084 /* FIXME: The vector types are not correctly ordered on big-endian
4085 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4086 bits of d0 - regardless of what unit size is being held in d0. So
4087 the offset of the first uint8 in d0 is 7, but the offset of the
4088 first float is 4. This code works as-is for little-endian
4091 static struct type
*
4092 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4094 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4096 if (tdep
->neon_quad_type
== NULL
)
4098 struct type
*t
, *elem
;
4100 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4102 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4103 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4104 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4105 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4106 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4107 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4108 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4109 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4110 elem
= builtin_type (gdbarch
)->builtin_float
;
4111 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4112 elem
= builtin_type (gdbarch
)->builtin_double
;
4113 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4115 TYPE_VECTOR (t
) = 1;
4116 TYPE_NAME (t
) = "neon_q";
4117 tdep
->neon_quad_type
= t
;
4120 return tdep
->neon_quad_type
;
4123 /* Return the GDB type object for the "standard" data type of data in
4126 static struct type
*
4127 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4129 int num_regs
= gdbarch_num_regs (gdbarch
);
4131 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4132 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4133 return builtin_type (gdbarch
)->builtin_float
;
4135 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4136 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4137 return arm_neon_quad_type (gdbarch
);
4139 /* If the target description has register information, we are only
4140 in this function so that we can override the types of
4141 double-precision registers for NEON. */
4142 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4144 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4146 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4147 && TYPE_CODE (t
) == TYPE_CODE_FLT
4148 && gdbarch_tdep (gdbarch
)->have_neon
)
4149 return arm_neon_double_type (gdbarch
);
4154 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4156 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4157 return builtin_type (gdbarch
)->builtin_void
;
4159 return arm_ext_type (gdbarch
);
4161 else if (regnum
== ARM_SP_REGNUM
)
4162 return builtin_type (gdbarch
)->builtin_data_ptr
;
4163 else if (regnum
== ARM_PC_REGNUM
)
4164 return builtin_type (gdbarch
)->builtin_func_ptr
;
4165 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4166 /* These registers are only supported on targets which supply
4167 an XML description. */
4168 return builtin_type (gdbarch
)->builtin_int0
;
4170 return builtin_type (gdbarch
)->builtin_uint32
;
4173 /* Map a DWARF register REGNUM onto the appropriate GDB register
4177 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4179 /* Core integer regs. */
4180 if (reg
>= 0 && reg
<= 15)
4183 /* Legacy FPA encoding. These were once used in a way which
4184 overlapped with VFP register numbering, so their use is
4185 discouraged, but GDB doesn't support the ARM toolchain
4186 which used them for VFP. */
4187 if (reg
>= 16 && reg
<= 23)
4188 return ARM_F0_REGNUM
+ reg
- 16;
4190 /* New assignments for the FPA registers. */
4191 if (reg
>= 96 && reg
<= 103)
4192 return ARM_F0_REGNUM
+ reg
- 96;
4194 /* WMMX register assignments. */
4195 if (reg
>= 104 && reg
<= 111)
4196 return ARM_WCGR0_REGNUM
+ reg
- 104;
4198 if (reg
>= 112 && reg
<= 127)
4199 return ARM_WR0_REGNUM
+ reg
- 112;
4201 if (reg
>= 192 && reg
<= 199)
4202 return ARM_WC0_REGNUM
+ reg
- 192;
4204 /* VFP v2 registers. A double precision value is actually
4205 in d1 rather than s2, but the ABI only defines numbering
4206 for the single precision registers. This will "just work"
4207 in GDB for little endian targets (we'll read eight bytes,
4208 starting in s0 and then progressing to s1), but will be
4209 reversed on big endian targets with VFP. This won't
4210 be a problem for the new Neon quad registers; you're supposed
4211 to use DW_OP_piece for those. */
4212 if (reg
>= 64 && reg
<= 95)
4216 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4217 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4221 /* VFP v3 / Neon registers. This range is also used for VFP v2
4222 registers, except that it now describes d0 instead of s0. */
4223 if (reg
>= 256 && reg
<= 287)
4227 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4228 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4235 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4237 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4240 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4242 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4243 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4245 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4246 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4248 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4249 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4251 if (reg
< NUM_GREGS
)
4252 return SIM_ARM_R0_REGNUM
+ reg
;
4255 if (reg
< NUM_FREGS
)
4256 return SIM_ARM_FP0_REGNUM
+ reg
;
4259 if (reg
< NUM_SREGS
)
4260 return SIM_ARM_FPS_REGNUM
+ reg
;
4263 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4266 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4267 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4268 It is thought that this is is the floating-point register format on
4269 little-endian systems. */
4272 convert_from_extended (const struct floatformat
*fmt
, const void *ptr
,
4273 void *dbl
, int endianess
)
4277 if (endianess
== BFD_ENDIAN_BIG
)
4278 floatformat_to_doublest (&floatformat_arm_ext_big
, ptr
, &d
);
4280 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4282 floatformat_from_doublest (fmt
, &d
, dbl
);
4286 convert_to_extended (const struct floatformat
*fmt
, void *dbl
, const void *ptr
,
4291 floatformat_to_doublest (fmt
, ptr
, &d
);
4292 if (endianess
== BFD_ENDIAN_BIG
)
4293 floatformat_from_doublest (&floatformat_arm_ext_big
, &d
, dbl
);
4295 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4300 condition_true (unsigned long cond
, unsigned long status_reg
)
4302 if (cond
== INST_AL
|| cond
== INST_NV
)
4308 return ((status_reg
& FLAG_Z
) != 0);
4310 return ((status_reg
& FLAG_Z
) == 0);
4312 return ((status_reg
& FLAG_C
) != 0);
4314 return ((status_reg
& FLAG_C
) == 0);
4316 return ((status_reg
& FLAG_N
) != 0);
4318 return ((status_reg
& FLAG_N
) == 0);
4320 return ((status_reg
& FLAG_V
) != 0);
4322 return ((status_reg
& FLAG_V
) == 0);
4324 return ((status_reg
& (FLAG_C
| FLAG_Z
)) == FLAG_C
);
4326 return ((status_reg
& (FLAG_C
| FLAG_Z
)) != FLAG_C
);
4328 return (((status_reg
& FLAG_N
) == 0) == ((status_reg
& FLAG_V
) == 0));
4330 return (((status_reg
& FLAG_N
) == 0) != ((status_reg
& FLAG_V
) == 0));
4332 return (((status_reg
& FLAG_Z
) == 0)
4333 && (((status_reg
& FLAG_N
) == 0)
4334 == ((status_reg
& FLAG_V
) == 0)));
4336 return (((status_reg
& FLAG_Z
) != 0)
4337 || (((status_reg
& FLAG_N
) == 0)
4338 != ((status_reg
& FLAG_V
) == 0)));
4343 static unsigned long
4344 shifted_reg_val (struct frame_info
*frame
, unsigned long inst
, int carry
,
4345 unsigned long pc_val
, unsigned long status_reg
)
4347 unsigned long res
, shift
;
4348 int rm
= bits (inst
, 0, 3);
4349 unsigned long shifttype
= bits (inst
, 5, 6);
4353 int rs
= bits (inst
, 8, 11);
4354 shift
= (rs
== 15 ? pc_val
+ 8
4355 : get_frame_register_unsigned (frame
, rs
)) & 0xFF;
4358 shift
= bits (inst
, 7, 11);
4360 res
= (rm
== ARM_PC_REGNUM
4361 ? (pc_val
+ (bit (inst
, 4) ? 12 : 8))
4362 : get_frame_register_unsigned (frame
, rm
));
4367 res
= shift
>= 32 ? 0 : res
<< shift
;
4371 res
= shift
>= 32 ? 0 : res
>> shift
;
4377 res
= ((res
& 0x80000000L
)
4378 ? ~((~res
) >> shift
) : res
>> shift
);
4381 case 3: /* ROR/RRX */
4384 res
= (res
>> 1) | (carry
? 0x80000000L
: 0);
4386 res
= (res
>> shift
) | (res
<< (32 - shift
));
4390 return res
& 0xffffffff;
4393 /* Return number of 1-bits in VAL. */
4396 bitcount (unsigned long val
)
4399 for (nbits
= 0; val
!= 0; nbits
++)
4400 val
&= val
- 1; /* Delete rightmost 1-bit in val. */
4404 /* Return the size in bytes of the complete Thumb instruction whose
4405 first halfword is INST1. */
4408 thumb_insn_size (unsigned short inst1
)
4410 if ((inst1
& 0xe000) == 0xe000 && (inst1
& 0x1800) != 0)
4417 thumb_advance_itstate (unsigned int itstate
)
4419 /* Preserve IT[7:5], the first three bits of the condition. Shift
4420 the upcoming condition flags left by one bit. */
4421 itstate
= (itstate
& 0xe0) | ((itstate
<< 1) & 0x1f);
4423 /* If we have finished the IT block, clear the state. */
4424 if ((itstate
& 0x0f) == 0)
4430 /* Find the next PC after the current instruction executes. In some
4431 cases we can not statically determine the answer (see the IT state
4432 handling in this function); in that case, a breakpoint may be
4433 inserted in addition to the returned PC, which will be used to set
4434 another breakpoint by our caller. */
4437 thumb_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4439 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4440 struct address_space
*aspace
= get_frame_address_space (frame
);
4441 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4442 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4443 unsigned long pc_val
= ((unsigned long) pc
) + 4; /* PC after prefetch */
4444 unsigned short inst1
;
4445 CORE_ADDR nextpc
= pc
+ 2; /* Default is next instruction. */
4446 unsigned long offset
;
4447 ULONGEST status
, itstate
;
4449 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4450 pc_val
= MAKE_THUMB_ADDR (pc_val
);
4452 inst1
= read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
4454 /* Thumb-2 conditional execution support. There are eight bits in
4455 the CPSR which describe conditional execution state. Once
4456 reconstructed (they're in a funny order), the low five bits
4457 describe the low bit of the condition for each instruction and
4458 how many instructions remain. The high three bits describe the
4459 base condition. One of the low four bits will be set if an IT
4460 block is active. These bits read as zero on earlier
4462 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4463 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
4465 /* If-Then handling. On GNU/Linux, where this routine is used, we
4466 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4467 can disable execution of the undefined instruction. So we might
4468 miss the breakpoint if we set it on a skipped conditional
4469 instruction. Because conditional instructions can change the
4470 flags, affecting the execution of further instructions, we may
4471 need to set two breakpoints. */
4473 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
!= NULL
)
4475 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4477 /* An IT instruction. Because this instruction does not
4478 modify the flags, we can accurately predict the next
4479 executed instruction. */
4480 itstate
= inst1
& 0x00ff;
4481 pc
+= thumb_insn_size (inst1
);
4483 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4485 inst1
= read_memory_unsigned_integer (pc
, 2,
4486 byte_order_for_code
);
4487 pc
+= thumb_insn_size (inst1
);
4488 itstate
= thumb_advance_itstate (itstate
);
4491 return MAKE_THUMB_ADDR (pc
);
4493 else if (itstate
!= 0)
4495 /* We are in a conditional block. Check the condition. */
4496 if (! condition_true (itstate
>> 4, status
))
4498 /* Advance to the next executed instruction. */
4499 pc
+= thumb_insn_size (inst1
);
4500 itstate
= thumb_advance_itstate (itstate
);
4502 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4504 inst1
= read_memory_unsigned_integer (pc
, 2,
4505 byte_order_for_code
);
4506 pc
+= thumb_insn_size (inst1
);
4507 itstate
= thumb_advance_itstate (itstate
);
4510 return MAKE_THUMB_ADDR (pc
);
4512 else if ((itstate
& 0x0f) == 0x08)
4514 /* This is the last instruction of the conditional
4515 block, and it is executed. We can handle it normally
4516 because the following instruction is not conditional,
4517 and we must handle it normally because it is
4518 permitted to branch. Fall through. */
4524 /* There are conditional instructions after this one.
4525 If this instruction modifies the flags, then we can
4526 not predict what the next executed instruction will
4527 be. Fortunately, this instruction is architecturally
4528 forbidden to branch; we know it will fall through.
4529 Start by skipping past it. */
4530 pc
+= thumb_insn_size (inst1
);
4531 itstate
= thumb_advance_itstate (itstate
);
4533 /* Set a breakpoint on the following instruction. */
4534 gdb_assert ((itstate
& 0x0f) != 0);
4535 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
4536 MAKE_THUMB_ADDR (pc
));
4537 cond_negated
= (itstate
>> 4) & 1;
4539 /* Skip all following instructions with the same
4540 condition. If there is a later instruction in the IT
4541 block with the opposite condition, set the other
4542 breakpoint there. If not, then set a breakpoint on
4543 the instruction after the IT block. */
4546 inst1
= read_memory_unsigned_integer (pc
, 2,
4547 byte_order_for_code
);
4548 pc
+= thumb_insn_size (inst1
);
4549 itstate
= thumb_advance_itstate (itstate
);
4551 while (itstate
!= 0 && ((itstate
>> 4) & 1) == cond_negated
);
4553 return MAKE_THUMB_ADDR (pc
);
4557 else if (itstate
& 0x0f)
4559 /* We are in a conditional block. Check the condition. */
4560 int cond
= itstate
>> 4;
4562 if (! condition_true (cond
, status
))
4563 /* Advance to the next instruction. All the 32-bit
4564 instructions share a common prefix. */
4565 return MAKE_THUMB_ADDR (pc
+ thumb_insn_size (inst1
));
4567 /* Otherwise, handle the instruction normally. */
4570 if ((inst1
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
4574 /* Fetch the saved PC from the stack. It's stored above
4575 all of the other registers. */
4576 offset
= bitcount (bits (inst1
, 0, 7)) * INT_REGISTER_SIZE
;
4577 sp
= get_frame_register_unsigned (frame
, ARM_SP_REGNUM
);
4578 nextpc
= read_memory_unsigned_integer (sp
+ offset
, 4, byte_order
);
4580 else if ((inst1
& 0xf000) == 0xd000) /* conditional branch */
4582 unsigned long cond
= bits (inst1
, 8, 11);
4583 if (cond
== 0x0f) /* 0x0f = SWI */
4585 struct gdbarch_tdep
*tdep
;
4586 tdep
= gdbarch_tdep (gdbarch
);
4588 if (tdep
->syscall_next_pc
!= NULL
)
4589 nextpc
= tdep
->syscall_next_pc (frame
);
4592 else if (cond
!= 0x0f && condition_true (cond
, status
))
4593 nextpc
= pc_val
+ (sbits (inst1
, 0, 7) << 1);
4595 else if ((inst1
& 0xf800) == 0xe000) /* unconditional branch */
4597 nextpc
= pc_val
+ (sbits (inst1
, 0, 10) << 1);
4599 else if (thumb_insn_size (inst1
) == 4) /* 32-bit instruction */
4601 unsigned short inst2
;
4602 inst2
= read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
4604 /* Default to the next instruction. */
4606 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4608 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
4610 /* Branches and miscellaneous control instructions. */
4612 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
4615 int j1
, j2
, imm1
, imm2
;
4617 imm1
= sbits (inst1
, 0, 10);
4618 imm2
= bits (inst2
, 0, 10);
4619 j1
= bit (inst2
, 13);
4620 j2
= bit (inst2
, 11);
4622 offset
= ((imm1
<< 12) + (imm2
<< 1));
4623 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
4625 nextpc
= pc_val
+ offset
;
4626 /* For BLX make sure to clear the low bits. */
4627 if (bit (inst2
, 12) == 0)
4628 nextpc
= nextpc
& 0xfffffffc;
4630 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
4632 /* SUBS PC, LR, #imm8. */
4633 nextpc
= get_frame_register_unsigned (frame
, ARM_LR_REGNUM
);
4634 nextpc
-= inst2
& 0x00ff;
4636 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
4638 /* Conditional branch. */
4639 if (condition_true (bits (inst1
, 6, 9), status
))
4641 int sign
, j1
, j2
, imm1
, imm2
;
4643 sign
= sbits (inst1
, 10, 10);
4644 imm1
= bits (inst1
, 0, 5);
4645 imm2
= bits (inst2
, 0, 10);
4646 j1
= bit (inst2
, 13);
4647 j2
= bit (inst2
, 11);
4649 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
4650 offset
+= (imm1
<< 12) + (imm2
<< 1);
4652 nextpc
= pc_val
+ offset
;
4656 else if ((inst1
& 0xfe50) == 0xe810)
4658 /* Load multiple or RFE. */
4659 int rn
, offset
, load_pc
= 1;
4661 rn
= bits (inst1
, 0, 3);
4662 if (bit (inst1
, 7) && !bit (inst1
, 8))
4665 if (!bit (inst2
, 15))
4667 offset
= bitcount (inst2
) * 4 - 4;
4669 else if (!bit (inst1
, 7) && bit (inst1
, 8))
4672 if (!bit (inst2
, 15))
4676 else if (bit (inst1
, 7) && bit (inst1
, 8))
4681 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
4691 CORE_ADDR addr
= get_frame_register_unsigned (frame
, rn
);
4692 nextpc
= get_frame_memory_unsigned (frame
, addr
+ offset
, 4);
4695 else if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
4697 /* MOV PC or MOVS PC. */
4698 nextpc
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4699 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4701 else if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
4705 int rn
, load_pc
= 1;
4707 rn
= bits (inst1
, 0, 3);
4708 base
= get_frame_register_unsigned (frame
, rn
);
4709 if (rn
== ARM_PC_REGNUM
)
4711 base
= (base
+ 4) & ~(CORE_ADDR
) 0x3;
4713 base
+= bits (inst2
, 0, 11);
4715 base
-= bits (inst2
, 0, 11);
4717 else if (bit (inst1
, 7))
4718 base
+= bits (inst2
, 0, 11);
4719 else if (bit (inst2
, 11))
4721 if (bit (inst2
, 10))
4724 base
+= bits (inst2
, 0, 7);
4726 base
-= bits (inst2
, 0, 7);
4729 else if ((inst2
& 0x0fc0) == 0x0000)
4731 int shift
= bits (inst2
, 4, 5), rm
= bits (inst2
, 0, 3);
4732 base
+= get_frame_register_unsigned (frame
, rm
) << shift
;
4739 nextpc
= get_frame_memory_unsigned (frame
, base
, 4);
4741 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
4744 CORE_ADDR tbl_reg
, table
, offset
, length
;
4746 tbl_reg
= bits (inst1
, 0, 3);
4747 if (tbl_reg
== 0x0f)
4748 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4750 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4752 offset
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4753 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 1);
4754 nextpc
= pc_val
+ length
;
4756 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
4759 CORE_ADDR tbl_reg
, table
, offset
, length
;
4761 tbl_reg
= bits (inst1
, 0, 3);
4762 if (tbl_reg
== 0x0f)
4763 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4765 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4767 offset
= 2 * get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4768 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 2);
4769 nextpc
= pc_val
+ length
;
4772 else if ((inst1
& 0xff00) == 0x4700) /* bx REG, blx REG */
4774 if (bits (inst1
, 3, 6) == 0x0f)
4775 nextpc
= UNMAKE_THUMB_ADDR (pc_val
);
4777 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4779 else if ((inst1
& 0xff87) == 0x4687) /* mov pc, REG */
4781 if (bits (inst1
, 3, 6) == 0x0f)
4784 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4786 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4788 else if ((inst1
& 0xf500) == 0xb100)
4791 int imm
= (bit (inst1
, 9) << 6) + (bits (inst1
, 3, 7) << 1);
4792 ULONGEST reg
= get_frame_register_unsigned (frame
, bits (inst1
, 0, 2));
4794 if (bit (inst1
, 11) && reg
!= 0)
4795 nextpc
= pc_val
+ imm
;
4796 else if (!bit (inst1
, 11) && reg
== 0)
4797 nextpc
= pc_val
+ imm
;
4802 /* Get the raw next address. PC is the current program counter, in
4803 FRAME, which is assumed to be executing in ARM mode.
4805 The value returned has the execution state of the next instruction
4806 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4807 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4811 arm_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4813 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4814 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4815 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4816 unsigned long pc_val
;
4817 unsigned long this_instr
;
4818 unsigned long status
;
4821 pc_val
= (unsigned long) pc
;
4822 this_instr
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
4824 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4825 nextpc
= (CORE_ADDR
) (pc_val
+ 4); /* Default case */
4827 if (bits (this_instr
, 28, 31) == INST_NV
)
4828 switch (bits (this_instr
, 24, 27))
4833 /* Branch with Link and change to Thumb. */
4834 nextpc
= BranchDest (pc
, this_instr
);
4835 nextpc
|= bit (this_instr
, 24) << 1;
4836 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4842 /* Coprocessor register transfer. */
4843 if (bits (this_instr
, 12, 15) == 15)
4844 error (_("Invalid update to pc in instruction"));
4847 else if (condition_true (bits (this_instr
, 28, 31), status
))
4849 switch (bits (this_instr
, 24, 27))
4852 case 0x1: /* data processing */
4856 unsigned long operand1
, operand2
, result
= 0;
4860 if (bits (this_instr
, 12, 15) != 15)
4863 if (bits (this_instr
, 22, 25) == 0
4864 && bits (this_instr
, 4, 7) == 9) /* multiply */
4865 error (_("Invalid update to pc in instruction"));
4867 /* BX <reg>, BLX <reg> */
4868 if (bits (this_instr
, 4, 27) == 0x12fff1
4869 || bits (this_instr
, 4, 27) == 0x12fff3)
4871 rn
= bits (this_instr
, 0, 3);
4872 nextpc
= ((rn
== ARM_PC_REGNUM
)
4874 : get_frame_register_unsigned (frame
, rn
));
4879 /* Multiply into PC. */
4880 c
= (status
& FLAG_C
) ? 1 : 0;
4881 rn
= bits (this_instr
, 16, 19);
4882 operand1
= ((rn
== ARM_PC_REGNUM
)
4884 : get_frame_register_unsigned (frame
, rn
));
4886 if (bit (this_instr
, 25))
4888 unsigned long immval
= bits (this_instr
, 0, 7);
4889 unsigned long rotate
= 2 * bits (this_instr
, 8, 11);
4890 operand2
= ((immval
>> rotate
) | (immval
<< (32 - rotate
)))
4893 else /* operand 2 is a shifted register. */
4894 operand2
= shifted_reg_val (frame
, this_instr
, c
,
4897 switch (bits (this_instr
, 21, 24))
4900 result
= operand1
& operand2
;
4904 result
= operand1
^ operand2
;
4908 result
= operand1
- operand2
;
4912 result
= operand2
- operand1
;
4916 result
= operand1
+ operand2
;
4920 result
= operand1
+ operand2
+ c
;
4924 result
= operand1
- operand2
+ c
;
4928 result
= operand2
- operand1
+ c
;
4934 case 0xb: /* tst, teq, cmp, cmn */
4935 result
= (unsigned long) nextpc
;
4939 result
= operand1
| operand2
;
4943 /* Always step into a function. */
4948 result
= operand1
& ~operand2
;
4956 /* In 26-bit APCS the bottom two bits of the result are
4957 ignored, and we always end up in ARM state. */
4959 nextpc
= arm_addr_bits_remove (gdbarch
, result
);
4967 case 0x5: /* data transfer */
4970 if (bit (this_instr
, 20))
4973 if (bits (this_instr
, 12, 15) == 15)
4979 if (bit (this_instr
, 22))
4980 error (_("Invalid update to pc in instruction"));
4982 /* byte write to PC */
4983 rn
= bits (this_instr
, 16, 19);
4984 base
= ((rn
== ARM_PC_REGNUM
)
4986 : get_frame_register_unsigned (frame
, rn
));
4988 if (bit (this_instr
, 24))
4991 int c
= (status
& FLAG_C
) ? 1 : 0;
4992 unsigned long offset
=
4993 (bit (this_instr
, 25)
4994 ? shifted_reg_val (frame
, this_instr
, c
, pc_val
, status
)
4995 : bits (this_instr
, 0, 11));
4997 if (bit (this_instr
, 23))
5003 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
) base
,
5010 case 0x9: /* block transfer */
5011 if (bit (this_instr
, 20))
5014 if (bit (this_instr
, 15))
5018 unsigned long rn_val
5019 = get_frame_register_unsigned (frame
,
5020 bits (this_instr
, 16, 19));
5022 if (bit (this_instr
, 23))
5025 unsigned long reglist
= bits (this_instr
, 0, 14);
5026 offset
= bitcount (reglist
) * 4;
5027 if (bit (this_instr
, 24)) /* pre */
5030 else if (bit (this_instr
, 24))
5034 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
)
5041 case 0xb: /* branch & link */
5042 case 0xa: /* branch */
5044 nextpc
= BranchDest (pc
, this_instr
);
5050 case 0xe: /* coproc ops */
5054 struct gdbarch_tdep
*tdep
;
5055 tdep
= gdbarch_tdep (gdbarch
);
5057 if (tdep
->syscall_next_pc
!= NULL
)
5058 nextpc
= tdep
->syscall_next_pc (frame
);
5064 fprintf_filtered (gdb_stderr
, _("Bad bit-field extraction\n"));
5072 /* Determine next PC after current instruction executes. Will call either
5073 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5074 loop is detected. */
5077 arm_get_next_pc (struct frame_info
*frame
, CORE_ADDR pc
)
5081 if (arm_frame_is_thumb (frame
))
5082 nextpc
= thumb_get_next_pc_raw (frame
, pc
);
5084 nextpc
= arm_get_next_pc_raw (frame
, pc
);
5089 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5090 of the appropriate mode (as encoded in the PC value), even if this
5091 differs from what would be expected according to the symbol tables. */
5094 arm_insert_single_step_breakpoint (struct gdbarch
*gdbarch
,
5095 struct address_space
*aspace
,
5098 struct cleanup
*old_chain
5099 = make_cleanup_restore_integer (&arm_override_mode
);
5101 arm_override_mode
= IS_THUMB_ADDR (pc
);
5102 pc
= gdbarch_addr_bits_remove (gdbarch
, pc
);
5104 insert_single_step_breakpoint (gdbarch
, aspace
, pc
);
5106 do_cleanups (old_chain
);
5109 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5110 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5111 is found, attempt to step through it. A breakpoint is placed at the end of
5115 thumb_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5117 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5118 struct address_space
*aspace
= get_frame_address_space (frame
);
5119 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5120 CORE_ADDR pc
= get_frame_pc (frame
);
5121 CORE_ADDR breaks
[2] = {-1, -1};
5123 unsigned short insn1
, insn2
;
5126 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5127 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5128 ULONGEST status
, itstate
;
5130 /* We currently do not support atomic sequences within an IT block. */
5131 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
5132 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
5136 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5137 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5139 if (thumb_insn_size (insn1
) != 4)
5142 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5144 if (!((insn1
& 0xfff0) == 0xe850
5145 || ((insn1
& 0xfff0) == 0xe8d0 && (insn2
& 0x00c0) == 0x0040)))
5148 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5150 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5152 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5155 if (thumb_insn_size (insn1
) != 4)
5157 /* Assume that there is at most one conditional branch in the
5158 atomic sequence. If a conditional branch is found, put a
5159 breakpoint in its destination address. */
5160 if ((insn1
& 0xf000) == 0xd000 && bits (insn1
, 8, 11) != 0x0f)
5162 if (last_breakpoint
> 0)
5163 return 0; /* More than one conditional branch found,
5164 fallback to the standard code. */
5166 breaks
[1] = loc
+ 2 + (sbits (insn1
, 0, 7) << 1);
5170 /* We do not support atomic sequences that use any *other*
5171 instructions but conditional branches to change the PC.
5172 Fall back to standard code to avoid losing control of
5174 else if (thumb_instruction_changes_pc (insn1
))
5179 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5182 /* Assume that there is at most one conditional branch in the
5183 atomic sequence. If a conditional branch is found, put a
5184 breakpoint in its destination address. */
5185 if ((insn1
& 0xf800) == 0xf000
5186 && (insn2
& 0xd000) == 0x8000
5187 && (insn1
& 0x0380) != 0x0380)
5189 int sign
, j1
, j2
, imm1
, imm2
;
5190 unsigned int offset
;
5192 sign
= sbits (insn1
, 10, 10);
5193 imm1
= bits (insn1
, 0, 5);
5194 imm2
= bits (insn2
, 0, 10);
5195 j1
= bit (insn2
, 13);
5196 j2
= bit (insn2
, 11);
5198 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
5199 offset
+= (imm1
<< 12) + (imm2
<< 1);
5201 if (last_breakpoint
> 0)
5202 return 0; /* More than one conditional branch found,
5203 fallback to the standard code. */
5205 breaks
[1] = loc
+ offset
;
5209 /* We do not support atomic sequences that use any *other*
5210 instructions but conditional branches to change the PC.
5211 Fall back to standard code to avoid losing control of
5213 else if (thumb2_instruction_changes_pc (insn1
, insn2
))
5216 /* If we find a strex{,b,h,d}, we're done. */
5217 if ((insn1
& 0xfff0) == 0xe840
5218 || ((insn1
& 0xfff0) == 0xe8c0 && (insn2
& 0x00c0) == 0x0040))
5223 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5224 if (insn_count
== atomic_sequence_length
)
5227 /* Insert a breakpoint right after the end of the atomic sequence. */
5230 /* Check for duplicated breakpoints. Check also for a breakpoint
5231 placed (branch instruction's destination) anywhere in sequence. */
5233 && (breaks
[1] == breaks
[0]
5234 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5235 last_breakpoint
= 0;
5237 /* Effectively inserts the breakpoints. */
5238 for (index
= 0; index
<= last_breakpoint
; index
++)
5239 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
5240 MAKE_THUMB_ADDR (breaks
[index
]));
5246 arm_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5248 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5249 struct address_space
*aspace
= get_frame_address_space (frame
);
5250 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5251 CORE_ADDR pc
= get_frame_pc (frame
);
5252 CORE_ADDR breaks
[2] = {-1, -1};
5257 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5258 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5260 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5261 Note that we do not currently support conditionally executed atomic
5263 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5265 if ((insn
& 0xff9000f0) != 0xe1900090)
5268 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5270 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5272 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5275 /* Assume that there is at most one conditional branch in the atomic
5276 sequence. If a conditional branch is found, put a breakpoint in
5277 its destination address. */
5278 if (bits (insn
, 24, 27) == 0xa)
5280 if (last_breakpoint
> 0)
5281 return 0; /* More than one conditional branch found, fallback
5282 to the standard single-step code. */
5284 breaks
[1] = BranchDest (loc
- 4, insn
);
5288 /* We do not support atomic sequences that use any *other* instructions
5289 but conditional branches to change the PC. Fall back to standard
5290 code to avoid losing control of execution. */
5291 else if (arm_instruction_changes_pc (insn
))
5294 /* If we find a strex{,b,h,d}, we're done. */
5295 if ((insn
& 0xff9000f0) == 0xe1800090)
5299 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5300 if (insn_count
== atomic_sequence_length
)
5303 /* Insert a breakpoint right after the end of the atomic sequence. */
5306 /* Check for duplicated breakpoints. Check also for a breakpoint
5307 placed (branch instruction's destination) anywhere in sequence. */
5309 && (breaks
[1] == breaks
[0]
5310 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5311 last_breakpoint
= 0;
5313 /* Effectively inserts the breakpoints. */
5314 for (index
= 0; index
<= last_breakpoint
; index
++)
5315 arm_insert_single_step_breakpoint (gdbarch
, aspace
, breaks
[index
]);
5321 arm_deal_with_atomic_sequence (struct frame_info
*frame
)
5323 if (arm_frame_is_thumb (frame
))
5324 return thumb_deal_with_atomic_sequence_raw (frame
);
5326 return arm_deal_with_atomic_sequence_raw (frame
);
5329 /* single_step() is called just before we want to resume the inferior,
5330 if we want to single-step it but there is no hardware or kernel
5331 single-step support. We find the target of the coming instruction
5332 and breakpoint it. */
5335 arm_software_single_step (struct frame_info
*frame
)
5337 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5338 struct address_space
*aspace
= get_frame_address_space (frame
);
5341 if (arm_deal_with_atomic_sequence (frame
))
5344 next_pc
= arm_get_next_pc (frame
, get_frame_pc (frame
));
5345 arm_insert_single_step_breakpoint (gdbarch
, aspace
, next_pc
);
5350 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5351 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5352 NULL if an error occurs. BUF is freed. */
5355 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
5356 int old_len
, int new_len
)
5359 int bytes_to_read
= new_len
- old_len
;
5361 new_buf
= xmalloc (new_len
);
5362 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
5364 if (target_read_memory (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
5372 /* An IT block is at most the 2-byte IT instruction followed by
5373 four 4-byte instructions. The furthest back we must search to
5374 find an IT block that affects the current instruction is thus
5375 2 + 3 * 4 == 14 bytes. */
5376 #define MAX_IT_BLOCK_PREFIX 14
5378 /* Use a quick scan if there are more than this many bytes of
5380 #define IT_SCAN_THRESHOLD 32
5382 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5383 A breakpoint in an IT block may not be hit, depending on the
5386 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
5390 CORE_ADDR boundary
, func_start
;
5392 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
5393 int i
, any
, last_it
, last_it_count
;
5395 /* If we are using BKPT breakpoints, none of this is necessary. */
5396 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
5399 /* ARM mode does not have this problem. */
5400 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
5403 /* We are setting a breakpoint in Thumb code that could potentially
5404 contain an IT block. The first step is to find how much Thumb
5405 code there is; we do not need to read outside of known Thumb
5407 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
5409 /* Thumb-2 code must have mapping symbols to have a chance. */
5412 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
5414 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
5415 && func_start
> boundary
)
5416 boundary
= func_start
;
5418 /* Search for a candidate IT instruction. We have to do some fancy
5419 footwork to distinguish a real IT instruction from the second
5420 half of a 32-bit instruction, but there is no need for that if
5421 there's no candidate. */
5422 buf_len
= min (bpaddr
- boundary
, MAX_IT_BLOCK_PREFIX
);
5424 /* No room for an IT instruction. */
5427 buf
= xmalloc (buf_len
);
5428 if (target_read_memory (bpaddr
- buf_len
, buf
, buf_len
) != 0)
5431 for (i
= 0; i
< buf_len
; i
+= 2)
5433 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5434 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5446 /* OK, the code bytes before this instruction contain at least one
5447 halfword which resembles an IT instruction. We know that it's
5448 Thumb code, but there are still two possibilities. Either the
5449 halfword really is an IT instruction, or it is the second half of
5450 a 32-bit Thumb instruction. The only way we can tell is to
5451 scan forwards from a known instruction boundary. */
5452 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
5456 /* There's a lot of code before this instruction. Start with an
5457 optimistic search; it's easy to recognize halfwords that can
5458 not be the start of a 32-bit instruction, and use that to
5459 lock on to the instruction boundaries. */
5460 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
5463 buf_len
= IT_SCAN_THRESHOLD
;
5466 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
5468 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5469 if (thumb_insn_size (inst1
) == 2)
5476 /* At this point, if DEFINITE, BUF[I] is the first place we
5477 are sure that we know the instruction boundaries, and it is far
5478 enough from BPADDR that we could not miss an IT instruction
5479 affecting BPADDR. If ! DEFINITE, give up - start from a
5483 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
5487 buf_len
= bpaddr
- boundary
;
5493 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
5496 buf_len
= bpaddr
- boundary
;
5500 /* Scan forwards. Find the last IT instruction before BPADDR. */
5505 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5507 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5512 else if (inst1
& 0x0002)
5514 else if (inst1
& 0x0004)
5519 i
+= thumb_insn_size (inst1
);
5525 /* There wasn't really an IT instruction after all. */
5528 if (last_it_count
< 1)
5529 /* It was too far away. */
5532 /* This really is a trouble spot. Move the breakpoint to the IT
5534 return bpaddr
- buf_len
+ last_it
;
5537 /* ARM displaced stepping support.
5539 Generally ARM displaced stepping works as follows:
5541 1. When an instruction is to be single-stepped, it is first decoded by
5542 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5543 Depending on the type of instruction, it is then copied to a scratch
5544 location, possibly in a modified form. The copy_* set of functions
5545 performs such modification, as necessary. A breakpoint is placed after
5546 the modified instruction in the scratch space to return control to GDB.
5547 Note in particular that instructions which modify the PC will no longer
5548 do so after modification.
5550 2. The instruction is single-stepped, by setting the PC to the scratch
5551 location address, and resuming. Control returns to GDB when the
5554 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5555 function used for the current instruction. This function's job is to
5556 put the CPU/memory state back to what it would have been if the
5557 instruction had been executed unmodified in its original location. */
5559 /* NOP instruction (mov r0, r0). */
5560 #define ARM_NOP 0xe1a00000
5561 #define THUMB_NOP 0x4600
5563 /* Helper for register reads for displaced stepping. In particular, this
5564 returns the PC as it would be seen by the instruction at its original
5568 displaced_read_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5572 CORE_ADDR from
= dsc
->insn_addr
;
5574 if (regno
== ARM_PC_REGNUM
)
5576 /* Compute pipeline offset:
5577 - When executing an ARM instruction, PC reads as the address of the
5578 current instruction plus 8.
5579 - When executing a Thumb instruction, PC reads as the address of the
5580 current instruction plus 4. */
5587 if (debug_displaced
)
5588 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
5589 (unsigned long) from
);
5590 return (ULONGEST
) from
;
5594 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
5595 if (debug_displaced
)
5596 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
5597 regno
, (unsigned long) ret
);
5603 displaced_in_arm_mode (struct regcache
*regs
)
5606 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5608 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5610 return (ps
& t_bit
) == 0;
5613 /* Write to the PC as from a branch instruction. */
5616 branch_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5620 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5621 architecture versions < 6. */
5622 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5623 val
& ~(ULONGEST
) 0x3);
5625 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5626 val
& ~(ULONGEST
) 0x1);
5629 /* Write to the PC as from a branch-exchange instruction. */
5632 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
5635 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5637 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5641 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
5642 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
5644 else if ((val
& 2) == 0)
5646 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5647 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
5651 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5652 mode, align dest to 4 bytes). */
5653 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5654 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5655 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
5659 /* Write to the PC as if from a load instruction. */
5662 load_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5665 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
5666 bx_write_pc (regs
, val
);
5668 branch_write_pc (regs
, dsc
, val
);
5671 /* Write to the PC as if from an ALU instruction. */
5674 alu_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5677 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
5678 bx_write_pc (regs
, val
);
5680 branch_write_pc (regs
, dsc
, val
);
5683 /* Helper for writing to registers for displaced stepping. Writing to the PC
5684 has a varying effects depending on the instruction which does the write:
5685 this is controlled by the WRITE_PC argument. */
5688 displaced_write_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5689 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
5691 if (regno
== ARM_PC_REGNUM
)
5693 if (debug_displaced
)
5694 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
5695 (unsigned long) val
);
5698 case BRANCH_WRITE_PC
:
5699 branch_write_pc (regs
, dsc
, val
);
5703 bx_write_pc (regs
, val
);
5707 load_write_pc (regs
, dsc
, val
);
5711 alu_write_pc (regs
, dsc
, val
);
5714 case CANNOT_WRITE_PC
:
5715 warning (_("Instruction wrote to PC in an unexpected way when "
5716 "single-stepping"));
5720 internal_error (__FILE__
, __LINE__
,
5721 _("Invalid argument to displaced_write_reg"));
5724 dsc
->wrote_to_pc
= 1;
5728 if (debug_displaced
)
5729 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
5730 regno
, (unsigned long) val
);
5731 regcache_cooked_write_unsigned (regs
, regno
, val
);
5735 /* This function is used to concisely determine if an instruction INSN
5736 references PC. Register fields of interest in INSN should have the
5737 corresponding fields of BITMASK set to 0b1111. The function
5738 returns return 1 if any of these fields in INSN reference the PC
5739 (also 0b1111, r15), else it returns 0. */
5742 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
5744 uint32_t lowbit
= 1;
5746 while (bitmask
!= 0)
5750 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
5756 mask
= lowbit
* 0xf;
5758 if ((insn
& mask
) == mask
)
5767 /* The simplest copy function. Many instructions have the same effect no
5768 matter what address they are executed at: in those cases, use this. */
5771 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
5772 const char *iname
, struct displaced_step_closure
*dsc
)
5774 if (debug_displaced
)
5775 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
5776 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
5779 dsc
->modinsn
[0] = insn
;
5785 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
5786 uint16_t insn2
, const char *iname
,
5787 struct displaced_step_closure
*dsc
)
5789 if (debug_displaced
)
5790 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
5791 "opcode/class '%s' unmodified\n", insn1
, insn2
,
5794 dsc
->modinsn
[0] = insn1
;
5795 dsc
->modinsn
[1] = insn2
;
5801 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5804 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, unsigned int insn
,
5806 struct displaced_step_closure
*dsc
)
5808 if (debug_displaced
)
5809 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
5810 "opcode/class '%s' unmodified\n", insn
,
5813 dsc
->modinsn
[0] = insn
;
5818 /* Preload instructions with immediate offset. */
5821 cleanup_preload (struct gdbarch
*gdbarch
,
5822 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5824 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5825 if (!dsc
->u
.preload
.immed
)
5826 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5830 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5831 struct displaced_step_closure
*dsc
, unsigned int rn
)
5834 /* Preload instructions:
5836 {pli/pld} [rn, #+/-imm]
5838 {pli/pld} [r0, #+/-imm]. */
5840 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5841 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5842 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5843 dsc
->u
.preload
.immed
= 1;
5845 dsc
->cleanup
= &cleanup_preload
;
5849 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5850 struct displaced_step_closure
*dsc
)
5852 unsigned int rn
= bits (insn
, 16, 19);
5854 if (!insn_references_pc (insn
, 0x000f0000ul
))
5855 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
5857 if (debug_displaced
)
5858 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5859 (unsigned long) insn
);
5861 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5863 install_preload (gdbarch
, regs
, dsc
, rn
);
5869 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
5870 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5872 unsigned int rn
= bits (insn1
, 0, 3);
5873 unsigned int u_bit
= bit (insn1
, 7);
5874 int imm12
= bits (insn2
, 0, 11);
5877 if (rn
!= ARM_PC_REGNUM
)
5878 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
5880 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5881 PLD (literal) Encoding T1. */
5882 if (debug_displaced
)
5883 fprintf_unfiltered (gdb_stdlog
,
5884 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5885 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
5891 /* Rewrite instruction {pli/pld} PC imm12 into:
5892 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5896 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5898 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5899 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5901 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5903 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
5904 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
5905 dsc
->u
.preload
.immed
= 0;
5907 /* {pli/pld} [r0, r1] */
5908 dsc
->modinsn
[0] = insn1
& 0xfff0;
5909 dsc
->modinsn
[1] = 0xf001;
5912 dsc
->cleanup
= &cleanup_preload
;
5916 /* Preload instructions with register offset. */
5919 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
5920 struct displaced_step_closure
*dsc
, unsigned int rn
,
5923 ULONGEST rn_val
, rm_val
;
5925 /* Preload register-offset instructions:
5927 {pli/pld} [rn, rm {, shift}]
5929 {pli/pld} [r0, r1 {, shift}]. */
5931 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5932 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5933 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5934 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5935 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5936 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
5937 dsc
->u
.preload
.immed
= 0;
5939 dsc
->cleanup
= &cleanup_preload
;
5943 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5944 struct regcache
*regs
,
5945 struct displaced_step_closure
*dsc
)
5947 unsigned int rn
= bits (insn
, 16, 19);
5948 unsigned int rm
= bits (insn
, 0, 3);
5951 if (!insn_references_pc (insn
, 0x000f000ful
))
5952 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
5954 if (debug_displaced
)
5955 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5956 (unsigned long) insn
);
5958 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
5960 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
5964 /* Copy/cleanup coprocessor load and store instructions. */
5967 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
5968 struct regcache
*regs
,
5969 struct displaced_step_closure
*dsc
)
5971 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
5973 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5975 if (dsc
->u
.ldst
.writeback
)
5976 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
5980 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5981 struct displaced_step_closure
*dsc
,
5982 int writeback
, unsigned int rn
)
5986 /* Coprocessor load/store instructions:
5988 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5990 {stc/stc2} [r0, #+/-imm].
5992 ldc/ldc2 are handled identically. */
5994 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5995 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5996 /* PC should be 4-byte aligned. */
5997 rn_val
= rn_val
& 0xfffffffc;
5998 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
6000 dsc
->u
.ldst
.writeback
= writeback
;
6001 dsc
->u
.ldst
.rn
= rn
;
6003 dsc
->cleanup
= &cleanup_copro_load_store
;
6007 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
6008 struct regcache
*regs
,
6009 struct displaced_step_closure
*dsc
)
6011 unsigned int rn
= bits (insn
, 16, 19);
6013 if (!insn_references_pc (insn
, 0x000f0000ul
))
6014 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
6016 if (debug_displaced
)
6017 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
6018 "load/store insn %.8lx\n", (unsigned long) insn
);
6020 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
6022 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
6028 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
6029 uint16_t insn2
, struct regcache
*regs
,
6030 struct displaced_step_closure
*dsc
)
6032 unsigned int rn
= bits (insn1
, 0, 3);
6034 if (rn
!= ARM_PC_REGNUM
)
6035 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6036 "copro load/store", dsc
);
6038 if (debug_displaced
)
6039 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
6040 "load/store insn %.4x%.4x\n", insn1
, insn2
);
6042 dsc
->modinsn
[0] = insn1
& 0xfff0;
6043 dsc
->modinsn
[1] = insn2
;
6046 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6047 doesn't support writeback, so pass 0. */
6048 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
6053 /* Clean up branch instructions (actually perform the branch, by setting
6057 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6058 struct displaced_step_closure
*dsc
)
6060 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6061 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
6062 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
6063 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
6068 if (dsc
->u
.branch
.link
)
6070 /* The value of LR should be the next insn of current one. In order
6071 not to confuse logic hanlding later insn `bx lr', if current insn mode
6072 is Thumb, the bit 0 of LR value should be set to 1. */
6073 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6076 next_insn_addr
|= 0x1;
6078 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
6082 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
6085 /* Copy B/BL/BLX instructions with immediate destinations. */
6088 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6089 struct displaced_step_closure
*dsc
,
6090 unsigned int cond
, int exchange
, int link
, long offset
)
6092 /* Implement "BL<cond> <label>" as:
6094 Preparation: cond <- instruction condition
6095 Insn: mov r0, r0 (nop)
6096 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6098 B<cond> similar, but don't set r14 in cleanup. */
6100 dsc
->u
.branch
.cond
= cond
;
6101 dsc
->u
.branch
.link
= link
;
6102 dsc
->u
.branch
.exchange
= exchange
;
6104 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
6105 if (link
&& exchange
)
6106 /* For BLX, offset is computed from the Align (PC, 4). */
6107 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
6110 dsc
->u
.branch
.dest
+= 4 + offset
;
6112 dsc
->u
.branch
.dest
+= 8 + offset
;
6114 dsc
->cleanup
= &cleanup_branch
;
6117 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
6118 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6120 unsigned int cond
= bits (insn
, 28, 31);
6121 int exchange
= (cond
== 0xf);
6122 int link
= exchange
|| bit (insn
, 24);
6125 if (debug_displaced
)
6126 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
6127 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
6128 (unsigned long) insn
);
6130 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6131 then arrange the switch into Thumb mode. */
6132 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
6134 offset
= bits (insn
, 0, 23) << 2;
6136 if (bit (offset
, 25))
6137 offset
= offset
| ~0x3ffffff;
6139 dsc
->modinsn
[0] = ARM_NOP
;
6141 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6146 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
6147 uint16_t insn2
, struct regcache
*regs
,
6148 struct displaced_step_closure
*dsc
)
6150 int link
= bit (insn2
, 14);
6151 int exchange
= link
&& !bit (insn2
, 12);
6154 int j1
= bit (insn2
, 13);
6155 int j2
= bit (insn2
, 11);
6156 int s
= sbits (insn1
, 10, 10);
6157 int i1
= !(j1
^ bit (insn1
, 10));
6158 int i2
= !(j2
^ bit (insn1
, 10));
6160 if (!link
&& !exchange
) /* B */
6162 offset
= (bits (insn2
, 0, 10) << 1);
6163 if (bit (insn2
, 12)) /* Encoding T4 */
6165 offset
|= (bits (insn1
, 0, 9) << 12)
6171 else /* Encoding T3 */
6173 offset
|= (bits (insn1
, 0, 5) << 12)
6177 cond
= bits (insn1
, 6, 9);
6182 offset
= (bits (insn1
, 0, 9) << 12);
6183 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
6184 offset
|= exchange
?
6185 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
6188 if (debug_displaced
)
6189 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
6190 "%.4x %.4x with offset %.8lx\n",
6191 link
? (exchange
) ? "blx" : "bl" : "b",
6192 insn1
, insn2
, offset
);
6194 dsc
->modinsn
[0] = THUMB_NOP
;
6196 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6200 /* Copy B Thumb instructions. */
6202 thumb_copy_b (struct gdbarch
*gdbarch
, unsigned short insn
,
6203 struct displaced_step_closure
*dsc
)
6205 unsigned int cond
= 0;
6207 unsigned short bit_12_15
= bits (insn
, 12, 15);
6208 CORE_ADDR from
= dsc
->insn_addr
;
6210 if (bit_12_15
== 0xd)
6212 /* offset = SignExtend (imm8:0, 32) */
6213 offset
= sbits ((insn
<< 1), 0, 8);
6214 cond
= bits (insn
, 8, 11);
6216 else if (bit_12_15
== 0xe) /* Encoding T2 */
6218 offset
= sbits ((insn
<< 1), 0, 11);
6222 if (debug_displaced
)
6223 fprintf_unfiltered (gdb_stdlog
,
6224 "displaced: copying b immediate insn %.4x "
6225 "with offset %d\n", insn
, offset
);
6227 dsc
->u
.branch
.cond
= cond
;
6228 dsc
->u
.branch
.link
= 0;
6229 dsc
->u
.branch
.exchange
= 0;
6230 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
6232 dsc
->modinsn
[0] = THUMB_NOP
;
6234 dsc
->cleanup
= &cleanup_branch
;
6239 /* Copy BX/BLX with register-specified destinations. */
6242 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6243 struct displaced_step_closure
*dsc
, int link
,
6244 unsigned int cond
, unsigned int rm
)
6246 /* Implement {BX,BLX}<cond> <reg>" as:
6248 Preparation: cond <- instruction condition
6249 Insn: mov r0, r0 (nop)
6250 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6252 Don't set r14 in cleanup for BX. */
6254 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
6256 dsc
->u
.branch
.cond
= cond
;
6257 dsc
->u
.branch
.link
= link
;
6259 dsc
->u
.branch
.exchange
= 1;
6261 dsc
->cleanup
= &cleanup_branch
;
6265 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6266 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6268 unsigned int cond
= bits (insn
, 28, 31);
6271 int link
= bit (insn
, 5);
6272 unsigned int rm
= bits (insn
, 0, 3);
6274 if (debug_displaced
)
6275 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
6276 (unsigned long) insn
);
6278 dsc
->modinsn
[0] = ARM_NOP
;
6280 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
6285 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6286 struct regcache
*regs
,
6287 struct displaced_step_closure
*dsc
)
6289 int link
= bit (insn
, 7);
6290 unsigned int rm
= bits (insn
, 3, 6);
6292 if (debug_displaced
)
6293 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
6294 (unsigned short) insn
);
6296 dsc
->modinsn
[0] = THUMB_NOP
;
6298 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
6304 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6307 cleanup_alu_imm (struct gdbarch
*gdbarch
,
6308 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6310 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6311 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6312 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6313 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6317 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6318 struct displaced_step_closure
*dsc
)
6320 unsigned int rn
= bits (insn
, 16, 19);
6321 unsigned int rd
= bits (insn
, 12, 15);
6322 unsigned int op
= bits (insn
, 21, 24);
6323 int is_mov
= (op
== 0xd);
6324 ULONGEST rd_val
, rn_val
;
6326 if (!insn_references_pc (insn
, 0x000ff000ul
))
6327 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
6329 if (debug_displaced
)
6330 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
6331 "%.8lx\n", is_mov
? "move" : "ALU",
6332 (unsigned long) insn
);
6334 /* Instruction is of form:
6336 <op><cond> rd, [rn,] #imm
6340 Preparation: tmp1, tmp2 <- r0, r1;
6342 Insn: <op><cond> r0, r1, #imm
6343 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6346 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6347 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6348 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6349 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6350 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6351 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6355 dsc
->modinsn
[0] = insn
& 0xfff00fff;
6357 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
6359 dsc
->cleanup
= &cleanup_alu_imm
;
6365 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6366 uint16_t insn2
, struct regcache
*regs
,
6367 struct displaced_step_closure
*dsc
)
6369 unsigned int op
= bits (insn1
, 5, 8);
6370 unsigned int rn
, rm
, rd
;
6371 ULONGEST rd_val
, rn_val
;
6373 rn
= bits (insn1
, 0, 3); /* Rn */
6374 rm
= bits (insn2
, 0, 3); /* Rm */
6375 rd
= bits (insn2
, 8, 11); /* Rd */
6377 /* This routine is only called for instruction MOV. */
6378 gdb_assert (op
== 0x2 && rn
== 0xf);
6380 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
6381 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
6383 if (debug_displaced
)
6384 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
6385 "ALU", insn1
, insn2
);
6387 /* Instruction is of form:
6389 <op><cond> rd, [rn,] #imm
6393 Preparation: tmp1, tmp2 <- r0, r1;
6395 Insn: <op><cond> r0, r1, #imm
6396 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6399 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6400 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6401 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6402 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6403 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6404 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6407 dsc
->modinsn
[0] = insn1
;
6408 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
6411 dsc
->cleanup
= &cleanup_alu_imm
;
6416 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6419 cleanup_alu_reg (struct gdbarch
*gdbarch
,
6420 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6425 rd_val
= displaced_read_reg (regs
, dsc
, 0);
6427 for (i
= 0; i
< 3; i
++)
6428 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6430 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6434 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6435 struct displaced_step_closure
*dsc
,
6436 unsigned int rd
, unsigned int rn
, unsigned int rm
)
6438 ULONGEST rd_val
, rn_val
, rm_val
;
6440 /* Instruction is of form:
6442 <op><cond> rd, [rn,] rm [, <shift>]
6446 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6447 r0, r1, r2 <- rd, rn, rm
6448 Insn: <op><cond> r0, r1, r2 [, <shift>]
6449 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6452 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6453 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6454 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6455 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6456 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6457 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6458 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6459 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6460 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6463 dsc
->cleanup
= &cleanup_alu_reg
;
6467 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6468 struct displaced_step_closure
*dsc
)
6470 unsigned int op
= bits (insn
, 21, 24);
6471 int is_mov
= (op
== 0xd);
6473 if (!insn_references_pc (insn
, 0x000ff00ful
))
6474 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
6476 if (debug_displaced
)
6477 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
6478 is_mov
? "move" : "ALU", (unsigned long) insn
);
6481 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
6483 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
6485 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
6491 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6492 struct regcache
*regs
,
6493 struct displaced_step_closure
*dsc
)
6495 unsigned rn
, rm
, rd
;
6497 rd
= bits (insn
, 3, 6);
6498 rn
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
6501 if (rd
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6502 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
6504 if (debug_displaced
)
6505 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x\n",
6506 "ALU", (unsigned short) insn
);
6508 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x08);
6510 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
);
6515 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6518 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
6519 struct regcache
*regs
,
6520 struct displaced_step_closure
*dsc
)
6522 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6525 for (i
= 0; i
< 4; i
++)
6526 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6528 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6532 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6533 struct displaced_step_closure
*dsc
,
6534 unsigned int rd
, unsigned int rn
, unsigned int rm
,
6538 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
6540 /* Instruction is of form:
6542 <op><cond> rd, [rn,] rm, <shift> rs
6546 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6547 r0, r1, r2, r3 <- rd, rn, rm, rs
6548 Insn: <op><cond> r0, r1, r2, <shift> r3
6550 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6554 for (i
= 0; i
< 4; i
++)
6555 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6557 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6558 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6559 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6560 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
6561 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6562 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6563 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6564 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
6566 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
6570 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6571 struct regcache
*regs
,
6572 struct displaced_step_closure
*dsc
)
6574 unsigned int op
= bits (insn
, 21, 24);
6575 int is_mov
= (op
== 0xd);
6576 unsigned int rd
, rn
, rm
, rs
;
6578 if (!insn_references_pc (insn
, 0x000fff0ful
))
6579 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
6581 if (debug_displaced
)
6582 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
6583 "%.8lx\n", is_mov
? "move" : "ALU",
6584 (unsigned long) insn
);
6586 rn
= bits (insn
, 16, 19);
6587 rm
= bits (insn
, 0, 3);
6588 rs
= bits (insn
, 8, 11);
6589 rd
= bits (insn
, 12, 15);
6592 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
6594 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
6596 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
6601 /* Clean up load instructions. */
6604 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6605 struct displaced_step_closure
*dsc
)
6607 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
6609 rt_val
= displaced_read_reg (regs
, dsc
, 0);
6610 if (dsc
->u
.ldst
.xfersize
== 8)
6611 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
6612 rn_val
= displaced_read_reg (regs
, dsc
, 2);
6614 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6615 if (dsc
->u
.ldst
.xfersize
> 4)
6616 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6617 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6618 if (!dsc
->u
.ldst
.immed
)
6619 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6621 /* Handle register writeback. */
6622 if (dsc
->u
.ldst
.writeback
)
6623 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6624 /* Put result in right place. */
6625 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
6626 if (dsc
->u
.ldst
.xfersize
== 8)
6627 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
6630 /* Clean up store instructions. */
6633 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6634 struct displaced_step_closure
*dsc
)
6636 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
6638 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6639 if (dsc
->u
.ldst
.xfersize
> 4)
6640 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6641 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6642 if (!dsc
->u
.ldst
.immed
)
6643 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6644 if (!dsc
->u
.ldst
.restore_r4
)
6645 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
6648 if (dsc
->u
.ldst
.writeback
)
6649 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6652 /* Copy "extra" load/store instructions. These are halfword/doubleword
6653 transfers, which have a different encoding to byte/word transfers. */
6656 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unpriveleged
,
6657 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6659 unsigned int op1
= bits (insn
, 20, 24);
6660 unsigned int op2
= bits (insn
, 5, 6);
6661 unsigned int rt
= bits (insn
, 12, 15);
6662 unsigned int rn
= bits (insn
, 16, 19);
6663 unsigned int rm
= bits (insn
, 0, 3);
6664 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6665 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6666 int immed
= (op1
& 0x4) != 0;
6668 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
6670 if (!insn_references_pc (insn
, 0x000ff00ful
))
6671 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
6673 if (debug_displaced
)
6674 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
6675 "insn %.8lx\n", unpriveleged
? "unpriveleged " : "",
6676 (unsigned long) insn
);
6678 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
6681 internal_error (__FILE__
, __LINE__
,
6682 _("copy_extra_ld_st: instruction decode error"));
6684 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6685 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6686 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6688 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6690 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6691 if (bytesize
[opcode
] == 8)
6692 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
6693 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6695 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6697 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6698 if (bytesize
[opcode
] == 8)
6699 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
6700 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6702 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6705 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
6706 dsc
->u
.ldst
.rn
= rn
;
6707 dsc
->u
.ldst
.immed
= immed
;
6708 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
6709 dsc
->u
.ldst
.restore_r4
= 0;
6712 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6714 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6715 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6717 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6719 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6720 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6722 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
6727 /* Copy byte/half word/word loads and stores. */
6730 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6731 struct displaced_step_closure
*dsc
, int load
,
6732 int immed
, int writeback
, int size
, int usermode
,
6733 int rt
, int rm
, int rn
)
6735 ULONGEST rt_val
, rn_val
, rm_val
= 0;
6737 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6738 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6740 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6742 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
6744 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6745 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6747 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6749 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6750 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6752 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6754 dsc
->u
.ldst
.xfersize
= size
;
6755 dsc
->u
.ldst
.rn
= rn
;
6756 dsc
->u
.ldst
.immed
= immed
;
6757 dsc
->u
.ldst
.writeback
= writeback
;
6759 /* To write PC we can do:
6761 Before this sequence of instructions:
6762 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6763 r2 is the Rn value got from dispalced_read_reg.
6765 Insn1: push {pc} Write address of STR instruction + offset on stack
6766 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6767 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6768 = addr(Insn1) + offset - addr(Insn3) - 8
6770 Insn4: add r4, r4, #8 r4 = offset - 8
6771 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6773 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6775 Otherwise we don't know what value to write for PC, since the offset is
6776 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6777 of this can be found in Section "Saving from r15" in
6778 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6780 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6785 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6786 uint16_t insn2
, struct regcache
*regs
,
6787 struct displaced_step_closure
*dsc
, int size
)
6789 unsigned int u_bit
= bit (insn1
, 7);
6790 unsigned int rt
= bits (insn2
, 12, 15);
6791 int imm12
= bits (insn2
, 0, 11);
6794 if (debug_displaced
)
6795 fprintf_unfiltered (gdb_stdlog
,
6796 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6797 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
6803 /* Rewrite instruction LDR Rt imm12 into:
6805 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6809 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6812 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6813 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6814 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6816 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6818 pc_val
= pc_val
& 0xfffffffc;
6820 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
6821 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
6825 dsc
->u
.ldst
.xfersize
= size
;
6826 dsc
->u
.ldst
.immed
= 0;
6827 dsc
->u
.ldst
.writeback
= 0;
6828 dsc
->u
.ldst
.restore_r4
= 0;
6830 /* LDR R0, R2, R3 */
6831 dsc
->modinsn
[0] = 0xf852;
6832 dsc
->modinsn
[1] = 0x3;
6835 dsc
->cleanup
= &cleanup_load
;
6841 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6842 uint16_t insn2
, struct regcache
*regs
,
6843 struct displaced_step_closure
*dsc
,
6844 int writeback
, int immed
)
6846 unsigned int rt
= bits (insn2
, 12, 15);
6847 unsigned int rn
= bits (insn1
, 0, 3);
6848 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
6849 /* In LDR (register), there is also a register Rm, which is not allowed to
6850 be PC, so we don't have to check it. */
6852 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6853 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
6856 if (debug_displaced
)
6857 fprintf_unfiltered (gdb_stdlog
,
6858 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6859 rt
, rn
, insn1
, insn2
);
6861 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
6864 dsc
->u
.ldst
.restore_r4
= 0;
6867 /* ldr[b]<cond> rt, [rn, #imm], etc.
6869 ldr[b]<cond> r0, [r2, #imm]. */
6871 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6872 dsc
->modinsn
[1] = insn2
& 0x0fff;
6875 /* ldr[b]<cond> rt, [rn, rm], etc.
6877 ldr[b]<cond> r0, [r2, r3]. */
6879 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6880 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
6890 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
6891 struct regcache
*regs
,
6892 struct displaced_step_closure
*dsc
,
6893 int load
, int size
, int usermode
)
6895 int immed
= !bit (insn
, 25);
6896 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
6897 unsigned int rt
= bits (insn
, 12, 15);
6898 unsigned int rn
= bits (insn
, 16, 19);
6899 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
6901 if (!insn_references_pc (insn
, 0x000ff00ful
))
6902 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
6904 if (debug_displaced
)
6905 fprintf_unfiltered (gdb_stdlog
,
6906 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6907 load
? (size
== 1 ? "ldrb" : "ldr")
6908 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
6910 (unsigned long) insn
);
6912 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
6913 usermode
, rt
, rm
, rn
);
6915 if (load
|| rt
!= ARM_PC_REGNUM
)
6917 dsc
->u
.ldst
.restore_r4
= 0;
6920 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6922 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6923 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6925 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6927 {ldr,str}[b]<cond> r0, [r2, r3]. */
6928 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6932 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6933 dsc
->u
.ldst
.restore_r4
= 1;
6934 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
6935 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
6936 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
6937 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
6938 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
6942 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
6944 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
6949 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6954 /* Cleanup LDM instructions with fully-populated register list. This is an
6955 unfortunate corner case: it's impossible to implement correctly by modifying
6956 the instruction. The issue is as follows: we have an instruction,
6960 which we must rewrite to avoid loading PC. A possible solution would be to
6961 do the load in two halves, something like (with suitable cleanup
6965 ldm[id][ab] r8!, {r0-r7}
6967 ldm[id][ab] r8, {r7-r14}
6970 but at present there's no suitable place for <temp>, since the scratch space
6971 is overwritten before the cleanup routine is called. For now, we simply
6972 emulate the instruction. */
6975 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6976 struct displaced_step_closure
*dsc
)
6978 int inc
= dsc
->u
.block
.increment
;
6979 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
6980 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
6981 uint32_t regmask
= dsc
->u
.block
.regmask
;
6982 int regno
= inc
? 0 : 15;
6983 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
6984 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
6985 && (regmask
& 0x8000) != 0;
6986 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6987 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
6988 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6993 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6994 sensible we can do here. Complain loudly. */
6995 if (exception_return
)
6996 error (_("Cannot single-step exception return"));
6998 /* We don't handle any stores here for now. */
6999 gdb_assert (dsc
->u
.block
.load
!= 0);
7001 if (debug_displaced
)
7002 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
7003 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
7004 dsc
->u
.block
.increment
? "inc" : "dec",
7005 dsc
->u
.block
.before
? "before" : "after");
7012 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
7015 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
7018 xfer_addr
+= bump_before
;
7020 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
7021 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
7023 xfer_addr
+= bump_after
;
7025 regmask
&= ~(1 << regno
);
7028 if (dsc
->u
.block
.writeback
)
7029 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
7033 /* Clean up an STM which included the PC in the register list. */
7036 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7037 struct displaced_step_closure
*dsc
)
7039 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7040 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7041 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
7042 CORE_ADDR stm_insn_addr
;
7045 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7047 /* If condition code fails, there's nothing else to do. */
7048 if (!store_executed
)
7051 if (dsc
->u
.block
.increment
)
7053 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
7055 if (dsc
->u
.block
.before
)
7060 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
7062 if (dsc
->u
.block
.before
)
7066 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
7067 stm_insn_addr
= dsc
->scratch_base
;
7068 offset
= pc_val
- stm_insn_addr
;
7070 if (debug_displaced
)
7071 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
7072 "STM instruction\n", offset
);
7074 /* Rewrite the stored PC to the proper value for the non-displaced original
7076 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
7077 dsc
->insn_addr
+ offset
);
7080 /* Clean up an LDM which includes the PC in the register list. We clumped all
7081 the registers in the transferred list into a contiguous range r0...rX (to
7082 avoid loading PC directly and losing control of the debugged program), so we
7083 must undo that here. */
7086 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
7087 struct regcache
*regs
,
7088 struct displaced_step_closure
*dsc
)
7090 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7091 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7092 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
7093 unsigned int regs_loaded
= bitcount (mask
);
7094 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
7096 /* The method employed here will fail if the register list is fully populated
7097 (we need to avoid loading PC directly). */
7098 gdb_assert (num_to_shuffle
< 16);
7103 clobbered
= (1 << num_to_shuffle
) - 1;
7105 while (num_to_shuffle
> 0)
7107 if ((mask
& (1 << write_reg
)) != 0)
7109 unsigned int read_reg
= num_to_shuffle
- 1;
7111 if (read_reg
!= write_reg
)
7113 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
7114 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
7115 if (debug_displaced
)
7116 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
7117 "loaded register r%d to r%d\n"), read_reg
,
7120 else if (debug_displaced
)
7121 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
7122 "r%d already in the right place\n"),
7125 clobbered
&= ~(1 << write_reg
);
7133 /* Restore any registers we scribbled over. */
7134 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
7136 if ((clobbered
& (1 << write_reg
)) != 0)
7138 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
7140 if (debug_displaced
)
7141 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
7142 "clobbered register r%d\n"), write_reg
);
7143 clobbered
&= ~(1 << write_reg
);
7147 /* Perform register writeback manually. */
7148 if (dsc
->u
.block
.writeback
)
7150 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
7152 if (dsc
->u
.block
.increment
)
7153 new_rn_val
+= regs_loaded
* 4;
7155 new_rn_val
-= regs_loaded
* 4;
7157 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
7162 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7163 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7166 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
7167 struct regcache
*regs
,
7168 struct displaced_step_closure
*dsc
)
7170 int load
= bit (insn
, 20);
7171 int user
= bit (insn
, 22);
7172 int increment
= bit (insn
, 23);
7173 int before
= bit (insn
, 24);
7174 int writeback
= bit (insn
, 21);
7175 int rn
= bits (insn
, 16, 19);
7177 /* Block transfers which don't mention PC can be run directly
7179 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
7180 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
7182 if (rn
== ARM_PC_REGNUM
)
7184 warning (_("displaced: Unpredictable LDM or STM with "
7185 "base register r15"));
7186 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
7189 if (debug_displaced
)
7190 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7191 "%.8lx\n", (unsigned long) insn
);
7193 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7194 dsc
->u
.block
.rn
= rn
;
7196 dsc
->u
.block
.load
= load
;
7197 dsc
->u
.block
.user
= user
;
7198 dsc
->u
.block
.increment
= increment
;
7199 dsc
->u
.block
.before
= before
;
7200 dsc
->u
.block
.writeback
= writeback
;
7201 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
7203 dsc
->u
.block
.regmask
= insn
& 0xffff;
7207 if ((insn
& 0xffff) == 0xffff)
7209 /* LDM with a fully-populated register list. This case is
7210 particularly tricky. Implement for now by fully emulating the
7211 instruction (which might not behave perfectly in all cases, but
7212 these instructions should be rare enough for that not to matter
7214 dsc
->modinsn
[0] = ARM_NOP
;
7216 dsc
->cleanup
= &cleanup_block_load_all
;
7220 /* LDM of a list of registers which includes PC. Implement by
7221 rewriting the list of registers to be transferred into a
7222 contiguous chunk r0...rX before doing the transfer, then shuffling
7223 registers into the correct places in the cleanup routine. */
7224 unsigned int regmask
= insn
& 0xffff;
7225 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7226 unsigned int to
= 0, from
= 0, i
, new_rn
;
7228 for (i
= 0; i
< num_in_list
; i
++)
7229 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7231 /* Writeback makes things complicated. We need to avoid clobbering
7232 the base register with one of the registers in our modified
7233 register list, but just using a different register can't work in
7236 ldm r14!, {r0-r13,pc}
7238 which would need to be rewritten as:
7242 but that can't work, because there's no free register for N.
7244 Solve this by turning off the writeback bit, and emulating
7245 writeback manually in the cleanup routine. */
7250 new_regmask
= (1 << num_in_list
) - 1;
7252 if (debug_displaced
)
7253 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7254 "{..., pc}: original reg list %.4x, modified "
7255 "list %.4x\n"), rn
, writeback
? "!" : "",
7256 (int) insn
& 0xffff, new_regmask
);
7258 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
7260 dsc
->cleanup
= &cleanup_block_load_pc
;
7265 /* STM of a list of registers which includes PC. Run the instruction
7266 as-is, but out of line: this will store the wrong value for the PC,
7267 so we must manually fix up the memory in the cleanup routine.
7268 Doing things this way has the advantage that we can auto-detect
7269 the offset of the PC write (which is architecture-dependent) in
7270 the cleanup routine. */
7271 dsc
->modinsn
[0] = insn
;
7273 dsc
->cleanup
= &cleanup_block_store_pc
;
7280 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7281 struct regcache
*regs
,
7282 struct displaced_step_closure
*dsc
)
7284 int rn
= bits (insn1
, 0, 3);
7285 int load
= bit (insn1
, 4);
7286 int writeback
= bit (insn1
, 5);
7288 /* Block transfers which don't mention PC can be run directly
7290 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
7291 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
7293 if (rn
== ARM_PC_REGNUM
)
7295 warning (_("displaced: Unpredictable LDM or STM with "
7296 "base register r15"));
7297 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7298 "unpredictable ldm/stm", dsc
);
7301 if (debug_displaced
)
7302 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7303 "%.4x%.4x\n", insn1
, insn2
);
7305 /* Clear bit 13, since it should be always zero. */
7306 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
7307 dsc
->u
.block
.rn
= rn
;
7309 dsc
->u
.block
.load
= load
;
7310 dsc
->u
.block
.user
= 0;
7311 dsc
->u
.block
.increment
= bit (insn1
, 7);
7312 dsc
->u
.block
.before
= bit (insn1
, 8);
7313 dsc
->u
.block
.writeback
= writeback
;
7314 dsc
->u
.block
.cond
= INST_AL
;
7315 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7319 if (dsc
->u
.block
.regmask
== 0xffff)
7321 /* This branch is impossible to happen. */
7326 unsigned int regmask
= dsc
->u
.block
.regmask
;
7327 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7328 unsigned int to
= 0, from
= 0, i
, new_rn
;
7330 for (i
= 0; i
< num_in_list
; i
++)
7331 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7336 new_regmask
= (1 << num_in_list
) - 1;
7338 if (debug_displaced
)
7339 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7340 "{..., pc}: original reg list %.4x, modified "
7341 "list %.4x\n"), rn
, writeback
? "!" : "",
7342 (int) dsc
->u
.block
.regmask
, new_regmask
);
7344 dsc
->modinsn
[0] = insn1
;
7345 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
7348 dsc
->cleanup
= &cleanup_block_load_pc
;
7353 dsc
->modinsn
[0] = insn1
;
7354 dsc
->modinsn
[1] = insn2
;
7356 dsc
->cleanup
= &cleanup_block_store_pc
;
7361 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7362 for Linux, where some SVC instructions must be treated specially. */
7365 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7366 struct displaced_step_closure
*dsc
)
7368 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
7370 if (debug_displaced
)
7371 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
7372 "%.8lx\n", (unsigned long) resume_addr
);
7374 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
7378 /* Common copy routine for svc instruciton. */
7381 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7382 struct displaced_step_closure
*dsc
)
7384 /* Preparation: none.
7385 Insn: unmodified svc.
7386 Cleanup: pc <- insn_addr + insn_size. */
7388 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7390 dsc
->wrote_to_pc
= 1;
7392 /* Allow OS-specific code to override SVC handling. */
7393 if (dsc
->u
.svc
.copy_svc_os
)
7394 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
7397 dsc
->cleanup
= &cleanup_svc
;
7403 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
7404 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7407 if (debug_displaced
)
7408 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
7409 (unsigned long) insn
);
7411 dsc
->modinsn
[0] = insn
;
7413 return install_svc (gdbarch
, regs
, dsc
);
7417 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
7418 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7421 if (debug_displaced
)
7422 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
7425 dsc
->modinsn
[0] = insn
;
7427 return install_svc (gdbarch
, regs
, dsc
);
7430 /* Copy undefined instructions. */
7433 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
7434 struct displaced_step_closure
*dsc
)
7436 if (debug_displaced
)
7437 fprintf_unfiltered (gdb_stdlog
,
7438 "displaced: copying undefined insn %.8lx\n",
7439 (unsigned long) insn
);
7441 dsc
->modinsn
[0] = insn
;
7447 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7448 struct displaced_step_closure
*dsc
)
7451 if (debug_displaced
)
7452 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
7453 "%.4x %.4x\n", (unsigned short) insn1
,
7454 (unsigned short) insn2
);
7456 dsc
->modinsn
[0] = insn1
;
7457 dsc
->modinsn
[1] = insn2
;
7463 /* Copy unpredictable instructions. */
7466 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
7467 struct displaced_step_closure
*dsc
)
7469 if (debug_displaced
)
7470 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
7471 "%.8lx\n", (unsigned long) insn
);
7473 dsc
->modinsn
[0] = insn
;
7478 /* The decode_* functions are instruction decoding helpers. They mostly follow
7479 the presentation in the ARM ARM. */
7482 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
7483 struct regcache
*regs
,
7484 struct displaced_step_closure
*dsc
)
7486 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
7487 unsigned int rn
= bits (insn
, 16, 19);
7489 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0xe) == 0x0)
7490 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
7491 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0xe) == 0x1)
7492 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
7493 else if ((op1
& 0x60) == 0x20)
7494 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
7495 else if ((op1
& 0x71) == 0x40)
7496 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
7498 else if ((op1
& 0x77) == 0x41)
7499 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7500 else if ((op1
& 0x77) == 0x45)
7501 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
7502 else if ((op1
& 0x77) == 0x51)
7505 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7507 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7509 else if ((op1
& 0x77) == 0x55)
7510 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7511 else if (op1
== 0x57)
7514 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
7515 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
7516 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
7517 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
7518 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
7520 else if ((op1
& 0x63) == 0x43)
7521 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7522 else if ((op2
& 0x1) == 0x0)
7523 switch (op1
& ~0x80)
7526 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7528 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
7529 case 0x71: case 0x75:
7531 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
7532 case 0x63: case 0x67: case 0x73: case 0x77:
7533 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7535 return arm_copy_undef (gdbarch
, insn
, dsc
);
7538 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
7542 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
7543 struct regcache
*regs
,
7544 struct displaced_step_closure
*dsc
)
7546 if (bit (insn
, 27) == 0)
7547 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
7548 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7549 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
7552 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
7555 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
7557 case 0x4: case 0x5: case 0x6: case 0x7:
7558 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7561 switch ((insn
& 0xe00000) >> 21)
7563 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7565 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7568 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7571 return arm_copy_undef (gdbarch
, insn
, dsc
);
7576 int rn_f
= (bits (insn
, 16, 19) == 0xf);
7577 switch ((insn
& 0xe00000) >> 21)
7580 /* ldc/ldc2 imm (undefined for rn == pc). */
7581 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
7582 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7585 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7587 case 0x4: case 0x5: case 0x6: case 0x7:
7588 /* ldc/ldc2 lit (undefined for rn != pc). */
7589 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
7590 : arm_copy_undef (gdbarch
, insn
, dsc
);
7593 return arm_copy_undef (gdbarch
, insn
, dsc
);
7598 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
7601 if (bits (insn
, 16, 19) == 0xf)
7603 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7605 return arm_copy_undef (gdbarch
, insn
, dsc
);
7609 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7611 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7615 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7617 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7620 return arm_copy_undef (gdbarch
, insn
, dsc
);
7624 /* Decode miscellaneous instructions in dp/misc encoding space. */
7627 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
7628 struct regcache
*regs
,
7629 struct displaced_step_closure
*dsc
)
7631 unsigned int op2
= bits (insn
, 4, 6);
7632 unsigned int op
= bits (insn
, 21, 22);
7633 unsigned int op1
= bits (insn
, 16, 19);
7638 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
7641 if (op
== 0x1) /* bx. */
7642 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
7644 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
7646 return arm_copy_undef (gdbarch
, insn
, dsc
);
7650 /* Not really supported. */
7651 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
7653 return arm_copy_undef (gdbarch
, insn
, dsc
);
7657 return arm_copy_bx_blx_reg (gdbarch
, insn
,
7658 regs
, dsc
); /* blx register. */
7660 return arm_copy_undef (gdbarch
, insn
, dsc
);
7663 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
7667 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
7669 /* Not really supported. */
7670 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
7673 return arm_copy_undef (gdbarch
, insn
, dsc
);
7678 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
7679 struct regcache
*regs
,
7680 struct displaced_step_closure
*dsc
)
7683 switch (bits (insn
, 20, 24))
7686 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
7689 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
7691 case 0x12: case 0x16:
7692 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
7695 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
7699 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
7701 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
7702 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
7703 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
7704 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
7705 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
7706 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
7707 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
7708 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
7709 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
7710 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
7711 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
7712 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
7713 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
7714 /* 2nd arg means "unpriveleged". */
7715 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
7719 /* Should be unreachable. */
7724 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
7725 struct regcache
*regs
,
7726 struct displaced_step_closure
*dsc
)
7728 int a
= bit (insn
, 25), b
= bit (insn
, 4);
7729 uint32_t op1
= bits (insn
, 20, 24);
7730 int rn_f
= bits (insn
, 16, 19) == 0xf;
7732 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
7733 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
7734 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
7735 else if ((!a
&& (op1
& 0x17) == 0x02)
7736 || (a
&& (op1
& 0x17) == 0x02 && !b
))
7737 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
7738 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
7739 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
7740 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
7741 else if ((!a
&& (op1
& 0x17) == 0x03)
7742 || (a
&& (op1
& 0x17) == 0x03 && !b
))
7743 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
7744 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
7745 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
7746 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
7747 else if ((!a
&& (op1
& 0x17) == 0x06)
7748 || (a
&& (op1
& 0x17) == 0x06 && !b
))
7749 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
7750 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
7751 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
7752 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
7753 else if ((!a
&& (op1
& 0x17) == 0x07)
7754 || (a
&& (op1
& 0x17) == 0x07 && !b
))
7755 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
7757 /* Should be unreachable. */
7762 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
7763 struct displaced_step_closure
*dsc
)
7765 switch (bits (insn
, 20, 24))
7767 case 0x00: case 0x01: case 0x02: case 0x03:
7768 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
7770 case 0x04: case 0x05: case 0x06: case 0x07:
7771 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
7773 case 0x08: case 0x09: case 0x0a: case 0x0b:
7774 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7775 return arm_copy_unmodified (gdbarch
, insn
,
7776 "decode/pack/unpack/saturate/reverse", dsc
);
7779 if (bits (insn
, 5, 7) == 0) /* op2. */
7781 if (bits (insn
, 12, 15) == 0xf)
7782 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
7784 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
7787 return arm_copy_undef (gdbarch
, insn
, dsc
);
7789 case 0x1a: case 0x1b:
7790 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7791 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
7793 return arm_copy_undef (gdbarch
, insn
, dsc
);
7795 case 0x1c: case 0x1d:
7796 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
7798 if (bits (insn
, 0, 3) == 0xf)
7799 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
7801 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
7804 return arm_copy_undef (gdbarch
, insn
, dsc
);
7806 case 0x1e: case 0x1f:
7807 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7808 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
7810 return arm_copy_undef (gdbarch
, insn
, dsc
);
7813 /* Should be unreachable. */
7818 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, int32_t insn
,
7819 struct regcache
*regs
,
7820 struct displaced_step_closure
*dsc
)
7823 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7825 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
7829 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
7830 struct regcache
*regs
,
7831 struct displaced_step_closure
*dsc
)
7833 unsigned int opcode
= bits (insn
, 20, 24);
7837 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7838 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
7840 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7841 case 0x12: case 0x16:
7842 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
7844 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7845 case 0x13: case 0x17:
7846 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
7848 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7849 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7850 /* Note: no writeback for these instructions. Bit 25 will always be
7851 zero though (via caller), so the following works OK. */
7852 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7855 /* Should be unreachable. */
7859 /* Decode shifted register instructions. */
7862 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
7863 uint16_t insn2
, struct regcache
*regs
,
7864 struct displaced_step_closure
*dsc
)
7866 /* PC is only allowed to be used in instruction MOV. */
7868 unsigned int op
= bits (insn1
, 5, 8);
7869 unsigned int rn
= bits (insn1
, 0, 3);
7871 if (op
== 0x2 && rn
== 0xf) /* MOV */
7872 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
7874 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7875 "dp (shift reg)", dsc
);
7879 /* Decode extension register load/store. Exactly the same as
7880 arm_decode_ext_reg_ld_st. */
7883 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
7884 uint16_t insn2
, struct regcache
*regs
,
7885 struct displaced_step_closure
*dsc
)
7887 unsigned int opcode
= bits (insn1
, 4, 8);
7891 case 0x04: case 0x05:
7892 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7893 "vfp/neon vmov", dsc
);
7895 case 0x08: case 0x0c: /* 01x00 */
7896 case 0x0a: case 0x0e: /* 01x10 */
7897 case 0x12: case 0x16: /* 10x10 */
7898 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7899 "vfp/neon vstm/vpush", dsc
);
7901 case 0x09: case 0x0d: /* 01x01 */
7902 case 0x0b: case 0x0f: /* 01x11 */
7903 case 0x13: case 0x17: /* 10x11 */
7904 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7905 "vfp/neon vldm/vpop", dsc
);
7907 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7908 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7910 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7911 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
7914 /* Should be unreachable. */
7919 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
, CORE_ADDR to
,
7920 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7922 unsigned int op1
= bits (insn
, 20, 25);
7923 int op
= bit (insn
, 4);
7924 unsigned int coproc
= bits (insn
, 8, 11);
7925 unsigned int rn
= bits (insn
, 16, 19);
7927 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
7928 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
7929 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
7930 && (coproc
& 0xe) != 0xa)
7932 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7933 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
7934 && (coproc
& 0xe) != 0xa)
7935 /* ldc/ldc2 imm/lit. */
7936 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7937 else if ((op1
& 0x3e) == 0x00)
7938 return arm_copy_undef (gdbarch
, insn
, dsc
);
7939 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
7940 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
7941 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
7942 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7943 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
7944 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7945 else if ((op1
& 0x30) == 0x20 && !op
)
7947 if ((coproc
& 0xe) == 0xa)
7948 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
7950 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7952 else if ((op1
& 0x30) == 0x20 && op
)
7953 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
7954 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
7955 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7956 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
7957 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7958 else if ((op1
& 0x30) == 0x30)
7959 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
7961 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
7965 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
7966 uint16_t insn2
, struct regcache
*regs
,
7967 struct displaced_step_closure
*dsc
)
7969 unsigned int coproc
= bits (insn2
, 8, 11);
7970 unsigned int op1
= bits (insn1
, 4, 9);
7971 unsigned int bit_5_8
= bits (insn1
, 5, 8);
7972 unsigned int bit_9
= bit (insn1
, 9);
7973 unsigned int bit_4
= bit (insn1
, 4);
7974 unsigned int rn
= bits (insn1
, 0, 3);
7979 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7980 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7982 else if (bit_5_8
== 0) /* UNDEFINED. */
7983 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7986 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7987 if ((coproc
& 0xe) == 0xa)
7988 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
7990 else /* coproc is not 101x. */
7992 if (bit_4
== 0) /* STC/STC2. */
7993 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7995 else /* LDC/LDC2 {literal, immeidate}. */
7996 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
8002 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
8008 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8009 struct displaced_step_closure
*dsc
, int rd
)
8015 Preparation: Rd <- PC
8021 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
8022 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
8026 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8027 struct displaced_step_closure
*dsc
,
8028 int rd
, unsigned int imm
)
8031 /* Encoding T2: ADDS Rd, #imm */
8032 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
8034 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8040 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
8041 struct regcache
*regs
,
8042 struct displaced_step_closure
*dsc
)
8044 unsigned int rd
= bits (insn
, 8, 10);
8045 unsigned int imm8
= bits (insn
, 0, 7);
8047 if (debug_displaced
)
8048 fprintf_unfiltered (gdb_stdlog
,
8049 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8052 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
8056 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
8057 uint16_t insn2
, struct regcache
*regs
,
8058 struct displaced_step_closure
*dsc
)
8060 unsigned int rd
= bits (insn2
, 8, 11);
8061 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8062 extract raw immediate encoding rather than computing immediate. When
8063 generating ADD or SUB instruction, we can simply perform OR operation to
8064 set immediate into ADD. */
8065 unsigned int imm_3_8
= insn2
& 0x70ff;
8066 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
8068 if (debug_displaced
)
8069 fprintf_unfiltered (gdb_stdlog
,
8070 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8071 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
8073 if (bit (insn1
, 7)) /* Encoding T2 */
8075 /* Encoding T3: SUB Rd, Rd, #imm */
8076 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
8077 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8079 else /* Encoding T3 */
8081 /* Encoding T3: ADD Rd, Rd, #imm */
8082 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
8083 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8087 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8093 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, unsigned short insn1
,
8094 struct regcache
*regs
,
8095 struct displaced_step_closure
*dsc
)
8097 unsigned int rt
= bits (insn1
, 8, 10);
8099 int imm8
= (bits (insn1
, 0, 7) << 2);
8100 CORE_ADDR from
= dsc
->insn_addr
;
8106 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8108 Insn: LDR R0, [R2, R3];
8109 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8111 if (debug_displaced
)
8112 fprintf_unfiltered (gdb_stdlog
,
8113 "displaced: copying thumb ldr r%d [pc #%d]\n"
8116 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
8117 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
8118 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
8119 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
8120 /* The assembler calculates the required value of the offset from the
8121 Align(PC,4) value of this instruction to the label. */
8122 pc
= pc
& 0xfffffffc;
8124 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
8125 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
8128 dsc
->u
.ldst
.xfersize
= 4;
8130 dsc
->u
.ldst
.immed
= 0;
8131 dsc
->u
.ldst
.writeback
= 0;
8132 dsc
->u
.ldst
.restore_r4
= 0;
8134 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8136 dsc
->cleanup
= &cleanup_load
;
8141 /* Copy Thumb cbnz/cbz insruction. */
8144 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
8145 struct regcache
*regs
,
8146 struct displaced_step_closure
*dsc
)
8148 int non_zero
= bit (insn1
, 11);
8149 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
8150 CORE_ADDR from
= dsc
->insn_addr
;
8151 int rn
= bits (insn1
, 0, 2);
8152 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
8154 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
8155 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8156 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8157 condition is false, let it be, cleanup_branch will do nothing. */
8158 if (dsc
->u
.branch
.cond
)
8160 dsc
->u
.branch
.cond
= INST_AL
;
8161 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
8164 dsc
->u
.branch
.dest
= from
+ 2;
8166 dsc
->u
.branch
.link
= 0;
8167 dsc
->u
.branch
.exchange
= 0;
8169 if (debug_displaced
)
8170 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
8171 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
8172 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
8174 dsc
->modinsn
[0] = THUMB_NOP
;
8176 dsc
->cleanup
= &cleanup_branch
;
8180 /* Copy Table Branch Byte/Halfword */
8182 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
8183 uint16_t insn2
, struct regcache
*regs
,
8184 struct displaced_step_closure
*dsc
)
8186 ULONGEST rn_val
, rm_val
;
8187 int is_tbh
= bit (insn2
, 4);
8188 CORE_ADDR halfwords
= 0;
8189 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8191 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
8192 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
8198 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
8199 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
8205 target_read_memory (rn_val
+ rm_val
, buf
, 1);
8206 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
8209 if (debug_displaced
)
8210 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
8211 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
8212 (unsigned int) rn_val
, (unsigned int) rm_val
,
8213 (unsigned int) halfwords
);
8215 dsc
->u
.branch
.cond
= INST_AL
;
8216 dsc
->u
.branch
.link
= 0;
8217 dsc
->u
.branch
.exchange
= 0;
8218 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
8220 dsc
->cleanup
= &cleanup_branch
;
8226 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8227 struct displaced_step_closure
*dsc
)
8230 int val
= displaced_read_reg (regs
, dsc
, 7);
8231 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
8234 val
= displaced_read_reg (regs
, dsc
, 8);
8235 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
8238 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
8243 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, unsigned short insn1
,
8244 struct regcache
*regs
,
8245 struct displaced_step_closure
*dsc
)
8247 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
8249 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8252 (1) register list is full, that is, r0-r7 are used.
8253 Prepare: tmp[0] <- r8
8255 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8256 MOV r8, r7; Move value of r7 to r8;
8257 POP {r7}; Store PC value into r7.
8259 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8261 (2) register list is not full, supposing there are N registers in
8262 register list (except PC, 0 <= N <= 7).
8263 Prepare: for each i, 0 - N, tmp[i] <- ri.
8265 POP {r0, r1, ...., rN};
8267 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8268 from tmp[] properly.
8270 if (debug_displaced
)
8271 fprintf_unfiltered (gdb_stdlog
,
8272 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8273 dsc
->u
.block
.regmask
, insn1
);
8275 if (dsc
->u
.block
.regmask
== 0xff)
8277 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
8279 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
8280 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
8281 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
8284 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
8288 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
8289 unsigned int new_regmask
, bit
= 1;
8290 unsigned int to
= 0, from
= 0, i
, new_rn
;
8292 for (i
= 0; i
< num_in_list
+ 1; i
++)
8293 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
8295 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
8297 if (debug_displaced
)
8298 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
8299 "{..., pc}: original reg list %.4x,"
8300 " modified list %.4x\n"),
8301 (int) dsc
->u
.block
.regmask
, new_regmask
);
8303 dsc
->u
.block
.regmask
|= 0x8000;
8304 dsc
->u
.block
.writeback
= 0;
8305 dsc
->u
.block
.cond
= INST_AL
;
8307 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
8309 dsc
->cleanup
= &cleanup_block_load_pc
;
8316 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8317 struct regcache
*regs
,
8318 struct displaced_step_closure
*dsc
)
8320 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
8321 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
8324 /* 16-bit thumb instructions. */
8325 switch (op_bit_12_15
)
8327 /* Shift (imme), add, subtract, move and compare. */
8328 case 0: case 1: case 2: case 3:
8329 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8330 "shift/add/sub/mov/cmp",
8334 switch (op_bit_10_11
)
8336 case 0: /* Data-processing */
8337 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8341 case 1: /* Special data instructions and branch and exchange. */
8343 unsigned short op
= bits (insn1
, 7, 9);
8344 if (op
== 6 || op
== 7) /* BX or BLX */
8345 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
8346 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8347 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
8349 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
8353 default: /* LDR (literal) */
8354 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
8357 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8358 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
8361 if (op_bit_10_11
< 2) /* Generate PC-relative address */
8362 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
8363 else /* Generate SP-relative address */
8364 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
8366 case 11: /* Misc 16-bit instructions */
8368 switch (bits (insn1
, 8, 11))
8370 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8371 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
8373 case 12: case 13: /* POP */
8374 if (bit (insn1
, 8)) /* PC is in register list. */
8375 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
8377 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
8379 case 15: /* If-Then, and hints */
8380 if (bits (insn1
, 0, 3))
8381 /* If-Then makes up to four following instructions conditional.
8382 IT instruction itself is not conditional, so handle it as a
8383 common unmodified instruction. */
8384 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
8387 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
8390 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
8395 if (op_bit_10_11
< 2) /* Store multiple registers */
8396 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
8397 else /* Load multiple registers */
8398 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
8400 case 13: /* Conditional branch and supervisor call */
8401 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
8402 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8404 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
8406 case 14: /* Unconditional branch */
8407 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8414 internal_error (__FILE__
, __LINE__
,
8415 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8419 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
8420 uint16_t insn1
, uint16_t insn2
,
8421 struct regcache
*regs
,
8422 struct displaced_step_closure
*dsc
)
8424 int rt
= bits (insn2
, 12, 15);
8425 int rn
= bits (insn1
, 0, 3);
8426 int op1
= bits (insn1
, 7, 8);
8429 switch (bits (insn1
, 5, 6))
8431 case 0: /* Load byte and memory hints */
8432 if (rt
== 0xf) /* PLD/PLI */
8435 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8436 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
8438 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8443 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
8444 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8447 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8448 "ldrb{reg, immediate}/ldrbt",
8453 case 1: /* Load halfword and memory hints. */
8454 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
8455 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8456 "pld/unalloc memhint", dsc
);
8460 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8463 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8467 case 2: /* Load word */
8469 int insn2_bit_8_11
= bits (insn2
, 8, 11);
8472 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
8473 else if (op1
== 0x1) /* Encoding T3 */
8474 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
8476 else /* op1 == 0x0 */
8478 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
8479 /* LDR (immediate) */
8480 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8481 dsc
, bit (insn2
, 8), 1);
8482 else if (insn2_bit_8_11
== 0xe) /* LDRT */
8483 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8486 /* LDR (register) */
8487 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8493 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
8500 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8501 uint16_t insn2
, struct regcache
*regs
,
8502 struct displaced_step_closure
*dsc
)
8505 unsigned short op
= bit (insn2
, 15);
8506 unsigned int op1
= bits (insn1
, 11, 12);
8512 switch (bits (insn1
, 9, 10))
8517 /* Load/store {dual, execlusive}, table branch. */
8518 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
8519 && bits (insn2
, 5, 7) == 0)
8520 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
8523 /* PC is not allowed to use in load/store {dual, exclusive}
8525 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8526 "load/store dual/ex", dsc
);
8528 else /* load/store multiple */
8530 switch (bits (insn1
, 7, 8))
8532 case 0: case 3: /* SRS, RFE */
8533 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8536 case 1: case 2: /* LDM/STM/PUSH/POP */
8537 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
8544 /* Data-processing (shift register). */
8545 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
8548 default: /* Coprocessor instructions. */
8549 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8554 case 2: /* op1 = 2 */
8555 if (op
) /* Branch and misc control. */
8557 if (bit (insn2
, 14) /* BLX/BL */
8558 || bit (insn2
, 12) /* Unconditional branch */
8559 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
8560 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
8562 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8567 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
8569 int op
= bits (insn1
, 4, 8);
8570 int rn
= bits (insn1
, 0, 3);
8571 if ((op
== 0 || op
== 0xa) && rn
== 0xf)
8572 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
8575 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8578 else /* Data processing (modified immeidate) */
8579 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8583 case 3: /* op1 = 3 */
8584 switch (bits (insn1
, 9, 10))
8588 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
8590 else /* NEON Load/Store and Store single data item */
8591 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8592 "neon elt/struct load/store",
8595 case 1: /* op1 = 3, bits (9, 10) == 1 */
8596 switch (bits (insn1
, 7, 8))
8598 case 0: case 1: /* Data processing (register) */
8599 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8602 case 2: /* Multiply and absolute difference */
8603 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8604 "mul/mua/diff", dsc
);
8606 case 3: /* Long multiply and divide */
8607 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8612 default: /* Coprocessor instructions */
8613 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8622 internal_error (__FILE__
, __LINE__
,
8623 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8628 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8629 CORE_ADDR to
, struct regcache
*regs
,
8630 struct displaced_step_closure
*dsc
)
8632 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8634 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
8636 if (debug_displaced
)
8637 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
8638 "at %.8lx\n", insn1
, (unsigned long) from
);
8641 dsc
->insn_size
= thumb_insn_size (insn1
);
8642 if (thumb_insn_size (insn1
) == 4)
8645 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
8646 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
8649 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
8653 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8654 CORE_ADDR to
, struct regcache
*regs
,
8655 struct displaced_step_closure
*dsc
)
8658 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8661 /* Most displaced instructions use a 1-instruction scratch space, so set this
8662 here and override below if/when necessary. */
8664 dsc
->insn_addr
= from
;
8665 dsc
->scratch_base
= to
;
8666 dsc
->cleanup
= NULL
;
8667 dsc
->wrote_to_pc
= 0;
8669 if (!displaced_in_arm_mode (regs
))
8670 return thumb_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8674 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
8675 if (debug_displaced
)
8676 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
8677 "at %.8lx\n", (unsigned long) insn
,
8678 (unsigned long) from
);
8680 if ((insn
& 0xf0000000) == 0xf0000000)
8681 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
8682 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
8684 case 0x0: case 0x1: case 0x2: case 0x3:
8685 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
8688 case 0x4: case 0x5: case 0x6:
8689 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
8693 err
= arm_decode_media (gdbarch
, insn
, dsc
);
8696 case 0x8: case 0x9: case 0xa: case 0xb:
8697 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
8700 case 0xc: case 0xd: case 0xe: case 0xf:
8701 err
= arm_decode_svc_copro (gdbarch
, insn
, to
, regs
, dsc
);
8706 internal_error (__FILE__
, __LINE__
,
8707 _("arm_process_displaced_insn: Instruction decode error"));
8710 /* Actually set up the scratch space for a displaced instruction. */
8713 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8714 CORE_ADDR to
, struct displaced_step_closure
*dsc
)
8716 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8717 unsigned int i
, len
, offset
;
8718 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8719 int size
= dsc
->is_thumb
? 2 : 4;
8720 const gdb_byte
*bkp_insn
;
8723 /* Poke modified instruction(s). */
8724 for (i
= 0; i
< dsc
->numinsns
; i
++)
8726 if (debug_displaced
)
8728 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
8730 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
8733 fprintf_unfiltered (gdb_stdlog
, "%.4x",
8734 (unsigned short)dsc
->modinsn
[i
]);
8736 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
8737 (unsigned long) to
+ offset
);
8740 write_memory_unsigned_integer (to
+ offset
, size
,
8741 byte_order_for_code
,
8746 /* Choose the correct breakpoint instruction. */
8749 bkp_insn
= tdep
->thumb_breakpoint
;
8750 len
= tdep
->thumb_breakpoint_size
;
8754 bkp_insn
= tdep
->arm_breakpoint
;
8755 len
= tdep
->arm_breakpoint_size
;
8758 /* Put breakpoint afterwards. */
8759 write_memory (to
+ offset
, bkp_insn
, len
);
8761 if (debug_displaced
)
8762 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
8763 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
8766 /* Entry point for copying an instruction into scratch space for displaced
8769 struct displaced_step_closure
*
8770 arm_displaced_step_copy_insn (struct gdbarch
*gdbarch
,
8771 CORE_ADDR from
, CORE_ADDR to
,
8772 struct regcache
*regs
)
8774 struct displaced_step_closure
*dsc
8775 = xmalloc (sizeof (struct displaced_step_closure
));
8776 arm_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8777 arm_displaced_init_closure (gdbarch
, from
, to
, dsc
);
8782 /* Entry point for cleaning things up after a displaced instruction has been
8786 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
8787 struct displaced_step_closure
*dsc
,
8788 CORE_ADDR from
, CORE_ADDR to
,
8789 struct regcache
*regs
)
8792 dsc
->cleanup (gdbarch
, regs
, dsc
);
8794 if (!dsc
->wrote_to_pc
)
8795 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
8796 dsc
->insn_addr
+ dsc
->insn_size
);
8800 #include "bfd-in2.h"
8801 #include "libcoff.h"
8804 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
8806 struct gdbarch
*gdbarch
= info
->application_data
;
8808 if (arm_pc_is_thumb (gdbarch
, memaddr
))
8810 static asymbol
*asym
;
8811 static combined_entry_type ce
;
8812 static struct coff_symbol_struct csym
;
8813 static struct bfd fake_bfd
;
8814 static bfd_target fake_target
;
8816 if (csym
.native
== NULL
)
8818 /* Create a fake symbol vector containing a Thumb symbol.
8819 This is solely so that the code in print_insn_little_arm()
8820 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8821 the presence of a Thumb symbol and switch to decoding
8822 Thumb instructions. */
8824 fake_target
.flavour
= bfd_target_coff_flavour
;
8825 fake_bfd
.xvec
= &fake_target
;
8826 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
8828 csym
.symbol
.the_bfd
= &fake_bfd
;
8829 csym
.symbol
.name
= "fake";
8830 asym
= (asymbol
*) & csym
;
8833 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
8834 info
->symbols
= &asym
;
8837 info
->symbols
= NULL
;
8839 if (info
->endian
== BFD_ENDIAN_BIG
)
8840 return print_insn_big_arm (memaddr
, info
);
8842 return print_insn_little_arm (memaddr
, info
);
8845 /* The following define instruction sequences that will cause ARM
8846 cpu's to take an undefined instruction trap. These are used to
8847 signal a breakpoint to GDB.
8849 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8850 modes. A different instruction is required for each mode. The ARM
8851 cpu's can also be big or little endian. Thus four different
8852 instructions are needed to support all cases.
8854 Note: ARMv4 defines several new instructions that will take the
8855 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8856 not in fact add the new instructions. The new undefined
8857 instructions in ARMv4 are all instructions that had no defined
8858 behaviour in earlier chips. There is no guarantee that they will
8859 raise an exception, but may be treated as NOP's. In practice, it
8860 may only safe to rely on instructions matching:
8862 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8863 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8864 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8866 Even this may only true if the condition predicate is true. The
8867 following use a condition predicate of ALWAYS so it is always TRUE.
8869 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8870 and NetBSD all use a software interrupt rather than an undefined
8871 instruction to force a trap. This can be handled by by the
8872 abi-specific code during establishment of the gdbarch vector. */
8874 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8875 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8876 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8877 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8879 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
8880 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
8881 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
8882 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
8884 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8885 the program counter value to determine whether a 16-bit or 32-bit
8886 breakpoint should be used. It returns a pointer to a string of
8887 bytes that encode a breakpoint instruction, stores the length of
8888 the string to *lenptr, and adjusts the program counter (if
8889 necessary) to point to the actual memory location where the
8890 breakpoint should be inserted. */
8892 static const unsigned char *
8893 arm_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
, int *lenptr
)
8895 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8896 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8898 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
8900 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
8902 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8903 check whether we are replacing a 32-bit instruction. */
8904 if (tdep
->thumb2_breakpoint
!= NULL
)
8907 if (target_read_memory (*pcptr
, buf
, 2) == 0)
8909 unsigned short inst1
;
8910 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
8911 if (thumb_insn_size (inst1
) == 4)
8913 *lenptr
= tdep
->thumb2_breakpoint_size
;
8914 return tdep
->thumb2_breakpoint
;
8919 *lenptr
= tdep
->thumb_breakpoint_size
;
8920 return tdep
->thumb_breakpoint
;
8924 *lenptr
= tdep
->arm_breakpoint_size
;
8925 return tdep
->arm_breakpoint
;
8930 arm_remote_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
,
8933 arm_breakpoint_from_pc (gdbarch
, pcptr
, kindptr
);
8935 if (arm_pc_is_thumb (gdbarch
, *pcptr
) && *kindptr
== 4)
8936 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8937 that this is not confused with a 32-bit ARM breakpoint. */
8941 /* Extract from an array REGBUF containing the (raw) register state a
8942 function return value of type TYPE, and copy that, in virtual
8943 format, into VALBUF. */
8946 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
8949 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
8950 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8952 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
8954 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8958 /* The value is in register F0 in internal format. We need to
8959 extract the raw value and then convert it to the desired
8961 bfd_byte tmpbuf
[FP_REGISTER_SIZE
];
8963 regcache_cooked_read (regs
, ARM_F0_REGNUM
, tmpbuf
);
8964 convert_from_extended (floatformat_from_type (type
), tmpbuf
,
8965 valbuf
, gdbarch_byte_order (gdbarch
));
8969 case ARM_FLOAT_SOFT_FPA
:
8970 case ARM_FLOAT_SOFT_VFP
:
8971 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8972 not using the VFP ABI code. */
8974 regcache_cooked_read (regs
, ARM_A1_REGNUM
, valbuf
);
8975 if (TYPE_LENGTH (type
) > 4)
8976 regcache_cooked_read (regs
, ARM_A1_REGNUM
+ 1,
8977 valbuf
+ INT_REGISTER_SIZE
);
8981 internal_error (__FILE__
, __LINE__
,
8982 _("arm_extract_return_value: "
8983 "Floating point model not supported"));
8987 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8988 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8989 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8990 || TYPE_CODE (type
) == TYPE_CODE_PTR
8991 || TYPE_CODE (type
) == TYPE_CODE_REF
8992 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8994 /* If the type is a plain integer, then the access is
8995 straight-forward. Otherwise we have to play around a bit
8997 int len
= TYPE_LENGTH (type
);
8998 int regno
= ARM_A1_REGNUM
;
9003 /* By using store_unsigned_integer we avoid having to do
9004 anything special for small big-endian values. */
9005 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
9006 store_unsigned_integer (valbuf
,
9007 (len
> INT_REGISTER_SIZE
9008 ? INT_REGISTER_SIZE
: len
),
9010 len
-= INT_REGISTER_SIZE
;
9011 valbuf
+= INT_REGISTER_SIZE
;
9016 /* For a structure or union the behaviour is as if the value had
9017 been stored to word-aligned memory and then loaded into
9018 registers with 32-bit load instruction(s). */
9019 int len
= TYPE_LENGTH (type
);
9020 int regno
= ARM_A1_REGNUM
;
9021 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9025 regcache_cooked_read (regs
, regno
++, tmpbuf
);
9026 memcpy (valbuf
, tmpbuf
,
9027 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
9028 len
-= INT_REGISTER_SIZE
;
9029 valbuf
+= INT_REGISTER_SIZE
;
9035 /* Will a function return an aggregate type in memory or in a
9036 register? Return 0 if an aggregate type can be returned in a
9037 register, 1 if it must be returned in memory. */
9040 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
9043 enum type_code code
;
9045 CHECK_TYPEDEF (type
);
9047 /* In the ARM ABI, "integer" like aggregate types are returned in
9048 registers. For an aggregate type to be integer like, its size
9049 must be less than or equal to INT_REGISTER_SIZE and the
9050 offset of each addressable subfield must be zero. Note that bit
9051 fields are not addressable, and all addressable subfields of
9052 unions always start at offset zero.
9054 This function is based on the behaviour of GCC 2.95.1.
9055 See: gcc/arm.c: arm_return_in_memory() for details.
9057 Note: All versions of GCC before GCC 2.95.2 do not set up the
9058 parameters correctly for a function returning the following
9059 structure: struct { float f;}; This should be returned in memory,
9060 not a register. Richard Earnshaw sent me a patch, but I do not
9061 know of any way to detect if a function like the above has been
9062 compiled with the correct calling convention. */
9064 /* All aggregate types that won't fit in a register must be returned
9066 if (TYPE_LENGTH (type
) > INT_REGISTER_SIZE
)
9071 /* The AAPCS says all aggregates not larger than a word are returned
9073 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
9076 /* The only aggregate types that can be returned in a register are
9077 structs and unions. Arrays must be returned in memory. */
9078 code
= TYPE_CODE (type
);
9079 if ((TYPE_CODE_STRUCT
!= code
) && (TYPE_CODE_UNION
!= code
))
9084 /* Assume all other aggregate types can be returned in a register.
9085 Run a check for structures, unions and arrays. */
9088 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
9091 /* Need to check if this struct/union is "integer" like. For
9092 this to be true, its size must be less than or equal to
9093 INT_REGISTER_SIZE and the offset of each addressable
9094 subfield must be zero. Note that bit fields are not
9095 addressable, and unions always start at offset zero. If any
9096 of the subfields is a floating point type, the struct/union
9097 cannot be an integer type. */
9099 /* For each field in the object, check:
9100 1) Is it FP? --> yes, nRc = 1;
9101 2) Is it addressable (bitpos != 0) and
9102 not packed (bitsize == 0)?
9106 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
9108 enum type_code field_type_code
;
9109 field_type_code
= TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
9112 /* Is it a floating point type field? */
9113 if (field_type_code
== TYPE_CODE_FLT
)
9119 /* If bitpos != 0, then we have to care about it. */
9120 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
9122 /* Bitfields are not addressable. If the field bitsize is
9123 zero, then the field is not packed. Hence it cannot be
9124 a bitfield or any other packed type. */
9125 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
9137 /* Write into appropriate registers a function return value of type
9138 TYPE, given in virtual format. */
9141 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
9142 const gdb_byte
*valbuf
)
9144 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
9145 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9147 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
9149 gdb_byte buf
[MAX_REGISTER_SIZE
];
9151 switch (gdbarch_tdep (gdbarch
)->fp_model
)
9155 convert_to_extended (floatformat_from_type (type
), buf
, valbuf
,
9156 gdbarch_byte_order (gdbarch
));
9157 regcache_cooked_write (regs
, ARM_F0_REGNUM
, buf
);
9160 case ARM_FLOAT_SOFT_FPA
:
9161 case ARM_FLOAT_SOFT_VFP
:
9162 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9163 not using the VFP ABI code. */
9165 regcache_cooked_write (regs
, ARM_A1_REGNUM
, valbuf
);
9166 if (TYPE_LENGTH (type
) > 4)
9167 regcache_cooked_write (regs
, ARM_A1_REGNUM
+ 1,
9168 valbuf
+ INT_REGISTER_SIZE
);
9172 internal_error (__FILE__
, __LINE__
,
9173 _("arm_store_return_value: Floating "
9174 "point model not supported"));
9178 else if (TYPE_CODE (type
) == TYPE_CODE_INT
9179 || TYPE_CODE (type
) == TYPE_CODE_CHAR
9180 || TYPE_CODE (type
) == TYPE_CODE_BOOL
9181 || TYPE_CODE (type
) == TYPE_CODE_PTR
9182 || TYPE_CODE (type
) == TYPE_CODE_REF
9183 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
9185 if (TYPE_LENGTH (type
) <= 4)
9187 /* Values of one word or less are zero/sign-extended and
9189 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9190 LONGEST val
= unpack_long (type
, valbuf
);
9192 store_signed_integer (tmpbuf
, INT_REGISTER_SIZE
, byte_order
, val
);
9193 regcache_cooked_write (regs
, ARM_A1_REGNUM
, tmpbuf
);
9197 /* Integral values greater than one word are stored in consecutive
9198 registers starting with r0. This will always be a multiple of
9199 the regiser size. */
9200 int len
= TYPE_LENGTH (type
);
9201 int regno
= ARM_A1_REGNUM
;
9205 regcache_cooked_write (regs
, regno
++, valbuf
);
9206 len
-= INT_REGISTER_SIZE
;
9207 valbuf
+= INT_REGISTER_SIZE
;
9213 /* For a structure or union the behaviour is as if the value had
9214 been stored to word-aligned memory and then loaded into
9215 registers with 32-bit load instruction(s). */
9216 int len
= TYPE_LENGTH (type
);
9217 int regno
= ARM_A1_REGNUM
;
9218 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9222 memcpy (tmpbuf
, valbuf
,
9223 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
9224 regcache_cooked_write (regs
, regno
++, tmpbuf
);
9225 len
-= INT_REGISTER_SIZE
;
9226 valbuf
+= INT_REGISTER_SIZE
;
9232 /* Handle function return values. */
9234 static enum return_value_convention
9235 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
9236 struct type
*valtype
, struct regcache
*regcache
,
9237 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
9239 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9240 struct type
*func_type
= function
? value_type (function
) : NULL
;
9241 enum arm_vfp_cprc_base_type vfp_base_type
;
9244 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
9245 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
9247 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
9248 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
9250 for (i
= 0; i
< vfp_base_count
; i
++)
9252 if (reg_char
== 'q')
9255 arm_neon_quad_write (gdbarch
, regcache
, i
,
9256 writebuf
+ i
* unit_length
);
9259 arm_neon_quad_read (gdbarch
, regcache
, i
,
9260 readbuf
+ i
* unit_length
);
9267 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
9268 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9271 regcache_cooked_write (regcache
, regnum
,
9272 writebuf
+ i
* unit_length
);
9274 regcache_cooked_read (regcache
, regnum
,
9275 readbuf
+ i
* unit_length
);
9278 return RETURN_VALUE_REGISTER_CONVENTION
;
9281 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
9282 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
9283 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
9285 if (tdep
->struct_return
== pcc_struct_return
9286 || arm_return_in_memory (gdbarch
, valtype
))
9287 return RETURN_VALUE_STRUCT_CONVENTION
;
9290 /* AAPCS returns complex types longer than a register in memory. */
9291 if (tdep
->arm_abi
!= ARM_ABI_APCS
9292 && TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
9293 && TYPE_LENGTH (valtype
) > INT_REGISTER_SIZE
)
9294 return RETURN_VALUE_STRUCT_CONVENTION
;
9297 arm_store_return_value (valtype
, regcache
, writebuf
);
9300 arm_extract_return_value (valtype
, regcache
, readbuf
);
9302 return RETURN_VALUE_REGISTER_CONVENTION
;
9307 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
9309 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
9310 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9311 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9313 gdb_byte buf
[INT_REGISTER_SIZE
];
9315 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
9317 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
9321 *pc
= extract_unsigned_integer (buf
, INT_REGISTER_SIZE
, byte_order
);
9325 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9326 return the target PC. Otherwise return 0. */
9329 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
9333 CORE_ADDR start_addr
;
9335 /* Find the starting address and name of the function containing the PC. */
9336 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
9338 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9340 start_addr
= arm_skip_bx_reg (frame
, pc
);
9341 if (start_addr
!= 0)
9347 /* If PC is in a Thumb call or return stub, return the address of the
9348 target PC, which is in a register. The thunk functions are called
9349 _call_via_xx, where x is the register name. The possible names
9350 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9351 functions, named __ARM_call_via_r[0-7]. */
9352 if (strncmp (name
, "_call_via_", 10) == 0
9353 || strncmp (name
, "__ARM_call_via_", strlen ("__ARM_call_via_")) == 0)
9355 /* Use the name suffix to determine which register contains the
9357 static char *table
[15] =
9358 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9359 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9362 int offset
= strlen (name
) - 2;
9364 for (regno
= 0; regno
<= 14; regno
++)
9365 if (strcmp (&name
[offset
], table
[regno
]) == 0)
9366 return get_frame_register_unsigned (frame
, regno
);
9369 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9370 non-interworking calls to foo. We could decode the stubs
9371 to find the target but it's easier to use the symbol table. */
9372 namelen
= strlen (name
);
9373 if (name
[0] == '_' && name
[1] == '_'
9374 && ((namelen
> 2 + strlen ("_from_thumb")
9375 && strncmp (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb",
9376 strlen ("_from_thumb")) == 0)
9377 || (namelen
> 2 + strlen ("_from_arm")
9378 && strncmp (name
+ namelen
- strlen ("_from_arm"), "_from_arm",
9379 strlen ("_from_arm")) == 0)))
9382 int target_len
= namelen
- 2;
9383 struct bound_minimal_symbol minsym
;
9384 struct objfile
*objfile
;
9385 struct obj_section
*sec
;
9387 if (name
[namelen
- 1] == 'b')
9388 target_len
-= strlen ("_from_thumb");
9390 target_len
-= strlen ("_from_arm");
9392 target_name
= alloca (target_len
+ 1);
9393 memcpy (target_name
, name
+ 2, target_len
);
9394 target_name
[target_len
] = '\0';
9396 sec
= find_pc_section (pc
);
9397 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
9398 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
9399 if (minsym
.minsym
!= NULL
)
9400 return BMSYMBOL_VALUE_ADDRESS (minsym
);
9405 return 0; /* not a stub */
9409 set_arm_command (char *args
, int from_tty
)
9411 printf_unfiltered (_("\
9412 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9413 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
9417 show_arm_command (char *args
, int from_tty
)
9419 cmd_show_list (showarmcmdlist
, from_tty
, "");
9423 arm_update_current_architecture (void)
9425 struct gdbarch_info info
;
9427 /* If the current architecture is not ARM, we have nothing to do. */
9428 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
9431 /* Update the architecture. */
9432 gdbarch_info_init (&info
);
9434 if (!gdbarch_update_p (info
))
9435 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
9439 set_fp_model_sfunc (char *args
, int from_tty
,
9440 struct cmd_list_element
*c
)
9442 enum arm_float_model fp_model
;
9444 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
9445 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
9447 arm_fp_model
= fp_model
;
9451 if (fp_model
== ARM_FLOAT_LAST
)
9452 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
9455 arm_update_current_architecture ();
9459 show_fp_model (struct ui_file
*file
, int from_tty
,
9460 struct cmd_list_element
*c
, const char *value
)
9462 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9464 if (arm_fp_model
== ARM_FLOAT_AUTO
9465 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9466 fprintf_filtered (file
, _("\
9467 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9468 fp_model_strings
[tdep
->fp_model
]);
9470 fprintf_filtered (file
, _("\
9471 The current ARM floating point model is \"%s\".\n"),
9472 fp_model_strings
[arm_fp_model
]);
9476 arm_set_abi (char *args
, int from_tty
,
9477 struct cmd_list_element
*c
)
9479 enum arm_abi_kind arm_abi
;
9481 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
9482 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
9484 arm_abi_global
= arm_abi
;
9488 if (arm_abi
== ARM_ABI_LAST
)
9489 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
9492 arm_update_current_architecture ();
9496 arm_show_abi (struct ui_file
*file
, int from_tty
,
9497 struct cmd_list_element
*c
, const char *value
)
9499 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9501 if (arm_abi_global
== ARM_ABI_AUTO
9502 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9503 fprintf_filtered (file
, _("\
9504 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9505 arm_abi_strings
[tdep
->arm_abi
]);
9507 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
9512 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
9513 struct cmd_list_element
*c
, const char *value
)
9515 fprintf_filtered (file
,
9516 _("The current execution mode assumed "
9517 "(when symbols are unavailable) is \"%s\".\n"),
9518 arm_fallback_mode_string
);
9522 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
9523 struct cmd_list_element
*c
, const char *value
)
9525 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9527 fprintf_filtered (file
,
9528 _("The current execution mode assumed "
9529 "(even when symbols are available) is \"%s\".\n"),
9530 arm_force_mode_string
);
9533 /* If the user changes the register disassembly style used for info
9534 register and other commands, we have to also switch the style used
9535 in opcodes for disassembly output. This function is run in the "set
9536 arm disassembly" command, and does that. */
9539 set_disassembly_style_sfunc (char *args
, int from_tty
,
9540 struct cmd_list_element
*c
)
9542 set_disassembly_style ();
9545 /* Return the ARM register name corresponding to register I. */
9547 arm_register_name (struct gdbarch
*gdbarch
, int i
)
9549 const int num_regs
= gdbarch_num_regs (gdbarch
);
9551 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
9552 && i
>= num_regs
&& i
< num_regs
+ 32)
9554 static const char *const vfp_pseudo_names
[] = {
9555 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9556 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9557 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9558 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9561 return vfp_pseudo_names
[i
- num_regs
];
9564 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
9565 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
9567 static const char *const neon_pseudo_names
[] = {
9568 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9569 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9572 return neon_pseudo_names
[i
- num_regs
- 32];
9575 if (i
>= ARRAY_SIZE (arm_register_names
))
9576 /* These registers are only supported on targets which supply
9577 an XML description. */
9580 return arm_register_names
[i
];
9584 set_disassembly_style (void)
9588 /* Find the style that the user wants. */
9589 for (current
= 0; current
< num_disassembly_options
; current
++)
9590 if (disassembly_style
== valid_disassembly_styles
[current
])
9592 gdb_assert (current
< num_disassembly_options
);
9594 /* Synchronize the disassembler. */
9595 set_arm_regname_option (current
);
9598 /* Test whether the coff symbol specific value corresponds to a Thumb
9602 coff_sym_is_thumb (int val
)
9604 return (val
== C_THUMBEXT
9605 || val
== C_THUMBSTAT
9606 || val
== C_THUMBEXTFUNC
9607 || val
== C_THUMBSTATFUNC
9608 || val
== C_THUMBLABEL
);
9611 /* arm_coff_make_msymbol_special()
9612 arm_elf_make_msymbol_special()
9614 These functions test whether the COFF or ELF symbol corresponds to
9615 an address in thumb code, and set a "special" bit in a minimal
9616 symbol to indicate that it does. */
9619 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
9621 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type
*)sym
)->internal_elf_sym
)
9622 == ST_BRANCH_TO_THUMB
)
9623 MSYMBOL_SET_SPECIAL (msym
);
9627 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
9629 if (coff_sym_is_thumb (val
))
9630 MSYMBOL_SET_SPECIAL (msym
);
9634 arm_objfile_data_free (struct objfile
*objfile
, void *arg
)
9636 struct arm_per_objfile
*data
= arg
;
9639 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
9640 VEC_free (arm_mapping_symbol_s
, data
->section_maps
[i
]);
9644 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
9647 const char *name
= bfd_asymbol_name (sym
);
9648 struct arm_per_objfile
*data
;
9649 VEC(arm_mapping_symbol_s
) **map_p
;
9650 struct arm_mapping_symbol new_map_sym
;
9652 gdb_assert (name
[0] == '$');
9653 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
9656 data
= objfile_data (objfile
, arm_objfile_data_key
);
9659 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
,
9660 struct arm_per_objfile
);
9661 set_objfile_data (objfile
, arm_objfile_data_key
, data
);
9662 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
9663 objfile
->obfd
->section_count
,
9664 VEC(arm_mapping_symbol_s
) *);
9666 map_p
= &data
->section_maps
[bfd_get_section (sym
)->index
];
9668 new_map_sym
.value
= sym
->value
;
9669 new_map_sym
.type
= name
[1];
9671 /* Assume that most mapping symbols appear in order of increasing
9672 value. If they were randomly distributed, it would be faster to
9673 always push here and then sort at first use. */
9674 if (!VEC_empty (arm_mapping_symbol_s
, *map_p
))
9676 struct arm_mapping_symbol
*prev_map_sym
;
9678 prev_map_sym
= VEC_last (arm_mapping_symbol_s
, *map_p
);
9679 if (prev_map_sym
->value
>= sym
->value
)
9682 idx
= VEC_lower_bound (arm_mapping_symbol_s
, *map_p
, &new_map_sym
,
9683 arm_compare_mapping_symbols
);
9684 VEC_safe_insert (arm_mapping_symbol_s
, *map_p
, idx
, &new_map_sym
);
9689 VEC_safe_push (arm_mapping_symbol_s
, *map_p
, &new_map_sym
);
9693 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
9695 struct gdbarch
*gdbarch
= get_regcache_arch (regcache
);
9696 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
9698 /* If necessary, set the T bit. */
9701 ULONGEST val
, t_bit
;
9702 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
9703 t_bit
= arm_psr_thumb_bit (gdbarch
);
9704 if (arm_pc_is_thumb (gdbarch
, pc
))
9705 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9708 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9713 /* Read the contents of a NEON quad register, by reading from two
9714 double registers. This is used to implement the quad pseudo
9715 registers, and for argument passing in case the quad registers are
9716 missing; vectors are passed in quad registers when using the VFP
9717 ABI, even if a NEON unit is not present. REGNUM is the index of
9718 the quad register, in [0, 15]. */
9720 static enum register_status
9721 arm_neon_quad_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9722 int regnum
, gdb_byte
*buf
)
9725 gdb_byte reg_buf
[8];
9726 int offset
, double_regnum
;
9727 enum register_status status
;
9729 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9730 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9733 /* d0 is always the least significant half of q0. */
9734 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9739 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9740 if (status
!= REG_VALID
)
9742 memcpy (buf
+ offset
, reg_buf
, 8);
9744 offset
= 8 - offset
;
9745 status
= regcache_raw_read (regcache
, double_regnum
+ 1, reg_buf
);
9746 if (status
!= REG_VALID
)
9748 memcpy (buf
+ offset
, reg_buf
, 8);
9753 static enum register_status
9754 arm_pseudo_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9755 int regnum
, gdb_byte
*buf
)
9757 const int num_regs
= gdbarch_num_regs (gdbarch
);
9759 gdb_byte reg_buf
[8];
9760 int offset
, double_regnum
;
9762 gdb_assert (regnum
>= num_regs
);
9765 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9766 /* Quad-precision register. */
9767 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
9770 enum register_status status
;
9772 /* Single-precision register. */
9773 gdb_assert (regnum
< 32);
9775 /* s0 is always the least significant half of d0. */
9776 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9777 offset
= (regnum
& 1) ? 0 : 4;
9779 offset
= (regnum
& 1) ? 4 : 0;
9781 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9782 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9785 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9786 if (status
== REG_VALID
)
9787 memcpy (buf
, reg_buf
+ offset
, 4);
9792 /* Store the contents of BUF to a NEON quad register, by writing to
9793 two double registers. This is used to implement the quad pseudo
9794 registers, and for argument passing in case the quad registers are
9795 missing; vectors are passed in quad registers when using the VFP
9796 ABI, even if a NEON unit is not present. REGNUM is the index
9797 of the quad register, in [0, 15]. */
9800 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9801 int regnum
, const gdb_byte
*buf
)
9804 int offset
, double_regnum
;
9806 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9807 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9810 /* d0 is always the least significant half of q0. */
9811 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9816 regcache_raw_write (regcache
, double_regnum
, buf
+ offset
);
9817 offset
= 8 - offset
;
9818 regcache_raw_write (regcache
, double_regnum
+ 1, buf
+ offset
);
9822 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9823 int regnum
, const gdb_byte
*buf
)
9825 const int num_regs
= gdbarch_num_regs (gdbarch
);
9827 gdb_byte reg_buf
[8];
9828 int offset
, double_regnum
;
9830 gdb_assert (regnum
>= num_regs
);
9833 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9834 /* Quad-precision register. */
9835 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
9838 /* Single-precision register. */
9839 gdb_assert (regnum
< 32);
9841 /* s0 is always the least significant half of d0. */
9842 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9843 offset
= (regnum
& 1) ? 0 : 4;
9845 offset
= (regnum
& 1) ? 4 : 0;
9847 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9848 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9851 regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9852 memcpy (reg_buf
+ offset
, buf
, 4);
9853 regcache_raw_write (regcache
, double_regnum
, reg_buf
);
9857 static struct value
*
9858 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
9860 const int *reg_p
= baton
;
9861 return value_of_register (*reg_p
, frame
);
9864 static enum gdb_osabi
9865 arm_elf_osabi_sniffer (bfd
*abfd
)
9867 unsigned int elfosabi
;
9868 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
9870 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
9872 if (elfosabi
== ELFOSABI_ARM
)
9873 /* GNU tools use this value. Check note sections in this case,
9875 bfd_map_over_sections (abfd
,
9876 generic_elf_osabi_sniff_abi_tag_sections
,
9879 /* Anything else will be handled by the generic ELF sniffer. */
9884 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
9885 struct reggroup
*group
)
9887 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9888 this, FPS register belongs to save_regroup, restore_reggroup, and
9889 all_reggroup, of course. */
9890 if (regnum
== ARM_FPS_REGNUM
)
9891 return (group
== float_reggroup
9892 || group
== save_reggroup
9893 || group
== restore_reggroup
9894 || group
== all_reggroup
);
9896 return default_register_reggroup_p (gdbarch
, regnum
, group
);
9900 /* For backward-compatibility we allow two 'g' packet lengths with
9901 the remote protocol depending on whether FPA registers are
9902 supplied. M-profile targets do not have FPA registers, but some
9903 stubs already exist in the wild which use a 'g' packet which
9904 supplies them albeit with dummy values. The packet format which
9905 includes FPA registers should be considered deprecated for
9906 M-profile targets. */
9909 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
9911 if (gdbarch_tdep (gdbarch
)->is_m
)
9913 /* If we know from the executable this is an M-profile target,
9914 cater for remote targets whose register set layout is the
9915 same as the FPA layout. */
9916 register_remote_g_packet_guess (gdbarch
,
9917 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9918 (16 * INT_REGISTER_SIZE
)
9919 + (8 * FP_REGISTER_SIZE
)
9920 + (2 * INT_REGISTER_SIZE
),
9921 tdesc_arm_with_m_fpa_layout
);
9923 /* The regular M-profile layout. */
9924 register_remote_g_packet_guess (gdbarch
,
9925 /* r0-r12,sp,lr,pc; xpsr */
9926 (16 * INT_REGISTER_SIZE
)
9927 + INT_REGISTER_SIZE
,
9930 /* M-profile plus M4F VFP. */
9931 register_remote_g_packet_guess (gdbarch
,
9932 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9933 (16 * INT_REGISTER_SIZE
)
9934 + (16 * VFP_REGISTER_SIZE
)
9935 + (2 * INT_REGISTER_SIZE
),
9936 tdesc_arm_with_m_vfp_d16
);
9939 /* Otherwise we don't have a useful guess. */
9943 /* Initialize the current architecture based on INFO. If possible,
9944 re-use an architecture from ARCHES, which is a list of
9945 architectures already created during this debugging session.
9947 Called e.g. at program startup, when reading a core file, and when
9948 reading a binary file. */
9950 static struct gdbarch
*
9951 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
9953 struct gdbarch_tdep
*tdep
;
9954 struct gdbarch
*gdbarch
;
9955 struct gdbarch_list
*best_arch
;
9956 enum arm_abi_kind arm_abi
= arm_abi_global
;
9957 enum arm_float_model fp_model
= arm_fp_model
;
9958 struct tdesc_arch_data
*tdesc_data
= NULL
;
9960 int have_vfp_registers
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
9962 int have_fpa_registers
= 1;
9963 const struct target_desc
*tdesc
= info
.target_desc
;
9965 /* If we have an object to base this architecture on, try to determine
9968 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
9970 int ei_osabi
, e_flags
;
9972 switch (bfd_get_flavour (info
.abfd
))
9974 case bfd_target_aout_flavour
:
9975 /* Assume it's an old APCS-style ABI. */
9976 arm_abi
= ARM_ABI_APCS
;
9979 case bfd_target_coff_flavour
:
9980 /* Assume it's an old APCS-style ABI. */
9982 arm_abi
= ARM_ABI_APCS
;
9985 case bfd_target_elf_flavour
:
9986 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
9987 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9989 if (ei_osabi
== ELFOSABI_ARM
)
9991 /* GNU tools used to use this value, but do not for EABI
9992 objects. There's nowhere to tag an EABI version
9993 anyway, so assume APCS. */
9994 arm_abi
= ARM_ABI_APCS
;
9996 else if (ei_osabi
== ELFOSABI_NONE
)
9998 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
9999 int attr_arch
, attr_profile
;
10003 case EF_ARM_EABI_UNKNOWN
:
10004 /* Assume GNU tools. */
10005 arm_abi
= ARM_ABI_APCS
;
10008 case EF_ARM_EABI_VER4
:
10009 case EF_ARM_EABI_VER5
:
10010 arm_abi
= ARM_ABI_AAPCS
;
10011 /* EABI binaries default to VFP float ordering.
10012 They may also contain build attributes that can
10013 be used to identify if the VFP argument-passing
10015 if (fp_model
== ARM_FLOAT_AUTO
)
10018 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
10023 /* "The user intended FP parameter/result
10024 passing to conform to AAPCS, base
10026 fp_model
= ARM_FLOAT_SOFT_VFP
;
10029 /* "The user intended FP parameter/result
10030 passing to conform to AAPCS, VFP
10032 fp_model
= ARM_FLOAT_VFP
;
10035 /* "The user intended FP parameter/result
10036 passing to conform to tool chain-specific
10037 conventions" - we don't know any such
10038 conventions, so leave it as "auto". */
10041 /* Attribute value not mentioned in the
10042 October 2008 ABI, so leave it as
10047 fp_model
= ARM_FLOAT_SOFT_VFP
;
10053 /* Leave it as "auto". */
10054 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
10059 /* Detect M-profile programs. This only works if the
10060 executable file includes build attributes; GCC does
10061 copy them to the executable, but e.g. RealView does
10063 attr_arch
= bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
10065 attr_profile
= bfd_elf_get_obj_attr_int (info
.abfd
,
10067 Tag_CPU_arch_profile
);
10068 /* GCC specifies the profile for v6-M; RealView only
10069 specifies the profile for architectures starting with
10070 V7 (as opposed to architectures with a tag
10071 numerically greater than TAG_CPU_ARCH_V7). */
10072 if (!tdesc_has_registers (tdesc
)
10073 && (attr_arch
== TAG_CPU_ARCH_V6_M
10074 || attr_arch
== TAG_CPU_ARCH_V6S_M
10075 || attr_profile
== 'M'))
10080 if (fp_model
== ARM_FLOAT_AUTO
)
10082 int e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
10084 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
10087 /* Leave it as "auto". Strictly speaking this case
10088 means FPA, but almost nobody uses that now, and
10089 many toolchains fail to set the appropriate bits
10090 for the floating-point model they use. */
10092 case EF_ARM_SOFT_FLOAT
:
10093 fp_model
= ARM_FLOAT_SOFT_FPA
;
10095 case EF_ARM_VFP_FLOAT
:
10096 fp_model
= ARM_FLOAT_VFP
;
10098 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
10099 fp_model
= ARM_FLOAT_SOFT_VFP
;
10104 if (e_flags
& EF_ARM_BE8
)
10105 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
10110 /* Leave it as "auto". */
10115 /* Check any target description for validity. */
10116 if (tdesc_has_registers (tdesc
))
10118 /* For most registers we require GDB's default names; but also allow
10119 the numeric names for sp / lr / pc, as a convenience. */
10120 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
10121 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
10122 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
10124 const struct tdesc_feature
*feature
;
10127 feature
= tdesc_find_feature (tdesc
,
10128 "org.gnu.gdb.arm.core");
10129 if (feature
== NULL
)
10131 feature
= tdesc_find_feature (tdesc
,
10132 "org.gnu.gdb.arm.m-profile");
10133 if (feature
== NULL
)
10139 tdesc_data
= tdesc_data_alloc ();
10142 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
10143 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10144 arm_register_names
[i
]);
10145 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10148 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10151 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10155 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10156 ARM_PS_REGNUM
, "xpsr");
10158 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10159 ARM_PS_REGNUM
, "cpsr");
10163 tdesc_data_cleanup (tdesc_data
);
10167 feature
= tdesc_find_feature (tdesc
,
10168 "org.gnu.gdb.arm.fpa");
10169 if (feature
!= NULL
)
10172 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
10173 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10174 arm_register_names
[i
]);
10177 tdesc_data_cleanup (tdesc_data
);
10182 have_fpa_registers
= 0;
10184 feature
= tdesc_find_feature (tdesc
,
10185 "org.gnu.gdb.xscale.iwmmxt");
10186 if (feature
!= NULL
)
10188 static const char *const iwmmxt_names
[] = {
10189 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10190 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10191 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10192 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10196 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
10198 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10199 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10201 /* Check for the control registers, but do not fail if they
10203 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
10204 tdesc_numbered_register (feature
, tdesc_data
, i
,
10205 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10207 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
10209 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10210 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10214 tdesc_data_cleanup (tdesc_data
);
10219 /* If we have a VFP unit, check whether the single precision registers
10220 are present. If not, then we will synthesize them as pseudo
10222 feature
= tdesc_find_feature (tdesc
,
10223 "org.gnu.gdb.arm.vfp");
10224 if (feature
!= NULL
)
10226 static const char *const vfp_double_names
[] = {
10227 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10228 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10229 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10230 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10233 /* Require the double precision registers. There must be either
10236 for (i
= 0; i
< 32; i
++)
10238 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10240 vfp_double_names
[i
]);
10244 if (!valid_p
&& i
== 16)
10247 /* Also require FPSCR. */
10248 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10249 ARM_FPSCR_REGNUM
, "fpscr");
10252 tdesc_data_cleanup (tdesc_data
);
10256 if (tdesc_unnumbered_register (feature
, "s0") == 0)
10257 have_vfp_pseudos
= 1;
10259 have_vfp_registers
= 1;
10261 /* If we have VFP, also check for NEON. The architecture allows
10262 NEON without VFP (integer vector operations only), but GDB
10263 does not support that. */
10264 feature
= tdesc_find_feature (tdesc
,
10265 "org.gnu.gdb.arm.neon");
10266 if (feature
!= NULL
)
10268 /* NEON requires 32 double-precision registers. */
10271 tdesc_data_cleanup (tdesc_data
);
10275 /* If there are quad registers defined by the stub, use
10276 their type; otherwise (normally) provide them with
10277 the default type. */
10278 if (tdesc_unnumbered_register (feature
, "q0") == 0)
10279 have_neon_pseudos
= 1;
10286 /* If there is already a candidate, use it. */
10287 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
10289 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
10291 if (arm_abi
!= ARM_ABI_AUTO
10292 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
10295 if (fp_model
!= ARM_FLOAT_AUTO
10296 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
10299 /* There are various other properties in tdep that we do not
10300 need to check here: those derived from a target description,
10301 since gdbarches with a different target description are
10302 automatically disqualified. */
10304 /* Do check is_m, though, since it might come from the binary. */
10305 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
10308 /* Found a match. */
10312 if (best_arch
!= NULL
)
10314 if (tdesc_data
!= NULL
)
10315 tdesc_data_cleanup (tdesc_data
);
10316 return best_arch
->gdbarch
;
10319 tdep
= xcalloc (1, sizeof (struct gdbarch_tdep
));
10320 gdbarch
= gdbarch_alloc (&info
, tdep
);
10322 /* Record additional information about the architecture we are defining.
10323 These are gdbarch discriminators, like the OSABI. */
10324 tdep
->arm_abi
= arm_abi
;
10325 tdep
->fp_model
= fp_model
;
10327 tdep
->have_fpa_registers
= have_fpa_registers
;
10328 tdep
->have_vfp_registers
= have_vfp_registers
;
10329 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
10330 tdep
->have_neon_pseudos
= have_neon_pseudos
;
10331 tdep
->have_neon
= have_neon
;
10333 arm_register_g_packet_guesses (gdbarch
);
10336 switch (info
.byte_order_for_code
)
10338 case BFD_ENDIAN_BIG
:
10339 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
10340 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
10341 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
10342 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
10346 case BFD_ENDIAN_LITTLE
:
10347 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
10348 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
10349 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
10350 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
10355 internal_error (__FILE__
, __LINE__
,
10356 _("arm_gdbarch_init: bad byte order for float format"));
10359 /* On ARM targets char defaults to unsigned. */
10360 set_gdbarch_char_signed (gdbarch
, 0);
10362 /* Note: for displaced stepping, this includes the breakpoint, and one word
10363 of additional scratch space. This setting isn't used for anything beside
10364 displaced stepping at present. */
10365 set_gdbarch_max_insn_length (gdbarch
, 4 * DISPLACED_MODIFIED_INSNS
);
10367 /* This should be low enough for everything. */
10368 tdep
->lowest_pc
= 0x20;
10369 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
10371 /* The default, for both APCS and AAPCS, is to return small
10372 structures in registers. */
10373 tdep
->struct_return
= reg_struct_return
;
10375 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
10376 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
10378 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
10380 /* Frame handling. */
10381 set_gdbarch_dummy_id (gdbarch
, arm_dummy_id
);
10382 set_gdbarch_unwind_pc (gdbarch
, arm_unwind_pc
);
10383 set_gdbarch_unwind_sp (gdbarch
, arm_unwind_sp
);
10385 frame_base_set_default (gdbarch
, &arm_normal_base
);
10387 /* Address manipulation. */
10388 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
10390 /* Advance PC across function entry code. */
10391 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
10393 /* Detect whether PC is in function epilogue. */
10394 set_gdbarch_in_function_epilogue_p (gdbarch
, arm_in_function_epilogue_p
);
10396 /* Skip trampolines. */
10397 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
10399 /* The stack grows downward. */
10400 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
10402 /* Breakpoint manipulation. */
10403 set_gdbarch_breakpoint_from_pc (gdbarch
, arm_breakpoint_from_pc
);
10404 set_gdbarch_remote_breakpoint_from_pc (gdbarch
,
10405 arm_remote_breakpoint_from_pc
);
10407 /* Information about registers, etc. */
10408 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
10409 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
10410 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
10411 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10412 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
10414 /* This "info float" is FPA-specific. Use the generic version if we
10415 do not have FPA. */
10416 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
10417 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
10419 /* Internal <-> external register number maps. */
10420 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
10421 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
10423 set_gdbarch_register_name (gdbarch
, arm_register_name
);
10425 /* Returning results. */
10426 set_gdbarch_return_value (gdbarch
, arm_return_value
);
10429 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
10431 /* Minsymbol frobbing. */
10432 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
10433 set_gdbarch_coff_make_msymbol_special (gdbarch
,
10434 arm_coff_make_msymbol_special
);
10435 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
10437 /* Thumb-2 IT block support. */
10438 set_gdbarch_adjust_breakpoint_address (gdbarch
,
10439 arm_adjust_breakpoint_address
);
10441 /* Virtual tables. */
10442 set_gdbarch_vbit_in_delta (gdbarch
, 1);
10444 /* Hook in the ABI-specific overrides, if they have been registered. */
10445 gdbarch_init_osabi (info
, gdbarch
);
10447 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
10449 /* Add some default predicates. */
10451 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
10452 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
10453 dwarf2_append_unwinders (gdbarch
);
10454 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
10455 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
10457 /* Now we have tuned the configuration, set a few final things,
10458 based on what the OS ABI has told us. */
10460 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10461 binaries are always marked. */
10462 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
10463 tdep
->arm_abi
= ARM_ABI_APCS
;
10465 /* Watchpoints are not steppable. */
10466 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
10468 /* We used to default to FPA for generic ARM, but almost nobody
10469 uses that now, and we now provide a way for the user to force
10470 the model. So default to the most useful variant. */
10471 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
10472 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
10474 if (tdep
->jb_pc
>= 0)
10475 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
10477 /* Floating point sizes and format. */
10478 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
10479 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
10481 set_gdbarch_double_format
10482 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10483 set_gdbarch_long_double_format
10484 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10488 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
10489 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
10492 if (have_vfp_pseudos
)
10494 /* NOTE: These are the only pseudo registers used by
10495 the ARM target at the moment. If more are added, a
10496 little more care in numbering will be needed. */
10498 int num_pseudos
= 32;
10499 if (have_neon_pseudos
)
10501 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
10502 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
10503 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
10508 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
10510 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
10512 /* Override tdesc_register_type to adjust the types of VFP
10513 registers for NEON. */
10514 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10517 /* Add standard register aliases. We add aliases even for those
10518 nanes which are used by the current architecture - it's simpler,
10519 and does no harm, since nothing ever lists user registers. */
10520 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
10521 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
10522 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
10528 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
10530 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
10535 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10536 (unsigned long) tdep
->lowest_pc
);
10539 extern initialize_file_ftype _initialize_arm_tdep
; /* -Wmissing-prototypes */
10542 _initialize_arm_tdep (void)
10544 struct ui_file
*stb
;
10546 struct cmd_list_element
*new_set
, *new_show
;
10547 const char *setname
;
10548 const char *setdesc
;
10549 const char *const *regnames
;
10551 static char *helptext
;
10552 char regdesc
[1024], *rdptr
= regdesc
;
10553 size_t rest
= sizeof (regdesc
);
10555 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
10557 arm_objfile_data_key
10558 = register_objfile_data_with_cleanup (NULL
, arm_objfile_data_free
);
10560 /* Add ourselves to objfile event chain. */
10561 observer_attach_new_objfile (arm_exidx_new_objfile
);
10563 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
10565 /* Register an ELF OS ABI sniffer for ARM binaries. */
10566 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
10567 bfd_target_elf_flavour
,
10568 arm_elf_osabi_sniffer
);
10570 /* Initialize the standard target descriptions. */
10571 initialize_tdesc_arm_with_m ();
10572 initialize_tdesc_arm_with_m_fpa_layout ();
10573 initialize_tdesc_arm_with_m_vfp_d16 ();
10574 initialize_tdesc_arm_with_iwmmxt ();
10575 initialize_tdesc_arm_with_vfpv2 ();
10576 initialize_tdesc_arm_with_vfpv3 ();
10577 initialize_tdesc_arm_with_neon ();
10579 /* Get the number of possible sets of register names defined in opcodes. */
10580 num_disassembly_options
= get_arm_regname_num_options ();
10582 /* Add root prefix command for all "set arm"/"show arm" commands. */
10583 add_prefix_cmd ("arm", no_class
, set_arm_command
,
10584 _("Various ARM-specific commands."),
10585 &setarmcmdlist
, "set arm ", 0, &setlist
);
10587 add_prefix_cmd ("arm", no_class
, show_arm_command
,
10588 _("Various ARM-specific commands."),
10589 &showarmcmdlist
, "show arm ", 0, &showlist
);
10591 /* Sync the opcode insn printer with our register viewer. */
10592 parse_arm_disassembler_option ("reg-names-std");
10594 /* Initialize the array that will be passed to
10595 add_setshow_enum_cmd(). */
10596 valid_disassembly_styles
10597 = xmalloc ((num_disassembly_options
+ 1) * sizeof (char *));
10598 for (i
= 0; i
< num_disassembly_options
; i
++)
10600 numregs
= get_arm_regnames (i
, &setname
, &setdesc
, ®names
);
10601 valid_disassembly_styles
[i
] = setname
;
10602 length
= snprintf (rdptr
, rest
, "%s - %s\n", setname
, setdesc
);
10605 /* When we find the default names, tell the disassembler to use
10607 if (!strcmp (setname
, "std"))
10609 disassembly_style
= setname
;
10610 set_arm_regname_option (i
);
10613 /* Mark the end of valid options. */
10614 valid_disassembly_styles
[num_disassembly_options
] = NULL
;
10616 /* Create the help text. */
10617 stb
= mem_fileopen ();
10618 fprintf_unfiltered (stb
, "%s%s%s",
10619 _("The valid values are:\n"),
10621 _("The default is \"std\"."));
10622 helptext
= ui_file_xstrdup (stb
, NULL
);
10623 ui_file_delete (stb
);
10625 add_setshow_enum_cmd("disassembler", no_class
,
10626 valid_disassembly_styles
, &disassembly_style
,
10627 _("Set the disassembly style."),
10628 _("Show the disassembly style."),
10630 set_disassembly_style_sfunc
,
10631 NULL
, /* FIXME: i18n: The disassembly style is
10633 &setarmcmdlist
, &showarmcmdlist
);
10635 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
10636 _("Set usage of ARM 32-bit mode."),
10637 _("Show usage of ARM 32-bit mode."),
10638 _("When off, a 26-bit PC will be used."),
10640 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
10642 &setarmcmdlist
, &showarmcmdlist
);
10644 /* Add a command to allow the user to force the FPU model. */
10645 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
10646 _("Set the floating point type."),
10647 _("Show the floating point type."),
10648 _("auto - Determine the FP typefrom the OS-ABI.\n\
10649 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10650 fpa - FPA co-processor (GCC compiled).\n\
10651 softvfp - Software FP with pure-endian doubles.\n\
10652 vfp - VFP co-processor."),
10653 set_fp_model_sfunc
, show_fp_model
,
10654 &setarmcmdlist
, &showarmcmdlist
);
10656 /* Add a command to allow the user to force the ABI. */
10657 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
10659 _("Show the ABI."),
10660 NULL
, arm_set_abi
, arm_show_abi
,
10661 &setarmcmdlist
, &showarmcmdlist
);
10663 /* Add two commands to allow the user to force the assumed
10665 add_setshow_enum_cmd ("fallback-mode", class_support
,
10666 arm_mode_strings
, &arm_fallback_mode_string
,
10667 _("Set the mode assumed when symbols are unavailable."),
10668 _("Show the mode assumed when symbols are unavailable."),
10669 NULL
, NULL
, arm_show_fallback_mode
,
10670 &setarmcmdlist
, &showarmcmdlist
);
10671 add_setshow_enum_cmd ("force-mode", class_support
,
10672 arm_mode_strings
, &arm_force_mode_string
,
10673 _("Set the mode assumed even when symbols are available."),
10674 _("Show the mode assumed even when symbols are available."),
10675 NULL
, NULL
, arm_show_force_mode
,
10676 &setarmcmdlist
, &showarmcmdlist
);
10678 /* Debugging flag. */
10679 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
10680 _("Set ARM debugging."),
10681 _("Show ARM debugging."),
10682 _("When on, arm-specific debugging is enabled."),
10684 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
10685 &setdebuglist
, &showdebuglist
);
10688 /* ARM-reversible process record data structures. */
10690 #define ARM_INSN_SIZE_BYTES 4
10691 #define THUMB_INSN_SIZE_BYTES 2
10692 #define THUMB2_INSN_SIZE_BYTES 4
10695 /* Position of the bit within a 32-bit ARM instruction
10696 that defines whether the instruction is a load or store. */
10697 #define INSN_S_L_BIT_NUM 20
10699 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10702 unsigned int reg_len = LENGTH; \
10705 REGS = XNEWVEC (uint32_t, reg_len); \
10706 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10711 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10714 unsigned int mem_len = LENGTH; \
10717 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10718 memcpy(&MEMS->len, &RECORD_BUF[0], \
10719 sizeof(struct arm_mem_r) * LENGTH); \
10724 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10725 #define INSN_RECORDED(ARM_RECORD) \
10726 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10728 /* ARM memory record structure. */
10731 uint32_t len
; /* Record length. */
10732 uint32_t addr
; /* Memory address. */
10735 /* ARM instruction record contains opcode of current insn
10736 and execution state (before entry to decode_insn()),
10737 contains list of to-be-modified registers and
10738 memory blocks (on return from decode_insn()). */
10740 typedef struct insn_decode_record_t
10742 struct gdbarch
*gdbarch
;
10743 struct regcache
*regcache
;
10744 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
10745 uint32_t arm_insn
; /* Should accommodate thumb. */
10746 uint32_t cond
; /* Condition code. */
10747 uint32_t opcode
; /* Insn opcode. */
10748 uint32_t decode
; /* Insn decode bits. */
10749 uint32_t mem_rec_count
; /* No of mem records. */
10750 uint32_t reg_rec_count
; /* No of reg records. */
10751 uint32_t *arm_regs
; /* Registers to be saved for this record. */
10752 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
10753 } insn_decode_record
;
10756 /* Checks ARM SBZ and SBO mandatory fields. */
10759 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
10761 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
10780 enum arm_record_result
10782 ARM_RECORD_SUCCESS
= 0,
10783 ARM_RECORD_FAILURE
= 1
10790 } arm_record_strx_t
;
10801 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
10802 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
10805 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10806 ULONGEST u_regval
[2]= {0};
10808 uint32_t reg_src1
= 0, reg_src2
= 0;
10809 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10810 uint32_t opcode1
= 0;
10812 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10813 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10814 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10817 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10819 /* 1) Handle misc store, immediate offset. */
10820 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10821 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10822 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10823 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
10825 if (ARM_PC_REGNUM
== reg_src1
)
10827 /* If R15 was used as Rn, hence current PC+8. */
10828 u_regval
[0] = u_regval
[0] + 8;
10830 offset_8
= (immed_high
<< 4) | immed_low
;
10831 /* Calculate target store address. */
10832 if (14 == arm_insn_r
->opcode
)
10834 tgt_mem_addr
= u_regval
[0] + offset_8
;
10838 tgt_mem_addr
= u_regval
[0] - offset_8
;
10840 if (ARM_RECORD_STRH
== str_type
)
10842 record_buf_mem
[0] = 2;
10843 record_buf_mem
[1] = tgt_mem_addr
;
10844 arm_insn_r
->mem_rec_count
= 1;
10846 else if (ARM_RECORD_STRD
== str_type
)
10848 record_buf_mem
[0] = 4;
10849 record_buf_mem
[1] = tgt_mem_addr
;
10850 record_buf_mem
[2] = 4;
10851 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10852 arm_insn_r
->mem_rec_count
= 2;
10855 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
10857 /* 2) Store, register offset. */
10859 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10861 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10862 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10863 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10864 if (15 == reg_src2
)
10866 /* If R15 was used as Rn, hence current PC+8. */
10867 u_regval
[0] = u_regval
[0] + 8;
10869 /* Calculate target store address, Rn +/- Rm, register offset. */
10870 if (12 == arm_insn_r
->opcode
)
10872 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10876 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10878 if (ARM_RECORD_STRH
== str_type
)
10880 record_buf_mem
[0] = 2;
10881 record_buf_mem
[1] = tgt_mem_addr
;
10882 arm_insn_r
->mem_rec_count
= 1;
10884 else if (ARM_RECORD_STRD
== str_type
)
10886 record_buf_mem
[0] = 4;
10887 record_buf_mem
[1] = tgt_mem_addr
;
10888 record_buf_mem
[2] = 4;
10889 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10890 arm_insn_r
->mem_rec_count
= 2;
10893 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10894 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10896 /* 3) Store, immediate pre-indexed. */
10897 /* 5) Store, immediate post-indexed. */
10898 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10899 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10900 offset_8
= (immed_high
<< 4) | immed_low
;
10901 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10902 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10903 /* Calculate target store address, Rn +/- Rm, register offset. */
10904 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10906 tgt_mem_addr
= u_regval
[0] + offset_8
;
10910 tgt_mem_addr
= u_regval
[0] - offset_8
;
10912 if (ARM_RECORD_STRH
== str_type
)
10914 record_buf_mem
[0] = 2;
10915 record_buf_mem
[1] = tgt_mem_addr
;
10916 arm_insn_r
->mem_rec_count
= 1;
10918 else if (ARM_RECORD_STRD
== str_type
)
10920 record_buf_mem
[0] = 4;
10921 record_buf_mem
[1] = tgt_mem_addr
;
10922 record_buf_mem
[2] = 4;
10923 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10924 arm_insn_r
->mem_rec_count
= 2;
10926 /* Record Rn also as it changes. */
10927 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10928 arm_insn_r
->reg_rec_count
= 1;
10930 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
10931 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10933 /* 4) Store, register pre-indexed. */
10934 /* 6) Store, register post -indexed. */
10935 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10936 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10937 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10938 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10939 /* Calculate target store address, Rn +/- Rm, register offset. */
10940 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10942 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10946 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10948 if (ARM_RECORD_STRH
== str_type
)
10950 record_buf_mem
[0] = 2;
10951 record_buf_mem
[1] = tgt_mem_addr
;
10952 arm_insn_r
->mem_rec_count
= 1;
10954 else if (ARM_RECORD_STRD
== str_type
)
10956 record_buf_mem
[0] = 4;
10957 record_buf_mem
[1] = tgt_mem_addr
;
10958 record_buf_mem
[2] = 4;
10959 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10960 arm_insn_r
->mem_rec_count
= 2;
10962 /* Record Rn also as it changes. */
10963 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10964 arm_insn_r
->reg_rec_count
= 1;
10969 /* Handling ARM extension space insns. */
10972 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
10974 uint32_t ret
= 0; /* Return value: -1:record failure ; 0:success */
10975 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
10976 uint32_t record_buf
[8], record_buf_mem
[8];
10977 uint32_t reg_src1
= 0;
10978 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10979 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10980 ULONGEST u_regval
= 0;
10982 gdb_assert (!INSN_RECORDED(arm_insn_r
));
10983 /* Handle unconditional insn extension space. */
10985 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
10986 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10987 if (arm_insn_r
->cond
)
10989 /* PLD has no affect on architectural state, it just affects
10991 if (5 == ((opcode1
& 0xE0) >> 5))
10994 record_buf
[0] = ARM_PS_REGNUM
;
10995 record_buf
[1] = ARM_LR_REGNUM
;
10996 arm_insn_r
->reg_rec_count
= 2;
10998 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11002 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
11003 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
11006 /* Undefined instruction on ARM V5; need to handle if later
11007 versions define it. */
11010 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
11011 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
11012 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
11014 /* Handle arithmetic insn extension space. */
11015 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
11016 && !INSN_RECORDED(arm_insn_r
))
11018 /* Handle MLA(S) and MUL(S). */
11019 if (0 <= insn_op1
&& 3 >= insn_op1
)
11021 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11022 record_buf
[1] = ARM_PS_REGNUM
;
11023 arm_insn_r
->reg_rec_count
= 2;
11025 else if (4 <= insn_op1
&& 15 >= insn_op1
)
11027 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11028 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11029 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11030 record_buf
[2] = ARM_PS_REGNUM
;
11031 arm_insn_r
->reg_rec_count
= 3;
11035 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
11036 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
11037 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
11039 /* Handle control insn extension space. */
11041 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
11042 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
11044 if (!bit (arm_insn_r
->arm_insn
,25))
11046 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
11048 if ((0 == insn_op1
) || (2 == insn_op1
))
11051 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11052 arm_insn_r
->reg_rec_count
= 1;
11054 else if (1 == insn_op1
)
11056 /* CSPR is going to be changed. */
11057 record_buf
[0] = ARM_PS_REGNUM
;
11058 arm_insn_r
->reg_rec_count
= 1;
11060 else if (3 == insn_op1
)
11062 /* SPSR is going to be changed. */
11063 /* We need to get SPSR value, which is yet to be done. */
11064 printf_unfiltered (_("Process record does not support "
11065 "instruction 0x%0x at address %s.\n"),
11066 arm_insn_r
->arm_insn
,
11067 paddress (arm_insn_r
->gdbarch
,
11068 arm_insn_r
->this_addr
));
11072 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
11077 record_buf
[0] = ARM_PS_REGNUM
;
11078 arm_insn_r
->reg_rec_count
= 1;
11080 else if (3 == insn_op1
)
11083 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11084 arm_insn_r
->reg_rec_count
= 1;
11087 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
11090 record_buf
[0] = ARM_PS_REGNUM
;
11091 record_buf
[1] = ARM_LR_REGNUM
;
11092 arm_insn_r
->reg_rec_count
= 2;
11094 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
11096 /* QADD, QSUB, QDADD, QDSUB */
11097 record_buf
[0] = ARM_PS_REGNUM
;
11098 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11099 arm_insn_r
->reg_rec_count
= 2;
11101 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
11104 record_buf
[0] = ARM_PS_REGNUM
;
11105 record_buf
[1] = ARM_LR_REGNUM
;
11106 arm_insn_r
->reg_rec_count
= 2;
11108 /* Save SPSR also;how? */
11109 printf_unfiltered (_("Process record does not support "
11110 "instruction 0x%0x at address %s.\n"),
11111 arm_insn_r
->arm_insn
,
11112 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11115 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
11116 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
11117 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
11118 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
11121 if (0 == insn_op1
|| 1 == insn_op1
)
11123 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11124 /* We dont do optimization for SMULW<y> where we
11126 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11127 record_buf
[1] = ARM_PS_REGNUM
;
11128 arm_insn_r
->reg_rec_count
= 2;
11130 else if (2 == insn_op1
)
11133 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11134 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11135 arm_insn_r
->reg_rec_count
= 2;
11137 else if (3 == insn_op1
)
11140 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11141 arm_insn_r
->reg_rec_count
= 1;
11147 /* MSR : immediate form. */
11150 /* CSPR is going to be changed. */
11151 record_buf
[0] = ARM_PS_REGNUM
;
11152 arm_insn_r
->reg_rec_count
= 1;
11154 else if (3 == insn_op1
)
11156 /* SPSR is going to be changed. */
11157 /* we need to get SPSR value, which is yet to be done */
11158 printf_unfiltered (_("Process record does not support "
11159 "instruction 0x%0x at address %s.\n"),
11160 arm_insn_r
->arm_insn
,
11161 paddress (arm_insn_r
->gdbarch
,
11162 arm_insn_r
->this_addr
));
11168 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
11169 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
11170 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
11172 /* Handle load/store insn extension space. */
11174 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
11175 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
11176 && !INSN_RECORDED(arm_insn_r
))
11181 /* These insn, changes register and memory as well. */
11182 /* SWP or SWPB insn. */
11183 /* Get memory address given by Rn. */
11184 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11185 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11186 /* SWP insn ?, swaps word. */
11187 if (8 == arm_insn_r
->opcode
)
11189 record_buf_mem
[0] = 4;
11193 /* SWPB insn, swaps only byte. */
11194 record_buf_mem
[0] = 1;
11196 record_buf_mem
[1] = u_regval
;
11197 arm_insn_r
->mem_rec_count
= 1;
11198 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11199 arm_insn_r
->reg_rec_count
= 1;
11201 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11204 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11207 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11210 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11211 record_buf
[1] = record_buf
[0] + 1;
11212 arm_insn_r
->reg_rec_count
= 2;
11214 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11217 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11220 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
11222 /* LDRH, LDRSB, LDRSH. */
11223 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11224 arm_insn_r
->reg_rec_count
= 1;
11229 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
11230 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
11231 && !INSN_RECORDED(arm_insn_r
))
11234 /* Handle coprocessor insn extension space. */
11237 /* To be done for ARMv5 and later; as of now we return -1. */
11239 printf_unfiltered (_("Process record does not support instruction x%0x "
11240 "at address %s.\n"),arm_insn_r
->arm_insn
,
11241 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11244 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11245 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11250 /* Handling opcode 000 insns. */
11253 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
11255 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11256 uint32_t record_buf
[8], record_buf_mem
[8];
11257 ULONGEST u_regval
[2] = {0};
11259 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11260 uint32_t immed_high
= 0, immed_low
= 0, offset_8
= 0, tgt_mem_addr
= 0;
11261 uint32_t opcode1
= 0;
11263 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11264 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11265 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
11267 /* Data processing insn /multiply insn. */
11268 if (9 == arm_insn_r
->decode
11269 && ((4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11270 || (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)))
11272 /* Handle multiply instructions. */
11273 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11274 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
11276 /* Handle MLA and MUL. */
11277 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11278 record_buf
[1] = ARM_PS_REGNUM
;
11279 arm_insn_r
->reg_rec_count
= 2;
11281 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11283 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11284 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11285 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11286 record_buf
[2] = ARM_PS_REGNUM
;
11287 arm_insn_r
->reg_rec_count
= 3;
11290 else if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11291 && (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
))
11293 /* Handle misc load insns, as 20th bit (L = 1). */
11294 /* LDR insn has a capability to do branching, if
11295 MOV LR, PC is precceded by LDR insn having Rn as R15
11296 in that case, it emulates branch and link insn, and hence we
11297 need to save CSPR and PC as well. I am not sure this is right
11298 place; as opcode = 010 LDR insn make this happen, if R15 was
11300 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11301 if (15 != reg_dest
)
11303 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11304 arm_insn_r
->reg_rec_count
= 1;
11308 record_buf
[0] = reg_dest
;
11309 record_buf
[1] = ARM_PS_REGNUM
;
11310 arm_insn_r
->reg_rec_count
= 2;
11313 else if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11314 && sbo_sbz (arm_insn_r
->arm_insn
, 5, 12, 0)
11315 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11316 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21))
11318 /* Handle MSR insn. */
11319 if (9 == arm_insn_r
->opcode
)
11321 /* CSPR is going to be changed. */
11322 record_buf
[0] = ARM_PS_REGNUM
;
11323 arm_insn_r
->reg_rec_count
= 1;
11327 /* SPSR is going to be changed. */
11328 /* How to read SPSR value? */
11329 printf_unfiltered (_("Process record does not support instruction "
11330 "0x%0x at address %s.\n"),
11331 arm_insn_r
->arm_insn
,
11332 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11336 else if (9 == arm_insn_r
->decode
11337 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11338 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11340 /* Handling SWP, SWPB. */
11341 /* These insn, changes register and memory as well. */
11342 /* SWP or SWPB insn. */
11344 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11345 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11346 /* SWP insn ?, swaps word. */
11347 if (8 == arm_insn_r
->opcode
)
11349 record_buf_mem
[0] = 4;
11353 /* SWPB insn, swaps only byte. */
11354 record_buf_mem
[0] = 1;
11356 record_buf_mem
[1] = u_regval
[0];
11357 arm_insn_r
->mem_rec_count
= 1;
11358 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11359 arm_insn_r
->reg_rec_count
= 1;
11361 else if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
11362 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11364 /* Handle BLX, branch and link/exchange. */
11365 if (9 == arm_insn_r
->opcode
)
11367 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11368 and R14 stores the return address. */
11369 record_buf
[0] = ARM_PS_REGNUM
;
11370 record_buf
[1] = ARM_LR_REGNUM
;
11371 arm_insn_r
->reg_rec_count
= 2;
11374 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
11376 /* Handle enhanced software breakpoint insn, BKPT. */
11377 /* CPSR is changed to be executed in ARM state, disabling normal
11378 interrupts, entering abort mode. */
11379 /* According to high vector configuration PC is set. */
11380 /* user hit breakpoint and type reverse, in
11381 that case, we need to go back with previous CPSR and
11382 Program Counter. */
11383 record_buf
[0] = ARM_PS_REGNUM
;
11384 record_buf
[1] = ARM_LR_REGNUM
;
11385 arm_insn_r
->reg_rec_count
= 2;
11387 /* Save SPSR also; how? */
11388 printf_unfiltered (_("Process record does not support instruction "
11389 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11390 paddress (arm_insn_r
->gdbarch
,
11391 arm_insn_r
->this_addr
));
11394 else if (11 == arm_insn_r
->decode
11395 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11397 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11399 /* Handle str(x) insn */
11400 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11403 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
11404 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11406 /* Handle BX, branch and link/exchange. */
11407 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11408 record_buf
[0] = ARM_PS_REGNUM
;
11409 arm_insn_r
->reg_rec_count
= 1;
11411 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
11412 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
11413 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
11415 /* Count leading zeros: CLZ. */
11416 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11417 arm_insn_r
->reg_rec_count
= 1;
11419 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11420 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11421 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
11422 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0)
11425 /* Handle MRS insn. */
11426 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11427 arm_insn_r
->reg_rec_count
= 1;
11429 else if (arm_insn_r
->opcode
<= 15)
11431 /* Normal data processing insns. */
11432 /* Out of 11 shifter operands mode, all the insn modifies destination
11433 register, which is specified by 13-16 decode. */
11434 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11435 record_buf
[1] = ARM_PS_REGNUM
;
11436 arm_insn_r
->reg_rec_count
= 2;
11443 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11444 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11448 /* Handling opcode 001 insns. */
11451 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
11453 uint32_t record_buf
[8], record_buf_mem
[8];
11455 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11456 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11458 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11459 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
11460 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11463 /* Handle MSR insn. */
11464 if (9 == arm_insn_r
->opcode
)
11466 /* CSPR is going to be changed. */
11467 record_buf
[0] = ARM_PS_REGNUM
;
11468 arm_insn_r
->reg_rec_count
= 1;
11472 /* SPSR is going to be changed. */
11475 else if (arm_insn_r
->opcode
<= 15)
11477 /* Normal data processing insns. */
11478 /* Out of 11 shifter operands mode, all the insn modifies destination
11479 register, which is specified by 13-16 decode. */
11480 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11481 record_buf
[1] = ARM_PS_REGNUM
;
11482 arm_insn_r
->reg_rec_count
= 2;
11489 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11490 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11494 /* Handle ARM mode instructions with opcode 010. */
11497 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
11499 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11501 uint32_t reg_base
, reg_dest
;
11502 uint32_t offset_12
, tgt_mem_addr
;
11503 uint32_t record_buf
[8], record_buf_mem
[8];
11504 unsigned char wback
;
11507 /* Calculate wback. */
11508 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
11509 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
11511 arm_insn_r
->reg_rec_count
= 0;
11512 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11514 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11516 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11519 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11520 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
11522 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11523 preceeds a LDR instruction having R15 as reg_base, it
11524 emulates a branch and link instruction, and hence we need to save
11525 CPSR and PC as well. */
11526 if (ARM_PC_REGNUM
== reg_dest
)
11527 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11529 /* If wback is true, also save the base register, which is going to be
11532 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11536 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11538 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
11539 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11541 /* Handle bit U. */
11542 if (bit (arm_insn_r
->arm_insn
, 23))
11544 /* U == 1: Add the offset. */
11545 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
11549 /* U == 0: subtract the offset. */
11550 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
11553 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11555 if (bit (arm_insn_r
->arm_insn
, 22))
11557 /* STRB and STRBT: 1 byte. */
11558 record_buf_mem
[0] = 1;
11562 /* STR and STRT: 4 bytes. */
11563 record_buf_mem
[0] = 4;
11566 /* Handle bit P. */
11567 if (bit (arm_insn_r
->arm_insn
, 24))
11568 record_buf_mem
[1] = tgt_mem_addr
;
11570 record_buf_mem
[1] = (uint32_t) u_regval
;
11572 arm_insn_r
->mem_rec_count
= 1;
11574 /* If wback is true, also save the base register, which is going to be
11577 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11580 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11581 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11585 /* Handling opcode 011 insns. */
11588 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
11590 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11592 uint32_t shift_imm
= 0;
11593 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11594 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
11595 uint32_t record_buf
[8], record_buf_mem
[8];
11598 ULONGEST u_regval
[2];
11600 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11601 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11603 /* Handle enhanced store insns and LDRD DSP insn,
11604 order begins according to addressing modes for store insns
11608 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11610 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11611 /* LDR insn has a capability to do branching, if
11612 MOV LR, PC is precedded by LDR insn having Rn as R15
11613 in that case, it emulates branch and link insn, and hence we
11614 need to save CSPR and PC as well. */
11615 if (15 != reg_dest
)
11617 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11618 arm_insn_r
->reg_rec_count
= 1;
11622 record_buf
[0] = reg_dest
;
11623 record_buf
[1] = ARM_PS_REGNUM
;
11624 arm_insn_r
->reg_rec_count
= 2;
11629 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
11631 /* Store insn, register offset and register pre-indexed,
11632 register post-indexed. */
11634 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11636 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11637 regcache_raw_read_unsigned (reg_cache
, reg_src1
11639 regcache_raw_read_unsigned (reg_cache
, reg_src2
11641 if (15 == reg_src2
)
11643 /* If R15 was used as Rn, hence current PC+8. */
11644 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11645 u_regval
[0] = u_regval
[0] + 8;
11647 /* Calculate target store address, Rn +/- Rm, register offset. */
11649 if (bit (arm_insn_r
->arm_insn
, 23))
11651 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
11655 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
11658 switch (arm_insn_r
->opcode
)
11672 record_buf_mem
[0] = 4;
11687 record_buf_mem
[0] = 1;
11691 gdb_assert_not_reached ("no decoding pattern found");
11694 record_buf_mem
[1] = tgt_mem_addr
;
11695 arm_insn_r
->mem_rec_count
= 1;
11697 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11698 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11699 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11700 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11701 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11702 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11705 /* Rn is going to be changed in pre-indexed mode and
11706 post-indexed mode as well. */
11707 record_buf
[0] = reg_src2
;
11708 arm_insn_r
->reg_rec_count
= 1;
11713 /* Store insn, scaled register offset; scaled pre-indexed. */
11714 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
11716 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11718 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11719 /* Get shift_imm. */
11720 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
11721 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11722 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
11723 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11724 /* Offset_12 used as shift. */
11728 /* Offset_12 used as index. */
11729 offset_12
= u_regval
[0] << shift_imm
;
11733 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
11739 if (bit (u_regval
[0], 31))
11741 offset_12
= 0xFFFFFFFF;
11750 /* This is arithmetic shift. */
11751 offset_12
= s_word
>> shift_imm
;
11758 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
11760 /* Get C flag value and shift it by 31. */
11761 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
11762 | (u_regval
[0]) >> 1);
11766 offset_12
= (u_regval
[0] >> shift_imm
) \
11768 (sizeof(uint32_t) - shift_imm
));
11773 gdb_assert_not_reached ("no decoding pattern found");
11777 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11779 if (bit (arm_insn_r
->arm_insn
, 23))
11781 tgt_mem_addr
= u_regval
[1] + offset_12
;
11785 tgt_mem_addr
= u_regval
[1] - offset_12
;
11788 switch (arm_insn_r
->opcode
)
11802 record_buf_mem
[0] = 4;
11817 record_buf_mem
[0] = 1;
11821 gdb_assert_not_reached ("no decoding pattern found");
11824 record_buf_mem
[1] = tgt_mem_addr
;
11825 arm_insn_r
->mem_rec_count
= 1;
11827 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11828 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11829 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11830 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11831 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11832 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11835 /* Rn is going to be changed in register scaled pre-indexed
11836 mode,and scaled post indexed mode. */
11837 record_buf
[0] = reg_src2
;
11838 arm_insn_r
->reg_rec_count
= 1;
11843 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11844 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11848 /* Handle ARM mode instructions with opcode 100. */
11851 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
11853 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11854 uint32_t register_count
= 0, register_bits
;
11855 uint32_t reg_base
, addr_mode
;
11856 uint32_t record_buf
[24], record_buf_mem
[48];
11860 /* Fetch the list of registers. */
11861 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
11862 arm_insn_r
->reg_rec_count
= 0;
11864 /* Fetch the base register that contains the address we are loading data
11866 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11868 /* Calculate wback. */
11869 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
11871 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11873 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11875 /* Find out which registers are going to be loaded from memory. */
11876 while (register_bits
)
11878 if (register_bits
& 0x00000001)
11879 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
11880 register_bits
= register_bits
>> 1;
11885 /* If wback is true, also save the base register, which is going to be
11888 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11890 /* Save the CPSR register. */
11891 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11895 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11897 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
11899 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11901 /* Find out how many registers are going to be stored to memory. */
11902 while (register_bits
)
11904 if (register_bits
& 0x00000001)
11906 register_bits
= register_bits
>> 1;
11911 /* STMDA (STMED): Decrement after. */
11913 record_buf_mem
[1] = (uint32_t) u_regval
11914 - register_count
* INT_REGISTER_SIZE
+ 4;
11916 /* STM (STMIA, STMEA): Increment after. */
11918 record_buf_mem
[1] = (uint32_t) u_regval
;
11920 /* STMDB (STMFD): Decrement before. */
11922 record_buf_mem
[1] = (uint32_t) u_regval
11923 - register_count
* INT_REGISTER_SIZE
;
11925 /* STMIB (STMFA): Increment before. */
11927 record_buf_mem
[1] = (uint32_t) u_regval
+ INT_REGISTER_SIZE
;
11930 gdb_assert_not_reached ("no decoding pattern found");
11934 record_buf_mem
[0] = register_count
* INT_REGISTER_SIZE
;
11935 arm_insn_r
->mem_rec_count
= 1;
11937 /* If wback is true, also save the base register, which is going to be
11940 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11943 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11944 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11948 /* Handling opcode 101 insns. */
11951 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11953 uint32_t record_buf
[8];
11955 /* Handle B, BL, BLX(1) insns. */
11956 /* B simply branches so we do nothing here. */
11957 /* Note: BLX(1) doesnt fall here but instead it falls into
11958 extension space. */
11959 if (bit (arm_insn_r
->arm_insn
, 24))
11961 record_buf
[0] = ARM_LR_REGNUM
;
11962 arm_insn_r
->reg_rec_count
= 1;
11965 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11970 /* Handling opcode 110 insns. */
11973 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11975 printf_unfiltered (_("Process record does not support instruction "
11976 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11977 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11982 /* Record handler for vector data transfer instructions. */
11985 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11987 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11988 uint32_t record_buf
[4];
11990 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
11991 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11992 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11993 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11994 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11995 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11997 /* Handle VMOV instruction. */
11998 if (bit_l
&& bit_c
)
12000 record_buf
[0] = reg_t
;
12001 arm_insn_r
->reg_rec_count
= 1;
12003 else if (bit_l
&& !bit_c
)
12005 /* Handle VMOV instruction. */
12006 if (bits_a
== 0x00)
12008 if (bit (arm_insn_r
->arm_insn
, 20))
12009 record_buf
[0] = reg_t
;
12011 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
12014 arm_insn_r
->reg_rec_count
= 1;
12016 /* Handle VMRS instruction. */
12017 else if (bits_a
== 0x07)
12020 reg_t
= ARM_PS_REGNUM
;
12022 record_buf
[0] = reg_t
;
12023 arm_insn_r
->reg_rec_count
= 1;
12026 else if (!bit_l
&& !bit_c
)
12028 /* Handle VMOV instruction. */
12029 if (bits_a
== 0x00)
12031 if (bit (arm_insn_r
->arm_insn
, 20))
12032 record_buf
[0] = reg_t
;
12034 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
12037 arm_insn_r
->reg_rec_count
= 1;
12039 /* Handle VMSR instruction. */
12040 else if (bits_a
== 0x07)
12042 record_buf
[0] = ARM_FPSCR_REGNUM
;
12043 arm_insn_r
->reg_rec_count
= 1;
12046 else if (!bit_l
&& bit_c
)
12048 /* Handle VMOV instruction. */
12049 if (!(bits_a
& 0x04))
12051 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
12053 arm_insn_r
->reg_rec_count
= 1;
12055 /* Handle VDUP instruction. */
12058 if (bit (arm_insn_r
->arm_insn
, 21))
12060 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12061 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12062 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
12063 arm_insn_r
->reg_rec_count
= 2;
12067 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12068 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12069 arm_insn_r
->reg_rec_count
= 1;
12074 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12078 /* Record handler for extension register load/store instructions. */
12081 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
12083 uint32_t opcode
, single_reg
;
12084 uint8_t op_vldm_vstm
;
12085 uint32_t record_buf
[8], record_buf_mem
[128];
12086 ULONGEST u_regval
= 0;
12088 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12089 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
12091 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
12092 single_reg
= bit (arm_insn_r
->arm_insn
, 8);
12093 op_vldm_vstm
= opcode
& 0x1b;
12095 /* Handle VMOV instructions. */
12096 if ((opcode
& 0x1e) == 0x04)
12098 if (bit (arm_insn_r
->arm_insn
, 4))
12100 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12101 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12102 arm_insn_r
->reg_rec_count
= 2;
12106 uint8_t reg_m
= (bits (arm_insn_r
->arm_insn
, 0, 3) << 1)
12107 | bit (arm_insn_r
->arm_insn
, 5);
12111 record_buf
[0] = num_regs
+ reg_m
;
12112 record_buf
[1] = num_regs
+ reg_m
+ 1;
12113 arm_insn_r
->reg_rec_count
= 2;
12117 record_buf
[0] = reg_m
+ ARM_D0_REGNUM
;
12118 arm_insn_r
->reg_rec_count
= 1;
12122 /* Handle VSTM and VPUSH instructions. */
12123 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
12124 || op_vldm_vstm
== 0x12)
12126 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12127 uint32_t memory_index
= 0;
12129 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12130 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12131 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12132 imm_off32
= imm_off8
<< 24;
12133 memory_count
= imm_off8
;
12135 if (bit (arm_insn_r
->arm_insn
, 23))
12136 start_address
= u_regval
;
12138 start_address
= u_regval
- imm_off32
;
12140 if (bit (arm_insn_r
->arm_insn
, 21))
12142 record_buf
[0] = reg_rn
;
12143 arm_insn_r
->reg_rec_count
= 1;
12146 while (memory_count
> 0)
12150 record_buf_mem
[memory_index
] = start_address
;
12151 record_buf_mem
[memory_index
+ 1] = 4;
12152 start_address
= start_address
+ 4;
12153 memory_index
= memory_index
+ 2;
12157 record_buf_mem
[memory_index
] = start_address
;
12158 record_buf_mem
[memory_index
+ 1] = 4;
12159 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12160 record_buf_mem
[memory_index
+ 3] = 4;
12161 start_address
= start_address
+ 8;
12162 memory_index
= memory_index
+ 4;
12166 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
12168 /* Handle VLDM instructions. */
12169 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
12170 || op_vldm_vstm
== 0x13)
12172 uint32_t reg_count
, reg_vd
;
12173 uint32_t reg_index
= 0;
12175 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12176 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
12179 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12181 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12183 if (bit (arm_insn_r
->arm_insn
, 21))
12184 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
12186 while (reg_count
> 0)
12189 record_buf
[reg_index
++] = num_regs
+ reg_vd
+ reg_count
- 1;
12191 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
12195 arm_insn_r
->reg_rec_count
= reg_index
;
12197 /* VSTR Vector store register. */
12198 else if ((opcode
& 0x13) == 0x10)
12200 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12201 uint32_t memory_index
= 0;
12203 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12204 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12205 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12206 imm_off32
= imm_off8
<< 24;
12207 memory_count
= imm_off8
;
12209 if (bit (arm_insn_r
->arm_insn
, 23))
12210 start_address
= u_regval
+ imm_off32
;
12212 start_address
= u_regval
- imm_off32
;
12216 record_buf_mem
[memory_index
] = start_address
;
12217 record_buf_mem
[memory_index
+ 1] = 4;
12218 arm_insn_r
->mem_rec_count
= 1;
12222 record_buf_mem
[memory_index
] = start_address
;
12223 record_buf_mem
[memory_index
+ 1] = 4;
12224 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12225 record_buf_mem
[memory_index
+ 3] = 4;
12226 arm_insn_r
->mem_rec_count
= 2;
12229 /* VLDR Vector load register. */
12230 else if ((opcode
& 0x13) == 0x11)
12232 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12236 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12237 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
12241 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12242 record_buf
[0] = num_regs
+ reg_vd
;
12244 arm_insn_r
->reg_rec_count
= 1;
12247 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12248 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
12252 /* Record handler for arm/thumb mode VFP data processing instructions. */
12255 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
12257 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
12258 uint32_t record_buf
[4];
12259 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
12260 enum insn_types curr_insn_type
= INSN_INV
;
12262 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12263 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
12264 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
12265 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
12266 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
12267 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
12268 opc1
= opc1
& 0x04;
12270 /* Handle VMLA, VMLS. */
12273 if (bit (arm_insn_r
->arm_insn
, 10))
12275 if (bit (arm_insn_r
->arm_insn
, 6))
12276 curr_insn_type
= INSN_T0
;
12278 curr_insn_type
= INSN_T1
;
12283 curr_insn_type
= INSN_T1
;
12285 curr_insn_type
= INSN_T2
;
12288 /* Handle VNMLA, VNMLS, VNMUL. */
12289 else if (opc1
== 0x01)
12292 curr_insn_type
= INSN_T1
;
12294 curr_insn_type
= INSN_T2
;
12297 else if (opc1
== 0x02 && !(opc3
& 0x01))
12299 if (bit (arm_insn_r
->arm_insn
, 10))
12301 if (bit (arm_insn_r
->arm_insn
, 6))
12302 curr_insn_type
= INSN_T0
;
12304 curr_insn_type
= INSN_T1
;
12309 curr_insn_type
= INSN_T1
;
12311 curr_insn_type
= INSN_T2
;
12314 /* Handle VADD, VSUB. */
12315 else if (opc1
== 0x03)
12317 if (!bit (arm_insn_r
->arm_insn
, 9))
12319 if (bit (arm_insn_r
->arm_insn
, 6))
12320 curr_insn_type
= INSN_T0
;
12322 curr_insn_type
= INSN_T1
;
12327 curr_insn_type
= INSN_T1
;
12329 curr_insn_type
= INSN_T2
;
12333 else if (opc1
== 0x0b)
12336 curr_insn_type
= INSN_T1
;
12338 curr_insn_type
= INSN_T2
;
12340 /* Handle all other vfp data processing instructions. */
12341 else if (opc1
== 0x0b)
12344 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
12346 if (bit (arm_insn_r
->arm_insn
, 4))
12348 if (bit (arm_insn_r
->arm_insn
, 6))
12349 curr_insn_type
= INSN_T0
;
12351 curr_insn_type
= INSN_T1
;
12356 curr_insn_type
= INSN_T1
;
12358 curr_insn_type
= INSN_T2
;
12361 /* Handle VNEG and VABS. */
12362 else if ((opc2
== 0x01 && opc3
== 0x01)
12363 || (opc2
== 0x00 && opc3
== 0x03))
12365 if (!bit (arm_insn_r
->arm_insn
, 11))
12367 if (bit (arm_insn_r
->arm_insn
, 6))
12368 curr_insn_type
= INSN_T0
;
12370 curr_insn_type
= INSN_T1
;
12375 curr_insn_type
= INSN_T1
;
12377 curr_insn_type
= INSN_T2
;
12380 /* Handle VSQRT. */
12381 else if (opc2
== 0x01 && opc3
== 0x03)
12384 curr_insn_type
= INSN_T1
;
12386 curr_insn_type
= INSN_T2
;
12389 else if (opc2
== 0x07 && opc3
== 0x03)
12392 curr_insn_type
= INSN_T1
;
12394 curr_insn_type
= INSN_T2
;
12396 else if (opc3
& 0x01)
12399 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
12401 if (!bit (arm_insn_r
->arm_insn
, 18))
12402 curr_insn_type
= INSN_T2
;
12406 curr_insn_type
= INSN_T1
;
12408 curr_insn_type
= INSN_T2
;
12412 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
12415 curr_insn_type
= INSN_T1
;
12417 curr_insn_type
= INSN_T2
;
12419 /* Handle VCVTB, VCVTT. */
12420 else if ((opc2
& 0x0e) == 0x02)
12421 curr_insn_type
= INSN_T2
;
12422 /* Handle VCMP, VCMPE. */
12423 else if ((opc2
& 0x0e) == 0x04)
12424 curr_insn_type
= INSN_T3
;
12428 switch (curr_insn_type
)
12431 reg_vd
= reg_vd
| (bit_d
<< 4);
12432 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12433 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
12434 arm_insn_r
->reg_rec_count
= 2;
12438 reg_vd
= reg_vd
| (bit_d
<< 4);
12439 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12440 arm_insn_r
->reg_rec_count
= 1;
12444 reg_vd
= (reg_vd
<< 1) | bit_d
;
12445 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12446 arm_insn_r
->reg_rec_count
= 1;
12450 record_buf
[0] = ARM_FPSCR_REGNUM
;
12451 arm_insn_r
->reg_rec_count
= 1;
12455 gdb_assert_not_reached ("no decoding pattern found");
12459 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12463 /* Handling opcode 110 insns. */
12466 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
12468 uint32_t op
, op1
, op1_sbit
, op1_ebit
, coproc
;
12470 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12471 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
12472 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12474 if ((coproc
& 0x0e) == 0x0a)
12476 /* Handle extension register ld/st instructions. */
12478 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12480 /* 64-bit transfers between arm core and extension registers. */
12481 if ((op1
& 0x3e) == 0x04)
12482 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12486 /* Handle coprocessor ld/st instructions. */
12491 return arm_record_unsupported_insn (arm_insn_r
);
12494 return arm_record_unsupported_insn (arm_insn_r
);
12497 /* Move to coprocessor from two arm core registers. */
12499 return arm_record_unsupported_insn (arm_insn_r
);
12501 /* Move to two arm core registers from coprocessor. */
12506 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12507 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12508 arm_insn_r
->reg_rec_count
= 2;
12510 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
12514 return arm_record_unsupported_insn (arm_insn_r
);
12517 /* Handling opcode 111 insns. */
12520 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
12522 uint32_t op
, op1_sbit
, op1_ebit
, coproc
;
12523 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
12524 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12525 ULONGEST u_regval
= 0;
12527 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
12528 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12529 op1_sbit
= bit (arm_insn_r
->arm_insn
, 24);
12530 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12531 op
= bit (arm_insn_r
->arm_insn
, 4);
12533 /* Handle arm SWI/SVC system call instructions. */
12536 if (tdep
->arm_syscall_record
!= NULL
)
12538 ULONGEST svc_operand
, svc_number
;
12540 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
12542 if (svc_operand
) /* OABI. */
12543 svc_number
= svc_operand
- 0x900000;
12545 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
12547 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
12551 printf_unfiltered (_("no syscall record support\n"));
12556 if ((coproc
& 0x0e) == 0x0a)
12558 /* VFP data-processing instructions. */
12559 if (!op1_sbit
&& !op
)
12560 return arm_record_vfp_data_proc_insn (arm_insn_r
);
12562 /* Advanced SIMD, VFP instructions. */
12563 if (!op1_sbit
&& op
)
12564 return arm_record_vdata_transfer_insn (arm_insn_r
);
12568 /* Coprocessor data operations. */
12569 if (!op1_sbit
&& !op
)
12570 return arm_record_unsupported_insn (arm_insn_r
);
12572 /* Move to Coprocessor from ARM core register. */
12573 if (!op1_sbit
&& !op1_ebit
&& op
)
12574 return arm_record_unsupported_insn (arm_insn_r
);
12576 /* Move to arm core register from coprocessor. */
12577 if (!op1_sbit
&& op1_ebit
&& op
)
12579 uint32_t record_buf
[1];
12581 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12582 if (record_buf
[0] == 15)
12583 record_buf
[0] = ARM_PS_REGNUM
;
12585 arm_insn_r
->reg_rec_count
= 1;
12586 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
12592 return arm_record_unsupported_insn (arm_insn_r
);
12595 /* Handling opcode 000 insns. */
12598 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
12600 uint32_t record_buf
[8];
12601 uint32_t reg_src1
= 0;
12603 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12605 record_buf
[0] = ARM_PS_REGNUM
;
12606 record_buf
[1] = reg_src1
;
12607 thumb_insn_r
->reg_rec_count
= 2;
12609 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12615 /* Handling opcode 001 insns. */
12618 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
12620 uint32_t record_buf
[8];
12621 uint32_t reg_src1
= 0;
12623 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12625 record_buf
[0] = ARM_PS_REGNUM
;
12626 record_buf
[1] = reg_src1
;
12627 thumb_insn_r
->reg_rec_count
= 2;
12629 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12634 /* Handling opcode 010 insns. */
12637 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
12639 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12640 uint32_t record_buf
[8], record_buf_mem
[8];
12642 uint32_t reg_src1
= 0, reg_src2
= 0;
12643 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
12645 ULONGEST u_regval
[2] = {0};
12647 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
12649 if (bit (thumb_insn_r
->arm_insn
, 12))
12651 /* Handle load/store register offset. */
12652 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 10);
12653 if (opcode2
>= 12 && opcode2
<= 15)
12655 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12656 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
12657 record_buf
[0] = reg_src1
;
12658 thumb_insn_r
->reg_rec_count
= 1;
12660 else if (opcode2
>= 8 && opcode2
<= 10)
12662 /* STR(2), STRB(2), STRH(2) . */
12663 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12664 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
12665 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
12666 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
12668 record_buf_mem
[0] = 4; /* STR (2). */
12669 else if (10 == opcode2
)
12670 record_buf_mem
[0] = 1; /* STRB (2). */
12671 else if (9 == opcode2
)
12672 record_buf_mem
[0] = 2; /* STRH (2). */
12673 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
12674 thumb_insn_r
->mem_rec_count
= 1;
12677 else if (bit (thumb_insn_r
->arm_insn
, 11))
12679 /* Handle load from literal pool. */
12681 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12682 record_buf
[0] = reg_src1
;
12683 thumb_insn_r
->reg_rec_count
= 1;
12687 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
12688 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12689 if ((3 == opcode2
) && (!opcode3
))
12691 /* Branch with exchange. */
12692 record_buf
[0] = ARM_PS_REGNUM
;
12693 thumb_insn_r
->reg_rec_count
= 1;
12697 /* Format 8; special data processing insns. */
12698 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12699 record_buf
[0] = ARM_PS_REGNUM
;
12700 record_buf
[1] = reg_src1
;
12701 thumb_insn_r
->reg_rec_count
= 2;
12706 /* Format 5; data processing insns. */
12707 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12708 if (bit (thumb_insn_r
->arm_insn
, 7))
12710 reg_src1
= reg_src1
+ 8;
12712 record_buf
[0] = ARM_PS_REGNUM
;
12713 record_buf
[1] = reg_src1
;
12714 thumb_insn_r
->reg_rec_count
= 2;
12717 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12718 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12724 /* Handling opcode 001 insns. */
12727 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
12729 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12730 uint32_t record_buf
[8], record_buf_mem
[8];
12732 uint32_t reg_src1
= 0;
12733 uint32_t opcode
= 0, immed_5
= 0;
12735 ULONGEST u_regval
= 0;
12737 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12742 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12743 record_buf
[0] = reg_src1
;
12744 thumb_insn_r
->reg_rec_count
= 1;
12749 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12750 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12751 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12752 record_buf_mem
[0] = 4;
12753 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
12754 thumb_insn_r
->mem_rec_count
= 1;
12757 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12758 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12764 /* Handling opcode 100 insns. */
12767 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
12769 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12770 uint32_t record_buf
[8], record_buf_mem
[8];
12772 uint32_t reg_src1
= 0;
12773 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
12775 ULONGEST u_regval
= 0;
12777 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12782 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12783 record_buf
[0] = reg_src1
;
12784 thumb_insn_r
->reg_rec_count
= 1;
12786 else if (1 == opcode
)
12789 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12790 record_buf
[0] = reg_src1
;
12791 thumb_insn_r
->reg_rec_count
= 1;
12793 else if (2 == opcode
)
12796 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12797 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12798 record_buf_mem
[0] = 4;
12799 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
12800 thumb_insn_r
->mem_rec_count
= 1;
12802 else if (0 == opcode
)
12805 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12806 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12807 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12808 record_buf_mem
[0] = 2;
12809 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
12810 thumb_insn_r
->mem_rec_count
= 1;
12813 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12814 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12820 /* Handling opcode 101 insns. */
12823 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
12825 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12827 uint32_t opcode
= 0, opcode1
= 0, opcode2
= 0;
12828 uint32_t register_bits
= 0, register_count
= 0;
12829 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12830 uint32_t record_buf
[24], record_buf_mem
[48];
12833 ULONGEST u_regval
= 0;
12835 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12836 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12837 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 12);
12842 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12843 while (register_bits
)
12845 if (register_bits
& 0x00000001)
12846 record_buf
[index
++] = register_count
;
12847 register_bits
= register_bits
>> 1;
12850 record_buf
[index
++] = ARM_PS_REGNUM
;
12851 record_buf
[index
++] = ARM_SP_REGNUM
;
12852 thumb_insn_r
->reg_rec_count
= index
;
12854 else if (10 == opcode2
)
12857 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12858 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12859 while (register_bits
)
12861 if (register_bits
& 0x00000001)
12863 register_bits
= register_bits
>> 1;
12865 start_address
= u_regval
- \
12866 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12867 thumb_insn_r
->mem_rec_count
= register_count
;
12868 while (register_count
)
12870 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12871 record_buf_mem
[(register_count
* 2) - 2] = 4;
12872 start_address
= start_address
+ 4;
12875 record_buf
[0] = ARM_SP_REGNUM
;
12876 thumb_insn_r
->reg_rec_count
= 1;
12878 else if (0x1E == opcode1
)
12881 /* Handle enhanced software breakpoint insn, BKPT. */
12882 /* CPSR is changed to be executed in ARM state, disabling normal
12883 interrupts, entering abort mode. */
12884 /* According to high vector configuration PC is set. */
12885 /* User hits breakpoint and type reverse, in that case, we need to go back with
12886 previous CPSR and Program Counter. */
12887 record_buf
[0] = ARM_PS_REGNUM
;
12888 record_buf
[1] = ARM_LR_REGNUM
;
12889 thumb_insn_r
->reg_rec_count
= 2;
12890 /* We need to save SPSR value, which is not yet done. */
12891 printf_unfiltered (_("Process record does not support instruction "
12892 "0x%0x at address %s.\n"),
12893 thumb_insn_r
->arm_insn
,
12894 paddress (thumb_insn_r
->gdbarch
,
12895 thumb_insn_r
->this_addr
));
12898 else if ((0 == opcode
) || (1 == opcode
))
12900 /* ADD(5), ADD(6). */
12901 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12902 record_buf
[0] = reg_src1
;
12903 thumb_insn_r
->reg_rec_count
= 1;
12905 else if (2 == opcode
)
12907 /* ADD(7), SUB(4). */
12908 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12909 record_buf
[0] = ARM_SP_REGNUM
;
12910 thumb_insn_r
->reg_rec_count
= 1;
12913 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12914 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12920 /* Handling opcode 110 insns. */
12923 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12925 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12926 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12928 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12929 uint32_t reg_src1
= 0;
12930 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12931 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12932 uint32_t record_buf
[24], record_buf_mem
[48];
12934 ULONGEST u_regval
= 0;
12936 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12937 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12943 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12945 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12946 while (register_bits
)
12948 if (register_bits
& 0x00000001)
12949 record_buf
[index
++] = register_count
;
12950 register_bits
= register_bits
>> 1;
12953 record_buf
[index
++] = reg_src1
;
12954 thumb_insn_r
->reg_rec_count
= index
;
12956 else if (0 == opcode2
)
12958 /* It handles both STMIA. */
12959 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12961 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12962 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12963 while (register_bits
)
12965 if (register_bits
& 0x00000001)
12967 register_bits
= register_bits
>> 1;
12969 start_address
= u_regval
;
12970 thumb_insn_r
->mem_rec_count
= register_count
;
12971 while (register_count
)
12973 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12974 record_buf_mem
[(register_count
* 2) - 2] = 4;
12975 start_address
= start_address
+ 4;
12979 else if (0x1F == opcode1
)
12981 /* Handle arm syscall insn. */
12982 if (tdep
->arm_syscall_record
!= NULL
)
12984 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12985 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12989 printf_unfiltered (_("no syscall record support\n"));
12994 /* B (1), conditional branch is automatically taken care in process_record,
12995 as PC is saved there. */
12997 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12998 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
13004 /* Handling opcode 111 insns. */
13007 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
13009 uint32_t record_buf
[8];
13010 uint32_t bits_h
= 0;
13012 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
13014 if (2 == bits_h
|| 3 == bits_h
)
13017 record_buf
[0] = ARM_LR_REGNUM
;
13018 thumb_insn_r
->reg_rec_count
= 1;
13020 else if (1 == bits_h
)
13023 record_buf
[0] = ARM_PS_REGNUM
;
13024 record_buf
[1] = ARM_LR_REGNUM
;
13025 thumb_insn_r
->reg_rec_count
= 2;
13028 /* B(2) is automatically taken care in process_record, as PC is
13031 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
13036 /* Handler for thumb2 load/store multiple instructions. */
13039 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
13041 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13043 uint32_t reg_rn
, op
;
13044 uint32_t register_bits
= 0, register_count
= 0;
13045 uint32_t index
= 0, start_address
= 0;
13046 uint32_t record_buf
[24], record_buf_mem
[48];
13048 ULONGEST u_regval
= 0;
13050 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13051 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13053 if (0 == op
|| 3 == op
)
13055 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13057 /* Handle RFE instruction. */
13058 record_buf
[0] = ARM_PS_REGNUM
;
13059 thumb2_insn_r
->reg_rec_count
= 1;
13063 /* Handle SRS instruction after reading banked SP. */
13064 return arm_record_unsupported_insn (thumb2_insn_r
);
13067 else if (1 == op
|| 2 == op
)
13069 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13071 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13072 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13073 while (register_bits
)
13075 if (register_bits
& 0x00000001)
13076 record_buf
[index
++] = register_count
;
13079 register_bits
= register_bits
>> 1;
13081 record_buf
[index
++] = reg_rn
;
13082 record_buf
[index
++] = ARM_PS_REGNUM
;
13083 thumb2_insn_r
->reg_rec_count
= index
;
13087 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13088 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13089 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13090 while (register_bits
)
13092 if (register_bits
& 0x00000001)
13095 register_bits
= register_bits
>> 1;
13100 /* Start address calculation for LDMDB/LDMEA. */
13101 start_address
= u_regval
;
13105 /* Start address calculation for LDMDB/LDMEA. */
13106 start_address
= u_regval
- register_count
* 4;
13109 thumb2_insn_r
->mem_rec_count
= register_count
;
13110 while (register_count
)
13112 record_buf_mem
[register_count
* 2 - 1] = start_address
;
13113 record_buf_mem
[register_count
* 2 - 2] = 4;
13114 start_address
= start_address
+ 4;
13117 record_buf
[0] = reg_rn
;
13118 record_buf
[1] = ARM_PS_REGNUM
;
13119 thumb2_insn_r
->reg_rec_count
= 2;
13123 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13125 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13127 return ARM_RECORD_SUCCESS
;
13130 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13134 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
13136 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13138 uint32_t reg_rd
, reg_rn
, offset_imm
;
13139 uint32_t reg_dest1
, reg_dest2
;
13140 uint32_t address
, offset_addr
;
13141 uint32_t record_buf
[8], record_buf_mem
[8];
13142 uint32_t op1
, op2
, op3
;
13145 ULONGEST u_regval
[2];
13147 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13148 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
13149 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13151 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13153 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
13155 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13156 record_buf
[0] = reg_dest1
;
13157 record_buf
[1] = ARM_PS_REGNUM
;
13158 thumb2_insn_r
->reg_rec_count
= 2;
13161 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
13163 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13164 record_buf
[2] = reg_dest2
;
13165 thumb2_insn_r
->reg_rec_count
= 3;
13170 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13171 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13173 if (0 == op1
&& 0 == op2
)
13175 /* Handle STREX. */
13176 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13177 address
= u_regval
[0] + (offset_imm
* 4);
13178 record_buf_mem
[0] = 4;
13179 record_buf_mem
[1] = address
;
13180 thumb2_insn_r
->mem_rec_count
= 1;
13181 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13182 record_buf
[0] = reg_rd
;
13183 thumb2_insn_r
->reg_rec_count
= 1;
13185 else if (1 == op1
&& 0 == op2
)
13187 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13188 record_buf
[0] = reg_rd
;
13189 thumb2_insn_r
->reg_rec_count
= 1;
13190 address
= u_regval
[0];
13191 record_buf_mem
[1] = address
;
13195 /* Handle STREXB. */
13196 record_buf_mem
[0] = 1;
13197 thumb2_insn_r
->mem_rec_count
= 1;
13201 /* Handle STREXH. */
13202 record_buf_mem
[0] = 2 ;
13203 thumb2_insn_r
->mem_rec_count
= 1;
13207 /* Handle STREXD. */
13208 address
= u_regval
[0];
13209 record_buf_mem
[0] = 4;
13210 record_buf_mem
[2] = 4;
13211 record_buf_mem
[3] = address
+ 4;
13212 thumb2_insn_r
->mem_rec_count
= 2;
13217 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13219 if (bit (thumb2_insn_r
->arm_insn
, 24))
13221 if (bit (thumb2_insn_r
->arm_insn
, 23))
13222 offset_addr
= u_regval
[0] + (offset_imm
* 4);
13224 offset_addr
= u_regval
[0] - (offset_imm
* 4);
13226 address
= offset_addr
;
13229 address
= u_regval
[0];
13231 record_buf_mem
[0] = 4;
13232 record_buf_mem
[1] = address
;
13233 record_buf_mem
[2] = 4;
13234 record_buf_mem
[3] = address
+ 4;
13235 thumb2_insn_r
->mem_rec_count
= 2;
13236 record_buf
[0] = reg_rn
;
13237 thumb2_insn_r
->reg_rec_count
= 1;
13241 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13243 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13245 return ARM_RECORD_SUCCESS
;
13248 /* Handler for thumb2 data processing (shift register and modified immediate)
13252 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
13254 uint32_t reg_rd
, op
;
13255 uint32_t record_buf
[8];
13257 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
13258 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13260 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
13262 record_buf
[0] = ARM_PS_REGNUM
;
13263 thumb2_insn_r
->reg_rec_count
= 1;
13267 record_buf
[0] = reg_rd
;
13268 record_buf
[1] = ARM_PS_REGNUM
;
13269 thumb2_insn_r
->reg_rec_count
= 2;
13272 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13274 return ARM_RECORD_SUCCESS
;
13277 /* Generic handler for thumb2 instructions which effect destination and PS
13281 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
13284 uint32_t record_buf
[8];
13286 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13288 record_buf
[0] = reg_rd
;
13289 record_buf
[1] = ARM_PS_REGNUM
;
13290 thumb2_insn_r
->reg_rec_count
= 2;
13292 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13294 return ARM_RECORD_SUCCESS
;
13297 /* Handler for thumb2 branch and miscellaneous control instructions. */
13300 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
13302 uint32_t op
, op1
, op2
;
13303 uint32_t record_buf
[8];
13305 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13306 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
13307 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13309 /* Handle MSR insn. */
13310 if (!(op1
& 0x2) && 0x38 == op
)
13314 /* CPSR is going to be changed. */
13315 record_buf
[0] = ARM_PS_REGNUM
;
13316 thumb2_insn_r
->reg_rec_count
= 1;
13320 arm_record_unsupported_insn(thumb2_insn_r
);
13324 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
13327 record_buf
[0] = ARM_PS_REGNUM
;
13328 record_buf
[1] = ARM_LR_REGNUM
;
13329 thumb2_insn_r
->reg_rec_count
= 2;
13332 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13334 return ARM_RECORD_SUCCESS
;
13337 /* Handler for thumb2 store single data item instructions. */
13340 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
13342 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13344 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
13345 uint32_t address
, offset_addr
;
13346 uint32_t record_buf
[8], record_buf_mem
[8];
13349 ULONGEST u_regval
[2];
13351 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
13352 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
13353 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13354 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13356 if (bit (thumb2_insn_r
->arm_insn
, 23))
13359 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
13360 offset_addr
= u_regval
[0] + offset_imm
;
13361 address
= offset_addr
;
13366 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
13368 /* Handle STRB (register). */
13369 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13370 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
13371 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
13372 offset_addr
= u_regval
[1] << shift_imm
;
13373 address
= u_regval
[0] + offset_addr
;
13377 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13378 if (bit (thumb2_insn_r
->arm_insn
, 10))
13380 if (bit (thumb2_insn_r
->arm_insn
, 9))
13381 offset_addr
= u_regval
[0] + offset_imm
;
13383 offset_addr
= u_regval
[0] - offset_imm
;
13385 address
= offset_addr
;
13388 address
= u_regval
[0];
13394 /* Store byte instructions. */
13397 record_buf_mem
[0] = 1;
13399 /* Store half word instructions. */
13402 record_buf_mem
[0] = 2;
13404 /* Store word instructions. */
13407 record_buf_mem
[0] = 4;
13411 gdb_assert_not_reached ("no decoding pattern found");
13415 record_buf_mem
[1] = address
;
13416 thumb2_insn_r
->mem_rec_count
= 1;
13417 record_buf
[0] = reg_rn
;
13418 thumb2_insn_r
->reg_rec_count
= 1;
13420 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13422 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13424 return ARM_RECORD_SUCCESS
;
13427 /* Handler for thumb2 load memory hints instructions. */
13430 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
13432 uint32_t record_buf
[8];
13433 uint32_t reg_rt
, reg_rn
;
13435 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13436 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13438 if (ARM_PC_REGNUM
!= reg_rt
)
13440 record_buf
[0] = reg_rt
;
13441 record_buf
[1] = reg_rn
;
13442 record_buf
[2] = ARM_PS_REGNUM
;
13443 thumb2_insn_r
->reg_rec_count
= 3;
13445 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13447 return ARM_RECORD_SUCCESS
;
13450 return ARM_RECORD_FAILURE
;
13453 /* Handler for thumb2 load word instructions. */
13456 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
13458 uint32_t opcode1
= 0, opcode2
= 0;
13459 uint32_t record_buf
[8];
13461 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13462 record_buf
[1] = ARM_PS_REGNUM
;
13463 thumb2_insn_r
->reg_rec_count
= 2;
13465 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13467 return ARM_RECORD_SUCCESS
;
13470 /* Handler for thumb2 long multiply, long multiply accumulate, and
13471 divide instructions. */
13474 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
13476 uint32_t opcode1
= 0, opcode2
= 0;
13477 uint32_t record_buf
[8];
13478 uint32_t reg_src1
= 0;
13480 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
13481 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13483 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
13485 /* Handle SMULL, UMULL, SMULAL. */
13486 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13487 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13488 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13489 record_buf
[2] = ARM_PS_REGNUM
;
13490 thumb2_insn_r
->reg_rec_count
= 3;
13492 else if (1 == opcode1
|| 3 == opcode2
)
13494 /* Handle SDIV and UDIV. */
13495 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13496 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13497 record_buf
[2] = ARM_PS_REGNUM
;
13498 thumb2_insn_r
->reg_rec_count
= 3;
13501 return ARM_RECORD_FAILURE
;
13503 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13505 return ARM_RECORD_SUCCESS
;
13508 /* Record handler for thumb32 coprocessor instructions. */
13511 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
13513 if (bit (thumb2_insn_r
->arm_insn
, 25))
13514 return arm_record_coproc_data_proc (thumb2_insn_r
);
13516 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
13519 /* Record handler for advance SIMD structure load/store instructions. */
13522 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
13524 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13525 uint32_t l_bit
, a_bit
, b_bits
;
13526 uint32_t record_buf
[128], record_buf_mem
[128];
13527 uint32_t reg_rn
, reg_vd
, address
, f_esize
, f_elem
;
13528 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
13531 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
13532 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
13533 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13534 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13535 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13536 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
13537 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
13538 f_esize
= 8 * f_ebytes
;
13539 f_elem
= 8 / f_ebytes
;
13543 ULONGEST u_regval
= 0;
13544 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13545 address
= u_regval
;
13550 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13552 if (b_bits
== 0x07)
13554 else if (b_bits
== 0x0a)
13556 else if (b_bits
== 0x06)
13558 else if (b_bits
== 0x02)
13563 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13565 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13567 record_buf_mem
[index_m
++] = f_ebytes
;
13568 record_buf_mem
[index_m
++] = address
;
13569 address
= address
+ f_ebytes
;
13570 thumb2_insn_r
->mem_rec_count
+= 1;
13575 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13577 if (b_bits
== 0x09 || b_bits
== 0x08)
13579 else if (b_bits
== 0x03)
13584 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13585 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13587 for (loop_t
= 0; loop_t
< 2; loop_t
++)
13589 record_buf_mem
[index_m
++] = f_ebytes
;
13590 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13591 thumb2_insn_r
->mem_rec_count
+= 1;
13593 address
= address
+ (2 * f_ebytes
);
13597 else if ((b_bits
& 0x0e) == 0x04)
13599 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13601 for (loop_t
= 0; loop_t
< 3; loop_t
++)
13603 record_buf_mem
[index_m
++] = f_ebytes
;
13604 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13605 thumb2_insn_r
->mem_rec_count
+= 1;
13607 address
= address
+ (3 * f_ebytes
);
13611 else if (!(b_bits
& 0x0e))
13613 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13615 for (loop_t
= 0; loop_t
< 4; loop_t
++)
13617 record_buf_mem
[index_m
++] = f_ebytes
;
13618 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13619 thumb2_insn_r
->mem_rec_count
+= 1;
13621 address
= address
+ (4 * f_ebytes
);
13627 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
13629 if (bft_size
== 0x00)
13631 else if (bft_size
== 0x01)
13633 else if (bft_size
== 0x02)
13639 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
13640 thumb2_insn_r
->mem_rec_count
= 1;
13642 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
13643 thumb2_insn_r
->mem_rec_count
= 2;
13645 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
13646 thumb2_insn_r
->mem_rec_count
= 3;
13648 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
13649 thumb2_insn_r
->mem_rec_count
= 4;
13651 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
13653 record_buf_mem
[index_m
] = f_ebytes
;
13654 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
13663 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13664 thumb2_insn_r
->reg_rec_count
= 1;
13666 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13667 thumb2_insn_r
->reg_rec_count
= 2;
13669 else if ((b_bits
& 0x0e) == 0x04)
13670 thumb2_insn_r
->reg_rec_count
= 3;
13672 else if (!(b_bits
& 0x0e))
13673 thumb2_insn_r
->reg_rec_count
= 4;
13678 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
13679 thumb2_insn_r
->reg_rec_count
= 1;
13681 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
13682 thumb2_insn_r
->reg_rec_count
= 2;
13684 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
13685 thumb2_insn_r
->reg_rec_count
= 3;
13687 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
13688 thumb2_insn_r
->reg_rec_count
= 4;
13690 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
13691 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
13695 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
13697 record_buf
[index_r
] = reg_rn
;
13698 thumb2_insn_r
->reg_rec_count
+= 1;
13701 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13703 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13708 /* Decodes thumb2 instruction type and invokes its record handler. */
13710 static unsigned int
13711 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
13713 uint32_t op
, op1
, op2
;
13715 op
= bit (thumb2_insn_r
->arm_insn
, 15);
13716 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
13717 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13721 if (!(op2
& 0x64 ))
13723 /* Load/store multiple instruction. */
13724 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
13726 else if (!((op2
& 0x64) ^ 0x04))
13728 /* Load/store (dual/exclusive) and table branch instruction. */
13729 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
13731 else if (!((op2
& 0x20) ^ 0x20))
13733 /* Data-processing (shifted register). */
13734 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13736 else if (op2
& 0x40)
13738 /* Co-processor instructions. */
13739 return thumb2_record_coproc_insn (thumb2_insn_r
);
13742 else if (op1
== 0x02)
13746 /* Branches and miscellaneous control instructions. */
13747 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
13749 else if (op2
& 0x20)
13751 /* Data-processing (plain binary immediate) instruction. */
13752 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13756 /* Data-processing (modified immediate). */
13757 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13760 else if (op1
== 0x03)
13762 if (!(op2
& 0x71 ))
13764 /* Store single data item. */
13765 return thumb2_record_str_single_data (thumb2_insn_r
);
13767 else if (!((op2
& 0x71) ^ 0x10))
13769 /* Advanced SIMD or structure load/store instructions. */
13770 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
13772 else if (!((op2
& 0x67) ^ 0x01))
13774 /* Load byte, memory hints instruction. */
13775 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13777 else if (!((op2
& 0x67) ^ 0x03))
13779 /* Load halfword, memory hints instruction. */
13780 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13782 else if (!((op2
& 0x67) ^ 0x05))
13784 /* Load word instruction. */
13785 return thumb2_record_ld_word (thumb2_insn_r
);
13787 else if (!((op2
& 0x70) ^ 0x20))
13789 /* Data-processing (register) instruction. */
13790 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13792 else if (!((op2
& 0x78) ^ 0x30))
13794 /* Multiply, multiply accumulate, abs diff instruction. */
13795 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13797 else if (!((op2
& 0x78) ^ 0x38))
13799 /* Long multiply, long multiply accumulate, and divide. */
13800 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
13802 else if (op2
& 0x40)
13804 /* Co-processor instructions. */
13805 return thumb2_record_coproc_insn (thumb2_insn_r
);
13812 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13813 and positive val on fauilure. */
13816 extract_arm_insn (insn_decode_record
*insn_record
, uint32_t insn_size
)
13818 gdb_byte buf
[insn_size
];
13820 memset (&buf
[0], 0, insn_size
);
13822 if (target_read_memory (insn_record
->this_addr
, &buf
[0], insn_size
))
13824 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13826 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13830 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13832 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13836 decode_insn (insn_decode_record
*arm_record
, record_type_t record_type
,
13837 uint32_t insn_size
)
13840 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13841 static const sti_arm_hdl_fp_t
const arm_handle_insn
[8] =
13843 arm_record_data_proc_misc_ld_str
, /* 000. */
13844 arm_record_data_proc_imm
, /* 001. */
13845 arm_record_ld_st_imm_offset
, /* 010. */
13846 arm_record_ld_st_reg_offset
, /* 011. */
13847 arm_record_ld_st_multiple
, /* 100. */
13848 arm_record_b_bl
, /* 101. */
13849 arm_record_asimd_vfp_coproc
, /* 110. */
13850 arm_record_coproc_data_proc
/* 111. */
13853 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13854 static const sti_arm_hdl_fp_t
const thumb_handle_insn
[8] =
13856 thumb_record_shift_add_sub
, /* 000. */
13857 thumb_record_add_sub_cmp_mov
, /* 001. */
13858 thumb_record_ld_st_reg_offset
, /* 010. */
13859 thumb_record_ld_st_imm_offset
, /* 011. */
13860 thumb_record_ld_st_stack
, /* 100. */
13861 thumb_record_misc
, /* 101. */
13862 thumb_record_ldm_stm_swi
, /* 110. */
13863 thumb_record_branch
/* 111. */
13866 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13867 uint32_t insn_id
= 0;
13869 if (extract_arm_insn (arm_record
, insn_size
))
13873 printf_unfiltered (_("Process record: error reading memory at "
13874 "addr %s len = %d.\n"),
13875 paddress (arm_record
->gdbarch
, arm_record
->this_addr
), insn_size
);
13879 else if (ARM_RECORD
== record_type
)
13881 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13882 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13883 ret
= arm_record_extension_space (arm_record
);
13884 /* If this insn has fallen into extension space
13885 then we need not decode it anymore. */
13886 if (ret
!= -1 && !INSN_RECORDED(arm_record
))
13888 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13891 else if (THUMB_RECORD
== record_type
)
13893 /* As thumb does not have condition codes, we set negative. */
13894 arm_record
->cond
= -1;
13895 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13896 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13898 else if (THUMB2_RECORD
== record_type
)
13900 /* As thumb does not have condition codes, we set negative. */
13901 arm_record
->cond
= -1;
13903 /* Swap first half of 32bit thumb instruction with second half. */
13904 arm_record
->arm_insn
13905 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13907 insn_id
= thumb2_record_decode_insn_handler (arm_record
);
13909 if (insn_id
!= ARM_RECORD_SUCCESS
)
13911 arm_record_unsupported_insn (arm_record
);
13917 /* Throw assertion. */
13918 gdb_assert_not_reached ("not a valid instruction, could not decode");
13925 /* Cleans up local record registers and memory allocations. */
13928 deallocate_reg_mem (insn_decode_record
*record
)
13930 xfree (record
->arm_regs
);
13931 xfree (record
->arm_mems
);
13935 /* Parse the current instruction and record the values of the registers and
13936 memory that will be changed in current instruction to record_arch_list".
13937 Return -1 if something is wrong. */
13940 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13941 CORE_ADDR insn_addr
)
13944 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
13945 uint32_t no_of_rec
= 0;
13946 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13947 ULONGEST t_bit
= 0, insn_id
= 0;
13949 ULONGEST u_regval
= 0;
13951 insn_decode_record arm_record
;
13953 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13954 arm_record
.regcache
= regcache
;
13955 arm_record
.this_addr
= insn_addr
;
13956 arm_record
.gdbarch
= gdbarch
;
13959 if (record_debug
> 1)
13961 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13963 paddress (gdbarch
, arm_record
.this_addr
));
13966 if (extract_arm_insn (&arm_record
, 2))
13970 printf_unfiltered (_("Process record: error reading memory at "
13971 "addr %s len = %d.\n"),
13972 paddress (arm_record
.gdbarch
,
13973 arm_record
.this_addr
), 2);
13978 /* Check the insn, whether it is thumb or arm one. */
13980 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13981 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13984 if (!(u_regval
& t_bit
))
13986 /* We are decoding arm insn. */
13987 ret
= decode_insn (&arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13991 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13992 /* is it thumb2 insn? */
13993 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13995 ret
= decode_insn (&arm_record
, THUMB2_RECORD
,
13996 THUMB2_INSN_SIZE_BYTES
);
14000 /* We are decoding thumb insn. */
14001 ret
= decode_insn (&arm_record
, THUMB_RECORD
, THUMB_INSN_SIZE_BYTES
);
14007 /* Record registers. */
14008 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
14009 if (arm_record
.arm_regs
)
14011 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
14013 if (record_full_arch_list_add_reg
14014 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
14018 /* Record memories. */
14019 if (arm_record
.arm_mems
)
14021 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
14023 if (record_full_arch_list_add_mem
14024 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
14025 arm_record
.arm_mems
[no_of_rec
].len
))
14030 if (record_full_arch_list_add_end ())
14035 deallocate_reg_mem (&arm_record
);