1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2020 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2/frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
48 #include "count-one-bits.h"
51 #include "arch/arm-get-next-pcs.h"
53 #include "gdb/sim-arm.h"
56 #include "coff/internal.h"
60 #include "record-full.h"
66 #include "gdbsupport/selftest.h"
69 static bool arm_debug
;
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
84 struct arm_mapping_symbol
89 bool operator< (const arm_mapping_symbol
&other
) const
90 { return this->value
< other
.value
; }
93 typedef std::vector
<arm_mapping_symbol
> arm_mapping_symbol_vec
;
97 explicit arm_per_bfd (size_t num_sections
)
98 : section_maps (new arm_mapping_symbol_vec
[num_sections
]),
99 section_maps_sorted (new bool[num_sections
] ())
102 DISABLE_COPY_AND_ASSIGN (arm_per_bfd
);
104 /* Information about mapping symbols ($a, $d, $t) in the objfile.
106 The format is an array of vectors of arm_mapping_symbols, there is one
107 vector for each section of the objfile (the array is index by BFD section
110 For each section, the vector of arm_mapping_symbol is sorted by
111 symbol value (address). */
112 std::unique_ptr
<arm_mapping_symbol_vec
[]> section_maps
;
114 /* For each corresponding element of section_maps above, is this vector
116 std::unique_ptr
<bool[]> section_maps_sorted
;
119 /* Per-bfd data used for mapping symbols. */
120 static bfd_key
<arm_per_bfd
> arm_bfd_data_key
;
122 /* The list of available "set arm ..." and "show arm ..." commands. */
123 static struct cmd_list_element
*setarmcmdlist
= NULL
;
124 static struct cmd_list_element
*showarmcmdlist
= NULL
;
126 /* The type of floating-point to use. Keep this in sync with enum
127 arm_float_model, and the help string in _initialize_arm_tdep. */
128 static const char *const fp_model_strings
[] =
138 /* A variable that can be configured by the user. */
139 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
140 static const char *current_fp_model
= "auto";
142 /* The ABI to use. Keep this in sync with arm_abi_kind. */
143 static const char *const arm_abi_strings
[] =
151 /* A variable that can be configured by the user. */
152 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
153 static const char *arm_abi_string
= "auto";
155 /* The execution mode to assume. */
156 static const char *const arm_mode_strings
[] =
164 static const char *arm_fallback_mode_string
= "auto";
165 static const char *arm_force_mode_string
= "auto";
167 /* The standard register names, and all the valid aliases for them. Note
168 that `fp', `sp' and `pc' are not added in this alias list, because they
169 have been added as builtin user registers in
170 std-regs.c:_initialize_frame_reg. */
175 } arm_register_aliases
[] = {
176 /* Basic register numbers. */
193 /* Synonyms (argument and variable registers). */
206 /* Other platform-specific names for r9. */
212 /* Names used by GCC (not listed in the ARM EABI). */
214 /* A special name from the older ATPCS. */
218 static const char *const arm_register_names
[] =
219 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
220 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
221 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
222 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
223 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
224 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
225 "fps", "cpsr" }; /* 24 25 */
227 /* Holds the current set of options to be passed to the disassembler. */
228 static char *arm_disassembler_options
;
230 /* Valid register name styles. */
231 static const char **valid_disassembly_styles
;
233 /* Disassembly style to use. Default to "std" register names. */
234 static const char *disassembly_style
;
236 /* All possible arm target descriptors. */
237 static struct target_desc
*tdesc_arm_list
[ARM_FP_TYPE_INVALID
];
238 static struct target_desc
*tdesc_arm_mprofile_list
[ARM_M_TYPE_INVALID
];
240 /* This is used to keep the bfd arch_info in sync with the disassembly
242 static void set_disassembly_style_sfunc (const char *, int,
243 struct cmd_list_element
*);
244 static void show_disassembly_style_sfunc (struct ui_file
*, int,
245 struct cmd_list_element
*,
248 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
249 readable_regcache
*regcache
,
250 int regnum
, gdb_byte
*buf
);
251 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
252 struct regcache
*regcache
,
253 int regnum
, const gdb_byte
*buf
);
256 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
);
259 /* get_next_pcs operations. */
260 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops
= {
261 arm_get_next_pcs_read_memory_unsigned_integer
,
262 arm_get_next_pcs_syscall_next_pc
,
263 arm_get_next_pcs_addr_bits_remove
,
264 arm_get_next_pcs_is_thumb
,
268 struct arm_prologue_cache
270 /* The stack pointer at the time this frame was created; i.e. the
271 caller's stack pointer when this function was called. It is used
272 to identify this frame. */
275 /* The frame base for this frame is just prev_sp - frame size.
276 FRAMESIZE is the distance from the frame pointer to the
277 initial stack pointer. */
281 /* The register used to hold the frame pointer for this frame. */
284 /* Saved register offsets. */
285 struct trad_frame_saved_reg
*saved_regs
;
288 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
289 CORE_ADDR prologue_start
,
290 CORE_ADDR prologue_end
,
291 struct arm_prologue_cache
*cache
);
293 /* Architecture version for displaced stepping. This effects the behaviour of
294 certain instructions, and really should not be hard-wired. */
296 #define DISPLACED_STEPPING_ARCH_VERSION 5
298 /* See arm-tdep.h. */
300 bool arm_apcs_32
= true;
302 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
305 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
307 if (gdbarch_tdep (gdbarch
)->is_m
)
313 /* Determine if the processor is currently executing in Thumb mode. */
316 arm_is_thumb (struct regcache
*regcache
)
319 ULONGEST t_bit
= arm_psr_thumb_bit (regcache
->arch ());
321 cpsr
= regcache_raw_get_unsigned (regcache
, ARM_PS_REGNUM
);
323 return (cpsr
& t_bit
) != 0;
326 /* Determine if FRAME is executing in Thumb mode. */
329 arm_frame_is_thumb (struct frame_info
*frame
)
332 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
334 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
335 directly (from a signal frame or dummy frame) or by interpreting
336 the saved LR (from a prologue or DWARF frame). So consult it and
337 trust the unwinders. */
338 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
340 return (cpsr
& t_bit
) != 0;
343 /* Search for the mapping symbol covering MEMADDR. If one is found,
344 return its type. Otherwise, return 0. If START is non-NULL,
345 set *START to the location of the mapping symbol. */
348 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
350 struct obj_section
*sec
;
352 /* If there are mapping symbols, consult them. */
353 sec
= find_pc_section (memaddr
);
356 arm_per_bfd
*data
= arm_bfd_data_key
.get (sec
->objfile
->obfd
);
359 unsigned int section_idx
= sec
->the_bfd_section
->index
;
360 arm_mapping_symbol_vec
&map
361 = data
->section_maps
[section_idx
];
363 /* Sort the vector on first use. */
364 if (!data
->section_maps_sorted
[section_idx
])
366 std::sort (map
.begin (), map
.end ());
367 data
->section_maps_sorted
[section_idx
] = true;
370 struct arm_mapping_symbol map_key
371 = { memaddr
- obj_section_addr (sec
), 0 };
372 arm_mapping_symbol_vec::const_iterator it
373 = std::lower_bound (map
.begin (), map
.end (), map_key
);
375 /* std::lower_bound finds the earliest ordered insertion
376 point. If the symbol at this position starts at this exact
377 address, we use that; otherwise, the preceding
378 mapping symbol covers this address. */
381 if (it
->value
== map_key
.value
)
384 *start
= it
->value
+ obj_section_addr (sec
);
389 if (it
> map
.begin ())
391 arm_mapping_symbol_vec::const_iterator prev_it
395 *start
= prev_it
->value
+ obj_section_addr (sec
);
396 return prev_it
->type
;
404 /* Determine if the program counter specified in MEMADDR is in a Thumb
405 function. This function should be called for addresses unrelated to
406 any executing frame; otherwise, prefer arm_frame_is_thumb. */
409 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
411 struct bound_minimal_symbol sym
;
413 arm_displaced_step_closure
*dsc
414 = ((arm_displaced_step_closure
* )
415 get_displaced_step_closure_by_addr (memaddr
));
417 /* If checking the mode of displaced instruction in copy area, the mode
418 should be determined by instruction on the original address. */
421 displaced_debug_printf ("check mode of %.8lx instead of %.8lx",
422 (unsigned long) dsc
->insn_addr
,
423 (unsigned long) memaddr
);
424 memaddr
= dsc
->insn_addr
;
427 /* If bit 0 of the address is set, assume this is a Thumb address. */
428 if (IS_THUMB_ADDR (memaddr
))
431 /* If the user wants to override the symbol table, let him. */
432 if (strcmp (arm_force_mode_string
, "arm") == 0)
434 if (strcmp (arm_force_mode_string
, "thumb") == 0)
437 /* ARM v6-M and v7-M are always in Thumb mode. */
438 if (gdbarch_tdep (gdbarch
)->is_m
)
441 /* If there are mapping symbols, consult them. */
442 type
= arm_find_mapping_symbol (memaddr
, NULL
);
446 /* Thumb functions have a "special" bit set in minimal symbols. */
447 sym
= lookup_minimal_symbol_by_pc (memaddr
);
449 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
451 /* If the user wants to override the fallback mode, let them. */
452 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
454 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
457 /* If we couldn't find any symbol, but we're talking to a running
458 target, then trust the current value of $cpsr. This lets
459 "display/i $pc" always show the correct mode (though if there is
460 a symbol table we will not reach here, so it still may not be
461 displayed in the mode it will be executed). */
462 if (target_has_registers ())
463 return arm_frame_is_thumb (get_current_frame ());
465 /* Otherwise we're out of luck; we assume ARM. */
469 /* Determine if the address specified equals any of these magic return
470 values, called EXC_RETURN, defined by the ARM v6-M, v7-M and v8-M
473 From ARMv6-M Reference Manual B1.5.8
474 Table B1-5 Exception return behavior
476 EXC_RETURN Return To Return Stack
477 0xFFFFFFF1 Handler mode Main
478 0xFFFFFFF9 Thread mode Main
479 0xFFFFFFFD Thread mode Process
481 From ARMv7-M Reference Manual B1.5.8
482 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
484 EXC_RETURN Return To Return Stack
485 0xFFFFFFF1 Handler mode Main
486 0xFFFFFFF9 Thread mode Main
487 0xFFFFFFFD Thread mode Process
489 Table B1-9 EXC_RETURN definition of exception return behavior, with
492 EXC_RETURN Return To Return Stack Frame Type
493 0xFFFFFFE1 Handler mode Main Extended
494 0xFFFFFFE9 Thread mode Main Extended
495 0xFFFFFFED Thread mode Process Extended
496 0xFFFFFFF1 Handler mode Main Basic
497 0xFFFFFFF9 Thread mode Main Basic
498 0xFFFFFFFD Thread mode Process Basic
500 For more details see "B1.5.8 Exception return behavior"
501 in both ARMv6-M and ARMv7-M Architecture Reference Manuals.
503 In the ARMv8-M Architecture Technical Reference also adds
504 for implementations without the Security Extension:
507 0xFFFFFFB0 Return to Handler mode.
508 0xFFFFFFB8 Return to Thread mode using the main stack.
509 0xFFFFFFBC Return to Thread mode using the process stack. */
512 arm_m_addr_is_magic (CORE_ADDR addr
)
516 /* Values from ARMv8-M Architecture Technical Reference. */
520 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
521 the exception return behavior. */
528 /* Address is magic. */
532 /* Address is not magic. */
537 /* Remove useless bits from addresses in a running program. */
539 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
541 /* On M-profile devices, do not strip the low bit from EXC_RETURN
542 (the magic exception return address). */
543 if (gdbarch_tdep (gdbarch
)->is_m
544 && arm_m_addr_is_magic (val
))
548 return UNMAKE_THUMB_ADDR (val
);
550 return (val
& 0x03fffffc);
553 /* Return 1 if PC is the start of a compiler helper function which
554 can be safely ignored during prologue skipping. IS_THUMB is true
555 if the function is known to be a Thumb function due to the way it
558 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
560 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
561 struct bound_minimal_symbol msym
;
563 msym
= lookup_minimal_symbol_by_pc (pc
);
564 if (msym
.minsym
!= NULL
565 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
566 && msym
.minsym
->linkage_name () != NULL
)
568 const char *name
= msym
.minsym
->linkage_name ();
570 /* The GNU linker's Thumb call stub to foo is named
572 if (strstr (name
, "_from_thumb") != NULL
)
575 /* On soft-float targets, __truncdfsf2 is called to convert promoted
576 arguments to their argument types in non-prototyped
578 if (startswith (name
, "__truncdfsf2"))
580 if (startswith (name
, "__aeabi_d2f"))
583 /* Internal functions related to thread-local storage. */
584 if (startswith (name
, "__tls_get_addr"))
586 if (startswith (name
, "__aeabi_read_tp"))
591 /* If we run against a stripped glibc, we may be unable to identify
592 special functions by name. Check for one important case,
593 __aeabi_read_tp, by comparing the *code* against the default
594 implementation (this is hand-written ARM assembler in glibc). */
597 && read_code_unsigned_integer (pc
, 4, byte_order_for_code
)
598 == 0xe3e00a0f /* mov r0, #0xffff0fff */
599 && read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
600 == 0xe240f01f) /* sub pc, r0, #31 */
607 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
608 the first 16-bit of instruction, and INSN2 is the second 16-bit of
610 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
611 ((bits ((insn1), 0, 3) << 12) \
612 | (bits ((insn1), 10, 10) << 11) \
613 | (bits ((insn2), 12, 14) << 8) \
614 | bits ((insn2), 0, 7))
616 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
617 the 32-bit instruction. */
618 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
619 ((bits ((insn), 16, 19) << 12) \
620 | bits ((insn), 0, 11))
622 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
625 thumb_expand_immediate (unsigned int imm
)
627 unsigned int count
= imm
>> 7;
635 return (imm
& 0xff) | ((imm
& 0xff) << 16);
637 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
639 return (imm
& 0xff) | ((imm
& 0xff) << 8)
640 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
643 return (0x80 | (imm
& 0x7f)) << (32 - count
);
646 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
647 epilogue, 0 otherwise. */
650 thumb_instruction_restores_sp (unsigned short insn
)
652 return (insn
== 0x46bd /* mov sp, r7 */
653 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
654 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
657 /* Analyze a Thumb prologue, looking for a recognizable stack frame
658 and frame pointer. Scan until we encounter a store that could
659 clobber the stack frame unexpectedly, or an unknown instruction.
660 Return the last address which is definitely safe to skip for an
661 initial breakpoint. */
664 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
665 CORE_ADDR start
, CORE_ADDR limit
,
666 struct arm_prologue_cache
*cache
)
668 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
669 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
673 CORE_ADDR unrecognized_pc
= 0;
675 for (i
= 0; i
< 16; i
++)
676 regs
[i
] = pv_register (i
, 0);
677 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
679 while (start
< limit
)
683 insn
= read_code_unsigned_integer (start
, 2, byte_order_for_code
);
685 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
690 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
693 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
694 whether to save LR (R14). */
695 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
697 /* Calculate offsets of saved R0-R7 and LR. */
698 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
699 if (mask
& (1 << regno
))
701 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
703 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
706 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
708 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
709 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
712 else if (thumb_instruction_restores_sp (insn
))
714 /* Don't scan past the epilogue. */
717 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
718 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
720 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
721 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
722 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
724 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
725 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
726 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
728 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
729 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
730 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
731 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
732 regs
[bits (insn
, 6, 8)]);
733 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
734 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
736 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
737 int rm
= bits (insn
, 3, 6);
738 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
740 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
742 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
743 int src_reg
= (insn
& 0x78) >> 3;
744 regs
[dst_reg
] = regs
[src_reg
];
746 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
748 /* Handle stores to the stack. Normally pushes are used,
749 but with GCC -mtpcs-frame, there may be other stores
750 in the prologue to create the frame. */
751 int regno
= (insn
>> 8) & 0x7;
754 offset
= (insn
& 0xff) << 2;
755 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
757 if (stack
.store_would_trash (addr
))
760 stack
.store (addr
, 4, regs
[regno
]);
762 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
764 int rd
= bits (insn
, 0, 2);
765 int rn
= bits (insn
, 3, 5);
768 offset
= bits (insn
, 6, 10) << 2;
769 addr
= pv_add_constant (regs
[rn
], offset
);
771 if (stack
.store_would_trash (addr
))
774 stack
.store (addr
, 4, regs
[rd
]);
776 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
777 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
778 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
779 /* Ignore stores of argument registers to the stack. */
781 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
782 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
783 /* Ignore block loads from the stack, potentially copying
784 parameters from memory. */
786 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
787 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
788 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
789 /* Similarly ignore single loads from the stack. */
791 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
792 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
793 /* Skip register copies, i.e. saves to another register
794 instead of the stack. */
796 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
797 /* Recognize constant loads; even with small stacks these are necessary
799 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
800 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
802 /* Constant pool loads, for the same reason. */
803 unsigned int constant
;
806 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
807 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
808 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
810 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
812 unsigned short inst2
;
814 inst2
= read_code_unsigned_integer (start
+ 2, 2,
815 byte_order_for_code
);
817 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
819 /* BL, BLX. Allow some special function calls when
820 skipping the prologue; GCC generates these before
821 storing arguments to the stack. */
823 int j1
, j2
, imm1
, imm2
;
825 imm1
= sbits (insn
, 0, 10);
826 imm2
= bits (inst2
, 0, 10);
827 j1
= bit (inst2
, 13);
828 j2
= bit (inst2
, 11);
830 offset
= ((imm1
<< 12) + (imm2
<< 1));
831 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
833 nextpc
= start
+ 4 + offset
;
834 /* For BLX make sure to clear the low bits. */
835 if (bit (inst2
, 12) == 0)
836 nextpc
= nextpc
& 0xfffffffc;
838 if (!skip_prologue_function (gdbarch
, nextpc
,
839 bit (inst2
, 12) != 0))
843 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
845 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
847 pv_t addr
= regs
[bits (insn
, 0, 3)];
850 if (stack
.store_would_trash (addr
))
853 /* Calculate offsets of saved registers. */
854 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
855 if (inst2
& (1 << regno
))
857 addr
= pv_add_constant (addr
, -4);
858 stack
.store (addr
, 4, regs
[regno
]);
862 regs
[bits (insn
, 0, 3)] = addr
;
865 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
867 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
869 int regno1
= bits (inst2
, 12, 15);
870 int regno2
= bits (inst2
, 8, 11);
871 pv_t addr
= regs
[bits (insn
, 0, 3)];
873 offset
= inst2
& 0xff;
875 addr
= pv_add_constant (addr
, offset
);
877 addr
= pv_add_constant (addr
, -offset
);
879 if (stack
.store_would_trash (addr
))
882 stack
.store (addr
, 4, regs
[regno1
]);
883 stack
.store (pv_add_constant (addr
, 4),
887 regs
[bits (insn
, 0, 3)] = addr
;
890 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
891 && (inst2
& 0x0c00) == 0x0c00
892 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
894 int regno
= bits (inst2
, 12, 15);
895 pv_t addr
= regs
[bits (insn
, 0, 3)];
897 offset
= inst2
& 0xff;
899 addr
= pv_add_constant (addr
, offset
);
901 addr
= pv_add_constant (addr
, -offset
);
903 if (stack
.store_would_trash (addr
))
906 stack
.store (addr
, 4, regs
[regno
]);
909 regs
[bits (insn
, 0, 3)] = addr
;
912 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
913 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
915 int regno
= bits (inst2
, 12, 15);
918 offset
= inst2
& 0xfff;
919 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
921 if (stack
.store_would_trash (addr
))
924 stack
.store (addr
, 4, regs
[regno
]);
927 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
928 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
929 /* Ignore stores of argument registers to the stack. */
932 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
933 && (inst2
& 0x0d00) == 0x0c00
934 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
935 /* Ignore stores of argument registers to the stack. */
938 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
940 && (inst2
& 0x8000) == 0x0000
941 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
942 /* Ignore block loads from the stack, potentially copying
943 parameters from memory. */
946 else if ((insn
& 0xff70) == 0xe950 /* ldrd Rt, Rt2,
948 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
949 /* Similarly ignore dual loads from the stack. */
952 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
953 && (inst2
& 0x0d00) == 0x0c00
954 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
955 /* Similarly ignore single loads from the stack. */
958 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
959 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
960 /* Similarly ignore single loads from the stack. */
963 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
964 && (inst2
& 0x8000) == 0x0000)
966 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
967 | (bits (inst2
, 12, 14) << 8)
968 | bits (inst2
, 0, 7));
970 regs
[bits (inst2
, 8, 11)]
971 = pv_add_constant (regs
[bits (insn
, 0, 3)],
972 thumb_expand_immediate (imm
));
975 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
976 && (inst2
& 0x8000) == 0x0000)
978 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
979 | (bits (inst2
, 12, 14) << 8)
980 | bits (inst2
, 0, 7));
982 regs
[bits (inst2
, 8, 11)]
983 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
986 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
987 && (inst2
& 0x8000) == 0x0000)
989 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
990 | (bits (inst2
, 12, 14) << 8)
991 | bits (inst2
, 0, 7));
993 regs
[bits (inst2
, 8, 11)]
994 = pv_add_constant (regs
[bits (insn
, 0, 3)],
995 - (CORE_ADDR
) thumb_expand_immediate (imm
));
998 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
999 && (inst2
& 0x8000) == 0x0000)
1001 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1002 | (bits (inst2
, 12, 14) << 8)
1003 | bits (inst2
, 0, 7));
1005 regs
[bits (inst2
, 8, 11)]
1006 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1009 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1011 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1012 | (bits (inst2
, 12, 14) << 8)
1013 | bits (inst2
, 0, 7));
1015 regs
[bits (inst2
, 8, 11)]
1016 = pv_constant (thumb_expand_immediate (imm
));
1019 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1022 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1024 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1027 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1028 && (inst2
& 0xf0f0) == 0)
1030 int dst_reg
= (inst2
& 0x0f00) >> 8;
1031 int src_reg
= inst2
& 0xf;
1032 regs
[dst_reg
] = regs
[src_reg
];
1035 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1037 /* Constant pool loads. */
1038 unsigned int constant
;
1041 offset
= bits (inst2
, 0, 11);
1043 loc
= start
+ 4 + offset
;
1045 loc
= start
+ 4 - offset
;
1047 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1048 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1051 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1053 /* Constant pool loads. */
1054 unsigned int constant
;
1057 offset
= bits (inst2
, 0, 7) << 2;
1059 loc
= start
+ 4 + offset
;
1061 loc
= start
+ 4 - offset
;
1063 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1064 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1066 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1067 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1070 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1072 /* Don't scan past anything that might change control flow. */
1077 /* The optimizer might shove anything into the prologue,
1078 so we just skip what we don't recognize. */
1079 unrecognized_pc
= start
;
1084 else if (thumb_instruction_changes_pc (insn
))
1086 /* Don't scan past anything that might change control flow. */
1091 /* The optimizer might shove anything into the prologue,
1092 so we just skip what we don't recognize. */
1093 unrecognized_pc
= start
;
1100 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1101 paddress (gdbarch
, start
));
1103 if (unrecognized_pc
== 0)
1104 unrecognized_pc
= start
;
1107 return unrecognized_pc
;
1109 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1111 /* Frame pointer is fp. Frame size is constant. */
1112 cache
->framereg
= ARM_FP_REGNUM
;
1113 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1115 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1117 /* Frame pointer is r7. Frame size is constant. */
1118 cache
->framereg
= THUMB_FP_REGNUM
;
1119 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1123 /* Try the stack pointer... this is a bit desperate. */
1124 cache
->framereg
= ARM_SP_REGNUM
;
1125 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1128 for (i
= 0; i
< 16; i
++)
1129 if (stack
.find_reg (gdbarch
, i
, &offset
))
1130 cache
->saved_regs
[i
].addr
= offset
;
1132 return unrecognized_pc
;
1136 /* Try to analyze the instructions starting from PC, which load symbol
1137 __stack_chk_guard. Return the address of instruction after loading this
1138 symbol, set the dest register number to *BASEREG, and set the size of
1139 instructions for loading symbol in OFFSET. Return 0 if instructions are
1143 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1144 unsigned int *destreg
, int *offset
)
1146 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1147 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1148 unsigned int low
, high
, address
;
1153 unsigned short insn1
1154 = read_code_unsigned_integer (pc
, 2, byte_order_for_code
);
1156 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1158 *destreg
= bits (insn1
, 8, 10);
1160 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1161 address
= read_memory_unsigned_integer (address
, 4,
1162 byte_order_for_code
);
1164 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1166 unsigned short insn2
1167 = read_code_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1169 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1172 = read_code_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1174 = read_code_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1176 /* movt Rd, #const */
1177 if ((insn1
& 0xfbc0) == 0xf2c0)
1179 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1180 *destreg
= bits (insn2
, 8, 11);
1182 address
= (high
<< 16 | low
);
1189 = read_code_unsigned_integer (pc
, 4, byte_order_for_code
);
1191 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1193 address
= bits (insn
, 0, 11) + pc
+ 8;
1194 address
= read_memory_unsigned_integer (address
, 4,
1195 byte_order_for_code
);
1197 *destreg
= bits (insn
, 12, 15);
1200 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1202 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1205 = read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1207 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1209 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1210 *destreg
= bits (insn
, 12, 15);
1212 address
= (high
<< 16 | low
);
1220 /* Try to skip a sequence of instructions used for stack protector. If PC
1221 points to the first instruction of this sequence, return the address of
1222 first instruction after this sequence, otherwise, return original PC.
1224 On arm, this sequence of instructions is composed of mainly three steps,
1225 Step 1: load symbol __stack_chk_guard,
1226 Step 2: load from address of __stack_chk_guard,
1227 Step 3: store it to somewhere else.
1229 Usually, instructions on step 2 and step 3 are the same on various ARM
1230 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1231 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1232 instructions in step 1 vary from different ARM architectures. On ARMv7,
1235 movw Rn, #:lower16:__stack_chk_guard
1236 movt Rn, #:upper16:__stack_chk_guard
1243 .word __stack_chk_guard
1245 Since ldr/str is a very popular instruction, we can't use them as
1246 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1247 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1248 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1251 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1253 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1254 unsigned int basereg
;
1255 struct bound_minimal_symbol stack_chk_guard
;
1257 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1260 /* Try to parse the instructions in Step 1. */
1261 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1266 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1267 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1268 Otherwise, this sequence cannot be for stack protector. */
1269 if (stack_chk_guard
.minsym
== NULL
1270 || !startswith (stack_chk_guard
.minsym
->linkage_name (), "__stack_chk_guard"))
1275 unsigned int destreg
;
1277 = read_code_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1279 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1280 if ((insn
& 0xf800) != 0x6800)
1282 if (bits (insn
, 3, 5) != basereg
)
1284 destreg
= bits (insn
, 0, 2);
1286 insn
= read_code_unsigned_integer (pc
+ offset
+ 2, 2,
1287 byte_order_for_code
);
1288 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1289 if ((insn
& 0xf800) != 0x6000)
1291 if (destreg
!= bits (insn
, 0, 2))
1296 unsigned int destreg
;
1298 = read_code_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1300 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1301 if ((insn
& 0x0e500000) != 0x04100000)
1303 if (bits (insn
, 16, 19) != basereg
)
1305 destreg
= bits (insn
, 12, 15);
1306 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1307 insn
= read_code_unsigned_integer (pc
+ offset
+ 4,
1308 4, byte_order_for_code
);
1309 if ((insn
& 0x0e500000) != 0x04000000)
1311 if (bits (insn
, 12, 15) != destreg
)
1314 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1317 return pc
+ offset
+ 4;
1319 return pc
+ offset
+ 8;
1322 /* Advance the PC across any function entry prologue instructions to
1323 reach some "real" code.
1325 The APCS (ARM Procedure Call Standard) defines the following
1329 [stmfd sp!, {a1,a2,a3,a4}]
1330 stmfd sp!, {...,fp,ip,lr,pc}
1331 [stfe f7, [sp, #-12]!]
1332 [stfe f6, [sp, #-12]!]
1333 [stfe f5, [sp, #-12]!]
1334 [stfe f4, [sp, #-12]!]
1335 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1338 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1340 CORE_ADDR func_addr
, limit_pc
;
1342 /* See if we can determine the end of the prologue via the symbol table.
1343 If so, then return either PC, or the PC after the prologue, whichever
1345 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1347 CORE_ADDR post_prologue_pc
1348 = skip_prologue_using_sal (gdbarch
, func_addr
);
1349 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1351 if (post_prologue_pc
)
1353 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1356 /* GCC always emits a line note before the prologue and another
1357 one after, even if the two are at the same address or on the
1358 same line. Take advantage of this so that we do not need to
1359 know every instruction that might appear in the prologue. We
1360 will have producer information for most binaries; if it is
1361 missing (e.g. for -gstabs), assuming the GNU tools. */
1362 if (post_prologue_pc
1364 || COMPUNIT_PRODUCER (cust
) == NULL
1365 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1366 || producer_is_llvm (COMPUNIT_PRODUCER (cust
))))
1367 return post_prologue_pc
;
1369 if (post_prologue_pc
!= 0)
1371 CORE_ADDR analyzed_limit
;
1373 /* For non-GCC compilers, make sure the entire line is an
1374 acceptable prologue; GDB will round this function's
1375 return value up to the end of the following line so we
1376 can not skip just part of a line (and we do not want to).
1378 RealView does not treat the prologue specially, but does
1379 associate prologue code with the opening brace; so this
1380 lets us skip the first line if we think it is the opening
1382 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1383 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1384 post_prologue_pc
, NULL
);
1386 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1387 post_prologue_pc
, NULL
);
1389 if (analyzed_limit
!= post_prologue_pc
)
1392 return post_prologue_pc
;
1396 /* Can't determine prologue from the symbol table, need to examine
1399 /* Find an upper limit on the function prologue using the debug
1400 information. If the debug information could not be used to provide
1401 that bound, then use an arbitrary large number as the upper bound. */
1402 /* Like arm_scan_prologue, stop no later than pc + 64. */
1403 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1405 limit_pc
= pc
+ 64; /* Magic. */
1408 /* Check if this is Thumb code. */
1409 if (arm_pc_is_thumb (gdbarch
, pc
))
1410 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1412 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1416 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1417 This function decodes a Thumb function prologue to determine:
1418 1) the size of the stack frame
1419 2) which registers are saved on it
1420 3) the offsets of saved regs
1421 4) the offset from the stack pointer to the frame pointer
1423 A typical Thumb function prologue would create this stack frame
1424 (offsets relative to FP)
1425 old SP -> 24 stack parameters
1428 R7 -> 0 local variables (16 bytes)
1429 SP -> -12 additional stack space (12 bytes)
1430 The frame size would thus be 36 bytes, and the frame offset would be
1431 12 bytes. The frame register is R7.
1433 The comments for thumb_skip_prolog() describe the algorithm we use
1434 to detect the end of the prolog. */
1438 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1439 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1441 CORE_ADDR prologue_start
;
1442 CORE_ADDR prologue_end
;
1444 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1447 /* See comment in arm_scan_prologue for an explanation of
1449 if (prologue_end
> prologue_start
+ 64)
1451 prologue_end
= prologue_start
+ 64;
1455 /* We're in the boondocks: we have no idea where the start of the
1459 prologue_end
= std::min (prologue_end
, prev_pc
);
1461 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1464 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1468 arm_instruction_restores_sp (unsigned int insn
)
1470 if (bits (insn
, 28, 31) != INST_NV
)
1472 if ((insn
& 0x0df0f000) == 0x0080d000
1473 /* ADD SP (register or immediate). */
1474 || (insn
& 0x0df0f000) == 0x0040d000
1475 /* SUB SP (register or immediate). */
1476 || (insn
& 0x0ffffff0) == 0x01a0d000
1478 || (insn
& 0x0fff0000) == 0x08bd0000
1480 || (insn
& 0x0fff0000) == 0x049d0000)
1481 /* POP of a single register. */
1488 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1489 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1490 fill it in. Return the first address not recognized as a prologue
1493 We recognize all the instructions typically found in ARM prologues,
1494 plus harmless instructions which can be skipped (either for analysis
1495 purposes, or a more restrictive set that can be skipped when finding
1496 the end of the prologue). */
1499 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1500 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1501 struct arm_prologue_cache
*cache
)
1503 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1505 CORE_ADDR offset
, current_pc
;
1506 pv_t regs
[ARM_FPS_REGNUM
];
1507 CORE_ADDR unrecognized_pc
= 0;
1509 /* Search the prologue looking for instructions that set up the
1510 frame pointer, adjust the stack pointer, and save registers.
1512 Be careful, however, and if it doesn't look like a prologue,
1513 don't try to scan it. If, for instance, a frameless function
1514 begins with stmfd sp!, then we will tell ourselves there is
1515 a frame, which will confuse stack traceback, as well as "finish"
1516 and other operations that rely on a knowledge of the stack
1519 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1520 regs
[regno
] = pv_register (regno
, 0);
1521 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1523 for (current_pc
= prologue_start
;
1524 current_pc
< prologue_end
;
1528 = read_code_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1530 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1532 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1535 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1536 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1538 unsigned imm
= insn
& 0xff; /* immediate value */
1539 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1540 int rd
= bits (insn
, 12, 15);
1541 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1542 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1545 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1546 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1548 unsigned imm
= insn
& 0xff; /* immediate value */
1549 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1550 int rd
= bits (insn
, 12, 15);
1551 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1552 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1555 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1558 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1560 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1561 stack
.store (regs
[ARM_SP_REGNUM
], 4,
1562 regs
[bits (insn
, 12, 15)]);
1565 else if ((insn
& 0xffff0000) == 0xe92d0000)
1566 /* stmfd sp!, {..., fp, ip, lr, pc}
1568 stmfd sp!, {a1, a2, a3, a4} */
1570 int mask
= insn
& 0xffff;
1572 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1575 /* Calculate offsets of saved registers. */
1576 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1577 if (mask
& (1 << regno
))
1580 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1581 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1584 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1585 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1586 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1588 /* No need to add this to saved_regs -- it's just an arg reg. */
1591 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1592 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1593 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1595 /* No need to add this to saved_regs -- it's just an arg reg. */
1598 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1600 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1602 /* No need to add this to saved_regs -- it's just arg regs. */
1605 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1607 unsigned imm
= insn
& 0xff; /* immediate value */
1608 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1609 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1610 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1612 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1614 unsigned imm
= insn
& 0xff; /* immediate value */
1615 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1616 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1617 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1619 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1621 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1623 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1626 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1627 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1628 stack
.store (regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1630 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1632 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1634 int n_saved_fp_regs
;
1635 unsigned int fp_start_reg
, fp_bound_reg
;
1637 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1640 if ((insn
& 0x800) == 0x800) /* N0 is set */
1642 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1643 n_saved_fp_regs
= 3;
1645 n_saved_fp_regs
= 1;
1649 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1650 n_saved_fp_regs
= 2;
1652 n_saved_fp_regs
= 4;
1655 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1656 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1657 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1659 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1660 stack
.store (regs
[ARM_SP_REGNUM
], 12,
1661 regs
[fp_start_reg
++]);
1664 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1666 /* Allow some special function calls when skipping the
1667 prologue; GCC generates these before storing arguments to
1669 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1671 if (skip_prologue_function (gdbarch
, dest
, 0))
1676 else if ((insn
& 0xf0000000) != 0xe0000000)
1677 break; /* Condition not true, exit early. */
1678 else if (arm_instruction_changes_pc (insn
))
1679 /* Don't scan past anything that might change control flow. */
1681 else if (arm_instruction_restores_sp (insn
))
1683 /* Don't scan past the epilogue. */
1686 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1687 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1688 /* Ignore block loads from the stack, potentially copying
1689 parameters from memory. */
1691 else if ((insn
& 0xfc500000) == 0xe4100000
1692 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1693 /* Similarly ignore single loads from the stack. */
1695 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1696 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1697 register instead of the stack. */
1701 /* The optimizer might shove anything into the prologue, if
1702 we build up cache (cache != NULL) from scanning prologue,
1703 we just skip what we don't recognize and scan further to
1704 make cache as complete as possible. However, if we skip
1705 prologue, we'll stop immediately on unrecognized
1707 unrecognized_pc
= current_pc
;
1715 if (unrecognized_pc
== 0)
1716 unrecognized_pc
= current_pc
;
1720 int framereg
, framesize
;
1722 /* The frame size is just the distance from the frame register
1723 to the original stack pointer. */
1724 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1726 /* Frame pointer is fp. */
1727 framereg
= ARM_FP_REGNUM
;
1728 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1732 /* Try the stack pointer... this is a bit desperate. */
1733 framereg
= ARM_SP_REGNUM
;
1734 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1737 cache
->framereg
= framereg
;
1738 cache
->framesize
= framesize
;
1740 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1741 if (stack
.find_reg (gdbarch
, regno
, &offset
))
1742 cache
->saved_regs
[regno
].addr
= offset
;
1746 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1747 paddress (gdbarch
, unrecognized_pc
));
1749 return unrecognized_pc
;
1753 arm_scan_prologue (struct frame_info
*this_frame
,
1754 struct arm_prologue_cache
*cache
)
1756 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1757 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1758 CORE_ADDR prologue_start
, prologue_end
;
1759 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1760 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1762 /* Assume there is no frame until proven otherwise. */
1763 cache
->framereg
= ARM_SP_REGNUM
;
1764 cache
->framesize
= 0;
1766 /* Check for Thumb prologue. */
1767 if (arm_frame_is_thumb (this_frame
))
1769 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1773 /* Find the function prologue. If we can't find the function in
1774 the symbol table, peek in the stack frame to find the PC. */
1775 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1778 /* One way to find the end of the prologue (which works well
1779 for unoptimized code) is to do the following:
1781 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1784 prologue_end = prev_pc;
1785 else if (sal.end < prologue_end)
1786 prologue_end = sal.end;
1788 This mechanism is very accurate so long as the optimizer
1789 doesn't move any instructions from the function body into the
1790 prologue. If this happens, sal.end will be the last
1791 instruction in the first hunk of prologue code just before
1792 the first instruction that the scheduler has moved from
1793 the body to the prologue.
1795 In order to make sure that we scan all of the prologue
1796 instructions, we use a slightly less accurate mechanism which
1797 may scan more than necessary. To help compensate for this
1798 lack of accuracy, the prologue scanning loop below contains
1799 several clauses which'll cause the loop to terminate early if
1800 an implausible prologue instruction is encountered.
1806 is a suitable endpoint since it accounts for the largest
1807 possible prologue plus up to five instructions inserted by
1810 if (prologue_end
> prologue_start
+ 64)
1812 prologue_end
= prologue_start
+ 64; /* See above. */
1817 /* We have no symbol information. Our only option is to assume this
1818 function has a standard stack frame and the normal frame register.
1819 Then, we can find the value of our frame pointer on entrance to
1820 the callee (or at the present moment if this is the innermost frame).
1821 The value stored there should be the address of the stmfd + 8. */
1822 CORE_ADDR frame_loc
;
1823 ULONGEST return_value
;
1825 /* AAPCS does not use a frame register, so we can abort here. */
1826 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_AAPCS
)
1829 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1830 if (!safe_read_memory_unsigned_integer (frame_loc
, 4, byte_order
,
1835 prologue_start
= gdbarch_addr_bits_remove
1836 (gdbarch
, return_value
) - 8;
1837 prologue_end
= prologue_start
+ 64; /* See above. */
1841 if (prev_pc
< prologue_end
)
1842 prologue_end
= prev_pc
;
1844 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1847 static struct arm_prologue_cache
*
1848 arm_make_prologue_cache (struct frame_info
*this_frame
)
1851 struct arm_prologue_cache
*cache
;
1852 CORE_ADDR unwound_fp
;
1854 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
1855 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
1857 arm_scan_prologue (this_frame
, cache
);
1859 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
1860 if (unwound_fp
== 0)
1863 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
1865 /* Calculate actual addresses of saved registers using offsets
1866 determined by arm_scan_prologue. */
1867 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
1868 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
1869 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
1874 /* Implementation of the stop_reason hook for arm_prologue frames. */
1876 static enum unwind_stop_reason
1877 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
1880 struct arm_prologue_cache
*cache
;
1883 if (*this_cache
== NULL
)
1884 *this_cache
= arm_make_prologue_cache (this_frame
);
1885 cache
= (struct arm_prologue_cache
*) *this_cache
;
1887 /* This is meant to halt the backtrace at "_start". */
1888 pc
= get_frame_pc (this_frame
);
1889 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
1890 return UNWIND_OUTERMOST
;
1892 /* If we've hit a wall, stop. */
1893 if (cache
->prev_sp
== 0)
1894 return UNWIND_OUTERMOST
;
1896 return UNWIND_NO_REASON
;
1899 /* Our frame ID for a normal frame is the current function's starting PC
1900 and the caller's SP when we were called. */
1903 arm_prologue_this_id (struct frame_info
*this_frame
,
1905 struct frame_id
*this_id
)
1907 struct arm_prologue_cache
*cache
;
1911 if (*this_cache
== NULL
)
1912 *this_cache
= arm_make_prologue_cache (this_frame
);
1913 cache
= (struct arm_prologue_cache
*) *this_cache
;
1915 /* Use function start address as part of the frame ID. If we cannot
1916 identify the start address (due to missing symbol information),
1917 fall back to just using the current PC. */
1918 pc
= get_frame_pc (this_frame
);
1919 func
= get_frame_func (this_frame
);
1923 id
= frame_id_build (cache
->prev_sp
, func
);
1927 static struct value
*
1928 arm_prologue_prev_register (struct frame_info
*this_frame
,
1932 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1933 struct arm_prologue_cache
*cache
;
1935 if (*this_cache
== NULL
)
1936 *this_cache
= arm_make_prologue_cache (this_frame
);
1937 cache
= (struct arm_prologue_cache
*) *this_cache
;
1939 /* If we are asked to unwind the PC, then we need to return the LR
1940 instead. The prologue may save PC, but it will point into this
1941 frame's prologue, not the next frame's resume location. Also
1942 strip the saved T bit. A valid LR may have the low bit set, but
1943 a valid PC never does. */
1944 if (prev_regnum
== ARM_PC_REGNUM
)
1948 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1949 return frame_unwind_got_constant (this_frame
, prev_regnum
,
1950 arm_addr_bits_remove (gdbarch
, lr
));
1953 /* SP is generally not saved to the stack, but this frame is
1954 identified by the next frame's stack pointer at the time of the call.
1955 The value was already reconstructed into PREV_SP. */
1956 if (prev_regnum
== ARM_SP_REGNUM
)
1957 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
1959 /* The CPSR may have been changed by the call instruction and by the
1960 called function. The only bit we can reconstruct is the T bit,
1961 by checking the low bit of LR as of the call. This is a reliable
1962 indicator of Thumb-ness except for some ARM v4T pre-interworking
1963 Thumb code, which could get away with a clear low bit as long as
1964 the called function did not use bx. Guess that all other
1965 bits are unchanged; the condition flags are presumably lost,
1966 but the processor status is likely valid. */
1967 if (prev_regnum
== ARM_PS_REGNUM
)
1970 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
1972 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
1973 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1974 if (IS_THUMB_ADDR (lr
))
1978 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
1981 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
1985 struct frame_unwind arm_prologue_unwind
= {
1987 arm_prologue_unwind_stop_reason
,
1988 arm_prologue_this_id
,
1989 arm_prologue_prev_register
,
1991 default_frame_sniffer
1994 /* Maintain a list of ARM exception table entries per objfile, similar to the
1995 list of mapping symbols. We only cache entries for standard ARM-defined
1996 personality routines; the cache will contain only the frame unwinding
1997 instructions associated with the entry (not the descriptors). */
1999 struct arm_exidx_entry
2004 bool operator< (const arm_exidx_entry
&other
) const
2006 return addr
< other
.addr
;
2010 struct arm_exidx_data
2012 std::vector
<std::vector
<arm_exidx_entry
>> section_maps
;
2015 /* Per-BFD key to store exception handling information. */
2016 static const struct bfd_key
<arm_exidx_data
> arm_exidx_data_key
;
2018 static struct obj_section
*
2019 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2021 struct obj_section
*osect
;
2023 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2024 if (bfd_section_flags (osect
->the_bfd_section
) & SEC_ALLOC
)
2026 bfd_vma start
, size
;
2027 start
= bfd_section_vma (osect
->the_bfd_section
);
2028 size
= bfd_section_size (osect
->the_bfd_section
);
2030 if (start
<= vma
&& vma
< start
+ size
)
2037 /* Parse contents of exception table and exception index sections
2038 of OBJFILE, and fill in the exception table entry cache.
2040 For each entry that refers to a standard ARM-defined personality
2041 routine, extract the frame unwinding instructions (from either
2042 the index or the table section). The unwinding instructions
2044 - extracting them from the rest of the table data
2045 - converting to host endianness
2046 - appending the implicit 0xb0 ("Finish") code
2048 The extracted and normalized instructions are stored for later
2049 retrieval by the arm_find_exidx_entry routine. */
2052 arm_exidx_new_objfile (struct objfile
*objfile
)
2054 struct arm_exidx_data
*data
;
2055 asection
*exidx
, *extab
;
2056 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2059 /* If we've already touched this file, do nothing. */
2060 if (!objfile
|| arm_exidx_data_key
.get (objfile
->obfd
) != NULL
)
2063 /* Read contents of exception table and index. */
2064 exidx
= bfd_get_section_by_name (objfile
->obfd
, ELF_STRING_ARM_unwind
);
2065 gdb::byte_vector exidx_data
;
2068 exidx_vma
= bfd_section_vma (exidx
);
2069 exidx_data
.resize (bfd_section_size (exidx
));
2071 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2072 exidx_data
.data (), 0,
2073 exidx_data
.size ()))
2077 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2078 gdb::byte_vector extab_data
;
2081 extab_vma
= bfd_section_vma (extab
);
2082 extab_data
.resize (bfd_section_size (extab
));
2084 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2085 extab_data
.data (), 0,
2086 extab_data
.size ()))
2090 /* Allocate exception table data structure. */
2091 data
= arm_exidx_data_key
.emplace (objfile
->obfd
);
2092 data
->section_maps
.resize (objfile
->obfd
->section_count
);
2094 /* Fill in exception table. */
2095 for (i
= 0; i
< exidx_data
.size () / 8; i
++)
2097 struct arm_exidx_entry new_exidx_entry
;
2098 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
.data () + i
* 8);
2099 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
,
2100 exidx_data
.data () + i
* 8 + 4);
2101 bfd_vma addr
= 0, word
= 0;
2102 int n_bytes
= 0, n_words
= 0;
2103 struct obj_section
*sec
;
2104 gdb_byte
*entry
= NULL
;
2106 /* Extract address of start of function. */
2107 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2108 idx
+= exidx_vma
+ i
* 8;
2110 /* Find section containing function and compute section offset. */
2111 sec
= arm_obj_section_from_vma (objfile
, idx
);
2114 idx
-= bfd_section_vma (sec
->the_bfd_section
);
2116 /* Determine address of exception table entry. */
2119 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2121 else if ((val
& 0xff000000) == 0x80000000)
2123 /* Exception table entry embedded in .ARM.exidx
2124 -- must be short form. */
2128 else if (!(val
& 0x80000000))
2130 /* Exception table entry in .ARM.extab. */
2131 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2132 addr
+= exidx_vma
+ i
* 8 + 4;
2134 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_data
.size ())
2136 word
= bfd_h_get_32 (objfile
->obfd
,
2137 extab_data
.data () + addr
- extab_vma
);
2140 if ((word
& 0xff000000) == 0x80000000)
2145 else if ((word
& 0xff000000) == 0x81000000
2146 || (word
& 0xff000000) == 0x82000000)
2150 n_words
= ((word
>> 16) & 0xff);
2152 else if (!(word
& 0x80000000))
2155 struct obj_section
*pers_sec
;
2156 int gnu_personality
= 0;
2158 /* Custom personality routine. */
2159 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2160 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2162 /* Check whether we've got one of the variants of the
2163 GNU personality routines. */
2164 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2167 static const char *personality
[] =
2169 "__gcc_personality_v0",
2170 "__gxx_personality_v0",
2171 "__gcj_personality_v0",
2172 "__gnu_objc_personality_v0",
2176 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2179 for (k
= 0; personality
[k
]; k
++)
2180 if (lookup_minimal_symbol_by_pc_name
2181 (pc
, personality
[k
], objfile
))
2183 gnu_personality
= 1;
2188 /* If so, the next word contains a word count in the high
2189 byte, followed by the same unwind instructions as the
2190 pre-defined forms. */
2192 && addr
+ 4 <= extab_vma
+ extab_data
.size ())
2194 word
= bfd_h_get_32 (objfile
->obfd
,
2196 + addr
- extab_vma
));
2199 n_words
= ((word
>> 24) & 0xff);
2205 /* Sanity check address. */
2207 if (addr
< extab_vma
2208 || addr
+ 4 * n_words
> extab_vma
+ extab_data
.size ())
2209 n_words
= n_bytes
= 0;
2211 /* The unwind instructions reside in WORD (only the N_BYTES least
2212 significant bytes are valid), followed by N_WORDS words in the
2213 extab section starting at ADDR. */
2214 if (n_bytes
|| n_words
)
2217 = (gdb_byte
*) obstack_alloc (&objfile
->objfile_obstack
,
2218 n_bytes
+ n_words
* 4 + 1);
2221 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2225 word
= bfd_h_get_32 (objfile
->obfd
,
2226 extab_data
.data () + addr
- extab_vma
);
2229 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2230 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2231 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2232 *p
++ = (gdb_byte
) (word
& 0xff);
2235 /* Implied "Finish" to terminate the list. */
2239 /* Push entry onto vector. They are guaranteed to always
2240 appear in order of increasing addresses. */
2241 new_exidx_entry
.addr
= idx
;
2242 new_exidx_entry
.entry
= entry
;
2243 data
->section_maps
[sec
->the_bfd_section
->index
].push_back
2248 /* Search for the exception table entry covering MEMADDR. If one is found,
2249 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2250 set *START to the start of the region covered by this entry. */
2253 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2255 struct obj_section
*sec
;
2257 sec
= find_pc_section (memaddr
);
2260 struct arm_exidx_data
*data
;
2261 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2263 data
= arm_exidx_data_key
.get (sec
->objfile
->obfd
);
2266 std::vector
<arm_exidx_entry
> &map
2267 = data
->section_maps
[sec
->the_bfd_section
->index
];
2270 auto idx
= std::lower_bound (map
.begin (), map
.end (), map_key
);
2272 /* std::lower_bound finds the earliest ordered insertion
2273 point. If the following symbol starts at this exact
2274 address, we use that; otherwise, the preceding
2275 exception table entry covers this address. */
2276 if (idx
< map
.end ())
2278 if (idx
->addr
== map_key
.addr
)
2281 *start
= idx
->addr
+ obj_section_addr (sec
);
2286 if (idx
> map
.begin ())
2290 *start
= idx
->addr
+ obj_section_addr (sec
);
2300 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2301 instruction list from the ARM exception table entry ENTRY, allocate and
2302 return a prologue cache structure describing how to unwind this frame.
2304 Return NULL if the unwinding instruction list contains a "spare",
2305 "reserved" or "refuse to unwind" instruction as defined in section
2306 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2307 for the ARM Architecture" document. */
2309 static struct arm_prologue_cache
*
2310 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2315 struct arm_prologue_cache
*cache
;
2316 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2317 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2323 /* Whenever we reload SP, we actually have to retrieve its
2324 actual value in the current frame. */
2327 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2329 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2330 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2334 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2335 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2341 /* Decode next unwind instruction. */
2344 if ((insn
& 0xc0) == 0)
2346 int offset
= insn
& 0x3f;
2347 vsp
+= (offset
<< 2) + 4;
2349 else if ((insn
& 0xc0) == 0x40)
2351 int offset
= insn
& 0x3f;
2352 vsp
-= (offset
<< 2) + 4;
2354 else if ((insn
& 0xf0) == 0x80)
2356 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2359 /* The special case of an all-zero mask identifies
2360 "Refuse to unwind". We return NULL to fall back
2361 to the prologue analyzer. */
2365 /* Pop registers r4..r15 under mask. */
2366 for (i
= 0; i
< 12; i
++)
2367 if (mask
& (1 << i
))
2369 cache
->saved_regs
[4 + i
].addr
= vsp
;
2373 /* Special-case popping SP -- we need to reload vsp. */
2374 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2377 else if ((insn
& 0xf0) == 0x90)
2379 int reg
= insn
& 0xf;
2381 /* Reserved cases. */
2382 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2385 /* Set SP from another register and mark VSP for reload. */
2386 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2389 else if ((insn
& 0xf0) == 0xa0)
2391 int count
= insn
& 0x7;
2392 int pop_lr
= (insn
& 0x8) != 0;
2395 /* Pop r4..r[4+count]. */
2396 for (i
= 0; i
<= count
; i
++)
2398 cache
->saved_regs
[4 + i
].addr
= vsp
;
2402 /* If indicated by flag, pop LR as well. */
2405 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2409 else if (insn
== 0xb0)
2411 /* We could only have updated PC by popping into it; if so, it
2412 will show up as address. Otherwise, copy LR into PC. */
2413 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2414 cache
->saved_regs
[ARM_PC_REGNUM
]
2415 = cache
->saved_regs
[ARM_LR_REGNUM
];
2420 else if (insn
== 0xb1)
2422 int mask
= *entry
++;
2425 /* All-zero mask and mask >= 16 is "spare". */
2426 if (mask
== 0 || mask
>= 16)
2429 /* Pop r0..r3 under mask. */
2430 for (i
= 0; i
< 4; i
++)
2431 if (mask
& (1 << i
))
2433 cache
->saved_regs
[i
].addr
= vsp
;
2437 else if (insn
== 0xb2)
2439 ULONGEST offset
= 0;
2444 offset
|= (*entry
& 0x7f) << shift
;
2447 while (*entry
++ & 0x80);
2449 vsp
+= 0x204 + (offset
<< 2);
2451 else if (insn
== 0xb3)
2453 int start
= *entry
>> 4;
2454 int count
= (*entry
++) & 0xf;
2457 /* Only registers D0..D15 are valid here. */
2458 if (start
+ count
>= 16)
2461 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2462 for (i
= 0; i
<= count
; i
++)
2464 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2468 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2471 else if ((insn
& 0xf8) == 0xb8)
2473 int count
= insn
& 0x7;
2476 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2477 for (i
= 0; i
<= count
; i
++)
2479 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2483 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2486 else if (insn
== 0xc6)
2488 int start
= *entry
>> 4;
2489 int count
= (*entry
++) & 0xf;
2492 /* Only registers WR0..WR15 are valid. */
2493 if (start
+ count
>= 16)
2496 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2497 for (i
= 0; i
<= count
; i
++)
2499 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2503 else if (insn
== 0xc7)
2505 int mask
= *entry
++;
2508 /* All-zero mask and mask >= 16 is "spare". */
2509 if (mask
== 0 || mask
>= 16)
2512 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2513 for (i
= 0; i
< 4; i
++)
2514 if (mask
& (1 << i
))
2516 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2520 else if ((insn
& 0xf8) == 0xc0)
2522 int count
= insn
& 0x7;
2525 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2526 for (i
= 0; i
<= count
; i
++)
2528 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2532 else if (insn
== 0xc8)
2534 int start
= *entry
>> 4;
2535 int count
= (*entry
++) & 0xf;
2538 /* Only registers D0..D31 are valid. */
2539 if (start
+ count
>= 16)
2542 /* Pop VFP double-precision registers
2543 D[16+start]..D[16+start+count]. */
2544 for (i
= 0; i
<= count
; i
++)
2546 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2550 else if (insn
== 0xc9)
2552 int start
= *entry
>> 4;
2553 int count
= (*entry
++) & 0xf;
2556 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2557 for (i
= 0; i
<= count
; i
++)
2559 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2563 else if ((insn
& 0xf8) == 0xd0)
2565 int count
= insn
& 0x7;
2568 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2569 for (i
= 0; i
<= count
; i
++)
2571 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2577 /* Everything else is "spare". */
2582 /* If we restore SP from a register, assume this was the frame register.
2583 Otherwise just fall back to SP as frame register. */
2584 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2585 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2587 cache
->framereg
= ARM_SP_REGNUM
;
2589 /* Determine offset to previous frame. */
2591 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2593 /* We already got the previous SP. */
2594 cache
->prev_sp
= vsp
;
2599 /* Unwinding via ARM exception table entries. Note that the sniffer
2600 already computes a filled-in prologue cache, which is then used
2601 with the same arm_prologue_this_id and arm_prologue_prev_register
2602 routines also used for prologue-parsing based unwinding. */
2605 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2606 struct frame_info
*this_frame
,
2607 void **this_prologue_cache
)
2609 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2610 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2611 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2612 struct arm_prologue_cache
*cache
;
2615 /* See if we have an ARM exception table entry covering this address. */
2616 addr_in_block
= get_frame_address_in_block (this_frame
);
2617 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2621 /* The ARM exception table does not describe unwind information
2622 for arbitrary PC values, but is guaranteed to be correct only
2623 at call sites. We have to decide here whether we want to use
2624 ARM exception table information for this frame, or fall back
2625 to using prologue parsing. (Note that if we have DWARF CFI,
2626 this sniffer isn't even called -- CFI is always preferred.)
2628 Before we make this decision, however, we check whether we
2629 actually have *symbol* information for the current frame.
2630 If not, prologue parsing would not work anyway, so we might
2631 as well use the exception table and hope for the best. */
2632 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2636 /* If the next frame is "normal", we are at a call site in this
2637 frame, so exception information is guaranteed to be valid. */
2638 if (get_next_frame (this_frame
)
2639 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2642 /* We also assume exception information is valid if we're currently
2643 blocked in a system call. The system library is supposed to
2644 ensure this, so that e.g. pthread cancellation works. */
2645 if (arm_frame_is_thumb (this_frame
))
2649 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 2,
2650 2, byte_order_for_code
, &insn
)
2651 && (insn
& 0xff00) == 0xdf00 /* svc */)
2658 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 4,
2659 4, byte_order_for_code
, &insn
)
2660 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2664 /* Bail out if we don't know that exception information is valid. */
2668 /* The ARM exception index does not mark the *end* of the region
2669 covered by the entry, and some functions will not have any entry.
2670 To correctly recognize the end of the covered region, the linker
2671 should have inserted dummy records with a CANTUNWIND marker.
2673 Unfortunately, current versions of GNU ld do not reliably do
2674 this, and thus we may have found an incorrect entry above.
2675 As a (temporary) sanity check, we only use the entry if it
2676 lies *within* the bounds of the function. Note that this check
2677 might reject perfectly valid entries that just happen to cover
2678 multiple functions; therefore this check ought to be removed
2679 once the linker is fixed. */
2680 if (func_start
> exidx_region
)
2684 /* Decode the list of unwinding instructions into a prologue cache.
2685 Note that this may fail due to e.g. a "refuse to unwind" code. */
2686 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2690 *this_prologue_cache
= cache
;
2694 struct frame_unwind arm_exidx_unwind
= {
2696 default_frame_unwind_stop_reason
,
2697 arm_prologue_this_id
,
2698 arm_prologue_prev_register
,
2700 arm_exidx_unwind_sniffer
2703 static struct arm_prologue_cache
*
2704 arm_make_epilogue_frame_cache (struct frame_info
*this_frame
)
2706 struct arm_prologue_cache
*cache
;
2709 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2710 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2712 /* Still rely on the offset calculated from prologue. */
2713 arm_scan_prologue (this_frame
, cache
);
2715 /* Since we are in epilogue, the SP has been restored. */
2716 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2718 /* Calculate actual addresses of saved registers using offsets
2719 determined by arm_scan_prologue. */
2720 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2721 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2722 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2727 /* Implementation of function hook 'this_id' in
2728 'struct frame_uwnind' for epilogue unwinder. */
2731 arm_epilogue_frame_this_id (struct frame_info
*this_frame
,
2733 struct frame_id
*this_id
)
2735 struct arm_prologue_cache
*cache
;
2738 if (*this_cache
== NULL
)
2739 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2740 cache
= (struct arm_prologue_cache
*) *this_cache
;
2742 /* Use function start address as part of the frame ID. If we cannot
2743 identify the start address (due to missing symbol information),
2744 fall back to just using the current PC. */
2745 pc
= get_frame_pc (this_frame
);
2746 func
= get_frame_func (this_frame
);
2750 (*this_id
) = frame_id_build (cache
->prev_sp
, pc
);
2753 /* Implementation of function hook 'prev_register' in
2754 'struct frame_uwnind' for epilogue unwinder. */
2756 static struct value
*
2757 arm_epilogue_frame_prev_register (struct frame_info
*this_frame
,
2758 void **this_cache
, int regnum
)
2760 if (*this_cache
== NULL
)
2761 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2763 return arm_prologue_prev_register (this_frame
, this_cache
, regnum
);
2766 static int arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
,
2768 static int thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
,
2771 /* Implementation of function hook 'sniffer' in
2772 'struct frame_uwnind' for epilogue unwinder. */
2775 arm_epilogue_frame_sniffer (const struct frame_unwind
*self
,
2776 struct frame_info
*this_frame
,
2777 void **this_prologue_cache
)
2779 if (frame_relative_level (this_frame
) == 0)
2781 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2782 CORE_ADDR pc
= get_frame_pc (this_frame
);
2784 if (arm_frame_is_thumb (this_frame
))
2785 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
2787 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
2793 /* Frame unwinder from epilogue. */
2795 static const struct frame_unwind arm_epilogue_frame_unwind
=
2798 default_frame_unwind_stop_reason
,
2799 arm_epilogue_frame_this_id
,
2800 arm_epilogue_frame_prev_register
,
2802 arm_epilogue_frame_sniffer
,
2805 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2806 trampoline, return the target PC. Otherwise return 0.
2808 void call0a (char c, short s, int i, long l) {}
2812 (*pointer_to_call0a) (c, s, i, l);
2815 Instead of calling a stub library function _call_via_xx (xx is
2816 the register name), GCC may inline the trampoline in the object
2817 file as below (register r2 has the address of call0a).
2820 .type main, %function
2829 The trampoline 'bx r2' doesn't belong to main. */
2832 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2834 /* The heuristics of recognizing such trampoline is that FRAME is
2835 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2836 if (arm_frame_is_thumb (frame
))
2840 if (target_read_memory (pc
, buf
, 2) == 0)
2842 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2843 enum bfd_endian byte_order_for_code
2844 = gdbarch_byte_order_for_code (gdbarch
);
2846 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2848 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2851 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2853 /* Clear the LSB so that gdb core sets step-resume
2854 breakpoint at the right address. */
2855 return UNMAKE_THUMB_ADDR (dest
);
2863 static struct arm_prologue_cache
*
2864 arm_make_stub_cache (struct frame_info
*this_frame
)
2866 struct arm_prologue_cache
*cache
;
2868 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2869 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2871 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2876 /* Our frame ID for a stub frame is the current SP and LR. */
2879 arm_stub_this_id (struct frame_info
*this_frame
,
2881 struct frame_id
*this_id
)
2883 struct arm_prologue_cache
*cache
;
2885 if (*this_cache
== NULL
)
2886 *this_cache
= arm_make_stub_cache (this_frame
);
2887 cache
= (struct arm_prologue_cache
*) *this_cache
;
2889 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2893 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2894 struct frame_info
*this_frame
,
2895 void **this_prologue_cache
)
2897 CORE_ADDR addr_in_block
;
2899 CORE_ADDR pc
, start_addr
;
2902 addr_in_block
= get_frame_address_in_block (this_frame
);
2903 pc
= get_frame_pc (this_frame
);
2904 if (in_plt_section (addr_in_block
)
2905 /* We also use the stub winder if the target memory is unreadable
2906 to avoid having the prologue unwinder trying to read it. */
2907 || target_read_memory (pc
, dummy
, 4) != 0)
2910 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2911 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2917 struct frame_unwind arm_stub_unwind
= {
2919 default_frame_unwind_stop_reason
,
2921 arm_prologue_prev_register
,
2923 arm_stub_unwind_sniffer
2926 /* Put here the code to store, into CACHE->saved_regs, the addresses
2927 of the saved registers of frame described by THIS_FRAME. CACHE is
2930 static struct arm_prologue_cache
*
2931 arm_m_exception_cache (struct frame_info
*this_frame
)
2933 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2934 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
2935 struct arm_prologue_cache
*cache
;
2938 CORE_ADDR unwound_sp
;
2940 uint32_t exc_return
;
2941 uint32_t process_stack_used
;
2942 uint32_t extended_frame_used
;
2943 uint32_t secure_stack_used
;
2945 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2946 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2948 /* ARMv7-M Architecture Reference "B1.5.6 Exception entry behavior"
2949 describes which bits in LR that define which stack was used prior
2950 to the exception and if FPU is used (causing extended stack frame). */
2952 lr
= get_frame_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2953 sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2955 /* Check EXC_RETURN indicator bits. */
2956 exc_return
= (((lr
>> 28) & 0xf) == 0xf);
2958 /* Check EXC_RETURN bit SPSEL if Main or Thread (process) stack used. */
2959 process_stack_used
= ((lr
& (1 << 2)) != 0);
2960 if (exc_return
&& process_stack_used
)
2962 /* Thread (process) stack used.
2963 Potentially this could be other register defined by target, but PSP
2964 can be considered a standard name for the "Process Stack Pointer".
2965 To be fully aware of system registers like MSP and PSP, these could
2966 be added to a separate XML arm-m-system-profile that is valid for
2967 ARMv6-M and ARMv7-M architectures. Also to be able to debug eg a
2968 corefile off-line, then these registers must be defined by GDB,
2969 and also be included in the corefile regsets. */
2971 int psp_regnum
= user_reg_map_name_to_regnum (gdbarch
, "psp", -1);
2972 if (psp_regnum
== -1)
2974 /* Thread (process) stack could not be fetched,
2975 give warning and exit. */
2977 warning (_("no PSP thread stack unwinding supported."));
2979 /* Terminate any further stack unwinding by refer to self. */
2980 cache
->prev_sp
= sp
;
2985 /* Thread (process) stack used, use PSP as SP. */
2986 unwound_sp
= get_frame_register_unsigned (this_frame
, psp_regnum
);
2991 /* Main stack used, use MSP as SP. */
2995 /* The hardware saves eight 32-bit words, comprising xPSR,
2996 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2997 "B1.5.6 Exception entry behavior" in
2998 "ARMv7-M Architecture Reference Manual". */
2999 cache
->saved_regs
[0].addr
= unwound_sp
;
3000 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
3001 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
3002 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
3003 cache
->saved_regs
[ARM_IP_REGNUM
].addr
= unwound_sp
+ 16;
3004 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= unwound_sp
+ 20;
3005 cache
->saved_regs
[ARM_PC_REGNUM
].addr
= unwound_sp
+ 24;
3006 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
3008 /* Check EXC_RETURN bit FTYPE if extended stack frame (FPU regs stored)
3010 extended_frame_used
= ((lr
& (1 << 4)) == 0);
3011 if (exc_return
&& extended_frame_used
)
3014 int fpu_regs_stack_offset
;
3016 /* This code does not take into account the lazy stacking, see "Lazy
3017 context save of FP state", in B1.5.7, also ARM AN298, supported
3018 by Cortex-M4F architecture.
3019 To fully handle this the FPCCR register (Floating-point Context
3020 Control Register) needs to be read out and the bits ASPEN and LSPEN
3021 could be checked to setup correct lazy stacked FP registers.
3022 This register is located at address 0xE000EF34. */
3024 /* Extended stack frame type used. */
3025 fpu_regs_stack_offset
= unwound_sp
+ 0x20;
3026 for (i
= 0; i
< 16; i
++)
3028 cache
->saved_regs
[ARM_D0_REGNUM
+ i
].addr
= fpu_regs_stack_offset
;
3029 fpu_regs_stack_offset
+= 4;
3031 cache
->saved_regs
[ARM_FPSCR_REGNUM
].addr
= unwound_sp
+ 0x60;
3033 /* Offset 0x64 is reserved. */
3034 cache
->prev_sp
= unwound_sp
+ 0x68;
3038 /* Standard stack frame type used. */
3039 cache
->prev_sp
= unwound_sp
+ 0x20;
3042 /* Check EXC_RETURN bit S if Secure or Non-secure stack used. */
3043 secure_stack_used
= ((lr
& (1 << 6)) != 0);
3044 if (exc_return
&& secure_stack_used
)
3046 /* ARMv8-M Exception and interrupt handling is not considered here.
3047 In the ARMv8-M architecture also EXC_RETURN bit S is controlling if
3048 the Secure or Non-secure stack was used. To separate Secure and
3049 Non-secure stacks, processors that are based on the ARMv8-M
3050 architecture support 4 stack pointers: MSP_S, PSP_S, MSP_NS, PSP_NS.
3051 In addition, a stack limit feature is provided using stack limit
3052 registers (accessible using MSR and MRS instructions) in Privileged
3056 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3057 aligner between the top of the 32-byte stack frame and the
3058 previous context's stack pointer. */
3059 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
3060 && (xpsr
& (1 << 9)) != 0)
3061 cache
->prev_sp
+= 4;
3066 /* Implementation of function hook 'this_id' in
3067 'struct frame_uwnind'. */
3070 arm_m_exception_this_id (struct frame_info
*this_frame
,
3072 struct frame_id
*this_id
)
3074 struct arm_prologue_cache
*cache
;
3076 if (*this_cache
== NULL
)
3077 *this_cache
= arm_m_exception_cache (this_frame
);
3078 cache
= (struct arm_prologue_cache
*) *this_cache
;
3080 /* Our frame ID for a stub frame is the current SP and LR. */
3081 *this_id
= frame_id_build (cache
->prev_sp
,
3082 get_frame_pc (this_frame
));
3085 /* Implementation of function hook 'prev_register' in
3086 'struct frame_uwnind'. */
3088 static struct value
*
3089 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3093 struct arm_prologue_cache
*cache
;
3095 if (*this_cache
== NULL
)
3096 *this_cache
= arm_m_exception_cache (this_frame
);
3097 cache
= (struct arm_prologue_cache
*) *this_cache
;
3099 /* The value was already reconstructed into PREV_SP. */
3100 if (prev_regnum
== ARM_SP_REGNUM
)
3101 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3104 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3108 /* Implementation of function hook 'sniffer' in
3109 'struct frame_uwnind'. */
3112 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3113 struct frame_info
*this_frame
,
3114 void **this_prologue_cache
)
3116 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3118 /* No need to check is_m; this sniffer is only registered for
3119 M-profile architectures. */
3121 /* Check if exception frame returns to a magic PC value. */
3122 return arm_m_addr_is_magic (this_pc
);
3125 /* Frame unwinder for M-profile exceptions. */
3127 struct frame_unwind arm_m_exception_unwind
=
3130 default_frame_unwind_stop_reason
,
3131 arm_m_exception_this_id
,
3132 arm_m_exception_prev_register
,
3134 arm_m_exception_unwind_sniffer
3138 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3140 struct arm_prologue_cache
*cache
;
3142 if (*this_cache
== NULL
)
3143 *this_cache
= arm_make_prologue_cache (this_frame
);
3144 cache
= (struct arm_prologue_cache
*) *this_cache
;
3146 return cache
->prev_sp
- cache
->framesize
;
3149 struct frame_base arm_normal_base
= {
3150 &arm_prologue_unwind
,
3151 arm_normal_frame_base
,
3152 arm_normal_frame_base
,
3153 arm_normal_frame_base
3156 static struct value
*
3157 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3160 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3162 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3167 /* The PC is normally copied from the return column, which
3168 describes saves of LR. However, that version may have an
3169 extra bit set to indicate Thumb state. The bit is not
3171 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3172 return frame_unwind_got_constant (this_frame
, regnum
,
3173 arm_addr_bits_remove (gdbarch
, lr
));
3176 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3177 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3178 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3179 if (IS_THUMB_ADDR (lr
))
3183 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3186 internal_error (__FILE__
, __LINE__
,
3187 _("Unexpected register %d"), regnum
);
3192 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3193 struct dwarf2_frame_state_reg
*reg
,
3194 struct frame_info
*this_frame
)
3200 reg
->how
= DWARF2_FRAME_REG_FN
;
3201 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3204 reg
->how
= DWARF2_FRAME_REG_CFA
;
3209 /* Implement the stack_frame_destroyed_p gdbarch method. */
3212 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3214 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3215 unsigned int insn
, insn2
;
3216 int found_return
= 0, found_stack_adjust
= 0;
3217 CORE_ADDR func_start
, func_end
;
3221 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3224 /* The epilogue is a sequence of instructions along the following lines:
3226 - add stack frame size to SP or FP
3227 - [if frame pointer used] restore SP from FP
3228 - restore registers from SP [may include PC]
3229 - a return-type instruction [if PC wasn't already restored]
3231 In a first pass, we scan forward from the current PC and verify the
3232 instructions we find as compatible with this sequence, ending in a
3235 However, this is not sufficient to distinguish indirect function calls
3236 within a function from indirect tail calls in the epilogue in some cases.
3237 Therefore, if we didn't already find any SP-changing instruction during
3238 forward scan, we add a backward scanning heuristic to ensure we actually
3239 are in the epilogue. */
3242 while (scan_pc
< func_end
&& !found_return
)
3244 if (target_read_memory (scan_pc
, buf
, 2))
3248 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3250 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3252 else if (insn
== 0x46f7) /* mov pc, lr */
3254 else if (thumb_instruction_restores_sp (insn
))
3256 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3259 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3261 if (target_read_memory (scan_pc
, buf
, 2))
3265 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3267 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3269 if (insn2
& 0x8000) /* <registers> include PC. */
3272 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3273 && (insn2
& 0x0fff) == 0x0b04)
3275 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3278 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3279 && (insn2
& 0x0e00) == 0x0a00)
3291 /* Since any instruction in the epilogue sequence, with the possible
3292 exception of return itself, updates the stack pointer, we need to
3293 scan backwards for at most one instruction. Try either a 16-bit or
3294 a 32-bit instruction. This is just a heuristic, so we do not worry
3295 too much about false positives. */
3297 if (pc
- 4 < func_start
)
3299 if (target_read_memory (pc
- 4, buf
, 4))
3302 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3303 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3305 if (thumb_instruction_restores_sp (insn2
))
3306 found_stack_adjust
= 1;
3307 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3308 found_stack_adjust
= 1;
3309 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3310 && (insn2
& 0x0fff) == 0x0b04)
3311 found_stack_adjust
= 1;
3312 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3313 && (insn2
& 0x0e00) == 0x0a00)
3314 found_stack_adjust
= 1;
3316 return found_stack_adjust
;
3320 arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3322 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3325 CORE_ADDR func_start
, func_end
;
3327 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3330 /* We are in the epilogue if the previous instruction was a stack
3331 adjustment and the next instruction is a possible return (bx, mov
3332 pc, or pop). We could have to scan backwards to find the stack
3333 adjustment, or forwards to find the return, but this is a decent
3334 approximation. First scan forwards. */
3337 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3338 if (bits (insn
, 28, 31) != INST_NV
)
3340 if ((insn
& 0x0ffffff0) == 0x012fff10)
3343 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3346 else if ((insn
& 0x0fff0000) == 0x08bd0000
3347 && (insn
& 0x0000c000) != 0)
3348 /* POP (LDMIA), including PC or LR. */
3355 /* Scan backwards. This is just a heuristic, so do not worry about
3356 false positives from mode changes. */
3358 if (pc
< func_start
+ 4)
3361 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3362 if (arm_instruction_restores_sp (insn
))
3368 /* Implement the stack_frame_destroyed_p gdbarch method. */
3371 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3373 if (arm_pc_is_thumb (gdbarch
, pc
))
3374 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3376 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
3379 /* When arguments must be pushed onto the stack, they go on in reverse
3380 order. The code below implements a FILO (stack) to do this. */
3385 struct stack_item
*prev
;
3389 static struct stack_item
*
3390 push_stack_item (struct stack_item
*prev
, const gdb_byte
*contents
, int len
)
3392 struct stack_item
*si
;
3393 si
= XNEW (struct stack_item
);
3394 si
->data
= (gdb_byte
*) xmalloc (len
);
3397 memcpy (si
->data
, contents
, len
);
3401 static struct stack_item
*
3402 pop_stack_item (struct stack_item
*si
)
3404 struct stack_item
*dead
= si
;
3411 /* Implement the gdbarch type alignment method, overrides the generic
3412 alignment algorithm for anything that is arm specific. */
3415 arm_type_align (gdbarch
*gdbarch
, struct type
*t
)
3417 t
= check_typedef (t
);
3418 if (t
->code () == TYPE_CODE_ARRAY
&& t
->is_vector ())
3420 /* Use the natural alignment for vector types (the same for
3421 scalar type), but the maximum alignment is 64-bit. */
3422 if (TYPE_LENGTH (t
) > 8)
3425 return TYPE_LENGTH (t
);
3428 /* Allow the common code to calculate the alignment. */
3432 /* Possible base types for a candidate for passing and returning in
3435 enum arm_vfp_cprc_base_type
3444 /* The length of one element of base type B. */
3447 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3451 case VFP_CPRC_SINGLE
:
3453 case VFP_CPRC_DOUBLE
:
3455 case VFP_CPRC_VEC64
:
3457 case VFP_CPRC_VEC128
:
3460 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3465 /* The character ('s', 'd' or 'q') for the type of VFP register used
3466 for passing base type B. */
3469 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3473 case VFP_CPRC_SINGLE
:
3475 case VFP_CPRC_DOUBLE
:
3477 case VFP_CPRC_VEC64
:
3479 case VFP_CPRC_VEC128
:
3482 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3487 /* Determine whether T may be part of a candidate for passing and
3488 returning in VFP registers, ignoring the limit on the total number
3489 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3490 classification of the first valid component found; if it is not
3491 VFP_CPRC_UNKNOWN, all components must have the same classification
3492 as *BASE_TYPE. If it is found that T contains a type not permitted
3493 for passing and returning in VFP registers, a type differently
3494 classified from *BASE_TYPE, or two types differently classified
3495 from each other, return -1, otherwise return the total number of
3496 base-type elements found (possibly 0 in an empty structure or
3497 array). Vector types are not currently supported, matching the
3498 generic AAPCS support. */
3501 arm_vfp_cprc_sub_candidate (struct type
*t
,
3502 enum arm_vfp_cprc_base_type
*base_type
)
3504 t
= check_typedef (t
);
3508 switch (TYPE_LENGTH (t
))
3511 if (*base_type
== VFP_CPRC_UNKNOWN
)
3512 *base_type
= VFP_CPRC_SINGLE
;
3513 else if (*base_type
!= VFP_CPRC_SINGLE
)
3518 if (*base_type
== VFP_CPRC_UNKNOWN
)
3519 *base_type
= VFP_CPRC_DOUBLE
;
3520 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3529 case TYPE_CODE_COMPLEX
:
3530 /* Arguments of complex T where T is one of the types float or
3531 double get treated as if they are implemented as:
3540 switch (TYPE_LENGTH (t
))
3543 if (*base_type
== VFP_CPRC_UNKNOWN
)
3544 *base_type
= VFP_CPRC_SINGLE
;
3545 else if (*base_type
!= VFP_CPRC_SINGLE
)
3550 if (*base_type
== VFP_CPRC_UNKNOWN
)
3551 *base_type
= VFP_CPRC_DOUBLE
;
3552 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3561 case TYPE_CODE_ARRAY
:
3563 if (t
->is_vector ())
3565 /* A 64-bit or 128-bit containerized vector type are VFP
3567 switch (TYPE_LENGTH (t
))
3570 if (*base_type
== VFP_CPRC_UNKNOWN
)
3571 *base_type
= VFP_CPRC_VEC64
;
3574 if (*base_type
== VFP_CPRC_UNKNOWN
)
3575 *base_type
= VFP_CPRC_VEC128
;
3586 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
),
3590 if (TYPE_LENGTH (t
) == 0)
3592 gdb_assert (count
== 0);
3595 else if (count
== 0)
3597 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3598 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3599 return TYPE_LENGTH (t
) / unitlen
;
3604 case TYPE_CODE_STRUCT
:
3609 for (i
= 0; i
< t
->num_fields (); i
++)
3613 if (!field_is_static (&t
->field (i
)))
3614 sub_count
= arm_vfp_cprc_sub_candidate (t
->field (i
).type (),
3616 if (sub_count
== -1)
3620 if (TYPE_LENGTH (t
) == 0)
3622 gdb_assert (count
== 0);
3625 else if (count
== 0)
3627 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3628 if (TYPE_LENGTH (t
) != unitlen
* count
)
3633 case TYPE_CODE_UNION
:
3638 for (i
= 0; i
< t
->num_fields (); i
++)
3640 int sub_count
= arm_vfp_cprc_sub_candidate (t
->field (i
).type (),
3642 if (sub_count
== -1)
3644 count
= (count
> sub_count
? count
: sub_count
);
3646 if (TYPE_LENGTH (t
) == 0)
3648 gdb_assert (count
== 0);
3651 else if (count
== 0)
3653 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3654 if (TYPE_LENGTH (t
) != unitlen
* count
)
3666 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3667 if passed to or returned from a non-variadic function with the VFP
3668 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3669 *BASE_TYPE to the base type for T and *COUNT to the number of
3670 elements of that base type before returning. */
3673 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3676 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3677 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3678 if (c
<= 0 || c
> 4)
3685 /* Return 1 if the VFP ABI should be used for passing arguments to and
3686 returning values from a function of type FUNC_TYPE, 0
3690 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3692 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3693 /* Variadic functions always use the base ABI. Assume that functions
3694 without debug info are not variadic. */
3695 if (func_type
&& check_typedef (func_type
)->has_varargs ())
3697 /* The VFP ABI is only supported as a variant of AAPCS. */
3698 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3700 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3703 /* We currently only support passing parameters in integer registers, which
3704 conforms with GCC's default model, and VFP argument passing following
3705 the VFP variant of AAPCS. Several other variants exist and
3706 we should probably support some of them based on the selected ABI. */
3709 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3710 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3711 struct value
**args
, CORE_ADDR sp
,
3712 function_call_return_method return_method
,
3713 CORE_ADDR struct_addr
)
3715 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3719 struct stack_item
*si
= NULL
;
3722 unsigned vfp_regs_free
= (1 << 16) - 1;
3724 /* Determine the type of this function and whether the VFP ABI
3726 ftype
= check_typedef (value_type (function
));
3727 if (ftype
->code () == TYPE_CODE_PTR
)
3728 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3729 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3731 /* Set the return address. For the ARM, the return breakpoint is
3732 always at BP_ADDR. */
3733 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3735 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3737 /* Walk through the list of args and determine how large a temporary
3738 stack is required. Need to take care here as structs may be
3739 passed on the stack, and we have to push them. */
3742 argreg
= ARM_A1_REGNUM
;
3745 /* The struct_return pointer occupies the first parameter
3746 passing register. */
3747 if (return_method
== return_method_struct
)
3750 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3751 gdbarch_register_name (gdbarch
, argreg
),
3752 paddress (gdbarch
, struct_addr
));
3753 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3757 for (argnum
= 0; argnum
< nargs
; argnum
++)
3760 struct type
*arg_type
;
3761 struct type
*target_type
;
3762 enum type_code typecode
;
3763 const bfd_byte
*val
;
3765 enum arm_vfp_cprc_base_type vfp_base_type
;
3767 int may_use_core_reg
= 1;
3769 arg_type
= check_typedef (value_type (args
[argnum
]));
3770 len
= TYPE_LENGTH (arg_type
);
3771 target_type
= TYPE_TARGET_TYPE (arg_type
);
3772 typecode
= arg_type
->code ();
3773 val
= value_contents (args
[argnum
]);
3775 align
= type_align (arg_type
);
3776 /* Round alignment up to a whole number of words. */
3777 align
= (align
+ ARM_INT_REGISTER_SIZE
- 1)
3778 & ~(ARM_INT_REGISTER_SIZE
- 1);
3779 /* Different ABIs have different maximum alignments. */
3780 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3782 /* The APCS ABI only requires word alignment. */
3783 align
= ARM_INT_REGISTER_SIZE
;
3787 /* The AAPCS requires at most doubleword alignment. */
3788 if (align
> ARM_INT_REGISTER_SIZE
* 2)
3789 align
= ARM_INT_REGISTER_SIZE
* 2;
3793 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3801 /* Because this is a CPRC it cannot go in a core register or
3802 cause a core register to be skipped for alignment.
3803 Either it goes in VFP registers and the rest of this loop
3804 iteration is skipped for this argument, or it goes on the
3805 stack (and the stack alignment code is correct for this
3807 may_use_core_reg
= 0;
3809 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3810 shift
= unit_length
/ 4;
3811 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3812 for (regno
= 0; regno
< 16; regno
+= shift
)
3813 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3822 vfp_regs_free
&= ~(mask
<< regno
);
3823 reg_scaled
= regno
/ shift
;
3824 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3825 for (i
= 0; i
< vfp_base_count
; i
++)
3829 if (reg_char
== 'q')
3830 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3831 val
+ i
* unit_length
);
3834 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3835 reg_char
, reg_scaled
+ i
);
3836 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3838 regcache
->cooked_write (regnum
, val
+ i
* unit_length
);
3845 /* This CPRC could not go in VFP registers, so all VFP
3846 registers are now marked as used. */
3851 /* Push stack padding for doubleword alignment. */
3852 if (nstack
& (align
- 1))
3854 si
= push_stack_item (si
, val
, ARM_INT_REGISTER_SIZE
);
3855 nstack
+= ARM_INT_REGISTER_SIZE
;
3858 /* Doubleword aligned quantities must go in even register pairs. */
3859 if (may_use_core_reg
3860 && argreg
<= ARM_LAST_ARG_REGNUM
3861 && align
> ARM_INT_REGISTER_SIZE
3865 /* If the argument is a pointer to a function, and it is a
3866 Thumb function, create a LOCAL copy of the value and set
3867 the THUMB bit in it. */
3868 if (TYPE_CODE_PTR
== typecode
3869 && target_type
!= NULL
3870 && TYPE_CODE_FUNC
== check_typedef (target_type
)->code ())
3872 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3873 if (arm_pc_is_thumb (gdbarch
, regval
))
3875 bfd_byte
*copy
= (bfd_byte
*) alloca (len
);
3876 store_unsigned_integer (copy
, len
, byte_order
,
3877 MAKE_THUMB_ADDR (regval
));
3882 /* Copy the argument to general registers or the stack in
3883 register-sized pieces. Large arguments are split between
3884 registers and stack. */
3887 int partial_len
= len
< ARM_INT_REGISTER_SIZE
3888 ? len
: ARM_INT_REGISTER_SIZE
;
3890 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3892 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3894 /* The argument is being passed in a general purpose
3896 if (byte_order
== BFD_ENDIAN_BIG
)
3897 regval
<<= (ARM_INT_REGISTER_SIZE
- partial_len
) * 8;
3899 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3901 gdbarch_register_name
3903 phex (regval
, ARM_INT_REGISTER_SIZE
));
3904 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3909 gdb_byte buf
[ARM_INT_REGISTER_SIZE
];
3911 memset (buf
, 0, sizeof (buf
));
3912 store_unsigned_integer (buf
, partial_len
, byte_order
, regval
);
3914 /* Push the arguments onto the stack. */
3916 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3918 si
= push_stack_item (si
, buf
, ARM_INT_REGISTER_SIZE
);
3919 nstack
+= ARM_INT_REGISTER_SIZE
;
3926 /* If we have an odd number of words to push, then decrement the stack
3927 by one word now, so first stack argument will be dword aligned. */
3934 write_memory (sp
, si
->data
, si
->len
);
3935 si
= pop_stack_item (si
);
3938 /* Finally, update teh SP register. */
3939 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3945 /* Always align the frame to an 8-byte boundary. This is required on
3946 some platforms and harmless on the rest. */
3949 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3951 /* Align the stack to eight bytes. */
3952 return sp
& ~ (CORE_ADDR
) 7;
3956 print_fpu_flags (struct ui_file
*file
, int flags
)
3958 if (flags
& (1 << 0))
3959 fputs_filtered ("IVO ", file
);
3960 if (flags
& (1 << 1))
3961 fputs_filtered ("DVZ ", file
);
3962 if (flags
& (1 << 2))
3963 fputs_filtered ("OFL ", file
);
3964 if (flags
& (1 << 3))
3965 fputs_filtered ("UFL ", file
);
3966 if (flags
& (1 << 4))
3967 fputs_filtered ("INX ", file
);
3968 fputc_filtered ('\n', file
);
3971 /* Print interesting information about the floating point processor
3972 (if present) or emulator. */
3974 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3975 struct frame_info
*frame
, const char *args
)
3977 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3980 type
= (status
>> 24) & 127;
3981 if (status
& (1 << 31))
3982 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
3984 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
3985 /* i18n: [floating point unit] mask */
3986 fputs_filtered (_("mask: "), file
);
3987 print_fpu_flags (file
, status
>> 16);
3988 /* i18n: [floating point unit] flags */
3989 fputs_filtered (_("flags: "), file
);
3990 print_fpu_flags (file
, status
);
3993 /* Construct the ARM extended floating point type. */
3994 static struct type
*
3995 arm_ext_type (struct gdbarch
*gdbarch
)
3997 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3999 if (!tdep
->arm_ext_type
)
4001 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
4002 floatformats_arm_ext
);
4004 return tdep
->arm_ext_type
;
4007 static struct type
*
4008 arm_neon_double_type (struct gdbarch
*gdbarch
)
4010 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4012 if (tdep
->neon_double_type
== NULL
)
4014 struct type
*t
, *elem
;
4016 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
4018 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4019 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
4020 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4021 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
4022 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4023 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
4024 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4025 append_composite_type_field (t
, "u64", elem
);
4026 elem
= builtin_type (gdbarch
)->builtin_float
;
4027 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
4028 elem
= builtin_type (gdbarch
)->builtin_double
;
4029 append_composite_type_field (t
, "f64", elem
);
4031 t
->set_is_vector (true);
4032 t
->set_name ("neon_d");
4033 tdep
->neon_double_type
= t
;
4036 return tdep
->neon_double_type
;
4039 /* FIXME: The vector types are not correctly ordered on big-endian
4040 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4041 bits of d0 - regardless of what unit size is being held in d0. So
4042 the offset of the first uint8 in d0 is 7, but the offset of the
4043 first float is 4. This code works as-is for little-endian
4046 static struct type
*
4047 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4049 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4051 if (tdep
->neon_quad_type
== NULL
)
4053 struct type
*t
, *elem
;
4055 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4057 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4058 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4059 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4060 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4061 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4062 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4063 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4064 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4065 elem
= builtin_type (gdbarch
)->builtin_float
;
4066 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4067 elem
= builtin_type (gdbarch
)->builtin_double
;
4068 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4070 t
->set_is_vector (true);
4071 t
->set_name ("neon_q");
4072 tdep
->neon_quad_type
= t
;
4075 return tdep
->neon_quad_type
;
4078 /* Return the GDB type object for the "standard" data type of data in
4081 static struct type
*
4082 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4084 int num_regs
= gdbarch_num_regs (gdbarch
);
4086 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4087 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4088 return builtin_type (gdbarch
)->builtin_float
;
4090 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4091 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4092 return arm_neon_quad_type (gdbarch
);
4094 /* If the target description has register information, we are only
4095 in this function so that we can override the types of
4096 double-precision registers for NEON. */
4097 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4099 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4101 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4102 && t
->code () == TYPE_CODE_FLT
4103 && gdbarch_tdep (gdbarch
)->have_neon
)
4104 return arm_neon_double_type (gdbarch
);
4109 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4111 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4112 return builtin_type (gdbarch
)->builtin_void
;
4114 return arm_ext_type (gdbarch
);
4116 else if (regnum
== ARM_SP_REGNUM
)
4117 return builtin_type (gdbarch
)->builtin_data_ptr
;
4118 else if (regnum
== ARM_PC_REGNUM
)
4119 return builtin_type (gdbarch
)->builtin_func_ptr
;
4120 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4121 /* These registers are only supported on targets which supply
4122 an XML description. */
4123 return builtin_type (gdbarch
)->builtin_int0
;
4125 return builtin_type (gdbarch
)->builtin_uint32
;
4128 /* Map a DWARF register REGNUM onto the appropriate GDB register
4132 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4134 /* Core integer regs. */
4135 if (reg
>= 0 && reg
<= 15)
4138 /* Legacy FPA encoding. These were once used in a way which
4139 overlapped with VFP register numbering, so their use is
4140 discouraged, but GDB doesn't support the ARM toolchain
4141 which used them for VFP. */
4142 if (reg
>= 16 && reg
<= 23)
4143 return ARM_F0_REGNUM
+ reg
- 16;
4145 /* New assignments for the FPA registers. */
4146 if (reg
>= 96 && reg
<= 103)
4147 return ARM_F0_REGNUM
+ reg
- 96;
4149 /* WMMX register assignments. */
4150 if (reg
>= 104 && reg
<= 111)
4151 return ARM_WCGR0_REGNUM
+ reg
- 104;
4153 if (reg
>= 112 && reg
<= 127)
4154 return ARM_WR0_REGNUM
+ reg
- 112;
4156 if (reg
>= 192 && reg
<= 199)
4157 return ARM_WC0_REGNUM
+ reg
- 192;
4159 /* VFP v2 registers. A double precision value is actually
4160 in d1 rather than s2, but the ABI only defines numbering
4161 for the single precision registers. This will "just work"
4162 in GDB for little endian targets (we'll read eight bytes,
4163 starting in s0 and then progressing to s1), but will be
4164 reversed on big endian targets with VFP. This won't
4165 be a problem for the new Neon quad registers; you're supposed
4166 to use DW_OP_piece for those. */
4167 if (reg
>= 64 && reg
<= 95)
4171 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4172 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4176 /* VFP v3 / Neon registers. This range is also used for VFP v2
4177 registers, except that it now describes d0 instead of s0. */
4178 if (reg
>= 256 && reg
<= 287)
4182 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4183 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4190 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4192 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4195 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4197 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4198 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4200 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4201 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4203 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4204 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4206 if (reg
< NUM_GREGS
)
4207 return SIM_ARM_R0_REGNUM
+ reg
;
4210 if (reg
< NUM_FREGS
)
4211 return SIM_ARM_FP0_REGNUM
+ reg
;
4214 if (reg
< NUM_SREGS
)
4215 return SIM_ARM_FPS_REGNUM
+ reg
;
4218 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4221 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4222 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4223 NULL if an error occurs. BUF is freed. */
4226 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
4227 int old_len
, int new_len
)
4230 int bytes_to_read
= new_len
- old_len
;
4232 new_buf
= (gdb_byte
*) xmalloc (new_len
);
4233 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
4235 if (target_read_code (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
4243 /* An IT block is at most the 2-byte IT instruction followed by
4244 four 4-byte instructions. The furthest back we must search to
4245 find an IT block that affects the current instruction is thus
4246 2 + 3 * 4 == 14 bytes. */
4247 #define MAX_IT_BLOCK_PREFIX 14
4249 /* Use a quick scan if there are more than this many bytes of
4251 #define IT_SCAN_THRESHOLD 32
4253 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4254 A breakpoint in an IT block may not be hit, depending on the
4257 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
4261 CORE_ADDR boundary
, func_start
;
4263 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
4264 int i
, any
, last_it
, last_it_count
;
4266 /* If we are using BKPT breakpoints, none of this is necessary. */
4267 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
4270 /* ARM mode does not have this problem. */
4271 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
4274 /* We are setting a breakpoint in Thumb code that could potentially
4275 contain an IT block. The first step is to find how much Thumb
4276 code there is; we do not need to read outside of known Thumb
4278 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
4280 /* Thumb-2 code must have mapping symbols to have a chance. */
4283 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
4285 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
4286 && func_start
> boundary
)
4287 boundary
= func_start
;
4289 /* Search for a candidate IT instruction. We have to do some fancy
4290 footwork to distinguish a real IT instruction from the second
4291 half of a 32-bit instruction, but there is no need for that if
4292 there's no candidate. */
4293 buf_len
= std::min (bpaddr
- boundary
, (CORE_ADDR
) MAX_IT_BLOCK_PREFIX
);
4295 /* No room for an IT instruction. */
4298 buf
= (gdb_byte
*) xmalloc (buf_len
);
4299 if (target_read_code (bpaddr
- buf_len
, buf
, buf_len
) != 0)
4302 for (i
= 0; i
< buf_len
; i
+= 2)
4304 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4305 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4318 /* OK, the code bytes before this instruction contain at least one
4319 halfword which resembles an IT instruction. We know that it's
4320 Thumb code, but there are still two possibilities. Either the
4321 halfword really is an IT instruction, or it is the second half of
4322 a 32-bit Thumb instruction. The only way we can tell is to
4323 scan forwards from a known instruction boundary. */
4324 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
4328 /* There's a lot of code before this instruction. Start with an
4329 optimistic search; it's easy to recognize halfwords that can
4330 not be the start of a 32-bit instruction, and use that to
4331 lock on to the instruction boundaries. */
4332 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
4335 buf_len
= IT_SCAN_THRESHOLD
;
4338 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
4340 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4341 if (thumb_insn_size (inst1
) == 2)
4348 /* At this point, if DEFINITE, BUF[I] is the first place we
4349 are sure that we know the instruction boundaries, and it is far
4350 enough from BPADDR that we could not miss an IT instruction
4351 affecting BPADDR. If ! DEFINITE, give up - start from a
4355 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
4359 buf_len
= bpaddr
- boundary
;
4365 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
4368 buf_len
= bpaddr
- boundary
;
4372 /* Scan forwards. Find the last IT instruction before BPADDR. */
4377 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4379 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4384 else if (inst1
& 0x0002)
4386 else if (inst1
& 0x0004)
4391 i
+= thumb_insn_size (inst1
);
4397 /* There wasn't really an IT instruction after all. */
4400 if (last_it_count
< 1)
4401 /* It was too far away. */
4404 /* This really is a trouble spot. Move the breakpoint to the IT
4406 return bpaddr
- buf_len
+ last_it
;
4409 /* ARM displaced stepping support.
4411 Generally ARM displaced stepping works as follows:
4413 1. When an instruction is to be single-stepped, it is first decoded by
4414 arm_process_displaced_insn. Depending on the type of instruction, it is
4415 then copied to a scratch location, possibly in a modified form. The
4416 copy_* set of functions performs such modification, as necessary. A
4417 breakpoint is placed after the modified instruction in the scratch space
4418 to return control to GDB. Note in particular that instructions which
4419 modify the PC will no longer do so after modification.
4421 2. The instruction is single-stepped, by setting the PC to the scratch
4422 location address, and resuming. Control returns to GDB when the
4425 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4426 function used for the current instruction. This function's job is to
4427 put the CPU/memory state back to what it would have been if the
4428 instruction had been executed unmodified in its original location. */
4430 /* NOP instruction (mov r0, r0). */
4431 #define ARM_NOP 0xe1a00000
4432 #define THUMB_NOP 0x4600
4434 /* Helper for register reads for displaced stepping. In particular, this
4435 returns the PC as it would be seen by the instruction at its original
4439 displaced_read_reg (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4443 CORE_ADDR from
= dsc
->insn_addr
;
4445 if (regno
== ARM_PC_REGNUM
)
4447 /* Compute pipeline offset:
4448 - When executing an ARM instruction, PC reads as the address of the
4449 current instruction plus 8.
4450 - When executing a Thumb instruction, PC reads as the address of the
4451 current instruction plus 4. */
4458 displaced_debug_printf ("read pc value %.8lx",
4459 (unsigned long) from
);
4460 return (ULONGEST
) from
;
4464 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
4466 displaced_debug_printf ("read r%d value %.8lx",
4467 regno
, (unsigned long) ret
);
4474 displaced_in_arm_mode (struct regcache
*regs
)
4477 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4479 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4481 return (ps
& t_bit
) == 0;
4484 /* Write to the PC as from a branch instruction. */
4487 branch_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4491 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4492 architecture versions < 6. */
4493 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4494 val
& ~(ULONGEST
) 0x3);
4496 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4497 val
& ~(ULONGEST
) 0x1);
4500 /* Write to the PC as from a branch-exchange instruction. */
4503 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
4506 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4508 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4512 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
4513 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
4515 else if ((val
& 2) == 0)
4517 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4518 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
4522 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4523 mode, align dest to 4 bytes). */
4524 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4525 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4526 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
4530 /* Write to the PC as if from a load instruction. */
4533 load_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4536 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
4537 bx_write_pc (regs
, val
);
4539 branch_write_pc (regs
, dsc
, val
);
4542 /* Write to the PC as if from an ALU instruction. */
4545 alu_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4548 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
4549 bx_write_pc (regs
, val
);
4551 branch_write_pc (regs
, dsc
, val
);
4554 /* Helper for writing to registers for displaced stepping. Writing to the PC
4555 has a varying effects depending on the instruction which does the write:
4556 this is controlled by the WRITE_PC argument. */
4559 displaced_write_reg (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4560 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
4562 if (regno
== ARM_PC_REGNUM
)
4564 displaced_debug_printf ("writing pc %.8lx", (unsigned long) val
);
4568 case BRANCH_WRITE_PC
:
4569 branch_write_pc (regs
, dsc
, val
);
4573 bx_write_pc (regs
, val
);
4577 load_write_pc (regs
, dsc
, val
);
4581 alu_write_pc (regs
, dsc
, val
);
4584 case CANNOT_WRITE_PC
:
4585 warning (_("Instruction wrote to PC in an unexpected way when "
4586 "single-stepping"));
4590 internal_error (__FILE__
, __LINE__
,
4591 _("Invalid argument to displaced_write_reg"));
4594 dsc
->wrote_to_pc
= 1;
4598 displaced_debug_printf ("writing r%d value %.8lx",
4599 regno
, (unsigned long) val
);
4600 regcache_cooked_write_unsigned (regs
, regno
, val
);
4604 /* This function is used to concisely determine if an instruction INSN
4605 references PC. Register fields of interest in INSN should have the
4606 corresponding fields of BITMASK set to 0b1111. The function
4607 returns return 1 if any of these fields in INSN reference the PC
4608 (also 0b1111, r15), else it returns 0. */
4611 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
4613 uint32_t lowbit
= 1;
4615 while (bitmask
!= 0)
4619 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
4625 mask
= lowbit
* 0xf;
4627 if ((insn
& mask
) == mask
)
4636 /* The simplest copy function. Many instructions have the same effect no
4637 matter what address they are executed at: in those cases, use this. */
4640 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
4641 const char *iname
, arm_displaced_step_closure
*dsc
)
4643 displaced_debug_printf ("copying insn %.8lx, opcode/class '%s' unmodified",
4644 (unsigned long) insn
, iname
);
4646 dsc
->modinsn
[0] = insn
;
4652 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
4653 uint16_t insn2
, const char *iname
,
4654 arm_displaced_step_closure
*dsc
)
4656 displaced_debug_printf ("copying insn %.4x %.4x, opcode/class '%s' "
4657 "unmodified", insn1
, insn2
, iname
);
4659 dsc
->modinsn
[0] = insn1
;
4660 dsc
->modinsn
[1] = insn2
;
4666 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4669 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
4671 arm_displaced_step_closure
*dsc
)
4673 displaced_debug_printf ("copying insn %.4x, opcode/class '%s' unmodified",
4676 dsc
->modinsn
[0] = insn
;
4681 /* Preload instructions with immediate offset. */
4684 cleanup_preload (struct gdbarch
*gdbarch
,
4685 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4687 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4688 if (!dsc
->u
.preload
.immed
)
4689 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
4693 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4694 arm_displaced_step_closure
*dsc
, unsigned int rn
)
4697 /* Preload instructions:
4699 {pli/pld} [rn, #+/-imm]
4701 {pli/pld} [r0, #+/-imm]. */
4703 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4704 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4705 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4706 dsc
->u
.preload
.immed
= 1;
4708 dsc
->cleanup
= &cleanup_preload
;
4712 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
4713 arm_displaced_step_closure
*dsc
)
4715 unsigned int rn
= bits (insn
, 16, 19);
4717 if (!insn_references_pc (insn
, 0x000f0000ul
))
4718 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
4720 displaced_debug_printf ("copying preload insn %.8lx", (unsigned long) insn
);
4722 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4724 install_preload (gdbarch
, regs
, dsc
, rn
);
4730 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
4731 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4733 unsigned int rn
= bits (insn1
, 0, 3);
4734 unsigned int u_bit
= bit (insn1
, 7);
4735 int imm12
= bits (insn2
, 0, 11);
4738 if (rn
!= ARM_PC_REGNUM
)
4739 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
4741 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4742 PLD (literal) Encoding T1. */
4743 displaced_debug_printf ("copying pld/pli pc (0x%x) %c imm12 %.4x",
4744 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
4750 /* Rewrite instruction {pli/pld} PC imm12 into:
4751 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4755 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4757 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4758 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4760 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
4762 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
4763 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
4764 dsc
->u
.preload
.immed
= 0;
4766 /* {pli/pld} [r0, r1] */
4767 dsc
->modinsn
[0] = insn1
& 0xfff0;
4768 dsc
->modinsn
[1] = 0xf001;
4771 dsc
->cleanup
= &cleanup_preload
;
4775 /* Preload instructions with register offset. */
4778 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
4779 arm_displaced_step_closure
*dsc
, unsigned int rn
,
4782 ULONGEST rn_val
, rm_val
;
4784 /* Preload register-offset instructions:
4786 {pli/pld} [rn, rm {, shift}]
4788 {pli/pld} [r0, r1 {, shift}]. */
4790 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4791 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4792 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4793 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
4794 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4795 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
4796 dsc
->u
.preload
.immed
= 0;
4798 dsc
->cleanup
= &cleanup_preload
;
4802 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
4803 struct regcache
*regs
,
4804 arm_displaced_step_closure
*dsc
)
4806 unsigned int rn
= bits (insn
, 16, 19);
4807 unsigned int rm
= bits (insn
, 0, 3);
4810 if (!insn_references_pc (insn
, 0x000f000ful
))
4811 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
4813 displaced_debug_printf ("copying preload insn %.8lx",
4814 (unsigned long) insn
);
4816 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
4818 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
4822 /* Copy/cleanup coprocessor load and store instructions. */
4825 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
4826 struct regcache
*regs
,
4827 arm_displaced_step_closure
*dsc
)
4829 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
4831 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4833 if (dsc
->u
.ldst
.writeback
)
4834 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
4838 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4839 arm_displaced_step_closure
*dsc
,
4840 int writeback
, unsigned int rn
)
4844 /* Coprocessor load/store instructions:
4846 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4848 {stc/stc2} [r0, #+/-imm].
4850 ldc/ldc2 are handled identically. */
4852 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4853 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4854 /* PC should be 4-byte aligned. */
4855 rn_val
= rn_val
& 0xfffffffc;
4856 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4858 dsc
->u
.ldst
.writeback
= writeback
;
4859 dsc
->u
.ldst
.rn
= rn
;
4861 dsc
->cleanup
= &cleanup_copro_load_store
;
4865 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
4866 struct regcache
*regs
,
4867 arm_displaced_step_closure
*dsc
)
4869 unsigned int rn
= bits (insn
, 16, 19);
4871 if (!insn_references_pc (insn
, 0x000f0000ul
))
4872 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
4874 displaced_debug_printf ("copying coprocessor load/store insn %.8lx",
4875 (unsigned long) insn
);
4877 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4879 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
4885 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
4886 uint16_t insn2
, struct regcache
*regs
,
4887 arm_displaced_step_closure
*dsc
)
4889 unsigned int rn
= bits (insn1
, 0, 3);
4891 if (rn
!= ARM_PC_REGNUM
)
4892 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
4893 "copro load/store", dsc
);
4895 displaced_debug_printf ("copying coprocessor load/store insn %.4x%.4x",
4898 dsc
->modinsn
[0] = insn1
& 0xfff0;
4899 dsc
->modinsn
[1] = insn2
;
4902 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4903 doesn't support writeback, so pass 0. */
4904 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
4909 /* Clean up branch instructions (actually perform the branch, by setting
4913 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4914 arm_displaced_step_closure
*dsc
)
4916 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
4917 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
4918 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
4919 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
4924 if (dsc
->u
.branch
.link
)
4926 /* The value of LR should be the next insn of current one. In order
4927 not to confuse logic handling later insn `bx lr', if current insn mode
4928 is Thumb, the bit 0 of LR value should be set to 1. */
4929 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
4932 next_insn_addr
|= 0x1;
4934 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
4938 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
4941 /* Copy B/BL/BLX instructions with immediate destinations. */
4944 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4945 arm_displaced_step_closure
*dsc
,
4946 unsigned int cond
, int exchange
, int link
, long offset
)
4948 /* Implement "BL<cond> <label>" as:
4950 Preparation: cond <- instruction condition
4951 Insn: mov r0, r0 (nop)
4952 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4954 B<cond> similar, but don't set r14 in cleanup. */
4956 dsc
->u
.branch
.cond
= cond
;
4957 dsc
->u
.branch
.link
= link
;
4958 dsc
->u
.branch
.exchange
= exchange
;
4960 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
4961 if (link
&& exchange
)
4962 /* For BLX, offset is computed from the Align (PC, 4). */
4963 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
4966 dsc
->u
.branch
.dest
+= 4 + offset
;
4968 dsc
->u
.branch
.dest
+= 8 + offset
;
4970 dsc
->cleanup
= &cleanup_branch
;
4973 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
4974 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4976 unsigned int cond
= bits (insn
, 28, 31);
4977 int exchange
= (cond
== 0xf);
4978 int link
= exchange
|| bit (insn
, 24);
4981 displaced_debug_printf ("copying %s immediate insn %.8lx",
4982 (exchange
) ? "blx" : (link
) ? "bl" : "b",
4983 (unsigned long) insn
);
4985 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4986 then arrange the switch into Thumb mode. */
4987 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
4989 offset
= bits (insn
, 0, 23) << 2;
4991 if (bit (offset
, 25))
4992 offset
= offset
| ~0x3ffffff;
4994 dsc
->modinsn
[0] = ARM_NOP
;
4996 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5001 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
5002 uint16_t insn2
, struct regcache
*regs
,
5003 arm_displaced_step_closure
*dsc
)
5005 int link
= bit (insn2
, 14);
5006 int exchange
= link
&& !bit (insn2
, 12);
5009 int j1
= bit (insn2
, 13);
5010 int j2
= bit (insn2
, 11);
5011 int s
= sbits (insn1
, 10, 10);
5012 int i1
= !(j1
^ bit (insn1
, 10));
5013 int i2
= !(j2
^ bit (insn1
, 10));
5015 if (!link
&& !exchange
) /* B */
5017 offset
= (bits (insn2
, 0, 10) << 1);
5018 if (bit (insn2
, 12)) /* Encoding T4 */
5020 offset
|= (bits (insn1
, 0, 9) << 12)
5026 else /* Encoding T3 */
5028 offset
|= (bits (insn1
, 0, 5) << 12)
5032 cond
= bits (insn1
, 6, 9);
5037 offset
= (bits (insn1
, 0, 9) << 12);
5038 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
5039 offset
|= exchange
?
5040 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
5043 displaced_debug_printf ("copying %s insn %.4x %.4x with offset %.8lx",
5044 link
? (exchange
) ? "blx" : "bl" : "b",
5045 insn1
, insn2
, offset
);
5047 dsc
->modinsn
[0] = THUMB_NOP
;
5049 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
5053 /* Copy B Thumb instructions. */
5055 thumb_copy_b (struct gdbarch
*gdbarch
, uint16_t insn
,
5056 arm_displaced_step_closure
*dsc
)
5058 unsigned int cond
= 0;
5060 unsigned short bit_12_15
= bits (insn
, 12, 15);
5061 CORE_ADDR from
= dsc
->insn_addr
;
5063 if (bit_12_15
== 0xd)
5065 /* offset = SignExtend (imm8:0, 32) */
5066 offset
= sbits ((insn
<< 1), 0, 8);
5067 cond
= bits (insn
, 8, 11);
5069 else if (bit_12_15
== 0xe) /* Encoding T2 */
5071 offset
= sbits ((insn
<< 1), 0, 11);
5075 displaced_debug_printf ("copying b immediate insn %.4x with offset %d",
5078 dsc
->u
.branch
.cond
= cond
;
5079 dsc
->u
.branch
.link
= 0;
5080 dsc
->u
.branch
.exchange
= 0;
5081 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
5083 dsc
->modinsn
[0] = THUMB_NOP
;
5085 dsc
->cleanup
= &cleanup_branch
;
5090 /* Copy BX/BLX with register-specified destinations. */
5093 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5094 arm_displaced_step_closure
*dsc
, int link
,
5095 unsigned int cond
, unsigned int rm
)
5097 /* Implement {BX,BLX}<cond> <reg>" as:
5099 Preparation: cond <- instruction condition
5100 Insn: mov r0, r0 (nop)
5101 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5103 Don't set r14 in cleanup for BX. */
5105 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
5107 dsc
->u
.branch
.cond
= cond
;
5108 dsc
->u
.branch
.link
= link
;
5110 dsc
->u
.branch
.exchange
= 1;
5112 dsc
->cleanup
= &cleanup_branch
;
5116 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5117 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5119 unsigned int cond
= bits (insn
, 28, 31);
5122 int link
= bit (insn
, 5);
5123 unsigned int rm
= bits (insn
, 0, 3);
5125 displaced_debug_printf ("copying insn %.8lx", (unsigned long) insn
);
5127 dsc
->modinsn
[0] = ARM_NOP
;
5129 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
5134 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5135 struct regcache
*regs
,
5136 arm_displaced_step_closure
*dsc
)
5138 int link
= bit (insn
, 7);
5139 unsigned int rm
= bits (insn
, 3, 6);
5141 displaced_debug_printf ("copying insn %.4x", (unsigned short) insn
);
5143 dsc
->modinsn
[0] = THUMB_NOP
;
5145 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
5151 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5154 cleanup_alu_imm (struct gdbarch
*gdbarch
,
5155 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5157 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5158 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5159 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5160 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5164 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5165 arm_displaced_step_closure
*dsc
)
5167 unsigned int rn
= bits (insn
, 16, 19);
5168 unsigned int rd
= bits (insn
, 12, 15);
5169 unsigned int op
= bits (insn
, 21, 24);
5170 int is_mov
= (op
== 0xd);
5171 ULONGEST rd_val
, rn_val
;
5173 if (!insn_references_pc (insn
, 0x000ff000ul
))
5174 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
5176 displaced_debug_printf ("copying immediate %s insn %.8lx",
5177 is_mov
? "move" : "ALU",
5178 (unsigned long) insn
);
5180 /* Instruction is of form:
5182 <op><cond> rd, [rn,] #imm
5186 Preparation: tmp1, tmp2 <- r0, r1;
5188 Insn: <op><cond> r0, r1, #imm
5189 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5192 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5193 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5194 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5195 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5196 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5197 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5201 dsc
->modinsn
[0] = insn
& 0xfff00fff;
5203 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
5205 dsc
->cleanup
= &cleanup_alu_imm
;
5211 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5212 uint16_t insn2
, struct regcache
*regs
,
5213 arm_displaced_step_closure
*dsc
)
5215 unsigned int op
= bits (insn1
, 5, 8);
5216 unsigned int rn
, rm
, rd
;
5217 ULONGEST rd_val
, rn_val
;
5219 rn
= bits (insn1
, 0, 3); /* Rn */
5220 rm
= bits (insn2
, 0, 3); /* Rm */
5221 rd
= bits (insn2
, 8, 11); /* Rd */
5223 /* This routine is only called for instruction MOV. */
5224 gdb_assert (op
== 0x2 && rn
== 0xf);
5226 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
5227 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
5229 displaced_debug_printf ("copying reg %s insn %.4x%.4x", "ALU", insn1
, insn2
);
5231 /* Instruction is of form:
5233 <op><cond> rd, [rn,] #imm
5237 Preparation: tmp1, tmp2 <- r0, r1;
5239 Insn: <op><cond> r0, r1, #imm
5240 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5243 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5244 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5245 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5246 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5247 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5248 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5251 dsc
->modinsn
[0] = insn1
;
5252 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
5255 dsc
->cleanup
= &cleanup_alu_imm
;
5260 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5263 cleanup_alu_reg (struct gdbarch
*gdbarch
,
5264 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5269 rd_val
= displaced_read_reg (regs
, dsc
, 0);
5271 for (i
= 0; i
< 3; i
++)
5272 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5274 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5278 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5279 arm_displaced_step_closure
*dsc
,
5280 unsigned int rd
, unsigned int rn
, unsigned int rm
)
5282 ULONGEST rd_val
, rn_val
, rm_val
;
5284 /* Instruction is of form:
5286 <op><cond> rd, [rn,] rm [, <shift>]
5290 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5291 r0, r1, r2 <- rd, rn, rm
5292 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5293 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5296 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5297 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5298 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5299 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5300 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5301 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5302 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5303 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5304 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5307 dsc
->cleanup
= &cleanup_alu_reg
;
5311 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5312 arm_displaced_step_closure
*dsc
)
5314 unsigned int op
= bits (insn
, 21, 24);
5315 int is_mov
= (op
== 0xd);
5317 if (!insn_references_pc (insn
, 0x000ff00ful
))
5318 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
5320 displaced_debug_printf ("copying reg %s insn %.8lx",
5321 is_mov
? "move" : "ALU", (unsigned long) insn
);
5324 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
5326 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
5328 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
5334 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5335 struct regcache
*regs
,
5336 arm_displaced_step_closure
*dsc
)
5340 rm
= bits (insn
, 3, 6);
5341 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
5343 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
5344 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
5346 displaced_debug_printf ("copying ALU reg insn %.4x", (unsigned short) insn
);
5348 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
5350 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
5355 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5358 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
5359 struct regcache
*regs
,
5360 arm_displaced_step_closure
*dsc
)
5362 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5365 for (i
= 0; i
< 4; i
++)
5366 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5368 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5372 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5373 arm_displaced_step_closure
*dsc
,
5374 unsigned int rd
, unsigned int rn
, unsigned int rm
,
5378 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
5380 /* Instruction is of form:
5382 <op><cond> rd, [rn,] rm, <shift> rs
5386 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5387 r0, r1, r2, r3 <- rd, rn, rm, rs
5388 Insn: <op><cond> r0, r1, r2, <shift> r3
5390 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5394 for (i
= 0; i
< 4; i
++)
5395 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
5397 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5398 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5399 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5400 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
5401 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5402 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5403 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5404 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
5406 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
5410 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5411 struct regcache
*regs
,
5412 arm_displaced_step_closure
*dsc
)
5414 unsigned int op
= bits (insn
, 21, 24);
5415 int is_mov
= (op
== 0xd);
5416 unsigned int rd
, rn
, rm
, rs
;
5418 if (!insn_references_pc (insn
, 0x000fff0ful
))
5419 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
5421 displaced_debug_printf ("copying shifted reg %s insn %.8lx",
5422 is_mov
? "move" : "ALU",
5423 (unsigned long) insn
);
5425 rn
= bits (insn
, 16, 19);
5426 rm
= bits (insn
, 0, 3);
5427 rs
= bits (insn
, 8, 11);
5428 rd
= bits (insn
, 12, 15);
5431 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
5433 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
5435 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
5440 /* Clean up load instructions. */
5443 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5444 arm_displaced_step_closure
*dsc
)
5446 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
5448 rt_val
= displaced_read_reg (regs
, dsc
, 0);
5449 if (dsc
->u
.ldst
.xfersize
== 8)
5450 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
5451 rn_val
= displaced_read_reg (regs
, dsc
, 2);
5453 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5454 if (dsc
->u
.ldst
.xfersize
> 4)
5455 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5456 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5457 if (!dsc
->u
.ldst
.immed
)
5458 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5460 /* Handle register writeback. */
5461 if (dsc
->u
.ldst
.writeback
)
5462 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5463 /* Put result in right place. */
5464 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
5465 if (dsc
->u
.ldst
.xfersize
== 8)
5466 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
5469 /* Clean up store instructions. */
5472 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5473 arm_displaced_step_closure
*dsc
)
5475 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
5477 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5478 if (dsc
->u
.ldst
.xfersize
> 4)
5479 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5480 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5481 if (!dsc
->u
.ldst
.immed
)
5482 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5483 if (!dsc
->u
.ldst
.restore_r4
)
5484 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
5487 if (dsc
->u
.ldst
.writeback
)
5488 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5491 /* Copy "extra" load/store instructions. These are halfword/doubleword
5492 transfers, which have a different encoding to byte/word transfers. */
5495 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unprivileged
,
5496 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5498 unsigned int op1
= bits (insn
, 20, 24);
5499 unsigned int op2
= bits (insn
, 5, 6);
5500 unsigned int rt
= bits (insn
, 12, 15);
5501 unsigned int rn
= bits (insn
, 16, 19);
5502 unsigned int rm
= bits (insn
, 0, 3);
5503 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5504 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5505 int immed
= (op1
& 0x4) != 0;
5507 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
5509 if (!insn_references_pc (insn
, 0x000ff00ful
))
5510 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
5512 displaced_debug_printf ("copying %sextra load/store insn %.8lx",
5513 unprivileged
? "unprivileged " : "",
5514 (unsigned long) insn
);
5516 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
5519 internal_error (__FILE__
, __LINE__
,
5520 _("copy_extra_ld_st: instruction decode error"));
5522 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5523 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5524 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5526 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5528 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5529 if (bytesize
[opcode
] == 8)
5530 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
5531 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5533 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5535 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5536 if (bytesize
[opcode
] == 8)
5537 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
5538 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5540 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5543 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
5544 dsc
->u
.ldst
.rn
= rn
;
5545 dsc
->u
.ldst
.immed
= immed
;
5546 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
5547 dsc
->u
.ldst
.restore_r4
= 0;
5550 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5552 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5553 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5555 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5557 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5558 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5560 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
5565 /* Copy byte/half word/word loads and stores. */
5568 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5569 arm_displaced_step_closure
*dsc
, int load
,
5570 int immed
, int writeback
, int size
, int usermode
,
5571 int rt
, int rm
, int rn
)
5573 ULONGEST rt_val
, rn_val
, rm_val
= 0;
5575 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5576 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5578 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5580 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
5582 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5583 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5585 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5587 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5588 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5590 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5592 dsc
->u
.ldst
.xfersize
= size
;
5593 dsc
->u
.ldst
.rn
= rn
;
5594 dsc
->u
.ldst
.immed
= immed
;
5595 dsc
->u
.ldst
.writeback
= writeback
;
5597 /* To write PC we can do:
5599 Before this sequence of instructions:
5600 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5601 r2 is the Rn value got from displaced_read_reg.
5603 Insn1: push {pc} Write address of STR instruction + offset on stack
5604 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5605 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5606 = addr(Insn1) + offset - addr(Insn3) - 8
5608 Insn4: add r4, r4, #8 r4 = offset - 8
5609 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5611 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5613 Otherwise we don't know what value to write for PC, since the offset is
5614 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5615 of this can be found in Section "Saving from r15" in
5616 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5618 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5623 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
5624 uint16_t insn2
, struct regcache
*regs
,
5625 arm_displaced_step_closure
*dsc
, int size
)
5627 unsigned int u_bit
= bit (insn1
, 7);
5628 unsigned int rt
= bits (insn2
, 12, 15);
5629 int imm12
= bits (insn2
, 0, 11);
5632 displaced_debug_printf ("copying ldr pc (0x%x) R%d %c imm12 %.4x",
5633 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
5639 /* Rewrite instruction LDR Rt imm12 into:
5641 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5645 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5648 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5649 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5650 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5652 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5654 pc_val
= pc_val
& 0xfffffffc;
5656 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
5657 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
5661 dsc
->u
.ldst
.xfersize
= size
;
5662 dsc
->u
.ldst
.immed
= 0;
5663 dsc
->u
.ldst
.writeback
= 0;
5664 dsc
->u
.ldst
.restore_r4
= 0;
5666 /* LDR R0, R2, R3 */
5667 dsc
->modinsn
[0] = 0xf852;
5668 dsc
->modinsn
[1] = 0x3;
5671 dsc
->cleanup
= &cleanup_load
;
5677 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5678 uint16_t insn2
, struct regcache
*regs
,
5679 arm_displaced_step_closure
*dsc
,
5680 int writeback
, int immed
)
5682 unsigned int rt
= bits (insn2
, 12, 15);
5683 unsigned int rn
= bits (insn1
, 0, 3);
5684 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
5685 /* In LDR (register), there is also a register Rm, which is not allowed to
5686 be PC, so we don't have to check it. */
5688 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
5689 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
5692 displaced_debug_printf ("copying ldr r%d [r%d] insn %.4x%.4x",
5693 rt
, rn
, insn1
, insn2
);
5695 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
5698 dsc
->u
.ldst
.restore_r4
= 0;
5701 /* ldr[b]<cond> rt, [rn, #imm], etc.
5703 ldr[b]<cond> r0, [r2, #imm]. */
5705 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5706 dsc
->modinsn
[1] = insn2
& 0x0fff;
5709 /* ldr[b]<cond> rt, [rn, rm], etc.
5711 ldr[b]<cond> r0, [r2, r3]. */
5713 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5714 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
5724 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
5725 struct regcache
*regs
,
5726 arm_displaced_step_closure
*dsc
,
5727 int load
, int size
, int usermode
)
5729 int immed
= !bit (insn
, 25);
5730 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
5731 unsigned int rt
= bits (insn
, 12, 15);
5732 unsigned int rn
= bits (insn
, 16, 19);
5733 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
5735 if (!insn_references_pc (insn
, 0x000ff00ful
))
5736 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
5738 displaced_debug_printf ("copying %s%s r%d [r%d] insn %.8lx",
5739 load
? (size
== 1 ? "ldrb" : "ldr")
5740 : (size
== 1 ? "strb" : "str"),
5741 usermode
? "t" : "",
5743 (unsigned long) insn
);
5745 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
5746 usermode
, rt
, rm
, rn
);
5748 if (load
|| rt
!= ARM_PC_REGNUM
)
5750 dsc
->u
.ldst
.restore_r4
= 0;
5753 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5755 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5756 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5758 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5760 {ldr,str}[b]<cond> r0, [r2, r3]. */
5761 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5765 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5766 dsc
->u
.ldst
.restore_r4
= 1;
5767 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
5768 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
5769 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
5770 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
5771 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
5775 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
5777 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
5782 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5787 /* Cleanup LDM instructions with fully-populated register list. This is an
5788 unfortunate corner case: it's impossible to implement correctly by modifying
5789 the instruction. The issue is as follows: we have an instruction,
5793 which we must rewrite to avoid loading PC. A possible solution would be to
5794 do the load in two halves, something like (with suitable cleanup
5798 ldm[id][ab] r8!, {r0-r7}
5800 ldm[id][ab] r8, {r7-r14}
5803 but at present there's no suitable place for <temp>, since the scratch space
5804 is overwritten before the cleanup routine is called. For now, we simply
5805 emulate the instruction. */
5808 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5809 arm_displaced_step_closure
*dsc
)
5811 int inc
= dsc
->u
.block
.increment
;
5812 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
5813 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
5814 uint32_t regmask
= dsc
->u
.block
.regmask
;
5815 int regno
= inc
? 0 : 15;
5816 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
5817 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
5818 && (regmask
& 0x8000) != 0;
5819 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5820 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
5821 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5826 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5827 sensible we can do here. Complain loudly. */
5828 if (exception_return
)
5829 error (_("Cannot single-step exception return"));
5831 /* We don't handle any stores here for now. */
5832 gdb_assert (dsc
->u
.block
.load
!= 0);
5834 displaced_debug_printf ("emulating block transfer: %s %s %s",
5835 dsc
->u
.block
.load
? "ldm" : "stm",
5836 dsc
->u
.block
.increment
? "inc" : "dec",
5837 dsc
->u
.block
.before
? "before" : "after");
5844 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
5847 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
5850 xfer_addr
+= bump_before
;
5852 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
5853 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
5855 xfer_addr
+= bump_after
;
5857 regmask
&= ~(1 << regno
);
5860 if (dsc
->u
.block
.writeback
)
5861 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
5865 /* Clean up an STM which included the PC in the register list. */
5868 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5869 arm_displaced_step_closure
*dsc
)
5871 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5872 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5873 CORE_ADDR pc_stored_at
, transferred_regs
5874 = count_one_bits (dsc
->u
.block
.regmask
);
5875 CORE_ADDR stm_insn_addr
;
5878 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5880 /* If condition code fails, there's nothing else to do. */
5881 if (!store_executed
)
5884 if (dsc
->u
.block
.increment
)
5886 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
5888 if (dsc
->u
.block
.before
)
5893 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
5895 if (dsc
->u
.block
.before
)
5899 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
5900 stm_insn_addr
= dsc
->scratch_base
;
5901 offset
= pc_val
- stm_insn_addr
;
5903 displaced_debug_printf ("detected PC offset %.8lx for STM instruction",
5906 /* Rewrite the stored PC to the proper value for the non-displaced original
5908 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
5909 dsc
->insn_addr
+ offset
);
5912 /* Clean up an LDM which includes the PC in the register list. We clumped all
5913 the registers in the transferred list into a contiguous range r0...rX (to
5914 avoid loading PC directly and losing control of the debugged program), so we
5915 must undo that here. */
5918 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
5919 struct regcache
*regs
,
5920 arm_displaced_step_closure
*dsc
)
5922 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5923 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5924 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
5925 unsigned int regs_loaded
= count_one_bits (mask
);
5926 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
5928 /* The method employed here will fail if the register list is fully populated
5929 (we need to avoid loading PC directly). */
5930 gdb_assert (num_to_shuffle
< 16);
5935 clobbered
= (1 << num_to_shuffle
) - 1;
5937 while (num_to_shuffle
> 0)
5939 if ((mask
& (1 << write_reg
)) != 0)
5941 unsigned int read_reg
= num_to_shuffle
- 1;
5943 if (read_reg
!= write_reg
)
5945 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
5946 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
5947 displaced_debug_printf ("LDM: move loaded register r%d to r%d",
5948 read_reg
, write_reg
);
5951 displaced_debug_printf ("LDM: register r%d already in the right "
5952 "place", write_reg
);
5954 clobbered
&= ~(1 << write_reg
);
5962 /* Restore any registers we scribbled over. */
5963 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
5965 if ((clobbered
& (1 << write_reg
)) != 0)
5967 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
5969 displaced_debug_printf ("LDM: restored clobbered register r%d",
5971 clobbered
&= ~(1 << write_reg
);
5975 /* Perform register writeback manually. */
5976 if (dsc
->u
.block
.writeback
)
5978 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
5980 if (dsc
->u
.block
.increment
)
5981 new_rn_val
+= regs_loaded
* 4;
5983 new_rn_val
-= regs_loaded
* 4;
5985 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
5990 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5991 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5994 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
5995 struct regcache
*regs
,
5996 arm_displaced_step_closure
*dsc
)
5998 int load
= bit (insn
, 20);
5999 int user
= bit (insn
, 22);
6000 int increment
= bit (insn
, 23);
6001 int before
= bit (insn
, 24);
6002 int writeback
= bit (insn
, 21);
6003 int rn
= bits (insn
, 16, 19);
6005 /* Block transfers which don't mention PC can be run directly
6007 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
6008 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
6010 if (rn
== ARM_PC_REGNUM
)
6012 warning (_("displaced: Unpredictable LDM or STM with "
6013 "base register r15"));
6014 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
6017 displaced_debug_printf ("copying block transfer insn %.8lx",
6018 (unsigned long) insn
);
6020 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6021 dsc
->u
.block
.rn
= rn
;
6023 dsc
->u
.block
.load
= load
;
6024 dsc
->u
.block
.user
= user
;
6025 dsc
->u
.block
.increment
= increment
;
6026 dsc
->u
.block
.before
= before
;
6027 dsc
->u
.block
.writeback
= writeback
;
6028 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
6030 dsc
->u
.block
.regmask
= insn
& 0xffff;
6034 if ((insn
& 0xffff) == 0xffff)
6036 /* LDM with a fully-populated register list. This case is
6037 particularly tricky. Implement for now by fully emulating the
6038 instruction (which might not behave perfectly in all cases, but
6039 these instructions should be rare enough for that not to matter
6041 dsc
->modinsn
[0] = ARM_NOP
;
6043 dsc
->cleanup
= &cleanup_block_load_all
;
6047 /* LDM of a list of registers which includes PC. Implement by
6048 rewriting the list of registers to be transferred into a
6049 contiguous chunk r0...rX before doing the transfer, then shuffling
6050 registers into the correct places in the cleanup routine. */
6051 unsigned int regmask
= insn
& 0xffff;
6052 unsigned int num_in_list
= count_one_bits (regmask
), new_regmask
;
6055 for (i
= 0; i
< num_in_list
; i
++)
6056 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6058 /* Writeback makes things complicated. We need to avoid clobbering
6059 the base register with one of the registers in our modified
6060 register list, but just using a different register can't work in
6063 ldm r14!, {r0-r13,pc}
6065 which would need to be rewritten as:
6069 but that can't work, because there's no free register for N.
6071 Solve this by turning off the writeback bit, and emulating
6072 writeback manually in the cleanup routine. */
6077 new_regmask
= (1 << num_in_list
) - 1;
6079 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6080 "%.4x, modified list %.4x",
6081 rn
, writeback
? "!" : "",
6082 (int) insn
& 0xffff, new_regmask
);
6084 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
6086 dsc
->cleanup
= &cleanup_block_load_pc
;
6091 /* STM of a list of registers which includes PC. Run the instruction
6092 as-is, but out of line: this will store the wrong value for the PC,
6093 so we must manually fix up the memory in the cleanup routine.
6094 Doing things this way has the advantage that we can auto-detect
6095 the offset of the PC write (which is architecture-dependent) in
6096 the cleanup routine. */
6097 dsc
->modinsn
[0] = insn
;
6099 dsc
->cleanup
= &cleanup_block_store_pc
;
6106 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6107 struct regcache
*regs
,
6108 arm_displaced_step_closure
*dsc
)
6110 int rn
= bits (insn1
, 0, 3);
6111 int load
= bit (insn1
, 4);
6112 int writeback
= bit (insn1
, 5);
6114 /* Block transfers which don't mention PC can be run directly
6116 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
6117 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
6119 if (rn
== ARM_PC_REGNUM
)
6121 warning (_("displaced: Unpredictable LDM or STM with "
6122 "base register r15"));
6123 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6124 "unpredictable ldm/stm", dsc
);
6127 displaced_debug_printf ("copying block transfer insn %.4x%.4x",
6130 /* Clear bit 13, since it should be always zero. */
6131 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
6132 dsc
->u
.block
.rn
= rn
;
6134 dsc
->u
.block
.load
= load
;
6135 dsc
->u
.block
.user
= 0;
6136 dsc
->u
.block
.increment
= bit (insn1
, 7);
6137 dsc
->u
.block
.before
= bit (insn1
, 8);
6138 dsc
->u
.block
.writeback
= writeback
;
6139 dsc
->u
.block
.cond
= INST_AL
;
6140 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6144 if (dsc
->u
.block
.regmask
== 0xffff)
6146 /* This branch is impossible to happen. */
6151 unsigned int regmask
= dsc
->u
.block
.regmask
;
6152 unsigned int num_in_list
= count_one_bits (regmask
), new_regmask
;
6155 for (i
= 0; i
< num_in_list
; i
++)
6156 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6161 new_regmask
= (1 << num_in_list
) - 1;
6163 displaced_debug_printf ("LDM r%d%s, {..., pc}: original reg list "
6164 "%.4x, modified list %.4x",
6165 rn
, writeback
? "!" : "",
6166 (int) dsc
->u
.block
.regmask
, new_regmask
);
6168 dsc
->modinsn
[0] = insn1
;
6169 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
6172 dsc
->cleanup
= &cleanup_block_load_pc
;
6177 dsc
->modinsn
[0] = insn1
;
6178 dsc
->modinsn
[1] = insn2
;
6180 dsc
->cleanup
= &cleanup_block_store_pc
;
6185 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6186 This is used to avoid a dependency on BFD's bfd_endian enum. */
6189 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr
, int len
,
6192 return read_memory_unsigned_integer (memaddr
, len
,
6193 (enum bfd_endian
) byte_order
);
6196 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6199 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs
*self
,
6202 return gdbarch_addr_bits_remove (self
->regcache
->arch (), val
);
6205 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6208 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
)
6213 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6216 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs
*self
)
6218 return arm_is_thumb (self
->regcache
);
6221 /* single_step() is called just before we want to resume the inferior,
6222 if we want to single-step it but there is no hardware or kernel
6223 single-step support. We find the target of the coming instructions
6224 and breakpoint them. */
6226 std::vector
<CORE_ADDR
>
6227 arm_software_single_step (struct regcache
*regcache
)
6229 struct gdbarch
*gdbarch
= regcache
->arch ();
6230 struct arm_get_next_pcs next_pcs_ctx
;
6232 arm_get_next_pcs_ctor (&next_pcs_ctx
,
6233 &arm_get_next_pcs_ops
,
6234 gdbarch_byte_order (gdbarch
),
6235 gdbarch_byte_order_for_code (gdbarch
),
6239 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
6241 for (CORE_ADDR
&pc_ref
: next_pcs
)
6242 pc_ref
= gdbarch_addr_bits_remove (gdbarch
, pc_ref
);
6247 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6248 for Linux, where some SVC instructions must be treated specially. */
6251 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6252 arm_displaced_step_closure
*dsc
)
6254 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6256 displaced_debug_printf ("cleanup for svc, resume at %.8lx",
6257 (unsigned long) resume_addr
);
6259 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
6263 /* Common copy routine for svc instruction. */
6266 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6267 arm_displaced_step_closure
*dsc
)
6269 /* Preparation: none.
6270 Insn: unmodified svc.
6271 Cleanup: pc <- insn_addr + insn_size. */
6273 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6275 dsc
->wrote_to_pc
= 1;
6277 /* Allow OS-specific code to override SVC handling. */
6278 if (dsc
->u
.svc
.copy_svc_os
)
6279 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
6282 dsc
->cleanup
= &cleanup_svc
;
6288 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
6289 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6292 displaced_debug_printf ("copying svc insn %.8lx",
6293 (unsigned long) insn
);
6295 dsc
->modinsn
[0] = insn
;
6297 return install_svc (gdbarch
, regs
, dsc
);
6301 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
6302 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6305 displaced_debug_printf ("copying svc insn %.4x", insn
);
6307 dsc
->modinsn
[0] = insn
;
6309 return install_svc (gdbarch
, regs
, dsc
);
6312 /* Copy undefined instructions. */
6315 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
6316 arm_displaced_step_closure
*dsc
)
6318 displaced_debug_printf ("copying undefined insn %.8lx",
6319 (unsigned long) insn
);
6321 dsc
->modinsn
[0] = insn
;
6327 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6328 arm_displaced_step_closure
*dsc
)
6331 displaced_debug_printf ("copying undefined insn %.4x %.4x",
6332 (unsigned short) insn1
, (unsigned short) insn2
);
6334 dsc
->modinsn
[0] = insn1
;
6335 dsc
->modinsn
[1] = insn2
;
6341 /* Copy unpredictable instructions. */
6344 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
6345 arm_displaced_step_closure
*dsc
)
6347 displaced_debug_printf ("copying unpredictable insn %.8lx",
6348 (unsigned long) insn
);
6350 dsc
->modinsn
[0] = insn
;
6355 /* The decode_* functions are instruction decoding helpers. They mostly follow
6356 the presentation in the ARM ARM. */
6359 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
6360 struct regcache
*regs
,
6361 arm_displaced_step_closure
*dsc
)
6363 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
6364 unsigned int rn
= bits (insn
, 16, 19);
6366 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0x1) == 0x0)
6367 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
6368 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0x1) == 0x1)
6369 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
6370 else if ((op1
& 0x60) == 0x20)
6371 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
6372 else if ((op1
& 0x71) == 0x40)
6373 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
6375 else if ((op1
& 0x77) == 0x41)
6376 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6377 else if ((op1
& 0x77) == 0x45)
6378 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
6379 else if ((op1
& 0x77) == 0x51)
6382 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6384 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6386 else if ((op1
& 0x77) == 0x55)
6387 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6388 else if (op1
== 0x57)
6391 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
6392 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
6393 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
6394 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
6395 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
6397 else if ((op1
& 0x63) == 0x43)
6398 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6399 else if ((op2
& 0x1) == 0x0)
6400 switch (op1
& ~0x80)
6403 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6405 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
6406 case 0x71: case 0x75:
6408 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
6409 case 0x63: case 0x67: case 0x73: case 0x77:
6410 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6412 return arm_copy_undef (gdbarch
, insn
, dsc
);
6415 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
6419 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
6420 struct regcache
*regs
,
6421 arm_displaced_step_closure
*dsc
)
6423 if (bit (insn
, 27) == 0)
6424 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
6425 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6426 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
6429 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
6432 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
6434 case 0x4: case 0x5: case 0x6: case 0x7:
6435 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6438 switch ((insn
& 0xe00000) >> 21)
6440 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6442 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6445 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6448 return arm_copy_undef (gdbarch
, insn
, dsc
);
6453 int rn_f
= (bits (insn
, 16, 19) == 0xf);
6454 switch ((insn
& 0xe00000) >> 21)
6457 /* ldc/ldc2 imm (undefined for rn == pc). */
6458 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
6459 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6462 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6464 case 0x4: case 0x5: case 0x6: case 0x7:
6465 /* ldc/ldc2 lit (undefined for rn != pc). */
6466 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
6467 : arm_copy_undef (gdbarch
, insn
, dsc
);
6470 return arm_copy_undef (gdbarch
, insn
, dsc
);
6475 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
6478 if (bits (insn
, 16, 19) == 0xf)
6480 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6482 return arm_copy_undef (gdbarch
, insn
, dsc
);
6486 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6488 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6492 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6494 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6497 return arm_copy_undef (gdbarch
, insn
, dsc
);
6501 /* Decode miscellaneous instructions in dp/misc encoding space. */
6504 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
6505 struct regcache
*regs
,
6506 arm_displaced_step_closure
*dsc
)
6508 unsigned int op2
= bits (insn
, 4, 6);
6509 unsigned int op
= bits (insn
, 21, 22);
6514 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
6517 if (op
== 0x1) /* bx. */
6518 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
6520 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
6522 return arm_copy_undef (gdbarch
, insn
, dsc
);
6526 /* Not really supported. */
6527 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
6529 return arm_copy_undef (gdbarch
, insn
, dsc
);
6533 return arm_copy_bx_blx_reg (gdbarch
, insn
,
6534 regs
, dsc
); /* blx register. */
6536 return arm_copy_undef (gdbarch
, insn
, dsc
);
6539 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
6543 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
6545 /* Not really supported. */
6546 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
6550 return arm_copy_undef (gdbarch
, insn
, dsc
);
6555 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
6556 struct regcache
*regs
,
6557 arm_displaced_step_closure
*dsc
)
6560 switch (bits (insn
, 20, 24))
6563 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
6566 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
6568 case 0x12: case 0x16:
6569 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
6572 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
6576 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
6578 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
6579 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
6580 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
6581 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
6582 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
6583 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
6584 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
6585 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
6586 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
6587 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
6588 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
6589 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
6590 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
6591 /* 2nd arg means "unprivileged". */
6592 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
6596 /* Should be unreachable. */
6601 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
6602 struct regcache
*regs
,
6603 arm_displaced_step_closure
*dsc
)
6605 int a
= bit (insn
, 25), b
= bit (insn
, 4);
6606 uint32_t op1
= bits (insn
, 20, 24);
6608 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
6609 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
6610 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
6611 else if ((!a
&& (op1
& 0x17) == 0x02)
6612 || (a
&& (op1
& 0x17) == 0x02 && !b
))
6613 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
6614 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
6615 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
6616 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
6617 else if ((!a
&& (op1
& 0x17) == 0x03)
6618 || (a
&& (op1
& 0x17) == 0x03 && !b
))
6619 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
6620 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
6621 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
6622 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
6623 else if ((!a
&& (op1
& 0x17) == 0x06)
6624 || (a
&& (op1
& 0x17) == 0x06 && !b
))
6625 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
6626 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
6627 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
6628 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
6629 else if ((!a
&& (op1
& 0x17) == 0x07)
6630 || (a
&& (op1
& 0x17) == 0x07 && !b
))
6631 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
6633 /* Should be unreachable. */
6638 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
6639 arm_displaced_step_closure
*dsc
)
6641 switch (bits (insn
, 20, 24))
6643 case 0x00: case 0x01: case 0x02: case 0x03:
6644 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
6646 case 0x04: case 0x05: case 0x06: case 0x07:
6647 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
6649 case 0x08: case 0x09: case 0x0a: case 0x0b:
6650 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6651 return arm_copy_unmodified (gdbarch
, insn
,
6652 "decode/pack/unpack/saturate/reverse", dsc
);
6655 if (bits (insn
, 5, 7) == 0) /* op2. */
6657 if (bits (insn
, 12, 15) == 0xf)
6658 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
6660 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
6663 return arm_copy_undef (gdbarch
, insn
, dsc
);
6665 case 0x1a: case 0x1b:
6666 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6667 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
6669 return arm_copy_undef (gdbarch
, insn
, dsc
);
6671 case 0x1c: case 0x1d:
6672 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
6674 if (bits (insn
, 0, 3) == 0xf)
6675 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
6677 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
6680 return arm_copy_undef (gdbarch
, insn
, dsc
);
6682 case 0x1e: case 0x1f:
6683 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6684 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
6686 return arm_copy_undef (gdbarch
, insn
, dsc
);
6689 /* Should be unreachable. */
6694 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, uint32_t insn
,
6695 struct regcache
*regs
,
6696 arm_displaced_step_closure
*dsc
)
6699 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6701 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
6705 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
6706 struct regcache
*regs
,
6707 arm_displaced_step_closure
*dsc
)
6709 unsigned int opcode
= bits (insn
, 20, 24);
6713 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6714 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
6716 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6717 case 0x12: case 0x16:
6718 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
6720 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6721 case 0x13: case 0x17:
6722 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
6724 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6725 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6726 /* Note: no writeback for these instructions. Bit 25 will always be
6727 zero though (via caller), so the following works OK. */
6728 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6731 /* Should be unreachable. */
6735 /* Decode shifted register instructions. */
6738 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
6739 uint16_t insn2
, struct regcache
*regs
,
6740 arm_displaced_step_closure
*dsc
)
6742 /* PC is only allowed to be used in instruction MOV. */
6744 unsigned int op
= bits (insn1
, 5, 8);
6745 unsigned int rn
= bits (insn1
, 0, 3);
6747 if (op
== 0x2 && rn
== 0xf) /* MOV */
6748 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
6750 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6751 "dp (shift reg)", dsc
);
6755 /* Decode extension register load/store. Exactly the same as
6756 arm_decode_ext_reg_ld_st. */
6759 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
6760 uint16_t insn2
, struct regcache
*regs
,
6761 arm_displaced_step_closure
*dsc
)
6763 unsigned int opcode
= bits (insn1
, 4, 8);
6767 case 0x04: case 0x05:
6768 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6769 "vfp/neon vmov", dsc
);
6771 case 0x08: case 0x0c: /* 01x00 */
6772 case 0x0a: case 0x0e: /* 01x10 */
6773 case 0x12: case 0x16: /* 10x10 */
6774 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6775 "vfp/neon vstm/vpush", dsc
);
6777 case 0x09: case 0x0d: /* 01x01 */
6778 case 0x0b: case 0x0f: /* 01x11 */
6779 case 0x13: case 0x17: /* 10x11 */
6780 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6781 "vfp/neon vldm/vpop", dsc
);
6783 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6784 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6786 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6787 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
6790 /* Should be unreachable. */
6795 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
,
6796 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6798 unsigned int op1
= bits (insn
, 20, 25);
6799 int op
= bit (insn
, 4);
6800 unsigned int coproc
= bits (insn
, 8, 11);
6802 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
6803 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
6804 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
6805 && (coproc
& 0xe) != 0xa)
6807 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6808 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
6809 && (coproc
& 0xe) != 0xa)
6810 /* ldc/ldc2 imm/lit. */
6811 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6812 else if ((op1
& 0x3e) == 0x00)
6813 return arm_copy_undef (gdbarch
, insn
, dsc
);
6814 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
6815 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
6816 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
6817 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6818 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
6819 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6820 else if ((op1
& 0x30) == 0x20 && !op
)
6822 if ((coproc
& 0xe) == 0xa)
6823 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
6825 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6827 else if ((op1
& 0x30) == 0x20 && op
)
6828 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
6829 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
6830 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6831 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
6832 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6833 else if ((op1
& 0x30) == 0x30)
6834 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
6836 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
6840 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
6841 uint16_t insn2
, struct regcache
*regs
,
6842 arm_displaced_step_closure
*dsc
)
6844 unsigned int coproc
= bits (insn2
, 8, 11);
6845 unsigned int bit_5_8
= bits (insn1
, 5, 8);
6846 unsigned int bit_9
= bit (insn1
, 9);
6847 unsigned int bit_4
= bit (insn1
, 4);
6852 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6853 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6855 else if (bit_5_8
== 0) /* UNDEFINED. */
6856 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
6859 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6860 if ((coproc
& 0xe) == 0xa)
6861 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
6863 else /* coproc is not 101x. */
6865 if (bit_4
== 0) /* STC/STC2. */
6866 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6868 else /* LDC/LDC2 {literal, immediate}. */
6869 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
6875 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
6881 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6882 arm_displaced_step_closure
*dsc
, int rd
)
6888 Preparation: Rd <- PC
6894 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6895 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
6899 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6900 arm_displaced_step_closure
*dsc
,
6901 int rd
, unsigned int imm
)
6904 /* Encoding T2: ADDS Rd, #imm */
6905 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
6907 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6913 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
6914 struct regcache
*regs
,
6915 arm_displaced_step_closure
*dsc
)
6917 unsigned int rd
= bits (insn
, 8, 10);
6918 unsigned int imm8
= bits (insn
, 0, 7);
6920 displaced_debug_printf ("copying thumb adr r%d, #%d insn %.4x",
6923 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
6927 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
6928 uint16_t insn2
, struct regcache
*regs
,
6929 arm_displaced_step_closure
*dsc
)
6931 unsigned int rd
= bits (insn2
, 8, 11);
6932 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6933 extract raw immediate encoding rather than computing immediate. When
6934 generating ADD or SUB instruction, we can simply perform OR operation to
6935 set immediate into ADD. */
6936 unsigned int imm_3_8
= insn2
& 0x70ff;
6937 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
6939 displaced_debug_printf ("copying thumb adr r%d, #%d:%d insn %.4x%.4x",
6940 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
6942 if (bit (insn1
, 7)) /* Encoding T2 */
6944 /* Encoding T3: SUB Rd, Rd, #imm */
6945 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
6946 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
6948 else /* Encoding T3 */
6950 /* Encoding T3: ADD Rd, Rd, #imm */
6951 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
6952 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
6956 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6962 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6963 struct regcache
*regs
,
6964 arm_displaced_step_closure
*dsc
)
6966 unsigned int rt
= bits (insn1
, 8, 10);
6968 int imm8
= (bits (insn1
, 0, 7) << 2);
6974 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6976 Insn: LDR R0, [R2, R3];
6977 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6979 displaced_debug_printf ("copying thumb ldr r%d [pc #%d]", rt
, imm8
);
6981 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6982 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6983 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6984 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6985 /* The assembler calculates the required value of the offset from the
6986 Align(PC,4) value of this instruction to the label. */
6987 pc
= pc
& 0xfffffffc;
6989 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
6990 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
6993 dsc
->u
.ldst
.xfersize
= 4;
6995 dsc
->u
.ldst
.immed
= 0;
6996 dsc
->u
.ldst
.writeback
= 0;
6997 dsc
->u
.ldst
.restore_r4
= 0;
6999 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
7001 dsc
->cleanup
= &cleanup_load
;
7006 /* Copy Thumb cbnz/cbz instruction. */
7009 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
7010 struct regcache
*regs
,
7011 arm_displaced_step_closure
*dsc
)
7013 int non_zero
= bit (insn1
, 11);
7014 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
7015 CORE_ADDR from
= dsc
->insn_addr
;
7016 int rn
= bits (insn1
, 0, 2);
7017 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
7019 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
7020 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
7021 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
7022 condition is false, let it be, cleanup_branch will do nothing. */
7023 if (dsc
->u
.branch
.cond
)
7025 dsc
->u
.branch
.cond
= INST_AL
;
7026 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
7029 dsc
->u
.branch
.dest
= from
+ 2;
7031 dsc
->u
.branch
.link
= 0;
7032 dsc
->u
.branch
.exchange
= 0;
7034 displaced_debug_printf ("copying %s [r%d = 0x%x] insn %.4x to %.8lx",
7035 non_zero
? "cbnz" : "cbz",
7036 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
7038 dsc
->modinsn
[0] = THUMB_NOP
;
7040 dsc
->cleanup
= &cleanup_branch
;
7044 /* Copy Table Branch Byte/Halfword */
7046 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
7047 uint16_t insn2
, struct regcache
*regs
,
7048 arm_displaced_step_closure
*dsc
)
7050 ULONGEST rn_val
, rm_val
;
7051 int is_tbh
= bit (insn2
, 4);
7052 CORE_ADDR halfwords
= 0;
7053 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7055 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
7056 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
7062 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
7063 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
7069 target_read_memory (rn_val
+ rm_val
, buf
, 1);
7070 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
7073 displaced_debug_printf ("%s base 0x%x offset 0x%x offset 0x%x",
7074 is_tbh
? "tbh" : "tbb",
7075 (unsigned int) rn_val
, (unsigned int) rm_val
,
7076 (unsigned int) halfwords
);
7078 dsc
->u
.branch
.cond
= INST_AL
;
7079 dsc
->u
.branch
.link
= 0;
7080 dsc
->u
.branch
.exchange
= 0;
7081 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
7083 dsc
->cleanup
= &cleanup_branch
;
7089 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7090 arm_displaced_step_closure
*dsc
)
7093 int val
= displaced_read_reg (regs
, dsc
, 7);
7094 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
7097 val
= displaced_read_reg (regs
, dsc
, 8);
7098 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
7101 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
7106 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7107 struct regcache
*regs
,
7108 arm_displaced_step_closure
*dsc
)
7110 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
7112 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7115 (1) register list is full, that is, r0-r7 are used.
7116 Prepare: tmp[0] <- r8
7118 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7119 MOV r8, r7; Move value of r7 to r8;
7120 POP {r7}; Store PC value into r7.
7122 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7124 (2) register list is not full, supposing there are N registers in
7125 register list (except PC, 0 <= N <= 7).
7126 Prepare: for each i, 0 - N, tmp[i] <- ri.
7128 POP {r0, r1, ...., rN};
7130 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7131 from tmp[] properly.
7133 displaced_debug_printf ("copying thumb pop {%.8x, pc} insn %.4x",
7134 dsc
->u
.block
.regmask
, insn1
);
7136 if (dsc
->u
.block
.regmask
== 0xff)
7138 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
7140 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
7141 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
7142 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
7145 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
7149 unsigned int num_in_list
= count_one_bits (dsc
->u
.block
.regmask
);
7151 unsigned int new_regmask
;
7153 for (i
= 0; i
< num_in_list
+ 1; i
++)
7154 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7156 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
7158 displaced_debug_printf ("POP {..., pc}: original reg list %.4x, "
7159 "modified list %.4x",
7160 (int) dsc
->u
.block
.regmask
, new_regmask
);
7162 dsc
->u
.block
.regmask
|= 0x8000;
7163 dsc
->u
.block
.writeback
= 0;
7164 dsc
->u
.block
.cond
= INST_AL
;
7166 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
7168 dsc
->cleanup
= &cleanup_block_load_pc
;
7175 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7176 struct regcache
*regs
,
7177 arm_displaced_step_closure
*dsc
)
7179 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
7180 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
7183 /* 16-bit thumb instructions. */
7184 switch (op_bit_12_15
)
7186 /* Shift (imme), add, subtract, move and compare. */
7187 case 0: case 1: case 2: case 3:
7188 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7189 "shift/add/sub/mov/cmp",
7193 switch (op_bit_10_11
)
7195 case 0: /* Data-processing */
7196 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7200 case 1: /* Special data instructions and branch and exchange. */
7202 unsigned short op
= bits (insn1
, 7, 9);
7203 if (op
== 6 || op
== 7) /* BX or BLX */
7204 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
7205 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7206 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
7208 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
7212 default: /* LDR (literal) */
7213 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
7216 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7217 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
7220 if (op_bit_10_11
< 2) /* Generate PC-relative address */
7221 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
7222 else /* Generate SP-relative address */
7223 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
7225 case 11: /* Misc 16-bit instructions */
7227 switch (bits (insn1
, 8, 11))
7229 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7230 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
7232 case 12: case 13: /* POP */
7233 if (bit (insn1
, 8)) /* PC is in register list. */
7234 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
7236 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
7238 case 15: /* If-Then, and hints */
7239 if (bits (insn1
, 0, 3))
7240 /* If-Then makes up to four following instructions conditional.
7241 IT instruction itself is not conditional, so handle it as a
7242 common unmodified instruction. */
7243 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
7246 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
7249 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
7254 if (op_bit_10_11
< 2) /* Store multiple registers */
7255 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
7256 else /* Load multiple registers */
7257 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
7259 case 13: /* Conditional branch and supervisor call */
7260 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
7261 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7263 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
7265 case 14: /* Unconditional branch */
7266 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7273 internal_error (__FILE__
, __LINE__
,
7274 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7278 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
7279 uint16_t insn1
, uint16_t insn2
,
7280 struct regcache
*regs
,
7281 arm_displaced_step_closure
*dsc
)
7283 int rt
= bits (insn2
, 12, 15);
7284 int rn
= bits (insn1
, 0, 3);
7285 int op1
= bits (insn1
, 7, 8);
7287 switch (bits (insn1
, 5, 6))
7289 case 0: /* Load byte and memory hints */
7290 if (rt
== 0xf) /* PLD/PLI */
7293 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7294 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
7296 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7301 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
7302 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7305 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7306 "ldrb{reg, immediate}/ldrbt",
7311 case 1: /* Load halfword and memory hints. */
7312 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
7313 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7314 "pld/unalloc memhint", dsc
);
7318 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7321 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7325 case 2: /* Load word */
7327 int insn2_bit_8_11
= bits (insn2
, 8, 11);
7330 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
7331 else if (op1
== 0x1) /* Encoding T3 */
7332 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
7334 else /* op1 == 0x0 */
7336 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
7337 /* LDR (immediate) */
7338 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7339 dsc
, bit (insn2
, 8), 1);
7340 else if (insn2_bit_8_11
== 0xe) /* LDRT */
7341 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7344 /* LDR (register) */
7345 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7351 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7358 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7359 uint16_t insn2
, struct regcache
*regs
,
7360 arm_displaced_step_closure
*dsc
)
7363 unsigned short op
= bit (insn2
, 15);
7364 unsigned int op1
= bits (insn1
, 11, 12);
7370 switch (bits (insn1
, 9, 10))
7375 /* Load/store {dual, exclusive}, table branch. */
7376 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
7377 && bits (insn2
, 5, 7) == 0)
7378 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
7381 /* PC is not allowed to use in load/store {dual, exclusive}
7383 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7384 "load/store dual/ex", dsc
);
7386 else /* load/store multiple */
7388 switch (bits (insn1
, 7, 8))
7390 case 0: case 3: /* SRS, RFE */
7391 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7394 case 1: case 2: /* LDM/STM/PUSH/POP */
7395 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
7402 /* Data-processing (shift register). */
7403 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
7406 default: /* Coprocessor instructions. */
7407 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7412 case 2: /* op1 = 2 */
7413 if (op
) /* Branch and misc control. */
7415 if (bit (insn2
, 14) /* BLX/BL */
7416 || bit (insn2
, 12) /* Unconditional branch */
7417 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
7418 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
7420 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7425 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
7427 int dp_op
= bits (insn1
, 4, 8);
7428 int rn
= bits (insn1
, 0, 3);
7429 if ((dp_op
== 0 || dp_op
== 0xa) && rn
== 0xf)
7430 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
7433 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7436 else /* Data processing (modified immediate) */
7437 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7441 case 3: /* op1 = 3 */
7442 switch (bits (insn1
, 9, 10))
7446 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
7448 else /* NEON Load/Store and Store single data item */
7449 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7450 "neon elt/struct load/store",
7453 case 1: /* op1 = 3, bits (9, 10) == 1 */
7454 switch (bits (insn1
, 7, 8))
7456 case 0: case 1: /* Data processing (register) */
7457 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7460 case 2: /* Multiply and absolute difference */
7461 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7462 "mul/mua/diff", dsc
);
7464 case 3: /* Long multiply and divide */
7465 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7470 default: /* Coprocessor instructions */
7471 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7480 internal_error (__FILE__
, __LINE__
,
7481 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7486 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7487 struct regcache
*regs
,
7488 arm_displaced_step_closure
*dsc
)
7490 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7492 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
7494 displaced_debug_printf ("process thumb insn %.4x at %.8lx",
7495 insn1
, (unsigned long) from
);
7498 dsc
->insn_size
= thumb_insn_size (insn1
);
7499 if (thumb_insn_size (insn1
) == 4)
7502 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
7503 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
7506 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
7510 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7511 CORE_ADDR to
, struct regcache
*regs
,
7512 arm_displaced_step_closure
*dsc
)
7515 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7518 /* Most displaced instructions use a 1-instruction scratch space, so set this
7519 here and override below if/when necessary. */
7521 dsc
->insn_addr
= from
;
7522 dsc
->scratch_base
= to
;
7523 dsc
->cleanup
= NULL
;
7524 dsc
->wrote_to_pc
= 0;
7526 if (!displaced_in_arm_mode (regs
))
7527 return thumb_process_displaced_insn (gdbarch
, from
, regs
, dsc
);
7531 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
7532 displaced_debug_printf ("stepping insn %.8lx at %.8lx",
7533 (unsigned long) insn
, (unsigned long) from
);
7535 if ((insn
& 0xf0000000) == 0xf0000000)
7536 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
7537 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
7539 case 0x0: case 0x1: case 0x2: case 0x3:
7540 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
7543 case 0x4: case 0x5: case 0x6:
7544 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
7548 err
= arm_decode_media (gdbarch
, insn
, dsc
);
7551 case 0x8: case 0x9: case 0xa: case 0xb:
7552 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
7555 case 0xc: case 0xd: case 0xe: case 0xf:
7556 err
= arm_decode_svc_copro (gdbarch
, insn
, regs
, dsc
);
7561 internal_error (__FILE__
, __LINE__
,
7562 _("arm_process_displaced_insn: Instruction decode error"));
7565 /* Actually set up the scratch space for a displaced instruction. */
7568 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7569 CORE_ADDR to
, arm_displaced_step_closure
*dsc
)
7571 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7572 unsigned int i
, len
, offset
;
7573 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7574 int size
= dsc
->is_thumb
? 2 : 4;
7575 const gdb_byte
*bkp_insn
;
7578 /* Poke modified instruction(s). */
7579 for (i
= 0; i
< dsc
->numinsns
; i
++)
7582 displaced_debug_printf ("writing insn %.8lx at %.8lx",
7583 dsc
->modinsn
[i
], (unsigned long) to
+ offset
);
7585 displaced_debug_printf ("writing insn %.4x at %.8lx",
7586 (unsigned short) dsc
->modinsn
[i
],
7587 (unsigned long) to
+ offset
);
7589 write_memory_unsigned_integer (to
+ offset
, size
,
7590 byte_order_for_code
,
7595 /* Choose the correct breakpoint instruction. */
7598 bkp_insn
= tdep
->thumb_breakpoint
;
7599 len
= tdep
->thumb_breakpoint_size
;
7603 bkp_insn
= tdep
->arm_breakpoint
;
7604 len
= tdep
->arm_breakpoint_size
;
7607 /* Put breakpoint afterwards. */
7608 write_memory (to
+ offset
, bkp_insn
, len
);
7610 displaced_debug_printf ("copy %s->%s", paddress (gdbarch
, from
),
7611 paddress (gdbarch
, to
));
7614 /* Entry point for cleaning things up after a displaced instruction has been
7618 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
7619 struct displaced_step_closure
*dsc_
,
7620 CORE_ADDR from
, CORE_ADDR to
,
7621 struct regcache
*regs
)
7623 arm_displaced_step_closure
*dsc
= (arm_displaced_step_closure
*) dsc_
;
7626 dsc
->cleanup (gdbarch
, regs
, dsc
);
7628 if (!dsc
->wrote_to_pc
)
7629 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
7630 dsc
->insn_addr
+ dsc
->insn_size
);
7634 #include "bfd-in2.h"
7635 #include "libcoff.h"
7638 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
7640 gdb_disassembler
*di
7641 = static_cast<gdb_disassembler
*>(info
->application_data
);
7642 struct gdbarch
*gdbarch
= di
->arch ();
7644 if (arm_pc_is_thumb (gdbarch
, memaddr
))
7646 static asymbol
*asym
;
7647 static combined_entry_type ce
;
7648 static struct coff_symbol_struct csym
;
7649 static struct bfd fake_bfd
;
7650 static bfd_target fake_target
;
7652 if (csym
.native
== NULL
)
7654 /* Create a fake symbol vector containing a Thumb symbol.
7655 This is solely so that the code in print_insn_little_arm()
7656 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7657 the presence of a Thumb symbol and switch to decoding
7658 Thumb instructions. */
7660 fake_target
.flavour
= bfd_target_coff_flavour
;
7661 fake_bfd
.xvec
= &fake_target
;
7662 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
7664 csym
.symbol
.the_bfd
= &fake_bfd
;
7665 csym
.symbol
.name
= "fake";
7666 asym
= (asymbol
*) & csym
;
7669 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
7670 info
->symbols
= &asym
;
7673 info
->symbols
= NULL
;
7675 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7676 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7677 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7678 the assert on the mismatch of info->mach and
7679 bfd_get_mach (current_program_space->exec_bfd ()) in
7680 default_print_insn. */
7681 if (current_program_space
->exec_bfd () != NULL
)
7682 info
->flags
|= USER_SPECIFIED_MACHINE_TYPE
;
7684 return default_print_insn (memaddr
, info
);
7687 /* The following define instruction sequences that will cause ARM
7688 cpu's to take an undefined instruction trap. These are used to
7689 signal a breakpoint to GDB.
7691 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7692 modes. A different instruction is required for each mode. The ARM
7693 cpu's can also be big or little endian. Thus four different
7694 instructions are needed to support all cases.
7696 Note: ARMv4 defines several new instructions that will take the
7697 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7698 not in fact add the new instructions. The new undefined
7699 instructions in ARMv4 are all instructions that had no defined
7700 behaviour in earlier chips. There is no guarantee that they will
7701 raise an exception, but may be treated as NOP's. In practice, it
7702 may only safe to rely on instructions matching:
7704 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7705 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7706 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7708 Even this may only true if the condition predicate is true. The
7709 following use a condition predicate of ALWAYS so it is always TRUE.
7711 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7712 and NetBSD all use a software interrupt rather than an undefined
7713 instruction to force a trap. This can be handled by by the
7714 abi-specific code during establishment of the gdbarch vector. */
7716 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7717 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7718 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7719 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7721 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
7722 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
7723 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
7724 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
7726 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7729 arm_breakpoint_kind_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
)
7731 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7732 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7734 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
7736 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
7738 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7739 check whether we are replacing a 32-bit instruction. */
7740 if (tdep
->thumb2_breakpoint
!= NULL
)
7744 if (target_read_memory (*pcptr
, buf
, 2) == 0)
7746 unsigned short inst1
;
7748 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
7749 if (thumb_insn_size (inst1
) == 4)
7750 return ARM_BP_KIND_THUMB2
;
7754 return ARM_BP_KIND_THUMB
;
7757 return ARM_BP_KIND_ARM
;
7761 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7763 static const gdb_byte
*
7764 arm_sw_breakpoint_from_kind (struct gdbarch
*gdbarch
, int kind
, int *size
)
7766 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7770 case ARM_BP_KIND_ARM
:
7771 *size
= tdep
->arm_breakpoint_size
;
7772 return tdep
->arm_breakpoint
;
7773 case ARM_BP_KIND_THUMB
:
7774 *size
= tdep
->thumb_breakpoint_size
;
7775 return tdep
->thumb_breakpoint
;
7776 case ARM_BP_KIND_THUMB2
:
7777 *size
= tdep
->thumb2_breakpoint_size
;
7778 return tdep
->thumb2_breakpoint
;
7780 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7784 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7787 arm_breakpoint_kind_from_current_state (struct gdbarch
*gdbarch
,
7788 struct regcache
*regcache
,
7793 /* Check the memory pointed by PC is readable. */
7794 if (target_read_memory (regcache_read_pc (regcache
), buf
, 4) == 0)
7796 struct arm_get_next_pcs next_pcs_ctx
;
7798 arm_get_next_pcs_ctor (&next_pcs_ctx
,
7799 &arm_get_next_pcs_ops
,
7800 gdbarch_byte_order (gdbarch
),
7801 gdbarch_byte_order_for_code (gdbarch
),
7805 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
7807 /* If MEMADDR is the next instruction of current pc, do the
7808 software single step computation, and get the thumb mode by
7809 the destination address. */
7810 for (CORE_ADDR pc
: next_pcs
)
7812 if (UNMAKE_THUMB_ADDR (pc
) == *pcptr
)
7814 if (IS_THUMB_ADDR (pc
))
7816 *pcptr
= MAKE_THUMB_ADDR (*pcptr
);
7817 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7820 return ARM_BP_KIND_ARM
;
7825 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7828 /* Extract from an array REGBUF containing the (raw) register state a
7829 function return value of type TYPE, and copy that, in virtual
7830 format, into VALBUF. */
7833 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
7836 struct gdbarch
*gdbarch
= regs
->arch ();
7837 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7839 if (TYPE_CODE_FLT
== type
->code ())
7841 switch (gdbarch_tdep (gdbarch
)->fp_model
)
7845 /* The value is in register F0 in internal format. We need to
7846 extract the raw value and then convert it to the desired
7848 bfd_byte tmpbuf
[ARM_FP_REGISTER_SIZE
];
7850 regs
->cooked_read (ARM_F0_REGNUM
, tmpbuf
);
7851 target_float_convert (tmpbuf
, arm_ext_type (gdbarch
),
7856 case ARM_FLOAT_SOFT_FPA
:
7857 case ARM_FLOAT_SOFT_VFP
:
7858 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7859 not using the VFP ABI code. */
7861 regs
->cooked_read (ARM_A1_REGNUM
, valbuf
);
7862 if (TYPE_LENGTH (type
) > 4)
7863 regs
->cooked_read (ARM_A1_REGNUM
+ 1,
7864 valbuf
+ ARM_INT_REGISTER_SIZE
);
7868 internal_error (__FILE__
, __LINE__
,
7869 _("arm_extract_return_value: "
7870 "Floating point model not supported"));
7874 else if (type
->code () == TYPE_CODE_INT
7875 || type
->code () == TYPE_CODE_CHAR
7876 || type
->code () == TYPE_CODE_BOOL
7877 || type
->code () == TYPE_CODE_PTR
7878 || TYPE_IS_REFERENCE (type
)
7879 || type
->code () == TYPE_CODE_ENUM
)
7881 /* If the type is a plain integer, then the access is
7882 straight-forward. Otherwise we have to play around a bit
7884 int len
= TYPE_LENGTH (type
);
7885 int regno
= ARM_A1_REGNUM
;
7890 /* By using store_unsigned_integer we avoid having to do
7891 anything special for small big-endian values. */
7892 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
7893 store_unsigned_integer (valbuf
,
7894 (len
> ARM_INT_REGISTER_SIZE
7895 ? ARM_INT_REGISTER_SIZE
: len
),
7897 len
-= ARM_INT_REGISTER_SIZE
;
7898 valbuf
+= ARM_INT_REGISTER_SIZE
;
7903 /* For a structure or union the behaviour is as if the value had
7904 been stored to word-aligned memory and then loaded into
7905 registers with 32-bit load instruction(s). */
7906 int len
= TYPE_LENGTH (type
);
7907 int regno
= ARM_A1_REGNUM
;
7908 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
7912 regs
->cooked_read (regno
++, tmpbuf
);
7913 memcpy (valbuf
, tmpbuf
,
7914 len
> ARM_INT_REGISTER_SIZE
? ARM_INT_REGISTER_SIZE
: len
);
7915 len
-= ARM_INT_REGISTER_SIZE
;
7916 valbuf
+= ARM_INT_REGISTER_SIZE
;
7922 /* Will a function return an aggregate type in memory or in a
7923 register? Return 0 if an aggregate type can be returned in a
7924 register, 1 if it must be returned in memory. */
7927 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
7929 enum type_code code
;
7931 type
= check_typedef (type
);
7933 /* Simple, non-aggregate types (ie not including vectors and
7934 complex) are always returned in a register (or registers). */
7935 code
= type
->code ();
7936 if (TYPE_CODE_STRUCT
!= code
&& TYPE_CODE_UNION
!= code
7937 && TYPE_CODE_ARRAY
!= code
&& TYPE_CODE_COMPLEX
!= code
)
7940 if (TYPE_CODE_ARRAY
== code
&& type
->is_vector ())
7942 /* Vector values should be returned using ARM registers if they
7943 are not over 16 bytes. */
7944 return (TYPE_LENGTH (type
) > 16);
7947 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
7949 /* The AAPCS says all aggregates not larger than a word are returned
7951 if (TYPE_LENGTH (type
) <= ARM_INT_REGISTER_SIZE
)
7960 /* All aggregate types that won't fit in a register must be returned
7962 if (TYPE_LENGTH (type
) > ARM_INT_REGISTER_SIZE
)
7965 /* In the ARM ABI, "integer" like aggregate types are returned in
7966 registers. For an aggregate type to be integer like, its size
7967 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7968 offset of each addressable subfield must be zero. Note that bit
7969 fields are not addressable, and all addressable subfields of
7970 unions always start at offset zero.
7972 This function is based on the behaviour of GCC 2.95.1.
7973 See: gcc/arm.c: arm_return_in_memory() for details.
7975 Note: All versions of GCC before GCC 2.95.2 do not set up the
7976 parameters correctly for a function returning the following
7977 structure: struct { float f;}; This should be returned in memory,
7978 not a register. Richard Earnshaw sent me a patch, but I do not
7979 know of any way to detect if a function like the above has been
7980 compiled with the correct calling convention. */
7982 /* Assume all other aggregate types can be returned in a register.
7983 Run a check for structures, unions and arrays. */
7986 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
7989 /* Need to check if this struct/union is "integer" like. For
7990 this to be true, its size must be less than or equal to
7991 ARM_INT_REGISTER_SIZE and the offset of each addressable
7992 subfield must be zero. Note that bit fields are not
7993 addressable, and unions always start at offset zero. If any
7994 of the subfields is a floating point type, the struct/union
7995 cannot be an integer type. */
7997 /* For each field in the object, check:
7998 1) Is it FP? --> yes, nRc = 1;
7999 2) Is it addressable (bitpos != 0) and
8000 not packed (bitsize == 0)?
8004 for (i
= 0; i
< type
->num_fields (); i
++)
8006 enum type_code field_type_code
;
8009 = check_typedef (type
->field (i
).type ())->code ();
8011 /* Is it a floating point type field? */
8012 if (field_type_code
== TYPE_CODE_FLT
)
8018 /* If bitpos != 0, then we have to care about it. */
8019 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
8021 /* Bitfields are not addressable. If the field bitsize is
8022 zero, then the field is not packed. Hence it cannot be
8023 a bitfield or any other packed type. */
8024 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
8037 /* Write into appropriate registers a function return value of type
8038 TYPE, given in virtual format. */
8041 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
8042 const gdb_byte
*valbuf
)
8044 struct gdbarch
*gdbarch
= regs
->arch ();
8045 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8047 if (type
->code () == TYPE_CODE_FLT
)
8049 gdb_byte buf
[ARM_FP_REGISTER_SIZE
];
8051 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8055 target_float_convert (valbuf
, type
, buf
, arm_ext_type (gdbarch
));
8056 regs
->cooked_write (ARM_F0_REGNUM
, buf
);
8059 case ARM_FLOAT_SOFT_FPA
:
8060 case ARM_FLOAT_SOFT_VFP
:
8061 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8062 not using the VFP ABI code. */
8064 regs
->cooked_write (ARM_A1_REGNUM
, valbuf
);
8065 if (TYPE_LENGTH (type
) > 4)
8066 regs
->cooked_write (ARM_A1_REGNUM
+ 1,
8067 valbuf
+ ARM_INT_REGISTER_SIZE
);
8071 internal_error (__FILE__
, __LINE__
,
8072 _("arm_store_return_value: Floating "
8073 "point model not supported"));
8077 else if (type
->code () == TYPE_CODE_INT
8078 || type
->code () == TYPE_CODE_CHAR
8079 || type
->code () == TYPE_CODE_BOOL
8080 || type
->code () == TYPE_CODE_PTR
8081 || TYPE_IS_REFERENCE (type
)
8082 || type
->code () == TYPE_CODE_ENUM
)
8084 if (TYPE_LENGTH (type
) <= 4)
8086 /* Values of one word or less are zero/sign-extended and
8088 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8089 LONGEST val
= unpack_long (type
, valbuf
);
8091 store_signed_integer (tmpbuf
, ARM_INT_REGISTER_SIZE
, byte_order
, val
);
8092 regs
->cooked_write (ARM_A1_REGNUM
, tmpbuf
);
8096 /* Integral values greater than one word are stored in consecutive
8097 registers starting with r0. This will always be a multiple of
8098 the regiser size. */
8099 int len
= TYPE_LENGTH (type
);
8100 int regno
= ARM_A1_REGNUM
;
8104 regs
->cooked_write (regno
++, valbuf
);
8105 len
-= ARM_INT_REGISTER_SIZE
;
8106 valbuf
+= ARM_INT_REGISTER_SIZE
;
8112 /* For a structure or union the behaviour is as if the value had
8113 been stored to word-aligned memory and then loaded into
8114 registers with 32-bit load instruction(s). */
8115 int len
= TYPE_LENGTH (type
);
8116 int regno
= ARM_A1_REGNUM
;
8117 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8121 memcpy (tmpbuf
, valbuf
,
8122 len
> ARM_INT_REGISTER_SIZE
? ARM_INT_REGISTER_SIZE
: len
);
8123 regs
->cooked_write (regno
++, tmpbuf
);
8124 len
-= ARM_INT_REGISTER_SIZE
;
8125 valbuf
+= ARM_INT_REGISTER_SIZE
;
8131 /* Handle function return values. */
8133 static enum return_value_convention
8134 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
8135 struct type
*valtype
, struct regcache
*regcache
,
8136 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
8138 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8139 struct type
*func_type
= function
? value_type (function
) : NULL
;
8140 enum arm_vfp_cprc_base_type vfp_base_type
;
8143 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
8144 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
8146 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
8147 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
8149 for (i
= 0; i
< vfp_base_count
; i
++)
8151 if (reg_char
== 'q')
8154 arm_neon_quad_write (gdbarch
, regcache
, i
,
8155 writebuf
+ i
* unit_length
);
8158 arm_neon_quad_read (gdbarch
, regcache
, i
,
8159 readbuf
+ i
* unit_length
);
8166 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
8167 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8170 regcache
->cooked_write (regnum
, writebuf
+ i
* unit_length
);
8172 regcache
->cooked_read (regnum
, readbuf
+ i
* unit_length
);
8175 return RETURN_VALUE_REGISTER_CONVENTION
;
8178 if (valtype
->code () == TYPE_CODE_STRUCT
8179 || valtype
->code () == TYPE_CODE_UNION
8180 || valtype
->code () == TYPE_CODE_ARRAY
)
8182 if (tdep
->struct_return
== pcc_struct_return
8183 || arm_return_in_memory (gdbarch
, valtype
))
8184 return RETURN_VALUE_STRUCT_CONVENTION
;
8186 else if (valtype
->code () == TYPE_CODE_COMPLEX
)
8188 if (arm_return_in_memory (gdbarch
, valtype
))
8189 return RETURN_VALUE_STRUCT_CONVENTION
;
8193 arm_store_return_value (valtype
, regcache
, writebuf
);
8196 arm_extract_return_value (valtype
, regcache
, readbuf
);
8198 return RETURN_VALUE_REGISTER_CONVENTION
;
8203 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
8205 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
8206 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8207 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8209 gdb_byte buf
[ARM_INT_REGISTER_SIZE
];
8211 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
8213 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
8214 ARM_INT_REGISTER_SIZE
))
8217 *pc
= extract_unsigned_integer (buf
, ARM_INT_REGISTER_SIZE
, byte_order
);
8220 /* A call to cmse secure entry function "foo" at "a" is modified by
8227 b) bl yyyy <__acle_se_foo>
8229 section .gnu.sgstubs:
8231 yyyy: sg // secure gateway
8232 b.w xxxx <__acle_se_foo> // original_branch_dest
8237 When the control at "b", the pc contains "yyyy" (sg address) which is a
8238 trampoline and does not exist in source code. This function returns the
8239 target pc "xxxx". For more details please refer to section 5.4
8240 (Entry functions) and section 3.4.4 (C level development flow of secure code)
8241 of "armv8-m-security-extensions-requirements-on-development-tools-engineering-specification"
8242 document on www.developer.arm.com. */
8245 arm_skip_cmse_entry (CORE_ADDR pc
, const char *name
, struct objfile
*objfile
)
8247 int target_len
= strlen (name
) + strlen ("__acle_se_") + 1;
8248 char *target_name
= (char *) alloca (target_len
);
8249 xsnprintf (target_name
, target_len
, "%s%s", "__acle_se_", name
);
8251 struct bound_minimal_symbol minsym
8252 = lookup_minimal_symbol (target_name
, NULL
, objfile
);
8254 if (minsym
.minsym
!= nullptr)
8255 return BMSYMBOL_VALUE_ADDRESS (minsym
);
8260 /* Return true when SEC points to ".gnu.sgstubs" section. */
8263 arm_is_sgstubs_section (struct obj_section
*sec
)
8265 return (sec
!= nullptr
8266 && sec
->the_bfd_section
!= nullptr
8267 && sec
->the_bfd_section
->name
!= nullptr
8268 && streq (sec
->the_bfd_section
->name
, ".gnu.sgstubs"));
8271 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8272 return the target PC. Otherwise return 0. */
8275 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
8279 CORE_ADDR start_addr
;
8281 /* Find the starting address and name of the function containing the PC. */
8282 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
8284 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8286 start_addr
= arm_skip_bx_reg (frame
, pc
);
8287 if (start_addr
!= 0)
8293 /* If PC is in a Thumb call or return stub, return the address of the
8294 target PC, which is in a register. The thunk functions are called
8295 _call_via_xx, where x is the register name. The possible names
8296 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8297 functions, named __ARM_call_via_r[0-7]. */
8298 if (startswith (name
, "_call_via_")
8299 || startswith (name
, "__ARM_call_via_"))
8301 /* Use the name suffix to determine which register contains the
8303 static const char *table
[15] =
8304 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8305 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8308 int offset
= strlen (name
) - 2;
8310 for (regno
= 0; regno
<= 14; regno
++)
8311 if (strcmp (&name
[offset
], table
[regno
]) == 0)
8312 return get_frame_register_unsigned (frame
, regno
);
8315 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8316 non-interworking calls to foo. We could decode the stubs
8317 to find the target but it's easier to use the symbol table. */
8318 namelen
= strlen (name
);
8319 if (name
[0] == '_' && name
[1] == '_'
8320 && ((namelen
> 2 + strlen ("_from_thumb")
8321 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
8322 || (namelen
> 2 + strlen ("_from_arm")
8323 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
8326 int target_len
= namelen
- 2;
8327 struct bound_minimal_symbol minsym
;
8328 struct objfile
*objfile
;
8329 struct obj_section
*sec
;
8331 if (name
[namelen
- 1] == 'b')
8332 target_len
-= strlen ("_from_thumb");
8334 target_len
-= strlen ("_from_arm");
8336 target_name
= (char *) alloca (target_len
+ 1);
8337 memcpy (target_name
, name
+ 2, target_len
);
8338 target_name
[target_len
] = '\0';
8340 sec
= find_pc_section (pc
);
8341 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
8342 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
8343 if (minsym
.minsym
!= NULL
)
8344 return BMSYMBOL_VALUE_ADDRESS (minsym
);
8349 struct obj_section
*section
= find_pc_section (pc
);
8351 /* Check whether SECTION points to the ".gnu.sgstubs" section. */
8352 if (arm_is_sgstubs_section (section
))
8353 return arm_skip_cmse_entry (pc
, name
, section
->objfile
);
8355 return 0; /* not a stub */
8359 arm_update_current_architecture (void)
8361 struct gdbarch_info info
;
8363 /* If the current architecture is not ARM, we have nothing to do. */
8364 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
8367 /* Update the architecture. */
8368 gdbarch_info_init (&info
);
8370 if (!gdbarch_update_p (info
))
8371 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
8375 set_fp_model_sfunc (const char *args
, int from_tty
,
8376 struct cmd_list_element
*c
)
8380 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
8381 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
8383 arm_fp_model
= (enum arm_float_model
) fp_model
;
8387 if (fp_model
== ARM_FLOAT_LAST
)
8388 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
8391 arm_update_current_architecture ();
8395 show_fp_model (struct ui_file
*file
, int from_tty
,
8396 struct cmd_list_element
*c
, const char *value
)
8398 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8400 if (arm_fp_model
== ARM_FLOAT_AUTO
8401 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8402 fprintf_filtered (file
, _("\
8403 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8404 fp_model_strings
[tdep
->fp_model
]);
8406 fprintf_filtered (file
, _("\
8407 The current ARM floating point model is \"%s\".\n"),
8408 fp_model_strings
[arm_fp_model
]);
8412 arm_set_abi (const char *args
, int from_tty
,
8413 struct cmd_list_element
*c
)
8417 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
8418 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
8420 arm_abi_global
= (enum arm_abi_kind
) arm_abi
;
8424 if (arm_abi
== ARM_ABI_LAST
)
8425 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
8428 arm_update_current_architecture ();
8432 arm_show_abi (struct ui_file
*file
, int from_tty
,
8433 struct cmd_list_element
*c
, const char *value
)
8435 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8437 if (arm_abi_global
== ARM_ABI_AUTO
8438 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8439 fprintf_filtered (file
, _("\
8440 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8441 arm_abi_strings
[tdep
->arm_abi
]);
8443 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
8448 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
8449 struct cmd_list_element
*c
, const char *value
)
8451 fprintf_filtered (file
,
8452 _("The current execution mode assumed "
8453 "(when symbols are unavailable) is \"%s\".\n"),
8454 arm_fallback_mode_string
);
8458 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
8459 struct cmd_list_element
*c
, const char *value
)
8461 fprintf_filtered (file
,
8462 _("The current execution mode assumed "
8463 "(even when symbols are available) is \"%s\".\n"),
8464 arm_force_mode_string
);
8467 /* If the user changes the register disassembly style used for info
8468 register and other commands, we have to also switch the style used
8469 in opcodes for disassembly output. This function is run in the "set
8470 arm disassembly" command, and does that. */
8473 set_disassembly_style_sfunc (const char *args
, int from_tty
,
8474 struct cmd_list_element
*c
)
8476 /* Convert the short style name into the long style name (eg, reg-names-*)
8477 before calling the generic set_disassembler_options() function. */
8478 std::string long_name
= std::string ("reg-names-") + disassembly_style
;
8479 set_disassembler_options (&long_name
[0]);
8483 show_disassembly_style_sfunc (struct ui_file
*file
, int from_tty
,
8484 struct cmd_list_element
*c
, const char *value
)
8486 struct gdbarch
*gdbarch
= get_current_arch ();
8487 char *options
= get_disassembler_options (gdbarch
);
8488 const char *style
= "";
8492 FOR_EACH_DISASSEMBLER_OPTION (opt
, options
)
8493 if (CONST_STRNEQ (opt
, "reg-names-"))
8495 style
= &opt
[strlen ("reg-names-")];
8496 len
= strcspn (style
, ",");
8499 fprintf_unfiltered (file
, "The disassembly style is \"%.*s\".\n", len
, style
);
8502 /* Return the ARM register name corresponding to register I. */
8504 arm_register_name (struct gdbarch
*gdbarch
, int i
)
8506 const int num_regs
= gdbarch_num_regs (gdbarch
);
8508 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
8509 && i
>= num_regs
&& i
< num_regs
+ 32)
8511 static const char *const vfp_pseudo_names
[] = {
8512 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8513 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8514 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8515 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8518 return vfp_pseudo_names
[i
- num_regs
];
8521 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
8522 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
8524 static const char *const neon_pseudo_names
[] = {
8525 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8526 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8529 return neon_pseudo_names
[i
- num_regs
- 32];
8532 if (i
>= ARRAY_SIZE (arm_register_names
))
8533 /* These registers are only supported on targets which supply
8534 an XML description. */
8537 return arm_register_names
[i
];
8540 /* Test whether the coff symbol specific value corresponds to a Thumb
8544 coff_sym_is_thumb (int val
)
8546 return (val
== C_THUMBEXT
8547 || val
== C_THUMBSTAT
8548 || val
== C_THUMBEXTFUNC
8549 || val
== C_THUMBSTATFUNC
8550 || val
== C_THUMBLABEL
);
8553 /* arm_coff_make_msymbol_special()
8554 arm_elf_make_msymbol_special()
8556 These functions test whether the COFF or ELF symbol corresponds to
8557 an address in thumb code, and set a "special" bit in a minimal
8558 symbol to indicate that it does. */
8561 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
8563 elf_symbol_type
*elfsym
= (elf_symbol_type
*) sym
;
8565 if (ARM_GET_SYM_BRANCH_TYPE (elfsym
->internal_elf_sym
.st_target_internal
)
8566 == ST_BRANCH_TO_THUMB
)
8567 MSYMBOL_SET_SPECIAL (msym
);
8571 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
8573 if (coff_sym_is_thumb (val
))
8574 MSYMBOL_SET_SPECIAL (msym
);
8578 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
8581 const char *name
= bfd_asymbol_name (sym
);
8582 struct arm_per_bfd
*data
;
8583 struct arm_mapping_symbol new_map_sym
;
8585 gdb_assert (name
[0] == '$');
8586 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
8589 data
= arm_bfd_data_key
.get (objfile
->obfd
);
8591 data
= arm_bfd_data_key
.emplace (objfile
->obfd
,
8592 objfile
->obfd
->section_count
);
8593 arm_mapping_symbol_vec
&map
8594 = data
->section_maps
[bfd_asymbol_section (sym
)->index
];
8596 new_map_sym
.value
= sym
->value
;
8597 new_map_sym
.type
= name
[1];
8599 /* Insert at the end, the vector will be sorted on first use. */
8600 map
.push_back (new_map_sym
);
8604 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
8606 struct gdbarch
*gdbarch
= regcache
->arch ();
8607 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
8609 /* If necessary, set the T bit. */
8612 ULONGEST val
, t_bit
;
8613 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
8614 t_bit
= arm_psr_thumb_bit (gdbarch
);
8615 if (arm_pc_is_thumb (gdbarch
, pc
))
8616 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8619 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8624 /* Read the contents of a NEON quad register, by reading from two
8625 double registers. This is used to implement the quad pseudo
8626 registers, and for argument passing in case the quad registers are
8627 missing; vectors are passed in quad registers when using the VFP
8628 ABI, even if a NEON unit is not present. REGNUM is the index of
8629 the quad register, in [0, 15]. */
8631 static enum register_status
8632 arm_neon_quad_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8633 int regnum
, gdb_byte
*buf
)
8636 gdb_byte reg_buf
[8];
8637 int offset
, double_regnum
;
8638 enum register_status status
;
8640 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8641 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8644 /* d0 is always the least significant half of q0. */
8645 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8650 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8651 if (status
!= REG_VALID
)
8653 memcpy (buf
+ offset
, reg_buf
, 8);
8655 offset
= 8 - offset
;
8656 status
= regcache
->raw_read (double_regnum
+ 1, reg_buf
);
8657 if (status
!= REG_VALID
)
8659 memcpy (buf
+ offset
, reg_buf
, 8);
8664 static enum register_status
8665 arm_pseudo_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8666 int regnum
, gdb_byte
*buf
)
8668 const int num_regs
= gdbarch_num_regs (gdbarch
);
8670 gdb_byte reg_buf
[8];
8671 int offset
, double_regnum
;
8673 gdb_assert (regnum
>= num_regs
);
8676 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8677 /* Quad-precision register. */
8678 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
8681 enum register_status status
;
8683 /* Single-precision register. */
8684 gdb_assert (regnum
< 32);
8686 /* s0 is always the least significant half of d0. */
8687 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8688 offset
= (regnum
& 1) ? 0 : 4;
8690 offset
= (regnum
& 1) ? 4 : 0;
8692 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8693 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8696 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8697 if (status
== REG_VALID
)
8698 memcpy (buf
, reg_buf
+ offset
, 4);
8703 /* Store the contents of BUF to a NEON quad register, by writing to
8704 two double registers. This is used to implement the quad pseudo
8705 registers, and for argument passing in case the quad registers are
8706 missing; vectors are passed in quad registers when using the VFP
8707 ABI, even if a NEON unit is not present. REGNUM is the index
8708 of the quad register, in [0, 15]. */
8711 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8712 int regnum
, const gdb_byte
*buf
)
8715 int offset
, double_regnum
;
8717 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8718 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8721 /* d0 is always the least significant half of q0. */
8722 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8727 regcache
->raw_write (double_regnum
, buf
+ offset
);
8728 offset
= 8 - offset
;
8729 regcache
->raw_write (double_regnum
+ 1, buf
+ offset
);
8733 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8734 int regnum
, const gdb_byte
*buf
)
8736 const int num_regs
= gdbarch_num_regs (gdbarch
);
8738 gdb_byte reg_buf
[8];
8739 int offset
, double_regnum
;
8741 gdb_assert (regnum
>= num_regs
);
8744 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8745 /* Quad-precision register. */
8746 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
8749 /* Single-precision register. */
8750 gdb_assert (regnum
< 32);
8752 /* s0 is always the least significant half of d0. */
8753 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8754 offset
= (regnum
& 1) ? 0 : 4;
8756 offset
= (regnum
& 1) ? 4 : 0;
8758 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8759 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8762 regcache
->raw_read (double_regnum
, reg_buf
);
8763 memcpy (reg_buf
+ offset
, buf
, 4);
8764 regcache
->raw_write (double_regnum
, reg_buf
);
8768 static struct value
*
8769 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
8771 const int *reg_p
= (const int *) baton
;
8772 return value_of_register (*reg_p
, frame
);
8775 static enum gdb_osabi
8776 arm_elf_osabi_sniffer (bfd
*abfd
)
8778 unsigned int elfosabi
;
8779 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
8781 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
8783 if (elfosabi
== ELFOSABI_ARM
)
8784 /* GNU tools use this value. Check note sections in this case,
8787 for (asection
*sect
: gdb_bfd_sections (abfd
))
8788 generic_elf_osabi_sniff_abi_tag_sections (abfd
, sect
, &osabi
);
8791 /* Anything else will be handled by the generic ELF sniffer. */
8796 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
8797 struct reggroup
*group
)
8799 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8800 this, FPS register belongs to save_regroup, restore_reggroup, and
8801 all_reggroup, of course. */
8802 if (regnum
== ARM_FPS_REGNUM
)
8803 return (group
== float_reggroup
8804 || group
== save_reggroup
8805 || group
== restore_reggroup
8806 || group
== all_reggroup
);
8808 return default_register_reggroup_p (gdbarch
, regnum
, group
);
8811 /* For backward-compatibility we allow two 'g' packet lengths with
8812 the remote protocol depending on whether FPA registers are
8813 supplied. M-profile targets do not have FPA registers, but some
8814 stubs already exist in the wild which use a 'g' packet which
8815 supplies them albeit with dummy values. The packet format which
8816 includes FPA registers should be considered deprecated for
8817 M-profile targets. */
8820 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
8822 if (gdbarch_tdep (gdbarch
)->is_m
)
8824 const target_desc
*tdesc
;
8826 /* If we know from the executable this is an M-profile target,
8827 cater for remote targets whose register set layout is the
8828 same as the FPA layout. */
8829 tdesc
= arm_read_mprofile_description (ARM_M_TYPE_WITH_FPA
);
8830 register_remote_g_packet_guess (gdbarch
,
8831 ARM_CORE_REGS_SIZE
+ ARM_FP_REGS_SIZE
,
8834 /* The regular M-profile layout. */
8835 tdesc
= arm_read_mprofile_description (ARM_M_TYPE_M_PROFILE
);
8836 register_remote_g_packet_guess (gdbarch
, ARM_CORE_REGS_SIZE
,
8839 /* M-profile plus M4F VFP. */
8840 tdesc
= arm_read_mprofile_description (ARM_M_TYPE_VFP_D16
);
8841 register_remote_g_packet_guess (gdbarch
,
8842 ARM_CORE_REGS_SIZE
+ ARM_VFP2_REGS_SIZE
,
8846 /* Otherwise we don't have a useful guess. */
8849 /* Implement the code_of_frame_writable gdbarch method. */
8852 arm_code_of_frame_writable (struct gdbarch
*gdbarch
, struct frame_info
*frame
)
8854 if (gdbarch_tdep (gdbarch
)->is_m
8855 && get_frame_type (frame
) == SIGTRAMP_FRAME
)
8857 /* M-profile exception frames return to some magic PCs, where
8858 isn't writable at all. */
8865 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8866 to be postfixed by a version (eg armv7hl). */
8869 arm_gnu_triplet_regexp (struct gdbarch
*gdbarch
)
8871 if (strcmp (gdbarch_bfd_arch_info (gdbarch
)->arch_name
, "arm") == 0)
8872 return "arm(v[^- ]*)?";
8873 return gdbarch_bfd_arch_info (gdbarch
)->arch_name
;
8876 /* Initialize the current architecture based on INFO. If possible,
8877 re-use an architecture from ARCHES, which is a list of
8878 architectures already created during this debugging session.
8880 Called e.g. at program startup, when reading a core file, and when
8881 reading a binary file. */
8883 static struct gdbarch
*
8884 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
8886 struct gdbarch_tdep
*tdep
;
8887 struct gdbarch
*gdbarch
;
8888 struct gdbarch_list
*best_arch
;
8889 enum arm_abi_kind arm_abi
= arm_abi_global
;
8890 enum arm_float_model fp_model
= arm_fp_model
;
8891 tdesc_arch_data_up tdesc_data
;
8894 int vfp_register_count
= 0;
8895 bool have_vfp_pseudos
= false, have_neon_pseudos
= false;
8896 bool have_wmmx_registers
= false;
8897 bool have_neon
= false;
8898 bool have_fpa_registers
= true;
8899 const struct target_desc
*tdesc
= info
.target_desc
;
8901 /* If we have an object to base this architecture on, try to determine
8904 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
8906 int ei_osabi
, e_flags
;
8908 switch (bfd_get_flavour (info
.abfd
))
8910 case bfd_target_coff_flavour
:
8911 /* Assume it's an old APCS-style ABI. */
8913 arm_abi
= ARM_ABI_APCS
;
8916 case bfd_target_elf_flavour
:
8917 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
8918 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
8920 if (ei_osabi
== ELFOSABI_ARM
)
8922 /* GNU tools used to use this value, but do not for EABI
8923 objects. There's nowhere to tag an EABI version
8924 anyway, so assume APCS. */
8925 arm_abi
= ARM_ABI_APCS
;
8927 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
8929 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
8933 case EF_ARM_EABI_UNKNOWN
:
8934 /* Assume GNU tools. */
8935 arm_abi
= ARM_ABI_APCS
;
8938 case EF_ARM_EABI_VER4
:
8939 case EF_ARM_EABI_VER5
:
8940 arm_abi
= ARM_ABI_AAPCS
;
8941 /* EABI binaries default to VFP float ordering.
8942 They may also contain build attributes that can
8943 be used to identify if the VFP argument-passing
8945 if (fp_model
== ARM_FLOAT_AUTO
)
8948 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
8952 case AEABI_VFP_args_base
:
8953 /* "The user intended FP parameter/result
8954 passing to conform to AAPCS, base
8956 fp_model
= ARM_FLOAT_SOFT_VFP
;
8958 case AEABI_VFP_args_vfp
:
8959 /* "The user intended FP parameter/result
8960 passing to conform to AAPCS, VFP
8962 fp_model
= ARM_FLOAT_VFP
;
8964 case AEABI_VFP_args_toolchain
:
8965 /* "The user intended FP parameter/result
8966 passing to conform to tool chain-specific
8967 conventions" - we don't know any such
8968 conventions, so leave it as "auto". */
8970 case AEABI_VFP_args_compatible
:
8971 /* "Code is compatible with both the base
8972 and VFP variants; the user did not permit
8973 non-variadic functions to pass FP
8974 parameters/results" - leave it as
8978 /* Attribute value not mentioned in the
8979 November 2012 ABI, so leave it as
8984 fp_model
= ARM_FLOAT_SOFT_VFP
;
8990 /* Leave it as "auto". */
8991 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
8996 /* Detect M-profile programs. This only works if the
8997 executable file includes build attributes; GCC does
8998 copy them to the executable, but e.g. RealView does
9001 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
9004 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
9005 Tag_CPU_arch_profile
);
9007 /* GCC specifies the profile for v6-M; RealView only
9008 specifies the profile for architectures starting with
9009 V7 (as opposed to architectures with a tag
9010 numerically greater than TAG_CPU_ARCH_V7). */
9011 if (!tdesc_has_registers (tdesc
)
9012 && (attr_arch
== TAG_CPU_ARCH_V6_M
9013 || attr_arch
== TAG_CPU_ARCH_V6S_M
9014 || attr_profile
== 'M'))
9019 if (fp_model
== ARM_FLOAT_AUTO
)
9021 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
9024 /* Leave it as "auto". Strictly speaking this case
9025 means FPA, but almost nobody uses that now, and
9026 many toolchains fail to set the appropriate bits
9027 for the floating-point model they use. */
9029 case EF_ARM_SOFT_FLOAT
:
9030 fp_model
= ARM_FLOAT_SOFT_FPA
;
9032 case EF_ARM_VFP_FLOAT
:
9033 fp_model
= ARM_FLOAT_VFP
;
9035 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
9036 fp_model
= ARM_FLOAT_SOFT_VFP
;
9041 if (e_flags
& EF_ARM_BE8
)
9042 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
9047 /* Leave it as "auto". */
9052 /* Check any target description for validity. */
9053 if (tdesc_has_registers (tdesc
))
9055 /* For most registers we require GDB's default names; but also allow
9056 the numeric names for sp / lr / pc, as a convenience. */
9057 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
9058 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
9059 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
9061 const struct tdesc_feature
*feature
;
9064 feature
= tdesc_find_feature (tdesc
,
9065 "org.gnu.gdb.arm.core");
9066 if (feature
== NULL
)
9068 feature
= tdesc_find_feature (tdesc
,
9069 "org.gnu.gdb.arm.m-profile");
9070 if (feature
== NULL
)
9076 tdesc_data
= tdesc_data_alloc ();
9079 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
9080 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9081 arm_register_names
[i
]);
9082 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
.get (),
9085 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
.get (),
9088 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
.get (),
9092 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9093 ARM_PS_REGNUM
, "xpsr");
9095 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9096 ARM_PS_REGNUM
, "cpsr");
9101 feature
= tdesc_find_feature (tdesc
,
9102 "org.gnu.gdb.arm.fpa");
9103 if (feature
!= NULL
)
9106 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
9107 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9108 arm_register_names
[i
]);
9113 have_fpa_registers
= false;
9115 feature
= tdesc_find_feature (tdesc
,
9116 "org.gnu.gdb.xscale.iwmmxt");
9117 if (feature
!= NULL
)
9119 static const char *const iwmmxt_names
[] = {
9120 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9121 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9122 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9123 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9127 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
9129 &= tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9130 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9132 /* Check for the control registers, but do not fail if they
9134 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
9135 tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9136 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9138 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
9140 &= tdesc_numbered_register (feature
, tdesc_data
.get (), i
,
9141 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9146 have_wmmx_registers
= true;
9149 /* If we have a VFP unit, check whether the single precision registers
9150 are present. If not, then we will synthesize them as pseudo
9152 feature
= tdesc_find_feature (tdesc
,
9153 "org.gnu.gdb.arm.vfp");
9154 if (feature
!= NULL
)
9156 static const char *const vfp_double_names
[] = {
9157 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9158 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9159 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9160 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9163 /* Require the double precision registers. There must be either
9166 for (i
= 0; i
< 32; i
++)
9168 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9170 vfp_double_names
[i
]);
9174 if (!valid_p
&& i
== 16)
9177 /* Also require FPSCR. */
9178 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
.get (),
9179 ARM_FPSCR_REGNUM
, "fpscr");
9183 if (tdesc_unnumbered_register (feature
, "s0") == 0)
9184 have_vfp_pseudos
= true;
9186 vfp_register_count
= i
;
9188 /* If we have VFP, also check for NEON. The architecture allows
9189 NEON without VFP (integer vector operations only), but GDB
9190 does not support that. */
9191 feature
= tdesc_find_feature (tdesc
,
9192 "org.gnu.gdb.arm.neon");
9193 if (feature
!= NULL
)
9195 /* NEON requires 32 double-precision registers. */
9199 /* If there are quad registers defined by the stub, use
9200 their type; otherwise (normally) provide them with
9201 the default type. */
9202 if (tdesc_unnumbered_register (feature
, "q0") == 0)
9203 have_neon_pseudos
= true;
9210 /* If there is already a candidate, use it. */
9211 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
9213 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
9215 if (arm_abi
!= ARM_ABI_AUTO
9216 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
9219 if (fp_model
!= ARM_FLOAT_AUTO
9220 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
9223 /* There are various other properties in tdep that we do not
9224 need to check here: those derived from a target description,
9225 since gdbarches with a different target description are
9226 automatically disqualified. */
9228 /* Do check is_m, though, since it might come from the binary. */
9229 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
9232 /* Found a match. */
9236 if (best_arch
!= NULL
)
9237 return best_arch
->gdbarch
;
9239 tdep
= XCNEW (struct gdbarch_tdep
);
9240 gdbarch
= gdbarch_alloc (&info
, tdep
);
9242 /* Record additional information about the architecture we are defining.
9243 These are gdbarch discriminators, like the OSABI. */
9244 tdep
->arm_abi
= arm_abi
;
9245 tdep
->fp_model
= fp_model
;
9247 tdep
->have_fpa_registers
= have_fpa_registers
;
9248 tdep
->have_wmmx_registers
= have_wmmx_registers
;
9249 gdb_assert (vfp_register_count
== 0
9250 || vfp_register_count
== 16
9251 || vfp_register_count
== 32);
9252 tdep
->vfp_register_count
= vfp_register_count
;
9253 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
9254 tdep
->have_neon_pseudos
= have_neon_pseudos
;
9255 tdep
->have_neon
= have_neon
;
9257 arm_register_g_packet_guesses (gdbarch
);
9260 switch (info
.byte_order_for_code
)
9262 case BFD_ENDIAN_BIG
:
9263 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
9264 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
9265 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
9266 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
9270 case BFD_ENDIAN_LITTLE
:
9271 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
9272 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
9273 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
9274 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
9279 internal_error (__FILE__
, __LINE__
,
9280 _("arm_gdbarch_init: bad byte order for float format"));
9283 /* On ARM targets char defaults to unsigned. */
9284 set_gdbarch_char_signed (gdbarch
, 0);
9286 /* wchar_t is unsigned under the AAPCS. */
9287 if (tdep
->arm_abi
== ARM_ABI_AAPCS
)
9288 set_gdbarch_wchar_signed (gdbarch
, 0);
9290 set_gdbarch_wchar_signed (gdbarch
, 1);
9292 /* Compute type alignment. */
9293 set_gdbarch_type_align (gdbarch
, arm_type_align
);
9295 /* Note: for displaced stepping, this includes the breakpoint, and one word
9296 of additional scratch space. This setting isn't used for anything beside
9297 displaced stepping at present. */
9298 set_gdbarch_max_insn_length (gdbarch
, 4 * ARM_DISPLACED_MODIFIED_INSNS
);
9300 /* This should be low enough for everything. */
9301 tdep
->lowest_pc
= 0x20;
9302 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
9304 /* The default, for both APCS and AAPCS, is to return small
9305 structures in registers. */
9306 tdep
->struct_return
= reg_struct_return
;
9308 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
9309 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
9312 set_gdbarch_code_of_frame_writable (gdbarch
, arm_code_of_frame_writable
);
9314 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
9316 frame_base_set_default (gdbarch
, &arm_normal_base
);
9318 /* Address manipulation. */
9319 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
9321 /* Advance PC across function entry code. */
9322 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
9324 /* Detect whether PC is at a point where the stack has been destroyed. */
9325 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
9327 /* Skip trampolines. */
9328 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
9330 /* The stack grows downward. */
9331 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
9333 /* Breakpoint manipulation. */
9334 set_gdbarch_breakpoint_kind_from_pc (gdbarch
, arm_breakpoint_kind_from_pc
);
9335 set_gdbarch_sw_breakpoint_from_kind (gdbarch
, arm_sw_breakpoint_from_kind
);
9336 set_gdbarch_breakpoint_kind_from_current_state (gdbarch
,
9337 arm_breakpoint_kind_from_current_state
);
9339 /* Information about registers, etc. */
9340 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
9341 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
9342 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
9343 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9344 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
9346 /* This "info float" is FPA-specific. Use the generic version if we
9348 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
9349 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
9351 /* Internal <-> external register number maps. */
9352 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
9353 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
9355 set_gdbarch_register_name (gdbarch
, arm_register_name
);
9357 /* Returning results. */
9358 set_gdbarch_return_value (gdbarch
, arm_return_value
);
9361 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
9363 /* Minsymbol frobbing. */
9364 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
9365 set_gdbarch_coff_make_msymbol_special (gdbarch
,
9366 arm_coff_make_msymbol_special
);
9367 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
9369 /* Thumb-2 IT block support. */
9370 set_gdbarch_adjust_breakpoint_address (gdbarch
,
9371 arm_adjust_breakpoint_address
);
9373 /* Virtual tables. */
9374 set_gdbarch_vbit_in_delta (gdbarch
, 1);
9376 /* Hook in the ABI-specific overrides, if they have been registered. */
9377 gdbarch_init_osabi (info
, gdbarch
);
9379 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
9381 /* Add some default predicates. */
9383 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
9384 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
9385 dwarf2_append_unwinders (gdbarch
);
9386 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
9387 frame_unwind_append_unwinder (gdbarch
, &arm_epilogue_frame_unwind
);
9388 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
9390 /* Now we have tuned the configuration, set a few final things,
9391 based on what the OS ABI has told us. */
9393 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9394 binaries are always marked. */
9395 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
9396 tdep
->arm_abi
= ARM_ABI_APCS
;
9398 /* Watchpoints are not steppable. */
9399 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
9401 /* We used to default to FPA for generic ARM, but almost nobody
9402 uses that now, and we now provide a way for the user to force
9403 the model. So default to the most useful variant. */
9404 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
9405 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
9407 if (tdep
->jb_pc
>= 0)
9408 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
9410 /* Floating point sizes and format. */
9411 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
9412 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
9414 set_gdbarch_double_format
9415 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9416 set_gdbarch_long_double_format
9417 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9421 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
9422 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
9425 if (have_vfp_pseudos
)
9427 /* NOTE: These are the only pseudo registers used by
9428 the ARM target at the moment. If more are added, a
9429 little more care in numbering will be needed. */
9431 int num_pseudos
= 32;
9432 if (have_neon_pseudos
)
9434 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
9435 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
9436 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
9439 if (tdesc_data
!= nullptr)
9441 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
9443 tdesc_use_registers (gdbarch
, tdesc
, std::move (tdesc_data
));
9445 /* Override tdesc_register_type to adjust the types of VFP
9446 registers for NEON. */
9447 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9450 /* Add standard register aliases. We add aliases even for those
9451 names which are used by the current architecture - it's simpler,
9452 and does no harm, since nothing ever lists user registers. */
9453 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
9454 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
9455 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
9457 set_gdbarch_disassembler_options (gdbarch
, &arm_disassembler_options
);
9458 set_gdbarch_valid_disassembler_options (gdbarch
, disassembler_options_arm ());
9460 set_gdbarch_gnu_triplet_regexp (gdbarch
, arm_gnu_triplet_regexp
);
9466 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
9468 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9473 fprintf_unfiltered (file
, _("arm_dump_tdep: fp_model = %i\n"),
9474 (int) tdep
->fp_model
);
9475 fprintf_unfiltered (file
, _("arm_dump_tdep: have_fpa_registers = %i\n"),
9476 (int) tdep
->have_fpa_registers
);
9477 fprintf_unfiltered (file
, _("arm_dump_tdep: have_wmmx_registers = %i\n"),
9478 (int) tdep
->have_wmmx_registers
);
9479 fprintf_unfiltered (file
, _("arm_dump_tdep: vfp_register_count = %i\n"),
9480 (int) tdep
->vfp_register_count
);
9481 fprintf_unfiltered (file
, _("arm_dump_tdep: have_vfp_pseudos = %i\n"),
9482 (int) tdep
->have_vfp_pseudos
);
9483 fprintf_unfiltered (file
, _("arm_dump_tdep: have_neon_pseudos = %i\n"),
9484 (int) tdep
->have_neon_pseudos
);
9485 fprintf_unfiltered (file
, _("arm_dump_tdep: have_neon = %i\n"),
9486 (int) tdep
->have_neon
);
9487 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx\n"),
9488 (unsigned long) tdep
->lowest_pc
);
9494 static void arm_record_test (void);
9498 void _initialize_arm_tdep ();
9500 _initialize_arm_tdep ()
9504 char regdesc
[1024], *rdptr
= regdesc
;
9505 size_t rest
= sizeof (regdesc
);
9507 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
9509 /* Add ourselves to objfile event chain. */
9510 gdb::observers::new_objfile
.attach (arm_exidx_new_objfile
);
9512 /* Register an ELF OS ABI sniffer for ARM binaries. */
9513 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
9514 bfd_target_elf_flavour
,
9515 arm_elf_osabi_sniffer
);
9517 /* Add root prefix command for all "set arm"/"show arm" commands. */
9518 add_basic_prefix_cmd ("arm", no_class
,
9519 _("Various ARM-specific commands."),
9520 &setarmcmdlist
, "set arm ", 0, &setlist
);
9522 add_show_prefix_cmd ("arm", no_class
,
9523 _("Various ARM-specific commands."),
9524 &showarmcmdlist
, "show arm ", 0, &showlist
);
9527 arm_disassembler_options
= xstrdup ("reg-names-std");
9528 const disasm_options_t
*disasm_options
9529 = &disassembler_options_arm ()->options
;
9530 int num_disassembly_styles
= 0;
9531 for (i
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9532 if (CONST_STRNEQ (disasm_options
->name
[i
], "reg-names-"))
9533 num_disassembly_styles
++;
9535 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9536 valid_disassembly_styles
= XNEWVEC (const char *,
9537 num_disassembly_styles
+ 1);
9538 for (i
= j
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9539 if (CONST_STRNEQ (disasm_options
->name
[i
], "reg-names-"))
9541 size_t offset
= strlen ("reg-names-");
9542 const char *style
= disasm_options
->name
[i
];
9543 valid_disassembly_styles
[j
++] = &style
[offset
];
9544 length
= snprintf (rdptr
, rest
, "%s - %s\n", &style
[offset
],
9545 disasm_options
->description
[i
]);
9549 /* Mark the end of valid options. */
9550 valid_disassembly_styles
[num_disassembly_styles
] = NULL
;
9552 /* Create the help text. */
9553 std::string helptext
= string_printf ("%s%s%s",
9554 _("The valid values are:\n"),
9556 _("The default is \"std\"."));
9558 add_setshow_enum_cmd("disassembler", no_class
,
9559 valid_disassembly_styles
, &disassembly_style
,
9560 _("Set the disassembly style."),
9561 _("Show the disassembly style."),
9563 set_disassembly_style_sfunc
,
9564 show_disassembly_style_sfunc
,
9565 &setarmcmdlist
, &showarmcmdlist
);
9567 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
9568 _("Set usage of ARM 32-bit mode."),
9569 _("Show usage of ARM 32-bit mode."),
9570 _("When off, a 26-bit PC will be used."),
9572 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
9574 &setarmcmdlist
, &showarmcmdlist
);
9576 /* Add a command to allow the user to force the FPU model. */
9577 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
9578 _("Set the floating point type."),
9579 _("Show the floating point type."),
9580 _("auto - Determine the FP typefrom the OS-ABI.\n\
9581 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9582 fpa - FPA co-processor (GCC compiled).\n\
9583 softvfp - Software FP with pure-endian doubles.\n\
9584 vfp - VFP co-processor."),
9585 set_fp_model_sfunc
, show_fp_model
,
9586 &setarmcmdlist
, &showarmcmdlist
);
9588 /* Add a command to allow the user to force the ABI. */
9589 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
9592 NULL
, arm_set_abi
, arm_show_abi
,
9593 &setarmcmdlist
, &showarmcmdlist
);
9595 /* Add two commands to allow the user to force the assumed
9597 add_setshow_enum_cmd ("fallback-mode", class_support
,
9598 arm_mode_strings
, &arm_fallback_mode_string
,
9599 _("Set the mode assumed when symbols are unavailable."),
9600 _("Show the mode assumed when symbols are unavailable."),
9601 NULL
, NULL
, arm_show_fallback_mode
,
9602 &setarmcmdlist
, &showarmcmdlist
);
9603 add_setshow_enum_cmd ("force-mode", class_support
,
9604 arm_mode_strings
, &arm_force_mode_string
,
9605 _("Set the mode assumed even when symbols are available."),
9606 _("Show the mode assumed even when symbols are available."),
9607 NULL
, NULL
, arm_show_force_mode
,
9608 &setarmcmdlist
, &showarmcmdlist
);
9610 /* Debugging flag. */
9611 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
9612 _("Set ARM debugging."),
9613 _("Show ARM debugging."),
9614 _("When on, arm-specific debugging is enabled."),
9616 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
9617 &setdebuglist
, &showdebuglist
);
9620 selftests::register_test ("arm-record", selftests::arm_record_test
);
9625 /* ARM-reversible process record data structures. */
9627 #define ARM_INSN_SIZE_BYTES 4
9628 #define THUMB_INSN_SIZE_BYTES 2
9629 #define THUMB2_INSN_SIZE_BYTES 4
9632 /* Position of the bit within a 32-bit ARM instruction
9633 that defines whether the instruction is a load or store. */
9634 #define INSN_S_L_BIT_NUM 20
9636 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9639 unsigned int reg_len = LENGTH; \
9642 REGS = XNEWVEC (uint32_t, reg_len); \
9643 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9648 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9651 unsigned int mem_len = LENGTH; \
9654 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9655 memcpy(&MEMS->len, &RECORD_BUF[0], \
9656 sizeof(struct arm_mem_r) * LENGTH); \
9661 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9662 #define INSN_RECORDED(ARM_RECORD) \
9663 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9665 /* ARM memory record structure. */
9668 uint32_t len
; /* Record length. */
9669 uint32_t addr
; /* Memory address. */
9672 /* ARM instruction record contains opcode of current insn
9673 and execution state (before entry to decode_insn()),
9674 contains list of to-be-modified registers and
9675 memory blocks (on return from decode_insn()). */
9677 typedef struct insn_decode_record_t
9679 struct gdbarch
*gdbarch
;
9680 struct regcache
*regcache
;
9681 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
9682 uint32_t arm_insn
; /* Should accommodate thumb. */
9683 uint32_t cond
; /* Condition code. */
9684 uint32_t opcode
; /* Insn opcode. */
9685 uint32_t decode
; /* Insn decode bits. */
9686 uint32_t mem_rec_count
; /* No of mem records. */
9687 uint32_t reg_rec_count
; /* No of reg records. */
9688 uint32_t *arm_regs
; /* Registers to be saved for this record. */
9689 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
9690 } insn_decode_record
;
9693 /* Checks ARM SBZ and SBO mandatory fields. */
9696 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
9698 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
9717 enum arm_record_result
9719 ARM_RECORD_SUCCESS
= 0,
9720 ARM_RECORD_FAILURE
= 1
9727 } arm_record_strx_t
;
9738 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
9739 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
9742 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9743 ULONGEST u_regval
[2]= {0};
9745 uint32_t reg_src1
= 0, reg_src2
= 0;
9746 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
9748 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
9749 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
9751 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
9753 /* 1) Handle misc store, immediate offset. */
9754 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9755 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9756 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9757 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
9759 if (ARM_PC_REGNUM
== reg_src1
)
9761 /* If R15 was used as Rn, hence current PC+8. */
9762 u_regval
[0] = u_regval
[0] + 8;
9764 offset_8
= (immed_high
<< 4) | immed_low
;
9765 /* Calculate target store address. */
9766 if (14 == arm_insn_r
->opcode
)
9768 tgt_mem_addr
= u_regval
[0] + offset_8
;
9772 tgt_mem_addr
= u_regval
[0] - offset_8
;
9774 if (ARM_RECORD_STRH
== str_type
)
9776 record_buf_mem
[0] = 2;
9777 record_buf_mem
[1] = tgt_mem_addr
;
9778 arm_insn_r
->mem_rec_count
= 1;
9780 else if (ARM_RECORD_STRD
== str_type
)
9782 record_buf_mem
[0] = 4;
9783 record_buf_mem
[1] = tgt_mem_addr
;
9784 record_buf_mem
[2] = 4;
9785 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9786 arm_insn_r
->mem_rec_count
= 2;
9789 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
9791 /* 2) Store, register offset. */
9793 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9795 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9796 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9797 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9800 /* If R15 was used as Rn, hence current PC+8. */
9801 u_regval
[0] = u_regval
[0] + 8;
9803 /* Calculate target store address, Rn +/- Rm, register offset. */
9804 if (12 == arm_insn_r
->opcode
)
9806 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9810 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9812 if (ARM_RECORD_STRH
== str_type
)
9814 record_buf_mem
[0] = 2;
9815 record_buf_mem
[1] = tgt_mem_addr
;
9816 arm_insn_r
->mem_rec_count
= 1;
9818 else if (ARM_RECORD_STRD
== str_type
)
9820 record_buf_mem
[0] = 4;
9821 record_buf_mem
[1] = tgt_mem_addr
;
9822 record_buf_mem
[2] = 4;
9823 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9824 arm_insn_r
->mem_rec_count
= 2;
9827 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
9828 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9830 /* 3) Store, immediate pre-indexed. */
9831 /* 5) Store, immediate post-indexed. */
9832 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9833 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9834 offset_8
= (immed_high
<< 4) | immed_low
;
9835 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9836 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9837 /* Calculate target store address, Rn +/- Rm, register offset. */
9838 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9840 tgt_mem_addr
= u_regval
[0] + offset_8
;
9844 tgt_mem_addr
= u_regval
[0] - offset_8
;
9846 if (ARM_RECORD_STRH
== str_type
)
9848 record_buf_mem
[0] = 2;
9849 record_buf_mem
[1] = tgt_mem_addr
;
9850 arm_insn_r
->mem_rec_count
= 1;
9852 else if (ARM_RECORD_STRD
== str_type
)
9854 record_buf_mem
[0] = 4;
9855 record_buf_mem
[1] = tgt_mem_addr
;
9856 record_buf_mem
[2] = 4;
9857 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9858 arm_insn_r
->mem_rec_count
= 2;
9860 /* Record Rn also as it changes. */
9861 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9862 arm_insn_r
->reg_rec_count
= 1;
9864 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
9865 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9867 /* 4) Store, register pre-indexed. */
9868 /* 6) Store, register post -indexed. */
9869 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9870 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9871 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9872 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9873 /* Calculate target store address, Rn +/- Rm, register offset. */
9874 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9876 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9880 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9882 if (ARM_RECORD_STRH
== str_type
)
9884 record_buf_mem
[0] = 2;
9885 record_buf_mem
[1] = tgt_mem_addr
;
9886 arm_insn_r
->mem_rec_count
= 1;
9888 else if (ARM_RECORD_STRD
== str_type
)
9890 record_buf_mem
[0] = 4;
9891 record_buf_mem
[1] = tgt_mem_addr
;
9892 record_buf_mem
[2] = 4;
9893 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9894 arm_insn_r
->mem_rec_count
= 2;
9896 /* Record Rn also as it changes. */
9897 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9898 arm_insn_r
->reg_rec_count
= 1;
9903 /* Handling ARM extension space insns. */
9906 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
9908 int ret
= 0; /* Return value: -1:record failure ; 0:success */
9909 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
9910 uint32_t record_buf
[8], record_buf_mem
[8];
9911 uint32_t reg_src1
= 0;
9912 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9913 ULONGEST u_regval
= 0;
9915 gdb_assert (!INSN_RECORDED(arm_insn_r
));
9916 /* Handle unconditional insn extension space. */
9918 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
9919 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
9920 if (arm_insn_r
->cond
)
9922 /* PLD has no affect on architectural state, it just affects
9924 if (5 == ((opcode1
& 0xE0) >> 5))
9927 record_buf
[0] = ARM_PS_REGNUM
;
9928 record_buf
[1] = ARM_LR_REGNUM
;
9929 arm_insn_r
->reg_rec_count
= 2;
9931 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9935 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
9936 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
9939 /* Undefined instruction on ARM V5; need to handle if later
9940 versions define it. */
9943 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
9944 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
9945 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
9947 /* Handle arithmetic insn extension space. */
9948 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
9949 && !INSN_RECORDED(arm_insn_r
))
9951 /* Handle MLA(S) and MUL(S). */
9952 if (in_inclusive_range (insn_op1
, 0U, 3U))
9954 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9955 record_buf
[1] = ARM_PS_REGNUM
;
9956 arm_insn_r
->reg_rec_count
= 2;
9958 else if (in_inclusive_range (insn_op1
, 4U, 15U))
9960 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9961 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
9962 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
9963 record_buf
[2] = ARM_PS_REGNUM
;
9964 arm_insn_r
->reg_rec_count
= 3;
9968 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
9969 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
9970 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
9972 /* Handle control insn extension space. */
9974 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
9975 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
9977 if (!bit (arm_insn_r
->arm_insn
,25))
9979 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
9981 if ((0 == insn_op1
) || (2 == insn_op1
))
9984 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9985 arm_insn_r
->reg_rec_count
= 1;
9987 else if (1 == insn_op1
)
9989 /* CSPR is going to be changed. */
9990 record_buf
[0] = ARM_PS_REGNUM
;
9991 arm_insn_r
->reg_rec_count
= 1;
9993 else if (3 == insn_op1
)
9995 /* SPSR is going to be changed. */
9996 /* We need to get SPSR value, which is yet to be done. */
10000 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
10005 record_buf
[0] = ARM_PS_REGNUM
;
10006 arm_insn_r
->reg_rec_count
= 1;
10008 else if (3 == insn_op1
)
10011 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10012 arm_insn_r
->reg_rec_count
= 1;
10015 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
10018 record_buf
[0] = ARM_PS_REGNUM
;
10019 record_buf
[1] = ARM_LR_REGNUM
;
10020 arm_insn_r
->reg_rec_count
= 2;
10022 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
10024 /* QADD, QSUB, QDADD, QDSUB */
10025 record_buf
[0] = ARM_PS_REGNUM
;
10026 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10027 arm_insn_r
->reg_rec_count
= 2;
10029 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
10032 record_buf
[0] = ARM_PS_REGNUM
;
10033 record_buf
[1] = ARM_LR_REGNUM
;
10034 arm_insn_r
->reg_rec_count
= 2;
10036 /* Save SPSR also;how? */
10039 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
10040 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
10041 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
10042 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
10045 if (0 == insn_op1
|| 1 == insn_op1
)
10047 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
10048 /* We dont do optimization for SMULW<y> where we
10050 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10051 record_buf
[1] = ARM_PS_REGNUM
;
10052 arm_insn_r
->reg_rec_count
= 2;
10054 else if (2 == insn_op1
)
10057 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10058 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
10059 arm_insn_r
->reg_rec_count
= 2;
10061 else if (3 == insn_op1
)
10064 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10065 arm_insn_r
->reg_rec_count
= 1;
10071 /* MSR : immediate form. */
10074 /* CSPR is going to be changed. */
10075 record_buf
[0] = ARM_PS_REGNUM
;
10076 arm_insn_r
->reg_rec_count
= 1;
10078 else if (3 == insn_op1
)
10080 /* SPSR is going to be changed. */
10081 /* we need to get SPSR value, which is yet to be done */
10087 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10088 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
10089 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
10091 /* Handle load/store insn extension space. */
10093 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
10094 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
10095 && !INSN_RECORDED(arm_insn_r
))
10100 /* These insn, changes register and memory as well. */
10101 /* SWP or SWPB insn. */
10102 /* Get memory address given by Rn. */
10103 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10104 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
10105 /* SWP insn ?, swaps word. */
10106 if (8 == arm_insn_r
->opcode
)
10108 record_buf_mem
[0] = 4;
10112 /* SWPB insn, swaps only byte. */
10113 record_buf_mem
[0] = 1;
10115 record_buf_mem
[1] = u_regval
;
10116 arm_insn_r
->mem_rec_count
= 1;
10117 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10118 arm_insn_r
->reg_rec_count
= 1;
10120 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10123 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10126 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10129 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10130 record_buf
[1] = record_buf
[0] + 1;
10131 arm_insn_r
->reg_rec_count
= 2;
10133 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10136 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10139 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
10141 /* LDRH, LDRSB, LDRSH. */
10142 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10143 arm_insn_r
->reg_rec_count
= 1;
10148 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
10149 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
10150 && !INSN_RECORDED(arm_insn_r
))
10153 /* Handle coprocessor insn extension space. */
10156 /* To be done for ARMv5 and later; as of now we return -1. */
10160 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10161 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10166 /* Handling opcode 000 insns. */
10169 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
10171 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10172 uint32_t record_buf
[8], record_buf_mem
[8];
10173 ULONGEST u_regval
[2] = {0};
10175 uint32_t reg_src1
= 0;
10176 uint32_t opcode1
= 0;
10178 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10179 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10180 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10182 if (!((opcode1
& 0x19) == 0x10))
10184 /* Data-processing (register) and Data-processing (register-shifted
10186 /* Out of 11 shifter operands mode, all the insn modifies destination
10187 register, which is specified by 13-16 decode. */
10188 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10189 record_buf
[1] = ARM_PS_REGNUM
;
10190 arm_insn_r
->reg_rec_count
= 2;
10192 else if ((arm_insn_r
->decode
< 8) && ((opcode1
& 0x19) == 0x10))
10194 /* Miscellaneous instructions */
10196 if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
10197 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10199 /* Handle BLX, branch and link/exchange. */
10200 if (9 == arm_insn_r
->opcode
)
10202 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10203 and R14 stores the return address. */
10204 record_buf
[0] = ARM_PS_REGNUM
;
10205 record_buf
[1] = ARM_LR_REGNUM
;
10206 arm_insn_r
->reg_rec_count
= 2;
10209 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
10211 /* Handle enhanced software breakpoint insn, BKPT. */
10212 /* CPSR is changed to be executed in ARM state, disabling normal
10213 interrupts, entering abort mode. */
10214 /* According to high vector configuration PC is set. */
10215 /* user hit breakpoint and type reverse, in
10216 that case, we need to go back with previous CPSR and
10217 Program Counter. */
10218 record_buf
[0] = ARM_PS_REGNUM
;
10219 record_buf
[1] = ARM_LR_REGNUM
;
10220 arm_insn_r
->reg_rec_count
= 2;
10222 /* Save SPSR also; how? */
10225 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
10226 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10228 /* Handle BX, branch and link/exchange. */
10229 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10230 record_buf
[0] = ARM_PS_REGNUM
;
10231 arm_insn_r
->reg_rec_count
= 1;
10233 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
10234 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
10235 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
10237 /* Count leading zeros: CLZ. */
10238 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10239 arm_insn_r
->reg_rec_count
= 1;
10241 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
10242 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10243 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
10244 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0))
10246 /* Handle MRS insn. */
10247 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10248 arm_insn_r
->reg_rec_count
= 1;
10251 else if (9 == arm_insn_r
->decode
&& opcode1
< 0x10)
10253 /* Multiply and multiply-accumulate */
10255 /* Handle multiply instructions. */
10256 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10257 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
10259 /* Handle MLA and MUL. */
10260 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10261 record_buf
[1] = ARM_PS_REGNUM
;
10262 arm_insn_r
->reg_rec_count
= 2;
10264 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
10266 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10267 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10268 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10269 record_buf
[2] = ARM_PS_REGNUM
;
10270 arm_insn_r
->reg_rec_count
= 3;
10273 else if (9 == arm_insn_r
->decode
&& opcode1
> 0x10)
10275 /* Synchronization primitives */
10277 /* Handling SWP, SWPB. */
10278 /* These insn, changes register and memory as well. */
10279 /* SWP or SWPB insn. */
10281 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10282 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10283 /* SWP insn ?, swaps word. */
10284 if (8 == arm_insn_r
->opcode
)
10286 record_buf_mem
[0] = 4;
10290 /* SWPB insn, swaps only byte. */
10291 record_buf_mem
[0] = 1;
10293 record_buf_mem
[1] = u_regval
[0];
10294 arm_insn_r
->mem_rec_count
= 1;
10295 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10296 arm_insn_r
->reg_rec_count
= 1;
10298 else if (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
10299 || 15 == arm_insn_r
->decode
)
10301 if ((opcode1
& 0x12) == 2)
10303 /* Extra load/store (unprivileged) */
10308 /* Extra load/store */
10309 switch (bits (arm_insn_r
->arm_insn
, 5, 6))
10312 if ((opcode1
& 0x05) == 0x0 || (opcode1
& 0x05) == 0x4)
10314 /* STRH (register), STRH (immediate) */
10315 arm_record_strx (arm_insn_r
, &record_buf
[0],
10316 &record_buf_mem
[0], ARM_RECORD_STRH
);
10318 else if ((opcode1
& 0x05) == 0x1)
10320 /* LDRH (register) */
10321 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10322 arm_insn_r
->reg_rec_count
= 1;
10324 if (bit (arm_insn_r
->arm_insn
, 21))
10326 /* Write back to Rn. */
10327 record_buf
[arm_insn_r
->reg_rec_count
++]
10328 = bits (arm_insn_r
->arm_insn
, 16, 19);
10331 else if ((opcode1
& 0x05) == 0x5)
10333 /* LDRH (immediate), LDRH (literal) */
10334 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10336 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10337 arm_insn_r
->reg_rec_count
= 1;
10341 /*LDRH (immediate) */
10342 if (bit (arm_insn_r
->arm_insn
, 21))
10344 /* Write back to Rn. */
10345 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10353 if ((opcode1
& 0x05) == 0x0)
10355 /* LDRD (register) */
10356 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10357 record_buf
[1] = record_buf
[0] + 1;
10358 arm_insn_r
->reg_rec_count
= 2;
10360 if (bit (arm_insn_r
->arm_insn
, 21))
10362 /* Write back to Rn. */
10363 record_buf
[arm_insn_r
->reg_rec_count
++]
10364 = bits (arm_insn_r
->arm_insn
, 16, 19);
10367 else if ((opcode1
& 0x05) == 0x1)
10369 /* LDRSB (register) */
10370 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10371 arm_insn_r
->reg_rec_count
= 1;
10373 if (bit (arm_insn_r
->arm_insn
, 21))
10375 /* Write back to Rn. */
10376 record_buf
[arm_insn_r
->reg_rec_count
++]
10377 = bits (arm_insn_r
->arm_insn
, 16, 19);
10380 else if ((opcode1
& 0x05) == 0x4 || (opcode1
& 0x05) == 0x5)
10382 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10384 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10386 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10387 arm_insn_r
->reg_rec_count
= 1;
10391 /*LDRD (immediate), LDRSB (immediate) */
10392 if (bit (arm_insn_r
->arm_insn
, 21))
10394 /* Write back to Rn. */
10395 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10403 if ((opcode1
& 0x05) == 0x0)
10405 /* STRD (register) */
10406 arm_record_strx (arm_insn_r
, &record_buf
[0],
10407 &record_buf_mem
[0], ARM_RECORD_STRD
);
10409 else if ((opcode1
& 0x05) == 0x1)
10411 /* LDRSH (register) */
10412 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10413 arm_insn_r
->reg_rec_count
= 1;
10415 if (bit (arm_insn_r
->arm_insn
, 21))
10417 /* Write back to Rn. */
10418 record_buf
[arm_insn_r
->reg_rec_count
++]
10419 = bits (arm_insn_r
->arm_insn
, 16, 19);
10422 else if ((opcode1
& 0x05) == 0x4)
10424 /* STRD (immediate) */
10425 arm_record_strx (arm_insn_r
, &record_buf
[0],
10426 &record_buf_mem
[0], ARM_RECORD_STRD
);
10428 else if ((opcode1
& 0x05) == 0x5)
10430 /* LDRSH (immediate), LDRSH (literal) */
10431 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10432 arm_insn_r
->reg_rec_count
= 1;
10434 if (bit (arm_insn_r
->arm_insn
, 21))
10436 /* Write back to Rn. */
10437 record_buf
[arm_insn_r
->reg_rec_count
++]
10438 = bits (arm_insn_r
->arm_insn
, 16, 19);
10454 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10455 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10459 /* Handling opcode 001 insns. */
10462 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
10464 uint32_t record_buf
[8], record_buf_mem
[8];
10466 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10467 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10469 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
10470 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
10471 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
10474 /* Handle MSR insn. */
10475 if (9 == arm_insn_r
->opcode
)
10477 /* CSPR is going to be changed. */
10478 record_buf
[0] = ARM_PS_REGNUM
;
10479 arm_insn_r
->reg_rec_count
= 1;
10483 /* SPSR is going to be changed. */
10486 else if (arm_insn_r
->opcode
<= 15)
10488 /* Normal data processing insns. */
10489 /* Out of 11 shifter operands mode, all the insn modifies destination
10490 register, which is specified by 13-16 decode. */
10491 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10492 record_buf
[1] = ARM_PS_REGNUM
;
10493 arm_insn_r
->reg_rec_count
= 2;
10500 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10501 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10506 arm_record_media (insn_decode_record
*arm_insn_r
)
10508 uint32_t record_buf
[8];
10510 switch (bits (arm_insn_r
->arm_insn
, 22, 24))
10513 /* Parallel addition and subtraction, signed */
10515 /* Parallel addition and subtraction, unsigned */
10518 /* Packing, unpacking, saturation and reversal */
10520 int rd
= bits (arm_insn_r
->arm_insn
, 12, 15);
10522 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10528 /* Signed multiplies */
10530 int rd
= bits (arm_insn_r
->arm_insn
, 16, 19);
10531 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 22);
10533 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10535 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10536 else if (op1
== 0x4)
10537 record_buf
[arm_insn_r
->reg_rec_count
++]
10538 = bits (arm_insn_r
->arm_insn
, 12, 15);
10544 if (bit (arm_insn_r
->arm_insn
, 21)
10545 && bits (arm_insn_r
->arm_insn
, 5, 6) == 0x2)
10548 record_buf
[arm_insn_r
->reg_rec_count
++]
10549 = bits (arm_insn_r
->arm_insn
, 12, 15);
10551 else if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x0
10552 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x0)
10554 /* USAD8 and USADA8 */
10555 record_buf
[arm_insn_r
->reg_rec_count
++]
10556 = bits (arm_insn_r
->arm_insn
, 16, 19);
10563 if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x3
10564 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x7)
10566 /* Permanently UNDEFINED */
10571 /* BFC, BFI and UBFX */
10572 record_buf
[arm_insn_r
->reg_rec_count
++]
10573 = bits (arm_insn_r
->arm_insn
, 12, 15);
10582 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10587 /* Handle ARM mode instructions with opcode 010. */
10590 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
10592 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10594 uint32_t reg_base
, reg_dest
;
10595 uint32_t offset_12
, tgt_mem_addr
;
10596 uint32_t record_buf
[8], record_buf_mem
[8];
10597 unsigned char wback
;
10600 /* Calculate wback. */
10601 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
10602 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
10604 arm_insn_r
->reg_rec_count
= 0;
10605 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10607 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10609 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10612 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10613 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
10615 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10616 preceeds a LDR instruction having R15 as reg_base, it
10617 emulates a branch and link instruction, and hence we need to save
10618 CPSR and PC as well. */
10619 if (ARM_PC_REGNUM
== reg_dest
)
10620 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10622 /* If wback is true, also save the base register, which is going to be
10625 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10629 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10631 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
10632 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10634 /* Handle bit U. */
10635 if (bit (arm_insn_r
->arm_insn
, 23))
10637 /* U == 1: Add the offset. */
10638 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
10642 /* U == 0: subtract the offset. */
10643 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
10646 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10648 if (bit (arm_insn_r
->arm_insn
, 22))
10650 /* STRB and STRBT: 1 byte. */
10651 record_buf_mem
[0] = 1;
10655 /* STR and STRT: 4 bytes. */
10656 record_buf_mem
[0] = 4;
10659 /* Handle bit P. */
10660 if (bit (arm_insn_r
->arm_insn
, 24))
10661 record_buf_mem
[1] = tgt_mem_addr
;
10663 record_buf_mem
[1] = (uint32_t) u_regval
;
10665 arm_insn_r
->mem_rec_count
= 1;
10667 /* If wback is true, also save the base register, which is going to be
10670 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10673 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10674 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10678 /* Handling opcode 011 insns. */
10681 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
10683 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10685 uint32_t shift_imm
= 0;
10686 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
10687 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
10688 uint32_t record_buf
[8], record_buf_mem
[8];
10691 ULONGEST u_regval
[2];
10693 if (bit (arm_insn_r
->arm_insn
, 4))
10694 return arm_record_media (arm_insn_r
);
10696 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10697 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10699 /* Handle enhanced store insns and LDRD DSP insn,
10700 order begins according to addressing modes for store insns
10704 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10706 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10707 /* LDR insn has a capability to do branching, if
10708 MOV LR, PC is preceded by LDR insn having Rn as R15
10709 in that case, it emulates branch and link insn, and hence we
10710 need to save CSPR and PC as well. */
10711 if (15 != reg_dest
)
10713 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10714 arm_insn_r
->reg_rec_count
= 1;
10718 record_buf
[0] = reg_dest
;
10719 record_buf
[1] = ARM_PS_REGNUM
;
10720 arm_insn_r
->reg_rec_count
= 2;
10725 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
10727 /* Store insn, register offset and register pre-indexed,
10728 register post-indexed. */
10730 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10732 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10733 regcache_raw_read_unsigned (reg_cache
, reg_src1
10735 regcache_raw_read_unsigned (reg_cache
, reg_src2
10737 if (15 == reg_src2
)
10739 /* If R15 was used as Rn, hence current PC+8. */
10740 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10741 u_regval
[0] = u_regval
[0] + 8;
10743 /* Calculate target store address, Rn +/- Rm, register offset. */
10745 if (bit (arm_insn_r
->arm_insn
, 23))
10747 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10751 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10754 switch (arm_insn_r
->opcode
)
10768 record_buf_mem
[0] = 4;
10783 record_buf_mem
[0] = 1;
10787 gdb_assert_not_reached ("no decoding pattern found");
10790 record_buf_mem
[1] = tgt_mem_addr
;
10791 arm_insn_r
->mem_rec_count
= 1;
10793 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10794 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10795 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10796 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10797 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10798 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10801 /* Rn is going to be changed in pre-indexed mode and
10802 post-indexed mode as well. */
10803 record_buf
[0] = reg_src2
;
10804 arm_insn_r
->reg_rec_count
= 1;
10809 /* Store insn, scaled register offset; scaled pre-indexed. */
10810 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
10812 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10814 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10815 /* Get shift_imm. */
10816 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
10817 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10818 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
10819 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10820 /* Offset_12 used as shift. */
10824 /* Offset_12 used as index. */
10825 offset_12
= u_regval
[0] << shift_imm
;
10829 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
10835 if (bit (u_regval
[0], 31))
10837 offset_12
= 0xFFFFFFFF;
10846 /* This is arithmetic shift. */
10847 offset_12
= s_word
>> shift_imm
;
10854 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
10856 /* Get C flag value and shift it by 31. */
10857 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
10858 | (u_regval
[0]) >> 1);
10862 offset_12
= (u_regval
[0] >> shift_imm
) \
10864 (sizeof(uint32_t) - shift_imm
));
10869 gdb_assert_not_reached ("no decoding pattern found");
10873 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10875 if (bit (arm_insn_r
->arm_insn
, 23))
10877 tgt_mem_addr
= u_regval
[1] + offset_12
;
10881 tgt_mem_addr
= u_regval
[1] - offset_12
;
10884 switch (arm_insn_r
->opcode
)
10898 record_buf_mem
[0] = 4;
10913 record_buf_mem
[0] = 1;
10917 gdb_assert_not_reached ("no decoding pattern found");
10920 record_buf_mem
[1] = tgt_mem_addr
;
10921 arm_insn_r
->mem_rec_count
= 1;
10923 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10924 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10925 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10926 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10927 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10928 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10931 /* Rn is going to be changed in register scaled pre-indexed
10932 mode,and scaled post indexed mode. */
10933 record_buf
[0] = reg_src2
;
10934 arm_insn_r
->reg_rec_count
= 1;
10939 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10940 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10944 /* Handle ARM mode instructions with opcode 100. */
10947 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
10949 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10950 uint32_t register_count
= 0, register_bits
;
10951 uint32_t reg_base
, addr_mode
;
10952 uint32_t record_buf
[24], record_buf_mem
[48];
10956 /* Fetch the list of registers. */
10957 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
10958 arm_insn_r
->reg_rec_count
= 0;
10960 /* Fetch the base register that contains the address we are loading data
10962 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10964 /* Calculate wback. */
10965 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
10967 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10969 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10971 /* Find out which registers are going to be loaded from memory. */
10972 while (register_bits
)
10974 if (register_bits
& 0x00000001)
10975 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
10976 register_bits
= register_bits
>> 1;
10981 /* If wback is true, also save the base register, which is going to be
10984 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10986 /* Save the CPSR register. */
10987 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10991 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10993 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
10995 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10997 /* Find out how many registers are going to be stored to memory. */
10998 while (register_bits
)
11000 if (register_bits
& 0x00000001)
11002 register_bits
= register_bits
>> 1;
11007 /* STMDA (STMED): Decrement after. */
11009 record_buf_mem
[1] = (uint32_t) u_regval
11010 - register_count
* ARM_INT_REGISTER_SIZE
+ 4;
11012 /* STM (STMIA, STMEA): Increment after. */
11014 record_buf_mem
[1] = (uint32_t) u_regval
;
11016 /* STMDB (STMFD): Decrement before. */
11018 record_buf_mem
[1] = (uint32_t) u_regval
11019 - register_count
* ARM_INT_REGISTER_SIZE
;
11021 /* STMIB (STMFA): Increment before. */
11023 record_buf_mem
[1] = (uint32_t) u_regval
+ ARM_INT_REGISTER_SIZE
;
11026 gdb_assert_not_reached ("no decoding pattern found");
11030 record_buf_mem
[0] = register_count
* ARM_INT_REGISTER_SIZE
;
11031 arm_insn_r
->mem_rec_count
= 1;
11033 /* If wback is true, also save the base register, which is going to be
11036 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11039 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11040 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11044 /* Handling opcode 101 insns. */
11047 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11049 uint32_t record_buf
[8];
11051 /* Handle B, BL, BLX(1) insns. */
11052 /* B simply branches so we do nothing here. */
11053 /* Note: BLX(1) doesnt fall here but instead it falls into
11054 extension space. */
11055 if (bit (arm_insn_r
->arm_insn
, 24))
11057 record_buf
[0] = ARM_LR_REGNUM
;
11058 arm_insn_r
->reg_rec_count
= 1;
11061 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11067 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11069 printf_unfiltered (_("Process record does not support instruction "
11070 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11071 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11076 /* Record handler for vector data transfer instructions. */
11079 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11081 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11082 uint32_t record_buf
[4];
11084 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11085 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11086 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11087 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11088 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11090 /* Handle VMOV instruction. */
11091 if (bit_l
&& bit_c
)
11093 record_buf
[0] = reg_t
;
11094 arm_insn_r
->reg_rec_count
= 1;
11096 else if (bit_l
&& !bit_c
)
11098 /* Handle VMOV instruction. */
11099 if (bits_a
== 0x00)
11101 record_buf
[0] = reg_t
;
11102 arm_insn_r
->reg_rec_count
= 1;
11104 /* Handle VMRS instruction. */
11105 else if (bits_a
== 0x07)
11108 reg_t
= ARM_PS_REGNUM
;
11110 record_buf
[0] = reg_t
;
11111 arm_insn_r
->reg_rec_count
= 1;
11114 else if (!bit_l
&& !bit_c
)
11116 /* Handle VMOV instruction. */
11117 if (bits_a
== 0x00)
11119 record_buf
[0] = ARM_D0_REGNUM
+ reg_v
;
11121 arm_insn_r
->reg_rec_count
= 1;
11123 /* Handle VMSR instruction. */
11124 else if (bits_a
== 0x07)
11126 record_buf
[0] = ARM_FPSCR_REGNUM
;
11127 arm_insn_r
->reg_rec_count
= 1;
11130 else if (!bit_l
&& bit_c
)
11132 /* Handle VMOV instruction. */
11133 if (!(bits_a
& 0x04))
11135 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
11137 arm_insn_r
->reg_rec_count
= 1;
11139 /* Handle VDUP instruction. */
11142 if (bit (arm_insn_r
->arm_insn
, 21))
11144 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11145 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11146 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
11147 arm_insn_r
->reg_rec_count
= 2;
11151 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11152 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11153 arm_insn_r
->reg_rec_count
= 1;
11158 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11162 /* Record handler for extension register load/store instructions. */
11165 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
11167 uint32_t opcode
, single_reg
;
11168 uint8_t op_vldm_vstm
;
11169 uint32_t record_buf
[8], record_buf_mem
[128];
11170 ULONGEST u_regval
= 0;
11172 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11174 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
11175 single_reg
= !bit (arm_insn_r
->arm_insn
, 8);
11176 op_vldm_vstm
= opcode
& 0x1b;
11178 /* Handle VMOV instructions. */
11179 if ((opcode
& 0x1e) == 0x04)
11181 if (bit (arm_insn_r
->arm_insn
, 20)) /* to_arm_registers bit 20? */
11183 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11184 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11185 arm_insn_r
->reg_rec_count
= 2;
11189 uint8_t reg_m
= bits (arm_insn_r
->arm_insn
, 0, 3);
11190 uint8_t bit_m
= bit (arm_insn_r
->arm_insn
, 5);
11194 /* The first S register number m is REG_M:M (M is bit 5),
11195 the corresponding D register number is REG_M:M / 2, which
11197 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_D0_REGNUM
+ reg_m
;
11198 /* The second S register number is REG_M:M + 1, the
11199 corresponding D register number is (REG_M:M + 1) / 2.
11200 IOW, if bit M is 1, the first and second S registers
11201 are mapped to different D registers, otherwise, they are
11202 in the same D register. */
11205 record_buf
[arm_insn_r
->reg_rec_count
++]
11206 = ARM_D0_REGNUM
+ reg_m
+ 1;
11211 record_buf
[0] = ((bit_m
<< 4) + reg_m
+ ARM_D0_REGNUM
);
11212 arm_insn_r
->reg_rec_count
= 1;
11216 /* Handle VSTM and VPUSH instructions. */
11217 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
11218 || op_vldm_vstm
== 0x12)
11220 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
11221 uint32_t memory_index
= 0;
11223 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11224 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11225 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11226 imm_off32
= imm_off8
<< 2;
11227 memory_count
= imm_off8
;
11229 if (bit (arm_insn_r
->arm_insn
, 23))
11230 start_address
= u_regval
;
11232 start_address
= u_regval
- imm_off32
;
11234 if (bit (arm_insn_r
->arm_insn
, 21))
11236 record_buf
[0] = reg_rn
;
11237 arm_insn_r
->reg_rec_count
= 1;
11240 while (memory_count
> 0)
11244 record_buf_mem
[memory_index
] = 4;
11245 record_buf_mem
[memory_index
+ 1] = start_address
;
11246 start_address
= start_address
+ 4;
11247 memory_index
= memory_index
+ 2;
11251 record_buf_mem
[memory_index
] = 4;
11252 record_buf_mem
[memory_index
+ 1] = start_address
;
11253 record_buf_mem
[memory_index
+ 2] = 4;
11254 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11255 start_address
= start_address
+ 8;
11256 memory_index
= memory_index
+ 4;
11260 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
11262 /* Handle VLDM instructions. */
11263 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
11264 || op_vldm_vstm
== 0x13)
11266 uint32_t reg_count
, reg_vd
;
11267 uint32_t reg_index
= 0;
11268 uint32_t bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11270 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11271 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
11273 /* REG_VD is the first D register number. If the instruction
11274 loads memory to S registers (SINGLE_REG is TRUE), the register
11275 number is (REG_VD << 1 | bit D), so the corresponding D
11276 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11278 reg_vd
= reg_vd
| (bit_d
<< 4);
11280 if (bit (arm_insn_r
->arm_insn
, 21) /* write back */)
11281 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
11283 /* If the instruction loads memory to D register, REG_COUNT should
11284 be divided by 2, according to the ARM Architecture Reference
11285 Manual. If the instruction loads memory to S register, divide by
11286 2 as well because two S registers are mapped to D register. */
11287 reg_count
= reg_count
/ 2;
11288 if (single_reg
&& bit_d
)
11290 /* Increase the register count if S register list starts from
11291 an odd number (bit d is one). */
11295 while (reg_count
> 0)
11297 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
11300 arm_insn_r
->reg_rec_count
= reg_index
;
11302 /* VSTR Vector store register. */
11303 else if ((opcode
& 0x13) == 0x10)
11305 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
;
11306 uint32_t memory_index
= 0;
11308 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11309 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11310 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11311 imm_off32
= imm_off8
<< 2;
11313 if (bit (arm_insn_r
->arm_insn
, 23))
11314 start_address
= u_regval
+ imm_off32
;
11316 start_address
= u_regval
- imm_off32
;
11320 record_buf_mem
[memory_index
] = 4;
11321 record_buf_mem
[memory_index
+ 1] = start_address
;
11322 arm_insn_r
->mem_rec_count
= 1;
11326 record_buf_mem
[memory_index
] = 4;
11327 record_buf_mem
[memory_index
+ 1] = start_address
;
11328 record_buf_mem
[memory_index
+ 2] = 4;
11329 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11330 arm_insn_r
->mem_rec_count
= 2;
11333 /* VLDR Vector load register. */
11334 else if ((opcode
& 0x13) == 0x11)
11336 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11340 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
11341 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
11345 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
11346 /* Record register D rather than pseudo register S. */
11347 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
/ 2;
11349 arm_insn_r
->reg_rec_count
= 1;
11352 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11353 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11357 /* Record handler for arm/thumb mode VFP data processing instructions. */
11360 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
11362 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
11363 uint32_t record_buf
[4];
11364 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
11365 enum insn_types curr_insn_type
= INSN_INV
;
11367 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11368 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
11369 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11370 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
11371 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
11372 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11373 /* Mask off the "D" bit. */
11374 opc1
= opc1
& ~0x04;
11376 /* Handle VMLA, VMLS. */
11379 if (bit (arm_insn_r
->arm_insn
, 10))
11381 if (bit (arm_insn_r
->arm_insn
, 6))
11382 curr_insn_type
= INSN_T0
;
11384 curr_insn_type
= INSN_T1
;
11389 curr_insn_type
= INSN_T1
;
11391 curr_insn_type
= INSN_T2
;
11394 /* Handle VNMLA, VNMLS, VNMUL. */
11395 else if (opc1
== 0x01)
11398 curr_insn_type
= INSN_T1
;
11400 curr_insn_type
= INSN_T2
;
11403 else if (opc1
== 0x02 && !(opc3
& 0x01))
11405 if (bit (arm_insn_r
->arm_insn
, 10))
11407 if (bit (arm_insn_r
->arm_insn
, 6))
11408 curr_insn_type
= INSN_T0
;
11410 curr_insn_type
= INSN_T1
;
11415 curr_insn_type
= INSN_T1
;
11417 curr_insn_type
= INSN_T2
;
11420 /* Handle VADD, VSUB. */
11421 else if (opc1
== 0x03)
11423 if (!bit (arm_insn_r
->arm_insn
, 9))
11425 if (bit (arm_insn_r
->arm_insn
, 6))
11426 curr_insn_type
= INSN_T0
;
11428 curr_insn_type
= INSN_T1
;
11433 curr_insn_type
= INSN_T1
;
11435 curr_insn_type
= INSN_T2
;
11439 else if (opc1
== 0x08)
11442 curr_insn_type
= INSN_T1
;
11444 curr_insn_type
= INSN_T2
;
11446 /* Handle all other vfp data processing instructions. */
11447 else if (opc1
== 0x0b)
11450 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
11452 if (bit (arm_insn_r
->arm_insn
, 4))
11454 if (bit (arm_insn_r
->arm_insn
, 6))
11455 curr_insn_type
= INSN_T0
;
11457 curr_insn_type
= INSN_T1
;
11462 curr_insn_type
= INSN_T1
;
11464 curr_insn_type
= INSN_T2
;
11467 /* Handle VNEG and VABS. */
11468 else if ((opc2
== 0x01 && opc3
== 0x01)
11469 || (opc2
== 0x00 && opc3
== 0x03))
11471 if (!bit (arm_insn_r
->arm_insn
, 11))
11473 if (bit (arm_insn_r
->arm_insn
, 6))
11474 curr_insn_type
= INSN_T0
;
11476 curr_insn_type
= INSN_T1
;
11481 curr_insn_type
= INSN_T1
;
11483 curr_insn_type
= INSN_T2
;
11486 /* Handle VSQRT. */
11487 else if (opc2
== 0x01 && opc3
== 0x03)
11490 curr_insn_type
= INSN_T1
;
11492 curr_insn_type
= INSN_T2
;
11495 else if (opc2
== 0x07 && opc3
== 0x03)
11498 curr_insn_type
= INSN_T1
;
11500 curr_insn_type
= INSN_T2
;
11502 else if (opc3
& 0x01)
11505 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
11507 if (!bit (arm_insn_r
->arm_insn
, 18))
11508 curr_insn_type
= INSN_T2
;
11512 curr_insn_type
= INSN_T1
;
11514 curr_insn_type
= INSN_T2
;
11518 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
11521 curr_insn_type
= INSN_T1
;
11523 curr_insn_type
= INSN_T2
;
11525 /* Handle VCVTB, VCVTT. */
11526 else if ((opc2
& 0x0e) == 0x02)
11527 curr_insn_type
= INSN_T2
;
11528 /* Handle VCMP, VCMPE. */
11529 else if ((opc2
& 0x0e) == 0x04)
11530 curr_insn_type
= INSN_T3
;
11534 switch (curr_insn_type
)
11537 reg_vd
= reg_vd
| (bit_d
<< 4);
11538 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11539 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
11540 arm_insn_r
->reg_rec_count
= 2;
11544 reg_vd
= reg_vd
| (bit_d
<< 4);
11545 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11546 arm_insn_r
->reg_rec_count
= 1;
11550 reg_vd
= (reg_vd
<< 1) | bit_d
;
11551 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11552 arm_insn_r
->reg_rec_count
= 1;
11556 record_buf
[0] = ARM_FPSCR_REGNUM
;
11557 arm_insn_r
->reg_rec_count
= 1;
11561 gdb_assert_not_reached ("no decoding pattern found");
11565 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11569 /* Handling opcode 110 insns. */
11572 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
11574 uint32_t op1
, op1_ebit
, coproc
;
11576 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11577 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11578 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11580 if ((coproc
& 0x0e) == 0x0a)
11582 /* Handle extension register ld/st instructions. */
11584 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11586 /* 64-bit transfers between arm core and extension registers. */
11587 if ((op1
& 0x3e) == 0x04)
11588 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11592 /* Handle coprocessor ld/st instructions. */
11597 return arm_record_unsupported_insn (arm_insn_r
);
11600 return arm_record_unsupported_insn (arm_insn_r
);
11603 /* Move to coprocessor from two arm core registers. */
11605 return arm_record_unsupported_insn (arm_insn_r
);
11607 /* Move to two arm core registers from coprocessor. */
11612 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11613 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11614 arm_insn_r
->reg_rec_count
= 2;
11616 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
11620 return arm_record_unsupported_insn (arm_insn_r
);
11623 /* Handling opcode 111 insns. */
11626 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
11628 uint32_t op
, op1_ebit
, coproc
, bits_24_25
;
11629 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
11630 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11632 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
11633 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11634 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11635 op
= bit (arm_insn_r
->arm_insn
, 4);
11636 bits_24_25
= bits (arm_insn_r
->arm_insn
, 24, 25);
11638 /* Handle arm SWI/SVC system call instructions. */
11639 if (bits_24_25
== 0x3)
11641 if (tdep
->arm_syscall_record
!= NULL
)
11643 ULONGEST svc_operand
, svc_number
;
11645 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
11647 if (svc_operand
) /* OABI. */
11648 svc_number
= svc_operand
- 0x900000;
11650 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
11652 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
11656 printf_unfiltered (_("no syscall record support\n"));
11660 else if (bits_24_25
== 0x02)
11664 if ((coproc
& 0x0e) == 0x0a)
11666 /* 8, 16, and 32-bit transfer */
11667 return arm_record_vdata_transfer_insn (arm_insn_r
);
11674 uint32_t record_buf
[1];
11676 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11677 if (record_buf
[0] == 15)
11678 record_buf
[0] = ARM_PS_REGNUM
;
11680 arm_insn_r
->reg_rec_count
= 1;
11681 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
11694 if ((coproc
& 0x0e) == 0x0a)
11696 /* VFP data-processing instructions. */
11697 return arm_record_vfp_data_proc_insn (arm_insn_r
);
11708 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11712 if ((coproc
& 0x0e) != 0x0a)
11718 else if (op1
== 4 || op1
== 5)
11720 if ((coproc
& 0x0e) == 0x0a)
11722 /* 64-bit transfers between ARM core and extension */
11731 else if (op1
== 0 || op1
== 1)
11738 if ((coproc
& 0x0e) == 0x0a)
11740 /* Extension register load/store */
11744 /* STC, STC2, LDC, LDC2 */
11753 /* Handling opcode 000 insns. */
11756 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
11758 uint32_t record_buf
[8];
11759 uint32_t reg_src1
= 0;
11761 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11763 record_buf
[0] = ARM_PS_REGNUM
;
11764 record_buf
[1] = reg_src1
;
11765 thumb_insn_r
->reg_rec_count
= 2;
11767 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11773 /* Handling opcode 001 insns. */
11776 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
11778 uint32_t record_buf
[8];
11779 uint32_t reg_src1
= 0;
11781 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11783 record_buf
[0] = ARM_PS_REGNUM
;
11784 record_buf
[1] = reg_src1
;
11785 thumb_insn_r
->reg_rec_count
= 2;
11787 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11792 /* Handling opcode 010 insns. */
11795 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
11797 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11798 uint32_t record_buf
[8], record_buf_mem
[8];
11800 uint32_t reg_src1
= 0, reg_src2
= 0;
11801 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
11803 ULONGEST u_regval
[2] = {0};
11805 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
11807 if (bit (thumb_insn_r
->arm_insn
, 12))
11809 /* Handle load/store register offset. */
11810 uint32_t opB
= bits (thumb_insn_r
->arm_insn
, 9, 11);
11812 if (in_inclusive_range (opB
, 4U, 7U))
11814 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11815 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
11816 record_buf
[0] = reg_src1
;
11817 thumb_insn_r
->reg_rec_count
= 1;
11819 else if (in_inclusive_range (opB
, 0U, 2U))
11821 /* STR(2), STRB(2), STRH(2) . */
11822 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11823 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
11824 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11825 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11827 record_buf_mem
[0] = 4; /* STR (2). */
11829 record_buf_mem
[0] = 1; /* STRB (2). */
11831 record_buf_mem
[0] = 2; /* STRH (2). */
11832 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
11833 thumb_insn_r
->mem_rec_count
= 1;
11836 else if (bit (thumb_insn_r
->arm_insn
, 11))
11838 /* Handle load from literal pool. */
11840 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11841 record_buf
[0] = reg_src1
;
11842 thumb_insn_r
->reg_rec_count
= 1;
11846 /* Special data instructions and branch and exchange */
11847 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
11848 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11849 if ((3 == opcode2
) && (!opcode3
))
11851 /* Branch with exchange. */
11852 record_buf
[0] = ARM_PS_REGNUM
;
11853 thumb_insn_r
->reg_rec_count
= 1;
11857 /* Format 8; special data processing insns. */
11858 record_buf
[0] = ARM_PS_REGNUM
;
11859 record_buf
[1] = (bit (thumb_insn_r
->arm_insn
, 7) << 3
11860 | bits (thumb_insn_r
->arm_insn
, 0, 2));
11861 thumb_insn_r
->reg_rec_count
= 2;
11866 /* Format 5; data processing insns. */
11867 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11868 if (bit (thumb_insn_r
->arm_insn
, 7))
11870 reg_src1
= reg_src1
+ 8;
11872 record_buf
[0] = ARM_PS_REGNUM
;
11873 record_buf
[1] = reg_src1
;
11874 thumb_insn_r
->reg_rec_count
= 2;
11877 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11878 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11884 /* Handling opcode 001 insns. */
11887 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
11889 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11890 uint32_t record_buf
[8], record_buf_mem
[8];
11892 uint32_t reg_src1
= 0;
11893 uint32_t opcode
= 0, immed_5
= 0;
11895 ULONGEST u_regval
= 0;
11897 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11902 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11903 record_buf
[0] = reg_src1
;
11904 thumb_insn_r
->reg_rec_count
= 1;
11909 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11910 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11911 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11912 record_buf_mem
[0] = 4;
11913 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
11914 thumb_insn_r
->mem_rec_count
= 1;
11917 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11918 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11924 /* Handling opcode 100 insns. */
11927 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
11929 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11930 uint32_t record_buf
[8], record_buf_mem
[8];
11932 uint32_t reg_src1
= 0;
11933 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
11935 ULONGEST u_regval
= 0;
11937 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11942 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11943 record_buf
[0] = reg_src1
;
11944 thumb_insn_r
->reg_rec_count
= 1;
11946 else if (1 == opcode
)
11949 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11950 record_buf
[0] = reg_src1
;
11951 thumb_insn_r
->reg_rec_count
= 1;
11953 else if (2 == opcode
)
11956 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
11957 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
11958 record_buf_mem
[0] = 4;
11959 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
11960 thumb_insn_r
->mem_rec_count
= 1;
11962 else if (0 == opcode
)
11965 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11966 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11967 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11968 record_buf_mem
[0] = 2;
11969 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
11970 thumb_insn_r
->mem_rec_count
= 1;
11973 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11974 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11980 /* Handling opcode 101 insns. */
11983 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
11985 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11987 uint32_t opcode
= 0;
11988 uint32_t register_bits
= 0, register_count
= 0;
11989 uint32_t index
= 0, start_address
= 0;
11990 uint32_t record_buf
[24], record_buf_mem
[48];
11993 ULONGEST u_regval
= 0;
11995 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11997 if (opcode
== 0 || opcode
== 1)
11999 /* ADR and ADD (SP plus immediate) */
12001 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12002 record_buf
[0] = reg_src1
;
12003 thumb_insn_r
->reg_rec_count
= 1;
12007 /* Miscellaneous 16-bit instructions */
12008 uint32_t opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 11);
12013 /* SETEND and CPS */
12016 /* ADD/SUB (SP plus immediate) */
12017 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12018 record_buf
[0] = ARM_SP_REGNUM
;
12019 thumb_insn_r
->reg_rec_count
= 1;
12021 case 1: /* fall through */
12022 case 3: /* fall through */
12023 case 9: /* fall through */
12028 /* SXTH, SXTB, UXTH, UXTB */
12029 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
12030 thumb_insn_r
->reg_rec_count
= 1;
12032 case 4: /* fall through */
12035 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12036 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12037 while (register_bits
)
12039 if (register_bits
& 0x00000001)
12041 register_bits
= register_bits
>> 1;
12043 start_address
= u_regval
- \
12044 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12045 thumb_insn_r
->mem_rec_count
= register_count
;
12046 while (register_count
)
12048 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12049 record_buf_mem
[(register_count
* 2) - 2] = 4;
12050 start_address
= start_address
+ 4;
12053 record_buf
[0] = ARM_SP_REGNUM
;
12054 thumb_insn_r
->reg_rec_count
= 1;
12057 /* REV, REV16, REVSH */
12058 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
12059 thumb_insn_r
->reg_rec_count
= 1;
12061 case 12: /* fall through */
12064 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12065 while (register_bits
)
12067 if (register_bits
& 0x00000001)
12068 record_buf
[index
++] = register_count
;
12069 register_bits
= register_bits
>> 1;
12072 record_buf
[index
++] = ARM_PS_REGNUM
;
12073 record_buf
[index
++] = ARM_SP_REGNUM
;
12074 thumb_insn_r
->reg_rec_count
= index
;
12078 /* Handle enhanced software breakpoint insn, BKPT. */
12079 /* CPSR is changed to be executed in ARM state, disabling normal
12080 interrupts, entering abort mode. */
12081 /* According to high vector configuration PC is set. */
12082 /* User hits breakpoint and type reverse, in that case, we need to go back with
12083 previous CPSR and Program Counter. */
12084 record_buf
[0] = ARM_PS_REGNUM
;
12085 record_buf
[1] = ARM_LR_REGNUM
;
12086 thumb_insn_r
->reg_rec_count
= 2;
12087 /* We need to save SPSR value, which is not yet done. */
12088 printf_unfiltered (_("Process record does not support instruction "
12089 "0x%0x at address %s.\n"),
12090 thumb_insn_r
->arm_insn
,
12091 paddress (thumb_insn_r
->gdbarch
,
12092 thumb_insn_r
->this_addr
));
12096 /* If-Then, and hints */
12103 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12104 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12110 /* Handling opcode 110 insns. */
12113 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12115 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12116 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12118 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12119 uint32_t reg_src1
= 0;
12120 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12121 uint32_t index
= 0, start_address
= 0;
12122 uint32_t record_buf
[24], record_buf_mem
[48];
12124 ULONGEST u_regval
= 0;
12126 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12127 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12133 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12135 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12136 while (register_bits
)
12138 if (register_bits
& 0x00000001)
12139 record_buf
[index
++] = register_count
;
12140 register_bits
= register_bits
>> 1;
12143 record_buf
[index
++] = reg_src1
;
12144 thumb_insn_r
->reg_rec_count
= index
;
12146 else if (0 == opcode2
)
12148 /* It handles both STMIA. */
12149 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12151 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12152 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12153 while (register_bits
)
12155 if (register_bits
& 0x00000001)
12157 register_bits
= register_bits
>> 1;
12159 start_address
= u_regval
;
12160 thumb_insn_r
->mem_rec_count
= register_count
;
12161 while (register_count
)
12163 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12164 record_buf_mem
[(register_count
* 2) - 2] = 4;
12165 start_address
= start_address
+ 4;
12169 else if (0x1F == opcode1
)
12171 /* Handle arm syscall insn. */
12172 if (tdep
->arm_syscall_record
!= NULL
)
12174 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12175 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12179 printf_unfiltered (_("no syscall record support\n"));
12184 /* B (1), conditional branch is automatically taken care in process_record,
12185 as PC is saved there. */
12187 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12188 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12194 /* Handling opcode 111 insns. */
12197 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12199 uint32_t record_buf
[8];
12200 uint32_t bits_h
= 0;
12202 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12204 if (2 == bits_h
|| 3 == bits_h
)
12207 record_buf
[0] = ARM_LR_REGNUM
;
12208 thumb_insn_r
->reg_rec_count
= 1;
12210 else if (1 == bits_h
)
12213 record_buf
[0] = ARM_PS_REGNUM
;
12214 record_buf
[1] = ARM_LR_REGNUM
;
12215 thumb_insn_r
->reg_rec_count
= 2;
12218 /* B(2) is automatically taken care in process_record, as PC is
12221 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12226 /* Handler for thumb2 load/store multiple instructions. */
12229 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
12231 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12233 uint32_t reg_rn
, op
;
12234 uint32_t register_bits
= 0, register_count
= 0;
12235 uint32_t index
= 0, start_address
= 0;
12236 uint32_t record_buf
[24], record_buf_mem
[48];
12238 ULONGEST u_regval
= 0;
12240 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12241 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12243 if (0 == op
|| 3 == op
)
12245 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12247 /* Handle RFE instruction. */
12248 record_buf
[0] = ARM_PS_REGNUM
;
12249 thumb2_insn_r
->reg_rec_count
= 1;
12253 /* Handle SRS instruction after reading banked SP. */
12254 return arm_record_unsupported_insn (thumb2_insn_r
);
12257 else if (1 == op
|| 2 == op
)
12259 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12261 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12262 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12263 while (register_bits
)
12265 if (register_bits
& 0x00000001)
12266 record_buf
[index
++] = register_count
;
12269 register_bits
= register_bits
>> 1;
12271 record_buf
[index
++] = reg_rn
;
12272 record_buf
[index
++] = ARM_PS_REGNUM
;
12273 thumb2_insn_r
->reg_rec_count
= index
;
12277 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12278 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12279 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12280 while (register_bits
)
12282 if (register_bits
& 0x00000001)
12285 register_bits
= register_bits
>> 1;
12290 /* Start address calculation for LDMDB/LDMEA. */
12291 start_address
= u_regval
;
12295 /* Start address calculation for LDMDB/LDMEA. */
12296 start_address
= u_regval
- register_count
* 4;
12299 thumb2_insn_r
->mem_rec_count
= register_count
;
12300 while (register_count
)
12302 record_buf_mem
[register_count
* 2 - 1] = start_address
;
12303 record_buf_mem
[register_count
* 2 - 2] = 4;
12304 start_address
= start_address
+ 4;
12307 record_buf
[0] = reg_rn
;
12308 record_buf
[1] = ARM_PS_REGNUM
;
12309 thumb2_insn_r
->reg_rec_count
= 2;
12313 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12315 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12317 return ARM_RECORD_SUCCESS
;
12320 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12324 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
12326 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12328 uint32_t reg_rd
, reg_rn
, offset_imm
;
12329 uint32_t reg_dest1
, reg_dest2
;
12330 uint32_t address
, offset_addr
;
12331 uint32_t record_buf
[8], record_buf_mem
[8];
12332 uint32_t op1
, op2
, op3
;
12334 ULONGEST u_regval
[2];
12336 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12337 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
12338 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12340 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12342 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
12344 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12345 record_buf
[0] = reg_dest1
;
12346 record_buf
[1] = ARM_PS_REGNUM
;
12347 thumb2_insn_r
->reg_rec_count
= 2;
12350 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
12352 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12353 record_buf
[2] = reg_dest2
;
12354 thumb2_insn_r
->reg_rec_count
= 3;
12359 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12360 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12362 if (0 == op1
&& 0 == op2
)
12364 /* Handle STREX. */
12365 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12366 address
= u_regval
[0] + (offset_imm
* 4);
12367 record_buf_mem
[0] = 4;
12368 record_buf_mem
[1] = address
;
12369 thumb2_insn_r
->mem_rec_count
= 1;
12370 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12371 record_buf
[0] = reg_rd
;
12372 thumb2_insn_r
->reg_rec_count
= 1;
12374 else if (1 == op1
&& 0 == op2
)
12376 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12377 record_buf
[0] = reg_rd
;
12378 thumb2_insn_r
->reg_rec_count
= 1;
12379 address
= u_regval
[0];
12380 record_buf_mem
[1] = address
;
12384 /* Handle STREXB. */
12385 record_buf_mem
[0] = 1;
12386 thumb2_insn_r
->mem_rec_count
= 1;
12390 /* Handle STREXH. */
12391 record_buf_mem
[0] = 2 ;
12392 thumb2_insn_r
->mem_rec_count
= 1;
12396 /* Handle STREXD. */
12397 address
= u_regval
[0];
12398 record_buf_mem
[0] = 4;
12399 record_buf_mem
[2] = 4;
12400 record_buf_mem
[3] = address
+ 4;
12401 thumb2_insn_r
->mem_rec_count
= 2;
12406 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12408 if (bit (thumb2_insn_r
->arm_insn
, 24))
12410 if (bit (thumb2_insn_r
->arm_insn
, 23))
12411 offset_addr
= u_regval
[0] + (offset_imm
* 4);
12413 offset_addr
= u_regval
[0] - (offset_imm
* 4);
12415 address
= offset_addr
;
12418 address
= u_regval
[0];
12420 record_buf_mem
[0] = 4;
12421 record_buf_mem
[1] = address
;
12422 record_buf_mem
[2] = 4;
12423 record_buf_mem
[3] = address
+ 4;
12424 thumb2_insn_r
->mem_rec_count
= 2;
12425 record_buf
[0] = reg_rn
;
12426 thumb2_insn_r
->reg_rec_count
= 1;
12430 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12432 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12434 return ARM_RECORD_SUCCESS
;
12437 /* Handler for thumb2 data processing (shift register and modified immediate)
12441 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
12443 uint32_t reg_rd
, op
;
12444 uint32_t record_buf
[8];
12446 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
12447 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12449 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
12451 record_buf
[0] = ARM_PS_REGNUM
;
12452 thumb2_insn_r
->reg_rec_count
= 1;
12456 record_buf
[0] = reg_rd
;
12457 record_buf
[1] = ARM_PS_REGNUM
;
12458 thumb2_insn_r
->reg_rec_count
= 2;
12461 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12463 return ARM_RECORD_SUCCESS
;
12466 /* Generic handler for thumb2 instructions which effect destination and PS
12470 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
12473 uint32_t record_buf
[8];
12475 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12477 record_buf
[0] = reg_rd
;
12478 record_buf
[1] = ARM_PS_REGNUM
;
12479 thumb2_insn_r
->reg_rec_count
= 2;
12481 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12483 return ARM_RECORD_SUCCESS
;
12486 /* Handler for thumb2 branch and miscellaneous control instructions. */
12489 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
12491 uint32_t op
, op1
, op2
;
12492 uint32_t record_buf
[8];
12494 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12495 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
12496 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12498 /* Handle MSR insn. */
12499 if (!(op1
& 0x2) && 0x38 == op
)
12503 /* CPSR is going to be changed. */
12504 record_buf
[0] = ARM_PS_REGNUM
;
12505 thumb2_insn_r
->reg_rec_count
= 1;
12509 arm_record_unsupported_insn(thumb2_insn_r
);
12513 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
12516 record_buf
[0] = ARM_PS_REGNUM
;
12517 record_buf
[1] = ARM_LR_REGNUM
;
12518 thumb2_insn_r
->reg_rec_count
= 2;
12521 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12523 return ARM_RECORD_SUCCESS
;
12526 /* Handler for thumb2 store single data item instructions. */
12529 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
12531 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12533 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
12534 uint32_t address
, offset_addr
;
12535 uint32_t record_buf
[8], record_buf_mem
[8];
12538 ULONGEST u_regval
[2];
12540 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
12541 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
12542 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12543 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12545 if (bit (thumb2_insn_r
->arm_insn
, 23))
12548 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
12549 offset_addr
= u_regval
[0] + offset_imm
;
12550 address
= offset_addr
;
12555 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
12557 /* Handle STRB (register). */
12558 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12559 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
12560 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
12561 offset_addr
= u_regval
[1] << shift_imm
;
12562 address
= u_regval
[0] + offset_addr
;
12566 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12567 if (bit (thumb2_insn_r
->arm_insn
, 10))
12569 if (bit (thumb2_insn_r
->arm_insn
, 9))
12570 offset_addr
= u_regval
[0] + offset_imm
;
12572 offset_addr
= u_regval
[0] - offset_imm
;
12574 address
= offset_addr
;
12577 address
= u_regval
[0];
12583 /* Store byte instructions. */
12586 record_buf_mem
[0] = 1;
12588 /* Store half word instructions. */
12591 record_buf_mem
[0] = 2;
12593 /* Store word instructions. */
12596 record_buf_mem
[0] = 4;
12600 gdb_assert_not_reached ("no decoding pattern found");
12604 record_buf_mem
[1] = address
;
12605 thumb2_insn_r
->mem_rec_count
= 1;
12606 record_buf
[0] = reg_rn
;
12607 thumb2_insn_r
->reg_rec_count
= 1;
12609 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12611 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12613 return ARM_RECORD_SUCCESS
;
12616 /* Handler for thumb2 load memory hints instructions. */
12619 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
12621 uint32_t record_buf
[8];
12622 uint32_t reg_rt
, reg_rn
;
12624 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12625 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12627 if (ARM_PC_REGNUM
!= reg_rt
)
12629 record_buf
[0] = reg_rt
;
12630 record_buf
[1] = reg_rn
;
12631 record_buf
[2] = ARM_PS_REGNUM
;
12632 thumb2_insn_r
->reg_rec_count
= 3;
12634 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12636 return ARM_RECORD_SUCCESS
;
12639 return ARM_RECORD_FAILURE
;
12642 /* Handler for thumb2 load word instructions. */
12645 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
12647 uint32_t record_buf
[8];
12649 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12650 record_buf
[1] = ARM_PS_REGNUM
;
12651 thumb2_insn_r
->reg_rec_count
= 2;
12653 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12655 return ARM_RECORD_SUCCESS
;
12658 /* Handler for thumb2 long multiply, long multiply accumulate, and
12659 divide instructions. */
12662 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
12664 uint32_t opcode1
= 0, opcode2
= 0;
12665 uint32_t record_buf
[8];
12667 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
12668 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12670 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
12672 /* Handle SMULL, UMULL, SMULAL. */
12673 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12674 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12675 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12676 record_buf
[2] = ARM_PS_REGNUM
;
12677 thumb2_insn_r
->reg_rec_count
= 3;
12679 else if (1 == opcode1
|| 3 == opcode2
)
12681 /* Handle SDIV and UDIV. */
12682 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12683 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12684 record_buf
[2] = ARM_PS_REGNUM
;
12685 thumb2_insn_r
->reg_rec_count
= 3;
12688 return ARM_RECORD_FAILURE
;
12690 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12692 return ARM_RECORD_SUCCESS
;
12695 /* Record handler for thumb32 coprocessor instructions. */
12698 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
12700 if (bit (thumb2_insn_r
->arm_insn
, 25))
12701 return arm_record_coproc_data_proc (thumb2_insn_r
);
12703 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
12706 /* Record handler for advance SIMD structure load/store instructions. */
12709 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
12711 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12712 uint32_t l_bit
, a_bit
, b_bits
;
12713 uint32_t record_buf
[128], record_buf_mem
[128];
12714 uint32_t reg_rn
, reg_vd
, address
, f_elem
;
12715 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
12718 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
12719 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
12720 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12721 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12722 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12723 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
12724 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
12725 f_elem
= 8 / f_ebytes
;
12729 ULONGEST u_regval
= 0;
12730 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12731 address
= u_regval
;
12736 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12738 if (b_bits
== 0x07)
12740 else if (b_bits
== 0x0a)
12742 else if (b_bits
== 0x06)
12744 else if (b_bits
== 0x02)
12749 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12751 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12753 record_buf_mem
[index_m
++] = f_ebytes
;
12754 record_buf_mem
[index_m
++] = address
;
12755 address
= address
+ f_ebytes
;
12756 thumb2_insn_r
->mem_rec_count
+= 1;
12761 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12763 if (b_bits
== 0x09 || b_bits
== 0x08)
12765 else if (b_bits
== 0x03)
12770 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12771 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12773 for (loop_t
= 0; loop_t
< 2; loop_t
++)
12775 record_buf_mem
[index_m
++] = f_ebytes
;
12776 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12777 thumb2_insn_r
->mem_rec_count
+= 1;
12779 address
= address
+ (2 * f_ebytes
);
12783 else if ((b_bits
& 0x0e) == 0x04)
12785 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12787 for (loop_t
= 0; loop_t
< 3; loop_t
++)
12789 record_buf_mem
[index_m
++] = f_ebytes
;
12790 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12791 thumb2_insn_r
->mem_rec_count
+= 1;
12793 address
= address
+ (3 * f_ebytes
);
12797 else if (!(b_bits
& 0x0e))
12799 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12801 for (loop_t
= 0; loop_t
< 4; loop_t
++)
12803 record_buf_mem
[index_m
++] = f_ebytes
;
12804 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12805 thumb2_insn_r
->mem_rec_count
+= 1;
12807 address
= address
+ (4 * f_ebytes
);
12813 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
12815 if (bft_size
== 0x00)
12817 else if (bft_size
== 0x01)
12819 else if (bft_size
== 0x02)
12825 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
12826 thumb2_insn_r
->mem_rec_count
= 1;
12828 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
12829 thumb2_insn_r
->mem_rec_count
= 2;
12831 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
12832 thumb2_insn_r
->mem_rec_count
= 3;
12834 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
12835 thumb2_insn_r
->mem_rec_count
= 4;
12837 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
12839 record_buf_mem
[index_m
] = f_ebytes
;
12840 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
12849 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12850 thumb2_insn_r
->reg_rec_count
= 1;
12852 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12853 thumb2_insn_r
->reg_rec_count
= 2;
12855 else if ((b_bits
& 0x0e) == 0x04)
12856 thumb2_insn_r
->reg_rec_count
= 3;
12858 else if (!(b_bits
& 0x0e))
12859 thumb2_insn_r
->reg_rec_count
= 4;
12864 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
12865 thumb2_insn_r
->reg_rec_count
= 1;
12867 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
12868 thumb2_insn_r
->reg_rec_count
= 2;
12870 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
12871 thumb2_insn_r
->reg_rec_count
= 3;
12873 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
12874 thumb2_insn_r
->reg_rec_count
= 4;
12876 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
12877 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
12881 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
12883 record_buf
[index_r
] = reg_rn
;
12884 thumb2_insn_r
->reg_rec_count
+= 1;
12887 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12889 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12894 /* Decodes thumb2 instruction type and invokes its record handler. */
12896 static unsigned int
12897 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
12899 uint32_t op
, op1
, op2
;
12901 op
= bit (thumb2_insn_r
->arm_insn
, 15);
12902 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
12903 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12907 if (!(op2
& 0x64 ))
12909 /* Load/store multiple instruction. */
12910 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
12912 else if ((op2
& 0x64) == 0x4)
12914 /* Load/store (dual/exclusive) and table branch instruction. */
12915 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
12917 else if ((op2
& 0x60) == 0x20)
12919 /* Data-processing (shifted register). */
12920 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12922 else if (op2
& 0x40)
12924 /* Co-processor instructions. */
12925 return thumb2_record_coproc_insn (thumb2_insn_r
);
12928 else if (op1
== 0x02)
12932 /* Branches and miscellaneous control instructions. */
12933 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
12935 else if (op2
& 0x20)
12937 /* Data-processing (plain binary immediate) instruction. */
12938 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12942 /* Data-processing (modified immediate). */
12943 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12946 else if (op1
== 0x03)
12948 if (!(op2
& 0x71 ))
12950 /* Store single data item. */
12951 return thumb2_record_str_single_data (thumb2_insn_r
);
12953 else if (!((op2
& 0x71) ^ 0x10))
12955 /* Advanced SIMD or structure load/store instructions. */
12956 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
12958 else if (!((op2
& 0x67) ^ 0x01))
12960 /* Load byte, memory hints instruction. */
12961 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
12963 else if (!((op2
& 0x67) ^ 0x03))
12965 /* Load halfword, memory hints instruction. */
12966 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
12968 else if (!((op2
& 0x67) ^ 0x05))
12970 /* Load word instruction. */
12971 return thumb2_record_ld_word (thumb2_insn_r
);
12973 else if (!((op2
& 0x70) ^ 0x20))
12975 /* Data-processing (register) instruction. */
12976 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12978 else if (!((op2
& 0x78) ^ 0x30))
12980 /* Multiply, multiply accumulate, abs diff instruction. */
12981 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12983 else if (!((op2
& 0x78) ^ 0x38))
12985 /* Long multiply, long multiply accumulate, and divide. */
12986 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
12988 else if (op2
& 0x40)
12990 /* Co-processor instructions. */
12991 return thumb2_record_coproc_insn (thumb2_insn_r
);
12999 /* Abstract memory reader. */
13001 class abstract_memory_reader
13004 /* Read LEN bytes of target memory at address MEMADDR, placing the
13005 results in GDB's memory at BUF. Return true on success. */
13007 virtual bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) = 0;
13010 /* Instruction reader from real target. */
13012 class instruction_reader
: public abstract_memory_reader
13015 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
13017 if (target_read_memory (memaddr
, buf
, len
))
13026 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13027 and positive val on failure. */
13030 extract_arm_insn (abstract_memory_reader
& reader
,
13031 insn_decode_record
*insn_record
, uint32_t insn_size
)
13033 gdb_byte buf
[insn_size
];
13035 memset (&buf
[0], 0, insn_size
);
13037 if (!reader
.read (insn_record
->this_addr
, buf
, insn_size
))
13039 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13041 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13045 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13047 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13051 decode_insn (abstract_memory_reader
&reader
, insn_decode_record
*arm_record
,
13052 record_type_t record_type
, uint32_t insn_size
)
13055 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
13057 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
13059 arm_record_data_proc_misc_ld_str
, /* 000. */
13060 arm_record_data_proc_imm
, /* 001. */
13061 arm_record_ld_st_imm_offset
, /* 010. */
13062 arm_record_ld_st_reg_offset
, /* 011. */
13063 arm_record_ld_st_multiple
, /* 100. */
13064 arm_record_b_bl
, /* 101. */
13065 arm_record_asimd_vfp_coproc
, /* 110. */
13066 arm_record_coproc_data_proc
/* 111. */
13069 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13071 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
13073 thumb_record_shift_add_sub
, /* 000. */
13074 thumb_record_add_sub_cmp_mov
, /* 001. */
13075 thumb_record_ld_st_reg_offset
, /* 010. */
13076 thumb_record_ld_st_imm_offset
, /* 011. */
13077 thumb_record_ld_st_stack
, /* 100. */
13078 thumb_record_misc
, /* 101. */
13079 thumb_record_ldm_stm_swi
, /* 110. */
13080 thumb_record_branch
/* 111. */
13083 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13084 uint32_t insn_id
= 0;
13086 if (extract_arm_insn (reader
, arm_record
, insn_size
))
13090 printf_unfiltered (_("Process record: error reading memory at "
13091 "addr %s len = %d.\n"),
13092 paddress (arm_record
->gdbarch
,
13093 arm_record
->this_addr
), insn_size
);
13097 else if (ARM_RECORD
== record_type
)
13099 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13100 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13102 if (arm_record
->cond
== 0xf)
13103 ret
= arm_record_extension_space (arm_record
);
13106 /* If this insn has fallen into extension space
13107 then we need not decode it anymore. */
13108 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13110 if (ret
!= ARM_RECORD_SUCCESS
)
13112 arm_record_unsupported_insn (arm_record
);
13116 else if (THUMB_RECORD
== record_type
)
13118 /* As thumb does not have condition codes, we set negative. */
13119 arm_record
->cond
= -1;
13120 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13121 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13122 if (ret
!= ARM_RECORD_SUCCESS
)
13124 arm_record_unsupported_insn (arm_record
);
13128 else if (THUMB2_RECORD
== record_type
)
13130 /* As thumb does not have condition codes, we set negative. */
13131 arm_record
->cond
= -1;
13133 /* Swap first half of 32bit thumb instruction with second half. */
13134 arm_record
->arm_insn
13135 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13137 ret
= thumb2_record_decode_insn_handler (arm_record
);
13139 if (ret
!= ARM_RECORD_SUCCESS
)
13141 arm_record_unsupported_insn (arm_record
);
13147 /* Throw assertion. */
13148 gdb_assert_not_reached ("not a valid instruction, could not decode");
13155 namespace selftests
{
13157 /* Provide both 16-bit and 32-bit thumb instructions. */
13159 class instruction_reader_thumb
: public abstract_memory_reader
13162 template<size_t SIZE
>
13163 instruction_reader_thumb (enum bfd_endian endian
,
13164 const uint16_t (&insns
)[SIZE
])
13165 : m_endian (endian
), m_insns (insns
), m_insns_size (SIZE
)
13168 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
13170 SELF_CHECK (len
== 4 || len
== 2);
13171 SELF_CHECK (memaddr
% 2 == 0);
13172 SELF_CHECK ((memaddr
/ 2) < m_insns_size
);
13174 store_unsigned_integer (buf
, 2, m_endian
, m_insns
[memaddr
/ 2]);
13177 store_unsigned_integer (&buf
[2], 2, m_endian
,
13178 m_insns
[memaddr
/ 2 + 1]);
13184 enum bfd_endian m_endian
;
13185 const uint16_t *m_insns
;
13186 size_t m_insns_size
;
13190 arm_record_test (void)
13192 struct gdbarch_info info
;
13193 gdbarch_info_init (&info
);
13194 info
.bfd_arch_info
= bfd_scan_arch ("arm");
13196 struct gdbarch
*gdbarch
= gdbarch_find_by_info (info
);
13198 SELF_CHECK (gdbarch
!= NULL
);
13200 /* 16-bit Thumb instructions. */
13202 insn_decode_record arm_record
;
13204 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13205 arm_record
.gdbarch
= gdbarch
;
13207 static const uint16_t insns
[] = {
13208 /* db b2 uxtb r3, r3 */
13210 /* cd 58 ldr r5, [r1, r3] */
13214 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13215 instruction_reader_thumb
reader (endian
, insns
);
13216 int ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13217 THUMB_INSN_SIZE_BYTES
);
13219 SELF_CHECK (ret
== 0);
13220 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13221 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13222 SELF_CHECK (arm_record
.arm_regs
[0] == 3);
13224 arm_record
.this_addr
+= 2;
13225 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13226 THUMB_INSN_SIZE_BYTES
);
13228 SELF_CHECK (ret
== 0);
13229 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13230 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13231 SELF_CHECK (arm_record
.arm_regs
[0] == 5);
13234 /* 32-bit Thumb-2 instructions. */
13236 insn_decode_record arm_record
;
13238 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13239 arm_record
.gdbarch
= gdbarch
;
13241 static const uint16_t insns
[] = {
13242 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13246 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13247 instruction_reader_thumb
reader (endian
, insns
);
13248 int ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13249 THUMB2_INSN_SIZE_BYTES
);
13251 SELF_CHECK (ret
== 0);
13252 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13253 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13254 SELF_CHECK (arm_record
.arm_regs
[0] == 7);
13257 } // namespace selftests
13258 #endif /* GDB_SELF_TEST */
13260 /* Cleans up local record registers and memory allocations. */
13263 deallocate_reg_mem (insn_decode_record
*record
)
13265 xfree (record
->arm_regs
);
13266 xfree (record
->arm_mems
);
13270 /* Parse the current instruction and record the values of the registers and
13271 memory that will be changed in current instruction to record_arch_list".
13272 Return -1 if something is wrong. */
13275 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13276 CORE_ADDR insn_addr
)
13279 uint32_t no_of_rec
= 0;
13280 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13281 ULONGEST t_bit
= 0, insn_id
= 0;
13283 ULONGEST u_regval
= 0;
13285 insn_decode_record arm_record
;
13287 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13288 arm_record
.regcache
= regcache
;
13289 arm_record
.this_addr
= insn_addr
;
13290 arm_record
.gdbarch
= gdbarch
;
13293 if (record_debug
> 1)
13295 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13297 paddress (gdbarch
, arm_record
.this_addr
));
13300 instruction_reader reader
;
13301 if (extract_arm_insn (reader
, &arm_record
, 2))
13305 printf_unfiltered (_("Process record: error reading memory at "
13306 "addr %s len = %d.\n"),
13307 paddress (arm_record
.gdbarch
,
13308 arm_record
.this_addr
), 2);
13313 /* Check the insn, whether it is thumb or arm one. */
13315 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13316 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13319 if (!(u_regval
& t_bit
))
13321 /* We are decoding arm insn. */
13322 ret
= decode_insn (reader
, &arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13326 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13327 /* is it thumb2 insn? */
13328 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13330 ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13331 THUMB2_INSN_SIZE_BYTES
);
13335 /* We are decoding thumb insn. */
13336 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13337 THUMB_INSN_SIZE_BYTES
);
13343 /* Record registers. */
13344 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13345 if (arm_record
.arm_regs
)
13347 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13349 if (record_full_arch_list_add_reg
13350 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
13354 /* Record memories. */
13355 if (arm_record
.arm_mems
)
13357 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
13359 if (record_full_arch_list_add_mem
13360 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
13361 arm_record
.arm_mems
[no_of_rec
].len
))
13366 if (record_full_arch_list_add_end ())
13371 deallocate_reg_mem (&arm_record
);
13376 /* See arm-tdep.h. */
13378 const target_desc
*
13379 arm_read_description (arm_fp_type fp_type
)
13381 struct target_desc
*tdesc
= tdesc_arm_list
[fp_type
];
13383 if (tdesc
== nullptr)
13385 tdesc
= arm_create_target_description (fp_type
);
13386 tdesc_arm_list
[fp_type
] = tdesc
;
13392 /* See arm-tdep.h. */
13394 const target_desc
*
13395 arm_read_mprofile_description (arm_m_profile_type m_type
)
13397 struct target_desc
*tdesc
= tdesc_arm_mprofile_list
[m_type
];
13399 if (tdesc
== nullptr)
13401 tdesc
= arm_create_mprofile_target_description (m_type
);
13402 tdesc_arm_mprofile_list
[m_type
] = tdesc
;