1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2019 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
32 #include "reggroups.h"
33 #include "target-float.h"
35 #include "arch-utils.h"
37 #include "frame-unwind.h"
38 #include "frame-base.h"
39 #include "trad-frame.h"
41 #include "dwarf2-frame.h"
43 #include "prologue-value.h"
45 #include "target-descriptions.h"
46 #include "user-regs.h"
47 #include "observable.h"
50 #include "arch/arm-get-next-pcs.h"
52 #include "gdb/sim-arm.h"
55 #include "coff/internal.h"
58 #include "gdbsupport/vec.h"
61 #include "record-full.h"
64 #include "features/arm/arm-with-m.c"
65 #include "features/arm/arm-with-m-fpa-layout.c"
66 #include "features/arm/arm-with-m-vfp-d16.c"
67 #include "features/arm/arm-with-iwmmxt.c"
68 #include "features/arm/arm-with-vfpv2.c"
69 #include "features/arm/arm-with-vfpv3.c"
70 #include "features/arm/arm-with-neon.c"
73 #include "gdbsupport/selftest.h"
78 /* Macros for setting and testing a bit in a minimal symbol that marks
79 it as Thumb function. The MSB of the minimal symbol's "info" field
80 is used for this purpose.
82 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
83 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
85 #define MSYMBOL_SET_SPECIAL(msym) \
86 MSYMBOL_TARGET_FLAG_1 (msym) = 1
88 #define MSYMBOL_IS_SPECIAL(msym) \
89 MSYMBOL_TARGET_FLAG_1 (msym)
91 struct arm_mapping_symbol
96 bool operator< (const arm_mapping_symbol
&other
) const
97 { return this->value
< other
.value
; }
100 typedef std::vector
<arm_mapping_symbol
> arm_mapping_symbol_vec
;
102 struct arm_per_objfile
104 explicit arm_per_objfile (size_t num_sections
)
105 : section_maps (new arm_mapping_symbol_vec
[num_sections
]),
106 section_maps_sorted (new bool[num_sections
] ())
109 DISABLE_COPY_AND_ASSIGN (arm_per_objfile
);
111 /* Information about mapping symbols ($a, $d, $t) in the objfile.
113 The format is an array of vectors of arm_mapping_symbols, there is one
114 vector for each section of the objfile (the array is index by BFD section
117 For each section, the vector of arm_mapping_symbol is sorted by
118 symbol value (address). */
119 std::unique_ptr
<arm_mapping_symbol_vec
[]> section_maps
;
121 /* For each corresponding element of section_maps above, is this vector
123 std::unique_ptr
<bool[]> section_maps_sorted
;
126 /* Per-objfile data used for mapping symbols. */
127 static objfile_key
<arm_per_objfile
> arm_objfile_data_key
;
129 /* The list of available "set arm ..." and "show arm ..." commands. */
130 static struct cmd_list_element
*setarmcmdlist
= NULL
;
131 static struct cmd_list_element
*showarmcmdlist
= NULL
;
133 /* The type of floating-point to use. Keep this in sync with enum
134 arm_float_model, and the help string in _initialize_arm_tdep. */
135 static const char *const fp_model_strings
[] =
145 /* A variable that can be configured by the user. */
146 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
147 static const char *current_fp_model
= "auto";
149 /* The ABI to use. Keep this in sync with arm_abi_kind. */
150 static const char *const arm_abi_strings
[] =
158 /* A variable that can be configured by the user. */
159 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
160 static const char *arm_abi_string
= "auto";
162 /* The execution mode to assume. */
163 static const char *const arm_mode_strings
[] =
171 static const char *arm_fallback_mode_string
= "auto";
172 static const char *arm_force_mode_string
= "auto";
174 /* The standard register names, and all the valid aliases for them. Note
175 that `fp', `sp' and `pc' are not added in this alias list, because they
176 have been added as builtin user registers in
177 std-regs.c:_initialize_frame_reg. */
182 } arm_register_aliases
[] = {
183 /* Basic register numbers. */
200 /* Synonyms (argument and variable registers). */
213 /* Other platform-specific names for r9. */
219 /* Names used by GCC (not listed in the ARM EABI). */
221 /* A special name from the older ATPCS. */
225 static const char *const arm_register_names
[] =
226 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
227 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
228 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
229 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
230 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
231 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
232 "fps", "cpsr" }; /* 24 25 */
234 /* Holds the current set of options to be passed to the disassembler. */
235 static char *arm_disassembler_options
;
237 /* Valid register name styles. */
238 static const char **valid_disassembly_styles
;
240 /* Disassembly style to use. Default to "std" register names. */
241 static const char *disassembly_style
;
243 /* This is used to keep the bfd arch_info in sync with the disassembly
245 static void set_disassembly_style_sfunc (const char *, int,
246 struct cmd_list_element
*);
247 static void show_disassembly_style_sfunc (struct ui_file
*, int,
248 struct cmd_list_element
*,
251 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
252 readable_regcache
*regcache
,
253 int regnum
, gdb_byte
*buf
);
254 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
255 struct regcache
*regcache
,
256 int regnum
, const gdb_byte
*buf
);
259 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
);
262 /* get_next_pcs operations. */
263 static struct arm_get_next_pcs_ops arm_get_next_pcs_ops
= {
264 arm_get_next_pcs_read_memory_unsigned_integer
,
265 arm_get_next_pcs_syscall_next_pc
,
266 arm_get_next_pcs_addr_bits_remove
,
267 arm_get_next_pcs_is_thumb
,
271 struct arm_prologue_cache
273 /* The stack pointer at the time this frame was created; i.e. the
274 caller's stack pointer when this function was called. It is used
275 to identify this frame. */
278 /* The frame base for this frame is just prev_sp - frame size.
279 FRAMESIZE is the distance from the frame pointer to the
280 initial stack pointer. */
284 /* The register used to hold the frame pointer for this frame. */
287 /* Saved register offsets. */
288 struct trad_frame_saved_reg
*saved_regs
;
291 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
292 CORE_ADDR prologue_start
,
293 CORE_ADDR prologue_end
,
294 struct arm_prologue_cache
*cache
);
296 /* Architecture version for displaced stepping. This effects the behaviour of
297 certain instructions, and really should not be hard-wired. */
299 #define DISPLACED_STEPPING_ARCH_VERSION 5
301 /* Set to true if the 32-bit mode is in use. */
305 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
308 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
310 if (gdbarch_tdep (gdbarch
)->is_m
)
316 /* Determine if the processor is currently executing in Thumb mode. */
319 arm_is_thumb (struct regcache
*regcache
)
322 ULONGEST t_bit
= arm_psr_thumb_bit (regcache
->arch ());
324 cpsr
= regcache_raw_get_unsigned (regcache
, ARM_PS_REGNUM
);
326 return (cpsr
& t_bit
) != 0;
329 /* Determine if FRAME is executing in Thumb mode. */
332 arm_frame_is_thumb (struct frame_info
*frame
)
335 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
337 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
338 directly (from a signal frame or dummy frame) or by interpreting
339 the saved LR (from a prologue or DWARF frame). So consult it and
340 trust the unwinders. */
341 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
343 return (cpsr
& t_bit
) != 0;
346 /* Search for the mapping symbol covering MEMADDR. If one is found,
347 return its type. Otherwise, return 0. If START is non-NULL,
348 set *START to the location of the mapping symbol. */
351 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
353 struct obj_section
*sec
;
355 /* If there are mapping symbols, consult them. */
356 sec
= find_pc_section (memaddr
);
359 arm_per_objfile
*data
= arm_objfile_data_key
.get (sec
->objfile
);
362 unsigned int section_idx
= sec
->the_bfd_section
->index
;
363 arm_mapping_symbol_vec
&map
364 = data
->section_maps
[section_idx
];
366 /* Sort the vector on first use. */
367 if (!data
->section_maps_sorted
[section_idx
])
369 std::sort (map
.begin (), map
.end ());
370 data
->section_maps_sorted
[section_idx
] = true;
373 struct arm_mapping_symbol map_key
374 = { memaddr
- obj_section_addr (sec
), 0 };
375 arm_mapping_symbol_vec::const_iterator it
376 = std::lower_bound (map
.begin (), map
.end (), map_key
);
378 /* std::lower_bound finds the earliest ordered insertion
379 point. If the symbol at this position starts at this exact
380 address, we use that; otherwise, the preceding
381 mapping symbol covers this address. */
384 if (it
->value
== map_key
.value
)
387 *start
= it
->value
+ obj_section_addr (sec
);
392 if (it
> map
.begin ())
394 arm_mapping_symbol_vec::const_iterator prev_it
398 *start
= prev_it
->value
+ obj_section_addr (sec
);
399 return prev_it
->type
;
407 /* Determine if the program counter specified in MEMADDR is in a Thumb
408 function. This function should be called for addresses unrelated to
409 any executing frame; otherwise, prefer arm_frame_is_thumb. */
412 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
414 struct bound_minimal_symbol sym
;
416 arm_displaced_step_closure
*dsc
417 = ((arm_displaced_step_closure
* )
418 get_displaced_step_closure_by_addr (memaddr
));
420 /* If checking the mode of displaced instruction in copy area, the mode
421 should be determined by instruction on the original address. */
425 fprintf_unfiltered (gdb_stdlog
,
426 "displaced: check mode of %.8lx instead of %.8lx\n",
427 (unsigned long) dsc
->insn_addr
,
428 (unsigned long) memaddr
);
429 memaddr
= dsc
->insn_addr
;
432 /* If bit 0 of the address is set, assume this is a Thumb address. */
433 if (IS_THUMB_ADDR (memaddr
))
436 /* If the user wants to override the symbol table, let him. */
437 if (strcmp (arm_force_mode_string
, "arm") == 0)
439 if (strcmp (arm_force_mode_string
, "thumb") == 0)
442 /* ARM v6-M and v7-M are always in Thumb mode. */
443 if (gdbarch_tdep (gdbarch
)->is_m
)
446 /* If there are mapping symbols, consult them. */
447 type
= arm_find_mapping_symbol (memaddr
, NULL
);
451 /* Thumb functions have a "special" bit set in minimal symbols. */
452 sym
= lookup_minimal_symbol_by_pc (memaddr
);
454 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
456 /* If the user wants to override the fallback mode, let them. */
457 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
459 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
462 /* If we couldn't find any symbol, but we're talking to a running
463 target, then trust the current value of $cpsr. This lets
464 "display/i $pc" always show the correct mode (though if there is
465 a symbol table we will not reach here, so it still may not be
466 displayed in the mode it will be executed). */
467 if (target_has_registers
)
468 return arm_frame_is_thumb (get_current_frame ());
470 /* Otherwise we're out of luck; we assume ARM. */
474 /* Determine if the address specified equals any of these magic return
475 values, called EXC_RETURN, defined by the ARM v6-M and v7-M
478 From ARMv6-M Reference Manual B1.5.8
479 Table B1-5 Exception return behavior
481 EXC_RETURN Return To Return Stack
482 0xFFFFFFF1 Handler mode Main
483 0xFFFFFFF9 Thread mode Main
484 0xFFFFFFFD Thread mode Process
486 From ARMv7-M Reference Manual B1.5.8
487 Table B1-8 EXC_RETURN definition of exception return behavior, no FP
489 EXC_RETURN Return To Return Stack
490 0xFFFFFFF1 Handler mode Main
491 0xFFFFFFF9 Thread mode Main
492 0xFFFFFFFD Thread mode Process
494 Table B1-9 EXC_RETURN definition of exception return behavior, with
497 EXC_RETURN Return To Return Stack Frame Type
498 0xFFFFFFE1 Handler mode Main Extended
499 0xFFFFFFE9 Thread mode Main Extended
500 0xFFFFFFED Thread mode Process Extended
501 0xFFFFFFF1 Handler mode Main Basic
502 0xFFFFFFF9 Thread mode Main Basic
503 0xFFFFFFFD Thread mode Process Basic
505 For more details see "B1.5.8 Exception return behavior"
506 in both ARMv6-M and ARMv7-M Architecture Reference Manuals. */
509 arm_m_addr_is_magic (CORE_ADDR addr
)
513 /* Values from Tables in B1.5.8 the EXC_RETURN definitions of
514 the exception return behavior. */
521 /* Address is magic. */
525 /* Address is not magic. */
530 /* Remove useless bits from addresses in a running program. */
532 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
534 /* On M-profile devices, do not strip the low bit from EXC_RETURN
535 (the magic exception return address). */
536 if (gdbarch_tdep (gdbarch
)->is_m
537 && arm_m_addr_is_magic (val
))
541 return UNMAKE_THUMB_ADDR (val
);
543 return (val
& 0x03fffffc);
546 /* Return 1 if PC is the start of a compiler helper function which
547 can be safely ignored during prologue skipping. IS_THUMB is true
548 if the function is known to be a Thumb function due to the way it
551 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
553 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
554 struct bound_minimal_symbol msym
;
556 msym
= lookup_minimal_symbol_by_pc (pc
);
557 if (msym
.minsym
!= NULL
558 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
559 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
561 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
563 /* The GNU linker's Thumb call stub to foo is named
565 if (strstr (name
, "_from_thumb") != NULL
)
568 /* On soft-float targets, __truncdfsf2 is called to convert promoted
569 arguments to their argument types in non-prototyped
571 if (startswith (name
, "__truncdfsf2"))
573 if (startswith (name
, "__aeabi_d2f"))
576 /* Internal functions related to thread-local storage. */
577 if (startswith (name
, "__tls_get_addr"))
579 if (startswith (name
, "__aeabi_read_tp"))
584 /* If we run against a stripped glibc, we may be unable to identify
585 special functions by name. Check for one important case,
586 __aeabi_read_tp, by comparing the *code* against the default
587 implementation (this is hand-written ARM assembler in glibc). */
590 && read_code_unsigned_integer (pc
, 4, byte_order_for_code
)
591 == 0xe3e00a0f /* mov r0, #0xffff0fff */
592 && read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
593 == 0xe240f01f) /* sub pc, r0, #31 */
600 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
601 the first 16-bit of instruction, and INSN2 is the second 16-bit of
603 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
604 ((bits ((insn1), 0, 3) << 12) \
605 | (bits ((insn1), 10, 10) << 11) \
606 | (bits ((insn2), 12, 14) << 8) \
607 | bits ((insn2), 0, 7))
609 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
610 the 32-bit instruction. */
611 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
612 ((bits ((insn), 16, 19) << 12) \
613 | bits ((insn), 0, 11))
615 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
618 thumb_expand_immediate (unsigned int imm
)
620 unsigned int count
= imm
>> 7;
628 return (imm
& 0xff) | ((imm
& 0xff) << 16);
630 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
632 return (imm
& 0xff) | ((imm
& 0xff) << 8)
633 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
636 return (0x80 | (imm
& 0x7f)) << (32 - count
);
639 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
640 epilogue, 0 otherwise. */
643 thumb_instruction_restores_sp (unsigned short insn
)
645 return (insn
== 0x46bd /* mov sp, r7 */
646 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
647 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
650 /* Analyze a Thumb prologue, looking for a recognizable stack frame
651 and frame pointer. Scan until we encounter a store that could
652 clobber the stack frame unexpectedly, or an unknown instruction.
653 Return the last address which is definitely safe to skip for an
654 initial breakpoint. */
657 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
658 CORE_ADDR start
, CORE_ADDR limit
,
659 struct arm_prologue_cache
*cache
)
661 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
662 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
666 CORE_ADDR unrecognized_pc
= 0;
668 for (i
= 0; i
< 16; i
++)
669 regs
[i
] = pv_register (i
, 0);
670 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
672 while (start
< limit
)
676 insn
= read_code_unsigned_integer (start
, 2, byte_order_for_code
);
678 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
683 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
686 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
687 whether to save LR (R14). */
688 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
690 /* Calculate offsets of saved R0-R7 and LR. */
691 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
692 if (mask
& (1 << regno
))
694 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
696 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
699 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
701 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
702 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
705 else if (thumb_instruction_restores_sp (insn
))
707 /* Don't scan past the epilogue. */
710 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
711 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
713 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
714 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
715 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
717 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
718 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
719 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
721 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
722 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
723 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
724 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
725 regs
[bits (insn
, 6, 8)]);
726 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
727 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
729 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
730 int rm
= bits (insn
, 3, 6);
731 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
733 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
735 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
736 int src_reg
= (insn
& 0x78) >> 3;
737 regs
[dst_reg
] = regs
[src_reg
];
739 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
741 /* Handle stores to the stack. Normally pushes are used,
742 but with GCC -mtpcs-frame, there may be other stores
743 in the prologue to create the frame. */
744 int regno
= (insn
>> 8) & 0x7;
747 offset
= (insn
& 0xff) << 2;
748 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
750 if (stack
.store_would_trash (addr
))
753 stack
.store (addr
, 4, regs
[regno
]);
755 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
757 int rd
= bits (insn
, 0, 2);
758 int rn
= bits (insn
, 3, 5);
761 offset
= bits (insn
, 6, 10) << 2;
762 addr
= pv_add_constant (regs
[rn
], offset
);
764 if (stack
.store_would_trash (addr
))
767 stack
.store (addr
, 4, regs
[rd
]);
769 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
770 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
771 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
772 /* Ignore stores of argument registers to the stack. */
774 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
775 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
776 /* Ignore block loads from the stack, potentially copying
777 parameters from memory. */
779 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
780 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
781 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
782 /* Similarly ignore single loads from the stack. */
784 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
785 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
786 /* Skip register copies, i.e. saves to another register
787 instead of the stack. */
789 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
790 /* Recognize constant loads; even with small stacks these are necessary
792 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
793 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
795 /* Constant pool loads, for the same reason. */
796 unsigned int constant
;
799 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
800 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
801 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
803 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
805 unsigned short inst2
;
807 inst2
= read_code_unsigned_integer (start
+ 2, 2,
808 byte_order_for_code
);
810 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
812 /* BL, BLX. Allow some special function calls when
813 skipping the prologue; GCC generates these before
814 storing arguments to the stack. */
816 int j1
, j2
, imm1
, imm2
;
818 imm1
= sbits (insn
, 0, 10);
819 imm2
= bits (inst2
, 0, 10);
820 j1
= bit (inst2
, 13);
821 j2
= bit (inst2
, 11);
823 offset
= ((imm1
<< 12) + (imm2
<< 1));
824 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
826 nextpc
= start
+ 4 + offset
;
827 /* For BLX make sure to clear the low bits. */
828 if (bit (inst2
, 12) == 0)
829 nextpc
= nextpc
& 0xfffffffc;
831 if (!skip_prologue_function (gdbarch
, nextpc
,
832 bit (inst2
, 12) != 0))
836 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
838 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
840 pv_t addr
= regs
[bits (insn
, 0, 3)];
843 if (stack
.store_would_trash (addr
))
846 /* Calculate offsets of saved registers. */
847 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
848 if (inst2
& (1 << regno
))
850 addr
= pv_add_constant (addr
, -4);
851 stack
.store (addr
, 4, regs
[regno
]);
855 regs
[bits (insn
, 0, 3)] = addr
;
858 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
860 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
862 int regno1
= bits (inst2
, 12, 15);
863 int regno2
= bits (inst2
, 8, 11);
864 pv_t addr
= regs
[bits (insn
, 0, 3)];
866 offset
= inst2
& 0xff;
868 addr
= pv_add_constant (addr
, offset
);
870 addr
= pv_add_constant (addr
, -offset
);
872 if (stack
.store_would_trash (addr
))
875 stack
.store (addr
, 4, regs
[regno1
]);
876 stack
.store (pv_add_constant (addr
, 4),
880 regs
[bits (insn
, 0, 3)] = addr
;
883 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
884 && (inst2
& 0x0c00) == 0x0c00
885 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
887 int regno
= bits (inst2
, 12, 15);
888 pv_t addr
= regs
[bits (insn
, 0, 3)];
890 offset
= inst2
& 0xff;
892 addr
= pv_add_constant (addr
, offset
);
894 addr
= pv_add_constant (addr
, -offset
);
896 if (stack
.store_would_trash (addr
))
899 stack
.store (addr
, 4, regs
[regno
]);
902 regs
[bits (insn
, 0, 3)] = addr
;
905 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
906 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
908 int regno
= bits (inst2
, 12, 15);
911 offset
= inst2
& 0xfff;
912 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
914 if (stack
.store_would_trash (addr
))
917 stack
.store (addr
, 4, regs
[regno
]);
920 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
921 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
922 /* Ignore stores of argument registers to the stack. */
925 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
926 && (inst2
& 0x0d00) == 0x0c00
927 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
928 /* Ignore stores of argument registers to the stack. */
931 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
933 && (inst2
& 0x8000) == 0x0000
934 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
935 /* Ignore block loads from the stack, potentially copying
936 parameters from memory. */
939 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
941 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
942 /* Similarly ignore dual loads from the stack. */
945 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
946 && (inst2
& 0x0d00) == 0x0c00
947 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
948 /* Similarly ignore single loads from the stack. */
951 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
952 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
953 /* Similarly ignore single loads from the stack. */
956 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
957 && (inst2
& 0x8000) == 0x0000)
959 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
960 | (bits (inst2
, 12, 14) << 8)
961 | bits (inst2
, 0, 7));
963 regs
[bits (inst2
, 8, 11)]
964 = pv_add_constant (regs
[bits (insn
, 0, 3)],
965 thumb_expand_immediate (imm
));
968 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
969 && (inst2
& 0x8000) == 0x0000)
971 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
972 | (bits (inst2
, 12, 14) << 8)
973 | bits (inst2
, 0, 7));
975 regs
[bits (inst2
, 8, 11)]
976 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
979 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
980 && (inst2
& 0x8000) == 0x0000)
982 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
983 | (bits (inst2
, 12, 14) << 8)
984 | bits (inst2
, 0, 7));
986 regs
[bits (inst2
, 8, 11)]
987 = pv_add_constant (regs
[bits (insn
, 0, 3)],
988 - (CORE_ADDR
) thumb_expand_immediate (imm
));
991 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
992 && (inst2
& 0x8000) == 0x0000)
994 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
995 | (bits (inst2
, 12, 14) << 8)
996 | bits (inst2
, 0, 7));
998 regs
[bits (inst2
, 8, 11)]
999 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1002 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1004 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1005 | (bits (inst2
, 12, 14) << 8)
1006 | bits (inst2
, 0, 7));
1008 regs
[bits (inst2
, 8, 11)]
1009 = pv_constant (thumb_expand_immediate (imm
));
1012 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1015 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1017 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1020 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1021 && (inst2
& 0xf0f0) == 0)
1023 int dst_reg
= (inst2
& 0x0f00) >> 8;
1024 int src_reg
= inst2
& 0xf;
1025 regs
[dst_reg
] = regs
[src_reg
];
1028 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1030 /* Constant pool loads. */
1031 unsigned int constant
;
1034 offset
= bits (inst2
, 0, 11);
1036 loc
= start
+ 4 + offset
;
1038 loc
= start
+ 4 - offset
;
1040 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1041 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1044 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1046 /* Constant pool loads. */
1047 unsigned int constant
;
1050 offset
= bits (inst2
, 0, 7) << 2;
1052 loc
= start
+ 4 + offset
;
1054 loc
= start
+ 4 - offset
;
1056 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1057 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1059 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1060 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1063 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1065 /* Don't scan past anything that might change control flow. */
1070 /* The optimizer might shove anything into the prologue,
1071 so we just skip what we don't recognize. */
1072 unrecognized_pc
= start
;
1077 else if (thumb_instruction_changes_pc (insn
))
1079 /* Don't scan past anything that might change control flow. */
1084 /* The optimizer might shove anything into the prologue,
1085 so we just skip what we don't recognize. */
1086 unrecognized_pc
= start
;
1093 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1094 paddress (gdbarch
, start
));
1096 if (unrecognized_pc
== 0)
1097 unrecognized_pc
= start
;
1100 return unrecognized_pc
;
1102 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1104 /* Frame pointer is fp. Frame size is constant. */
1105 cache
->framereg
= ARM_FP_REGNUM
;
1106 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1108 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1110 /* Frame pointer is r7. Frame size is constant. */
1111 cache
->framereg
= THUMB_FP_REGNUM
;
1112 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1116 /* Try the stack pointer... this is a bit desperate. */
1117 cache
->framereg
= ARM_SP_REGNUM
;
1118 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1121 for (i
= 0; i
< 16; i
++)
1122 if (stack
.find_reg (gdbarch
, i
, &offset
))
1123 cache
->saved_regs
[i
].addr
= offset
;
1125 return unrecognized_pc
;
1129 /* Try to analyze the instructions starting from PC, which load symbol
1130 __stack_chk_guard. Return the address of instruction after loading this
1131 symbol, set the dest register number to *BASEREG, and set the size of
1132 instructions for loading symbol in OFFSET. Return 0 if instructions are
1136 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1137 unsigned int *destreg
, int *offset
)
1139 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1140 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1141 unsigned int low
, high
, address
;
1146 unsigned short insn1
1147 = read_code_unsigned_integer (pc
, 2, byte_order_for_code
);
1149 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1151 *destreg
= bits (insn1
, 8, 10);
1153 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1154 address
= read_memory_unsigned_integer (address
, 4,
1155 byte_order_for_code
);
1157 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1159 unsigned short insn2
1160 = read_code_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1162 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1165 = read_code_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1167 = read_code_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1169 /* movt Rd, #const */
1170 if ((insn1
& 0xfbc0) == 0xf2c0)
1172 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1173 *destreg
= bits (insn2
, 8, 11);
1175 address
= (high
<< 16 | low
);
1182 = read_code_unsigned_integer (pc
, 4, byte_order_for_code
);
1184 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1186 address
= bits (insn
, 0, 11) + pc
+ 8;
1187 address
= read_memory_unsigned_integer (address
, 4,
1188 byte_order_for_code
);
1190 *destreg
= bits (insn
, 12, 15);
1193 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1195 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1198 = read_code_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1200 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1202 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1203 *destreg
= bits (insn
, 12, 15);
1205 address
= (high
<< 16 | low
);
1213 /* Try to skip a sequence of instructions used for stack protector. If PC
1214 points to the first instruction of this sequence, return the address of
1215 first instruction after this sequence, otherwise, return original PC.
1217 On arm, this sequence of instructions is composed of mainly three steps,
1218 Step 1: load symbol __stack_chk_guard,
1219 Step 2: load from address of __stack_chk_guard,
1220 Step 3: store it to somewhere else.
1222 Usually, instructions on step 2 and step 3 are the same on various ARM
1223 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1224 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1225 instructions in step 1 vary from different ARM architectures. On ARMv7,
1228 movw Rn, #:lower16:__stack_chk_guard
1229 movt Rn, #:upper16:__stack_chk_guard
1236 .word __stack_chk_guard
1238 Since ldr/str is a very popular instruction, we can't use them as
1239 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1240 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1241 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1244 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1246 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1247 unsigned int basereg
;
1248 struct bound_minimal_symbol stack_chk_guard
;
1250 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1253 /* Try to parse the instructions in Step 1. */
1254 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1259 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1260 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1261 Otherwise, this sequence cannot be for stack protector. */
1262 if (stack_chk_guard
.minsym
== NULL
1263 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
), "__stack_chk_guard"))
1268 unsigned int destreg
;
1270 = read_code_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1272 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1273 if ((insn
& 0xf800) != 0x6800)
1275 if (bits (insn
, 3, 5) != basereg
)
1277 destreg
= bits (insn
, 0, 2);
1279 insn
= read_code_unsigned_integer (pc
+ offset
+ 2, 2,
1280 byte_order_for_code
);
1281 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1282 if ((insn
& 0xf800) != 0x6000)
1284 if (destreg
!= bits (insn
, 0, 2))
1289 unsigned int destreg
;
1291 = read_code_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1293 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1294 if ((insn
& 0x0e500000) != 0x04100000)
1296 if (bits (insn
, 16, 19) != basereg
)
1298 destreg
= bits (insn
, 12, 15);
1299 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1300 insn
= read_code_unsigned_integer (pc
+ offset
+ 4,
1301 4, byte_order_for_code
);
1302 if ((insn
& 0x0e500000) != 0x04000000)
1304 if (bits (insn
, 12, 15) != destreg
)
1307 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1310 return pc
+ offset
+ 4;
1312 return pc
+ offset
+ 8;
1315 /* Advance the PC across any function entry prologue instructions to
1316 reach some "real" code.
1318 The APCS (ARM Procedure Call Standard) defines the following
1322 [stmfd sp!, {a1,a2,a3,a4}]
1323 stmfd sp!, {...,fp,ip,lr,pc}
1324 [stfe f7, [sp, #-12]!]
1325 [stfe f6, [sp, #-12]!]
1326 [stfe f5, [sp, #-12]!]
1327 [stfe f4, [sp, #-12]!]
1328 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1331 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1333 CORE_ADDR func_addr
, limit_pc
;
1335 /* See if we can determine the end of the prologue via the symbol table.
1336 If so, then return either PC, or the PC after the prologue, whichever
1338 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1340 CORE_ADDR post_prologue_pc
1341 = skip_prologue_using_sal (gdbarch
, func_addr
);
1342 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1344 if (post_prologue_pc
)
1346 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1349 /* GCC always emits a line note before the prologue and another
1350 one after, even if the two are at the same address or on the
1351 same line. Take advantage of this so that we do not need to
1352 know every instruction that might appear in the prologue. We
1353 will have producer information for most binaries; if it is
1354 missing (e.g. for -gstabs), assuming the GNU tools. */
1355 if (post_prologue_pc
1357 || COMPUNIT_PRODUCER (cust
) == NULL
1358 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1359 || startswith (COMPUNIT_PRODUCER (cust
), "clang ")))
1360 return post_prologue_pc
;
1362 if (post_prologue_pc
!= 0)
1364 CORE_ADDR analyzed_limit
;
1366 /* For non-GCC compilers, make sure the entire line is an
1367 acceptable prologue; GDB will round this function's
1368 return value up to the end of the following line so we
1369 can not skip just part of a line (and we do not want to).
1371 RealView does not treat the prologue specially, but does
1372 associate prologue code with the opening brace; so this
1373 lets us skip the first line if we think it is the opening
1375 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1376 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1377 post_prologue_pc
, NULL
);
1379 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1380 post_prologue_pc
, NULL
);
1382 if (analyzed_limit
!= post_prologue_pc
)
1385 return post_prologue_pc
;
1389 /* Can't determine prologue from the symbol table, need to examine
1392 /* Find an upper limit on the function prologue using the debug
1393 information. If the debug information could not be used to provide
1394 that bound, then use an arbitrary large number as the upper bound. */
1395 /* Like arm_scan_prologue, stop no later than pc + 64. */
1396 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1398 limit_pc
= pc
+ 64; /* Magic. */
1401 /* Check if this is Thumb code. */
1402 if (arm_pc_is_thumb (gdbarch
, pc
))
1403 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1405 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1409 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1410 This function decodes a Thumb function prologue to determine:
1411 1) the size of the stack frame
1412 2) which registers are saved on it
1413 3) the offsets of saved regs
1414 4) the offset from the stack pointer to the frame pointer
1416 A typical Thumb function prologue would create this stack frame
1417 (offsets relative to FP)
1418 old SP -> 24 stack parameters
1421 R7 -> 0 local variables (16 bytes)
1422 SP -> -12 additional stack space (12 bytes)
1423 The frame size would thus be 36 bytes, and the frame offset would be
1424 12 bytes. The frame register is R7.
1426 The comments for thumb_skip_prolog() describe the algorithm we use
1427 to detect the end of the prolog. */
1431 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1432 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1434 CORE_ADDR prologue_start
;
1435 CORE_ADDR prologue_end
;
1437 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1440 /* See comment in arm_scan_prologue for an explanation of
1442 if (prologue_end
> prologue_start
+ 64)
1444 prologue_end
= prologue_start
+ 64;
1448 /* We're in the boondocks: we have no idea where the start of the
1452 prologue_end
= std::min (prologue_end
, prev_pc
);
1454 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1457 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1461 arm_instruction_restores_sp (unsigned int insn
)
1463 if (bits (insn
, 28, 31) != INST_NV
)
1465 if ((insn
& 0x0df0f000) == 0x0080d000
1466 /* ADD SP (register or immediate). */
1467 || (insn
& 0x0df0f000) == 0x0040d000
1468 /* SUB SP (register or immediate). */
1469 || (insn
& 0x0ffffff0) == 0x01a0d000
1471 || (insn
& 0x0fff0000) == 0x08bd0000
1473 || (insn
& 0x0fff0000) == 0x049d0000)
1474 /* POP of a single register. */
1481 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1482 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1483 fill it in. Return the first address not recognized as a prologue
1486 We recognize all the instructions typically found in ARM prologues,
1487 plus harmless instructions which can be skipped (either for analysis
1488 purposes, or a more restrictive set that can be skipped when finding
1489 the end of the prologue). */
1492 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1493 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1494 struct arm_prologue_cache
*cache
)
1496 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1498 CORE_ADDR offset
, current_pc
;
1499 pv_t regs
[ARM_FPS_REGNUM
];
1500 CORE_ADDR unrecognized_pc
= 0;
1502 /* Search the prologue looking for instructions that set up the
1503 frame pointer, adjust the stack pointer, and save registers.
1505 Be careful, however, and if it doesn't look like a prologue,
1506 don't try to scan it. If, for instance, a frameless function
1507 begins with stmfd sp!, then we will tell ourselves there is
1508 a frame, which will confuse stack traceback, as well as "finish"
1509 and other operations that rely on a knowledge of the stack
1512 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1513 regs
[regno
] = pv_register (regno
, 0);
1514 pv_area
stack (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1516 for (current_pc
= prologue_start
;
1517 current_pc
< prologue_end
;
1521 = read_code_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1523 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1525 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1528 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1529 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1531 unsigned imm
= insn
& 0xff; /* immediate value */
1532 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1533 int rd
= bits (insn
, 12, 15);
1534 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1535 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1538 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1539 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1541 unsigned imm
= insn
& 0xff; /* immediate value */
1542 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1543 int rd
= bits (insn
, 12, 15);
1544 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1545 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1548 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1551 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1553 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1554 stack
.store (regs
[ARM_SP_REGNUM
], 4,
1555 regs
[bits (insn
, 12, 15)]);
1558 else if ((insn
& 0xffff0000) == 0xe92d0000)
1559 /* stmfd sp!, {..., fp, ip, lr, pc}
1561 stmfd sp!, {a1, a2, a3, a4} */
1563 int mask
= insn
& 0xffff;
1565 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1568 /* Calculate offsets of saved registers. */
1569 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1570 if (mask
& (1 << regno
))
1573 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1574 stack
.store (regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1577 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1578 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1579 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1581 /* No need to add this to saved_regs -- it's just an arg reg. */
1584 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1585 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1586 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1588 /* No need to add this to saved_regs -- it's just an arg reg. */
1591 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1593 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1595 /* No need to add this to saved_regs -- it's just arg regs. */
1598 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1600 unsigned imm
= insn
& 0xff; /* immediate value */
1601 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1602 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1603 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1605 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1607 unsigned imm
= insn
& 0xff; /* immediate value */
1608 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1609 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1610 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1612 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1614 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1616 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1619 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1620 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1621 stack
.store (regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1623 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1625 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1627 int n_saved_fp_regs
;
1628 unsigned int fp_start_reg
, fp_bound_reg
;
1630 if (stack
.store_would_trash (regs
[ARM_SP_REGNUM
]))
1633 if ((insn
& 0x800) == 0x800) /* N0 is set */
1635 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1636 n_saved_fp_regs
= 3;
1638 n_saved_fp_regs
= 1;
1642 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1643 n_saved_fp_regs
= 2;
1645 n_saved_fp_regs
= 4;
1648 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1649 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1650 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1652 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1653 stack
.store (regs
[ARM_SP_REGNUM
], 12,
1654 regs
[fp_start_reg
++]);
1657 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1659 /* Allow some special function calls when skipping the
1660 prologue; GCC generates these before storing arguments to
1662 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1664 if (skip_prologue_function (gdbarch
, dest
, 0))
1669 else if ((insn
& 0xf0000000) != 0xe0000000)
1670 break; /* Condition not true, exit early. */
1671 else if (arm_instruction_changes_pc (insn
))
1672 /* Don't scan past anything that might change control flow. */
1674 else if (arm_instruction_restores_sp (insn
))
1676 /* Don't scan past the epilogue. */
1679 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1680 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1681 /* Ignore block loads from the stack, potentially copying
1682 parameters from memory. */
1684 else if ((insn
& 0xfc500000) == 0xe4100000
1685 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1686 /* Similarly ignore single loads from the stack. */
1688 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1689 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1690 register instead of the stack. */
1694 /* The optimizer might shove anything into the prologue, if
1695 we build up cache (cache != NULL) from scanning prologue,
1696 we just skip what we don't recognize and scan further to
1697 make cache as complete as possible. However, if we skip
1698 prologue, we'll stop immediately on unrecognized
1700 unrecognized_pc
= current_pc
;
1708 if (unrecognized_pc
== 0)
1709 unrecognized_pc
= current_pc
;
1713 int framereg
, framesize
;
1715 /* The frame size is just the distance from the frame register
1716 to the original stack pointer. */
1717 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1719 /* Frame pointer is fp. */
1720 framereg
= ARM_FP_REGNUM
;
1721 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1725 /* Try the stack pointer... this is a bit desperate. */
1726 framereg
= ARM_SP_REGNUM
;
1727 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1730 cache
->framereg
= framereg
;
1731 cache
->framesize
= framesize
;
1733 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1734 if (stack
.find_reg (gdbarch
, regno
, &offset
))
1735 cache
->saved_regs
[regno
].addr
= offset
;
1739 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1740 paddress (gdbarch
, unrecognized_pc
));
1742 return unrecognized_pc
;
1746 arm_scan_prologue (struct frame_info
*this_frame
,
1747 struct arm_prologue_cache
*cache
)
1749 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1750 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1751 CORE_ADDR prologue_start
, prologue_end
;
1752 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1753 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1755 /* Assume there is no frame until proven otherwise. */
1756 cache
->framereg
= ARM_SP_REGNUM
;
1757 cache
->framesize
= 0;
1759 /* Check for Thumb prologue. */
1760 if (arm_frame_is_thumb (this_frame
))
1762 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1766 /* Find the function prologue. If we can't find the function in
1767 the symbol table, peek in the stack frame to find the PC. */
1768 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1771 /* One way to find the end of the prologue (which works well
1772 for unoptimized code) is to do the following:
1774 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1777 prologue_end = prev_pc;
1778 else if (sal.end < prologue_end)
1779 prologue_end = sal.end;
1781 This mechanism is very accurate so long as the optimizer
1782 doesn't move any instructions from the function body into the
1783 prologue. If this happens, sal.end will be the last
1784 instruction in the first hunk of prologue code just before
1785 the first instruction that the scheduler has moved from
1786 the body to the prologue.
1788 In order to make sure that we scan all of the prologue
1789 instructions, we use a slightly less accurate mechanism which
1790 may scan more than necessary. To help compensate for this
1791 lack of accuracy, the prologue scanning loop below contains
1792 several clauses which'll cause the loop to terminate early if
1793 an implausible prologue instruction is encountered.
1799 is a suitable endpoint since it accounts for the largest
1800 possible prologue plus up to five instructions inserted by
1803 if (prologue_end
> prologue_start
+ 64)
1805 prologue_end
= prologue_start
+ 64; /* See above. */
1810 /* We have no symbol information. Our only option is to assume this
1811 function has a standard stack frame and the normal frame register.
1812 Then, we can find the value of our frame pointer on entrance to
1813 the callee (or at the present moment if this is the innermost frame).
1814 The value stored there should be the address of the stmfd + 8. */
1815 CORE_ADDR frame_loc
;
1816 ULONGEST return_value
;
1818 /* AAPCS does not use a frame register, so we can abort here. */
1819 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_AAPCS
)
1822 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1823 if (!safe_read_memory_unsigned_integer (frame_loc
, 4, byte_order
,
1828 prologue_start
= gdbarch_addr_bits_remove
1829 (gdbarch
, return_value
) - 8;
1830 prologue_end
= prologue_start
+ 64; /* See above. */
1834 if (prev_pc
< prologue_end
)
1835 prologue_end
= prev_pc
;
1837 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1840 static struct arm_prologue_cache
*
1841 arm_make_prologue_cache (struct frame_info
*this_frame
)
1844 struct arm_prologue_cache
*cache
;
1845 CORE_ADDR unwound_fp
;
1847 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
1848 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
1850 arm_scan_prologue (this_frame
, cache
);
1852 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
1853 if (unwound_fp
== 0)
1856 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
1858 /* Calculate actual addresses of saved registers using offsets
1859 determined by arm_scan_prologue. */
1860 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
1861 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
1862 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
1867 /* Implementation of the stop_reason hook for arm_prologue frames. */
1869 static enum unwind_stop_reason
1870 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
1873 struct arm_prologue_cache
*cache
;
1876 if (*this_cache
== NULL
)
1877 *this_cache
= arm_make_prologue_cache (this_frame
);
1878 cache
= (struct arm_prologue_cache
*) *this_cache
;
1880 /* This is meant to halt the backtrace at "_start". */
1881 pc
= get_frame_pc (this_frame
);
1882 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
1883 return UNWIND_OUTERMOST
;
1885 /* If we've hit a wall, stop. */
1886 if (cache
->prev_sp
== 0)
1887 return UNWIND_OUTERMOST
;
1889 return UNWIND_NO_REASON
;
1892 /* Our frame ID for a normal frame is the current function's starting PC
1893 and the caller's SP when we were called. */
1896 arm_prologue_this_id (struct frame_info
*this_frame
,
1898 struct frame_id
*this_id
)
1900 struct arm_prologue_cache
*cache
;
1904 if (*this_cache
== NULL
)
1905 *this_cache
= arm_make_prologue_cache (this_frame
);
1906 cache
= (struct arm_prologue_cache
*) *this_cache
;
1908 /* Use function start address as part of the frame ID. If we cannot
1909 identify the start address (due to missing symbol information),
1910 fall back to just using the current PC. */
1911 pc
= get_frame_pc (this_frame
);
1912 func
= get_frame_func (this_frame
);
1916 id
= frame_id_build (cache
->prev_sp
, func
);
1920 static struct value
*
1921 arm_prologue_prev_register (struct frame_info
*this_frame
,
1925 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1926 struct arm_prologue_cache
*cache
;
1928 if (*this_cache
== NULL
)
1929 *this_cache
= arm_make_prologue_cache (this_frame
);
1930 cache
= (struct arm_prologue_cache
*) *this_cache
;
1932 /* If we are asked to unwind the PC, then we need to return the LR
1933 instead. The prologue may save PC, but it will point into this
1934 frame's prologue, not the next frame's resume location. Also
1935 strip the saved T bit. A valid LR may have the low bit set, but
1936 a valid PC never does. */
1937 if (prev_regnum
== ARM_PC_REGNUM
)
1941 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1942 return frame_unwind_got_constant (this_frame
, prev_regnum
,
1943 arm_addr_bits_remove (gdbarch
, lr
));
1946 /* SP is generally not saved to the stack, but this frame is
1947 identified by the next frame's stack pointer at the time of the call.
1948 The value was already reconstructed into PREV_SP. */
1949 if (prev_regnum
== ARM_SP_REGNUM
)
1950 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
1952 /* The CPSR may have been changed by the call instruction and by the
1953 called function. The only bit we can reconstruct is the T bit,
1954 by checking the low bit of LR as of the call. This is a reliable
1955 indicator of Thumb-ness except for some ARM v4T pre-interworking
1956 Thumb code, which could get away with a clear low bit as long as
1957 the called function did not use bx. Guess that all other
1958 bits are unchanged; the condition flags are presumably lost,
1959 but the processor status is likely valid. */
1960 if (prev_regnum
== ARM_PS_REGNUM
)
1963 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
1965 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
1966 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
1967 if (IS_THUMB_ADDR (lr
))
1971 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
1974 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
1978 struct frame_unwind arm_prologue_unwind
= {
1980 arm_prologue_unwind_stop_reason
,
1981 arm_prologue_this_id
,
1982 arm_prologue_prev_register
,
1984 default_frame_sniffer
1987 /* Maintain a list of ARM exception table entries per objfile, similar to the
1988 list of mapping symbols. We only cache entries for standard ARM-defined
1989 personality routines; the cache will contain only the frame unwinding
1990 instructions associated with the entry (not the descriptors). */
1992 struct arm_exidx_entry
1997 bool operator< (const arm_exidx_entry
&other
) const
1999 return addr
< other
.addr
;
2003 struct arm_exidx_data
2005 std::vector
<std::vector
<arm_exidx_entry
>> section_maps
;
2008 static const struct objfile_key
<arm_exidx_data
> arm_exidx_data_key
;
2010 static struct obj_section
*
2011 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2013 struct obj_section
*osect
;
2015 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2016 if (bfd_get_section_flags (objfile
->obfd
,
2017 osect
->the_bfd_section
) & SEC_ALLOC
)
2019 bfd_vma start
, size
;
2020 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2021 size
= bfd_get_section_size (osect
->the_bfd_section
);
2023 if (start
<= vma
&& vma
< start
+ size
)
2030 /* Parse contents of exception table and exception index sections
2031 of OBJFILE, and fill in the exception table entry cache.
2033 For each entry that refers to a standard ARM-defined personality
2034 routine, extract the frame unwinding instructions (from either
2035 the index or the table section). The unwinding instructions
2037 - extracting them from the rest of the table data
2038 - converting to host endianness
2039 - appending the implicit 0xb0 ("Finish") code
2041 The extracted and normalized instructions are stored for later
2042 retrieval by the arm_find_exidx_entry routine. */
2045 arm_exidx_new_objfile (struct objfile
*objfile
)
2047 struct arm_exidx_data
*data
;
2048 asection
*exidx
, *extab
;
2049 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2052 /* If we've already touched this file, do nothing. */
2053 if (!objfile
|| arm_exidx_data_key
.get (objfile
) != NULL
)
2056 /* Read contents of exception table and index. */
2057 exidx
= bfd_get_section_by_name (objfile
->obfd
, ELF_STRING_ARM_unwind
);
2058 gdb::byte_vector exidx_data
;
2061 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2062 exidx_data
.resize (bfd_get_section_size (exidx
));
2064 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2065 exidx_data
.data (), 0,
2066 exidx_data
.size ()))
2070 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2071 gdb::byte_vector extab_data
;
2074 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2075 extab_data
.resize (bfd_get_section_size (extab
));
2077 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2078 extab_data
.data (), 0,
2079 extab_data
.size ()))
2083 /* Allocate exception table data structure. */
2084 data
= arm_exidx_data_key
.emplace (objfile
);
2085 data
->section_maps
.resize (objfile
->obfd
->section_count
);
2087 /* Fill in exception table. */
2088 for (i
= 0; i
< exidx_data
.size () / 8; i
++)
2090 struct arm_exidx_entry new_exidx_entry
;
2091 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
.data () + i
* 8);
2092 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
,
2093 exidx_data
.data () + i
* 8 + 4);
2094 bfd_vma addr
= 0, word
= 0;
2095 int n_bytes
= 0, n_words
= 0;
2096 struct obj_section
*sec
;
2097 gdb_byte
*entry
= NULL
;
2099 /* Extract address of start of function. */
2100 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2101 idx
+= exidx_vma
+ i
* 8;
2103 /* Find section containing function and compute section offset. */
2104 sec
= arm_obj_section_from_vma (objfile
, idx
);
2107 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2109 /* Determine address of exception table entry. */
2112 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2114 else if ((val
& 0xff000000) == 0x80000000)
2116 /* Exception table entry embedded in .ARM.exidx
2117 -- must be short form. */
2121 else if (!(val
& 0x80000000))
2123 /* Exception table entry in .ARM.extab. */
2124 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2125 addr
+= exidx_vma
+ i
* 8 + 4;
2127 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_data
.size ())
2129 word
= bfd_h_get_32 (objfile
->obfd
,
2130 extab_data
.data () + addr
- extab_vma
);
2133 if ((word
& 0xff000000) == 0x80000000)
2138 else if ((word
& 0xff000000) == 0x81000000
2139 || (word
& 0xff000000) == 0x82000000)
2143 n_words
= ((word
>> 16) & 0xff);
2145 else if (!(word
& 0x80000000))
2148 struct obj_section
*pers_sec
;
2149 int gnu_personality
= 0;
2151 /* Custom personality routine. */
2152 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2153 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2155 /* Check whether we've got one of the variants of the
2156 GNU personality routines. */
2157 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2160 static const char *personality
[] =
2162 "__gcc_personality_v0",
2163 "__gxx_personality_v0",
2164 "__gcj_personality_v0",
2165 "__gnu_objc_personality_v0",
2169 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2172 for (k
= 0; personality
[k
]; k
++)
2173 if (lookup_minimal_symbol_by_pc_name
2174 (pc
, personality
[k
], objfile
))
2176 gnu_personality
= 1;
2181 /* If so, the next word contains a word count in the high
2182 byte, followed by the same unwind instructions as the
2183 pre-defined forms. */
2185 && addr
+ 4 <= extab_vma
+ extab_data
.size ())
2187 word
= bfd_h_get_32 (objfile
->obfd
,
2189 + addr
- extab_vma
));
2192 n_words
= ((word
>> 24) & 0xff);
2198 /* Sanity check address. */
2200 if (addr
< extab_vma
2201 || addr
+ 4 * n_words
> extab_vma
+ extab_data
.size ())
2202 n_words
= n_bytes
= 0;
2204 /* The unwind instructions reside in WORD (only the N_BYTES least
2205 significant bytes are valid), followed by N_WORDS words in the
2206 extab section starting at ADDR. */
2207 if (n_bytes
|| n_words
)
2210 = (gdb_byte
*) obstack_alloc (&objfile
->objfile_obstack
,
2211 n_bytes
+ n_words
* 4 + 1);
2214 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2218 word
= bfd_h_get_32 (objfile
->obfd
,
2219 extab_data
.data () + addr
- extab_vma
);
2222 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2223 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2224 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2225 *p
++ = (gdb_byte
) (word
& 0xff);
2228 /* Implied "Finish" to terminate the list. */
2232 /* Push entry onto vector. They are guaranteed to always
2233 appear in order of increasing addresses. */
2234 new_exidx_entry
.addr
= idx
;
2235 new_exidx_entry
.entry
= entry
;
2236 data
->section_maps
[sec
->the_bfd_section
->index
].push_back
2241 /* Search for the exception table entry covering MEMADDR. If one is found,
2242 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2243 set *START to the start of the region covered by this entry. */
2246 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2248 struct obj_section
*sec
;
2250 sec
= find_pc_section (memaddr
);
2253 struct arm_exidx_data
*data
;
2254 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2256 data
= arm_exidx_data_key
.get (sec
->objfile
);
2259 std::vector
<arm_exidx_entry
> &map
2260 = data
->section_maps
[sec
->the_bfd_section
->index
];
2263 auto idx
= std::lower_bound (map
.begin (), map
.end (), map_key
);
2265 /* std::lower_bound finds the earliest ordered insertion
2266 point. If the following symbol starts at this exact
2267 address, we use that; otherwise, the preceding
2268 exception table entry covers this address. */
2269 if (idx
< map
.end ())
2271 if (idx
->addr
== map_key
.addr
)
2274 *start
= idx
->addr
+ obj_section_addr (sec
);
2279 if (idx
> map
.begin ())
2283 *start
= idx
->addr
+ obj_section_addr (sec
);
2293 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2294 instruction list from the ARM exception table entry ENTRY, allocate and
2295 return a prologue cache structure describing how to unwind this frame.
2297 Return NULL if the unwinding instruction list contains a "spare",
2298 "reserved" or "refuse to unwind" instruction as defined in section
2299 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2300 for the ARM Architecture" document. */
2302 static struct arm_prologue_cache
*
2303 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2308 struct arm_prologue_cache
*cache
;
2309 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2310 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2316 /* Whenever we reload SP, we actually have to retrieve its
2317 actual value in the current frame. */
2320 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2322 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2323 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2327 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2328 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2334 /* Decode next unwind instruction. */
2337 if ((insn
& 0xc0) == 0)
2339 int offset
= insn
& 0x3f;
2340 vsp
+= (offset
<< 2) + 4;
2342 else if ((insn
& 0xc0) == 0x40)
2344 int offset
= insn
& 0x3f;
2345 vsp
-= (offset
<< 2) + 4;
2347 else if ((insn
& 0xf0) == 0x80)
2349 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2352 /* The special case of an all-zero mask identifies
2353 "Refuse to unwind". We return NULL to fall back
2354 to the prologue analyzer. */
2358 /* Pop registers r4..r15 under mask. */
2359 for (i
= 0; i
< 12; i
++)
2360 if (mask
& (1 << i
))
2362 cache
->saved_regs
[4 + i
].addr
= vsp
;
2366 /* Special-case popping SP -- we need to reload vsp. */
2367 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2370 else if ((insn
& 0xf0) == 0x90)
2372 int reg
= insn
& 0xf;
2374 /* Reserved cases. */
2375 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2378 /* Set SP from another register and mark VSP for reload. */
2379 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2382 else if ((insn
& 0xf0) == 0xa0)
2384 int count
= insn
& 0x7;
2385 int pop_lr
= (insn
& 0x8) != 0;
2388 /* Pop r4..r[4+count]. */
2389 for (i
= 0; i
<= count
; i
++)
2391 cache
->saved_regs
[4 + i
].addr
= vsp
;
2395 /* If indicated by flag, pop LR as well. */
2398 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2402 else if (insn
== 0xb0)
2404 /* We could only have updated PC by popping into it; if so, it
2405 will show up as address. Otherwise, copy LR into PC. */
2406 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2407 cache
->saved_regs
[ARM_PC_REGNUM
]
2408 = cache
->saved_regs
[ARM_LR_REGNUM
];
2413 else if (insn
== 0xb1)
2415 int mask
= *entry
++;
2418 /* All-zero mask and mask >= 16 is "spare". */
2419 if (mask
== 0 || mask
>= 16)
2422 /* Pop r0..r3 under mask. */
2423 for (i
= 0; i
< 4; i
++)
2424 if (mask
& (1 << i
))
2426 cache
->saved_regs
[i
].addr
= vsp
;
2430 else if (insn
== 0xb2)
2432 ULONGEST offset
= 0;
2437 offset
|= (*entry
& 0x7f) << shift
;
2440 while (*entry
++ & 0x80);
2442 vsp
+= 0x204 + (offset
<< 2);
2444 else if (insn
== 0xb3)
2446 int start
= *entry
>> 4;
2447 int count
= (*entry
++) & 0xf;
2450 /* Only registers D0..D15 are valid here. */
2451 if (start
+ count
>= 16)
2454 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2455 for (i
= 0; i
<= count
; i
++)
2457 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2461 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2464 else if ((insn
& 0xf8) == 0xb8)
2466 int count
= insn
& 0x7;
2469 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2470 for (i
= 0; i
<= count
; i
++)
2472 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2476 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2479 else if (insn
== 0xc6)
2481 int start
= *entry
>> 4;
2482 int count
= (*entry
++) & 0xf;
2485 /* Only registers WR0..WR15 are valid. */
2486 if (start
+ count
>= 16)
2489 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2490 for (i
= 0; i
<= count
; i
++)
2492 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2496 else if (insn
== 0xc7)
2498 int mask
= *entry
++;
2501 /* All-zero mask and mask >= 16 is "spare". */
2502 if (mask
== 0 || mask
>= 16)
2505 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2506 for (i
= 0; i
< 4; i
++)
2507 if (mask
& (1 << i
))
2509 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2513 else if ((insn
& 0xf8) == 0xc0)
2515 int count
= insn
& 0x7;
2518 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2519 for (i
= 0; i
<= count
; i
++)
2521 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2525 else if (insn
== 0xc8)
2527 int start
= *entry
>> 4;
2528 int count
= (*entry
++) & 0xf;
2531 /* Only registers D0..D31 are valid. */
2532 if (start
+ count
>= 16)
2535 /* Pop VFP double-precision registers
2536 D[16+start]..D[16+start+count]. */
2537 for (i
= 0; i
<= count
; i
++)
2539 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2543 else if (insn
== 0xc9)
2545 int start
= *entry
>> 4;
2546 int count
= (*entry
++) & 0xf;
2549 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2550 for (i
= 0; i
<= count
; i
++)
2552 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2556 else if ((insn
& 0xf8) == 0xd0)
2558 int count
= insn
& 0x7;
2561 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2562 for (i
= 0; i
<= count
; i
++)
2564 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2570 /* Everything else is "spare". */
2575 /* If we restore SP from a register, assume this was the frame register.
2576 Otherwise just fall back to SP as frame register. */
2577 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2578 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2580 cache
->framereg
= ARM_SP_REGNUM
;
2582 /* Determine offset to previous frame. */
2584 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2586 /* We already got the previous SP. */
2587 cache
->prev_sp
= vsp
;
2592 /* Unwinding via ARM exception table entries. Note that the sniffer
2593 already computes a filled-in prologue cache, which is then used
2594 with the same arm_prologue_this_id and arm_prologue_prev_register
2595 routines also used for prologue-parsing based unwinding. */
2598 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2599 struct frame_info
*this_frame
,
2600 void **this_prologue_cache
)
2602 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2603 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2604 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2605 struct arm_prologue_cache
*cache
;
2608 /* See if we have an ARM exception table entry covering this address. */
2609 addr_in_block
= get_frame_address_in_block (this_frame
);
2610 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2614 /* The ARM exception table does not describe unwind information
2615 for arbitrary PC values, but is guaranteed to be correct only
2616 at call sites. We have to decide here whether we want to use
2617 ARM exception table information for this frame, or fall back
2618 to using prologue parsing. (Note that if we have DWARF CFI,
2619 this sniffer isn't even called -- CFI is always preferred.)
2621 Before we make this decision, however, we check whether we
2622 actually have *symbol* information for the current frame.
2623 If not, prologue parsing would not work anyway, so we might
2624 as well use the exception table and hope for the best. */
2625 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2629 /* If the next frame is "normal", we are at a call site in this
2630 frame, so exception information is guaranteed to be valid. */
2631 if (get_next_frame (this_frame
)
2632 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2635 /* We also assume exception information is valid if we're currently
2636 blocked in a system call. The system library is supposed to
2637 ensure this, so that e.g. pthread cancellation works. */
2638 if (arm_frame_is_thumb (this_frame
))
2642 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 2,
2643 2, byte_order_for_code
, &insn
)
2644 && (insn
& 0xff00) == 0xdf00 /* svc */)
2651 if (safe_read_memory_unsigned_integer (get_frame_pc (this_frame
) - 4,
2652 4, byte_order_for_code
, &insn
)
2653 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2657 /* Bail out if we don't know that exception information is valid. */
2661 /* The ARM exception index does not mark the *end* of the region
2662 covered by the entry, and some functions will not have any entry.
2663 To correctly recognize the end of the covered region, the linker
2664 should have inserted dummy records with a CANTUNWIND marker.
2666 Unfortunately, current versions of GNU ld do not reliably do
2667 this, and thus we may have found an incorrect entry above.
2668 As a (temporary) sanity check, we only use the entry if it
2669 lies *within* the bounds of the function. Note that this check
2670 might reject perfectly valid entries that just happen to cover
2671 multiple functions; therefore this check ought to be removed
2672 once the linker is fixed. */
2673 if (func_start
> exidx_region
)
2677 /* Decode the list of unwinding instructions into a prologue cache.
2678 Note that this may fail due to e.g. a "refuse to unwind" code. */
2679 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2683 *this_prologue_cache
= cache
;
2687 struct frame_unwind arm_exidx_unwind
= {
2689 default_frame_unwind_stop_reason
,
2690 arm_prologue_this_id
,
2691 arm_prologue_prev_register
,
2693 arm_exidx_unwind_sniffer
2696 static struct arm_prologue_cache
*
2697 arm_make_epilogue_frame_cache (struct frame_info
*this_frame
)
2699 struct arm_prologue_cache
*cache
;
2702 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2703 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2705 /* Still rely on the offset calculated from prologue. */
2706 arm_scan_prologue (this_frame
, cache
);
2708 /* Since we are in epilogue, the SP has been restored. */
2709 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2711 /* Calculate actual addresses of saved registers using offsets
2712 determined by arm_scan_prologue. */
2713 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2714 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2715 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2720 /* Implementation of function hook 'this_id' in
2721 'struct frame_uwnind' for epilogue unwinder. */
2724 arm_epilogue_frame_this_id (struct frame_info
*this_frame
,
2726 struct frame_id
*this_id
)
2728 struct arm_prologue_cache
*cache
;
2731 if (*this_cache
== NULL
)
2732 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2733 cache
= (struct arm_prologue_cache
*) *this_cache
;
2735 /* Use function start address as part of the frame ID. If we cannot
2736 identify the start address (due to missing symbol information),
2737 fall back to just using the current PC. */
2738 pc
= get_frame_pc (this_frame
);
2739 func
= get_frame_func (this_frame
);
2743 (*this_id
) = frame_id_build (cache
->prev_sp
, pc
);
2746 /* Implementation of function hook 'prev_register' in
2747 'struct frame_uwnind' for epilogue unwinder. */
2749 static struct value
*
2750 arm_epilogue_frame_prev_register (struct frame_info
*this_frame
,
2751 void **this_cache
, int regnum
)
2753 if (*this_cache
== NULL
)
2754 *this_cache
= arm_make_epilogue_frame_cache (this_frame
);
2756 return arm_prologue_prev_register (this_frame
, this_cache
, regnum
);
2759 static int arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
,
2761 static int thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
,
2764 /* Implementation of function hook 'sniffer' in
2765 'struct frame_uwnind' for epilogue unwinder. */
2768 arm_epilogue_frame_sniffer (const struct frame_unwind
*self
,
2769 struct frame_info
*this_frame
,
2770 void **this_prologue_cache
)
2772 if (frame_relative_level (this_frame
) == 0)
2774 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2775 CORE_ADDR pc
= get_frame_pc (this_frame
);
2777 if (arm_frame_is_thumb (this_frame
))
2778 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
2780 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
2786 /* Frame unwinder from epilogue. */
2788 static const struct frame_unwind arm_epilogue_frame_unwind
=
2791 default_frame_unwind_stop_reason
,
2792 arm_epilogue_frame_this_id
,
2793 arm_epilogue_frame_prev_register
,
2795 arm_epilogue_frame_sniffer
,
2798 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2799 trampoline, return the target PC. Otherwise return 0.
2801 void call0a (char c, short s, int i, long l) {}
2805 (*pointer_to_call0a) (c, s, i, l);
2808 Instead of calling a stub library function _call_via_xx (xx is
2809 the register name), GCC may inline the trampoline in the object
2810 file as below (register r2 has the address of call0a).
2813 .type main, %function
2822 The trampoline 'bx r2' doesn't belong to main. */
2825 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2827 /* The heuristics of recognizing such trampoline is that FRAME is
2828 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2829 if (arm_frame_is_thumb (frame
))
2833 if (target_read_memory (pc
, buf
, 2) == 0)
2835 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2836 enum bfd_endian byte_order_for_code
2837 = gdbarch_byte_order_for_code (gdbarch
);
2839 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2841 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2844 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2846 /* Clear the LSB so that gdb core sets step-resume
2847 breakpoint at the right address. */
2848 return UNMAKE_THUMB_ADDR (dest
);
2856 static struct arm_prologue_cache
*
2857 arm_make_stub_cache (struct frame_info
*this_frame
)
2859 struct arm_prologue_cache
*cache
;
2861 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2862 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2864 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2869 /* Our frame ID for a stub frame is the current SP and LR. */
2872 arm_stub_this_id (struct frame_info
*this_frame
,
2874 struct frame_id
*this_id
)
2876 struct arm_prologue_cache
*cache
;
2878 if (*this_cache
== NULL
)
2879 *this_cache
= arm_make_stub_cache (this_frame
);
2880 cache
= (struct arm_prologue_cache
*) *this_cache
;
2882 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2886 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2887 struct frame_info
*this_frame
,
2888 void **this_prologue_cache
)
2890 CORE_ADDR addr_in_block
;
2892 CORE_ADDR pc
, start_addr
;
2895 addr_in_block
= get_frame_address_in_block (this_frame
);
2896 pc
= get_frame_pc (this_frame
);
2897 if (in_plt_section (addr_in_block
)
2898 /* We also use the stub winder if the target memory is unreadable
2899 to avoid having the prologue unwinder trying to read it. */
2900 || target_read_memory (pc
, dummy
, 4) != 0)
2903 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2904 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2910 struct frame_unwind arm_stub_unwind
= {
2912 default_frame_unwind_stop_reason
,
2914 arm_prologue_prev_register
,
2916 arm_stub_unwind_sniffer
2919 /* Put here the code to store, into CACHE->saved_regs, the addresses
2920 of the saved registers of frame described by THIS_FRAME. CACHE is
2923 static struct arm_prologue_cache
*
2924 arm_m_exception_cache (struct frame_info
*this_frame
)
2926 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2927 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
2928 struct arm_prologue_cache
*cache
;
2929 CORE_ADDR unwound_sp
;
2932 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2933 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2935 unwound_sp
= get_frame_register_unsigned (this_frame
,
2938 /* The hardware saves eight 32-bit words, comprising xPSR,
2939 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
2940 "B1.5.6 Exception entry behavior" in
2941 "ARMv7-M Architecture Reference Manual". */
2942 cache
->saved_regs
[0].addr
= unwound_sp
;
2943 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
2944 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
2945 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
2946 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
2947 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
2948 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
2949 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
2951 /* If bit 9 of the saved xPSR is set, then there is a four-byte
2952 aligner between the top of the 32-byte stack frame and the
2953 previous context's stack pointer. */
2954 cache
->prev_sp
= unwound_sp
+ 32;
2955 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
2956 && (xpsr
& (1 << 9)) != 0)
2957 cache
->prev_sp
+= 4;
2962 /* Implementation of function hook 'this_id' in
2963 'struct frame_uwnind'. */
2966 arm_m_exception_this_id (struct frame_info
*this_frame
,
2968 struct frame_id
*this_id
)
2970 struct arm_prologue_cache
*cache
;
2972 if (*this_cache
== NULL
)
2973 *this_cache
= arm_m_exception_cache (this_frame
);
2974 cache
= (struct arm_prologue_cache
*) *this_cache
;
2976 /* Our frame ID for a stub frame is the current SP and LR. */
2977 *this_id
= frame_id_build (cache
->prev_sp
,
2978 get_frame_pc (this_frame
));
2981 /* Implementation of function hook 'prev_register' in
2982 'struct frame_uwnind'. */
2984 static struct value
*
2985 arm_m_exception_prev_register (struct frame_info
*this_frame
,
2989 struct arm_prologue_cache
*cache
;
2991 if (*this_cache
== NULL
)
2992 *this_cache
= arm_m_exception_cache (this_frame
);
2993 cache
= (struct arm_prologue_cache
*) *this_cache
;
2995 /* The value was already reconstructed into PREV_SP. */
2996 if (prev_regnum
== ARM_SP_REGNUM
)
2997 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3000 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3004 /* Implementation of function hook 'sniffer' in
3005 'struct frame_uwnind'. */
3008 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3009 struct frame_info
*this_frame
,
3010 void **this_prologue_cache
)
3012 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3014 /* No need to check is_m; this sniffer is only registered for
3015 M-profile architectures. */
3017 /* Check if exception frame returns to a magic PC value. */
3018 return arm_m_addr_is_magic (this_pc
);
3021 /* Frame unwinder for M-profile exceptions. */
3023 struct frame_unwind arm_m_exception_unwind
=
3026 default_frame_unwind_stop_reason
,
3027 arm_m_exception_this_id
,
3028 arm_m_exception_prev_register
,
3030 arm_m_exception_unwind_sniffer
3034 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3036 struct arm_prologue_cache
*cache
;
3038 if (*this_cache
== NULL
)
3039 *this_cache
= arm_make_prologue_cache (this_frame
);
3040 cache
= (struct arm_prologue_cache
*) *this_cache
;
3042 return cache
->prev_sp
- cache
->framesize
;
3045 struct frame_base arm_normal_base
= {
3046 &arm_prologue_unwind
,
3047 arm_normal_frame_base
,
3048 arm_normal_frame_base
,
3049 arm_normal_frame_base
3052 static struct value
*
3053 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3056 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3058 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3063 /* The PC is normally copied from the return column, which
3064 describes saves of LR. However, that version may have an
3065 extra bit set to indicate Thumb state. The bit is not
3067 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3068 return frame_unwind_got_constant (this_frame
, regnum
,
3069 arm_addr_bits_remove (gdbarch
, lr
));
3072 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3073 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3074 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3075 if (IS_THUMB_ADDR (lr
))
3079 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3082 internal_error (__FILE__
, __LINE__
,
3083 _("Unexpected register %d"), regnum
);
3088 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3089 struct dwarf2_frame_state_reg
*reg
,
3090 struct frame_info
*this_frame
)
3096 reg
->how
= DWARF2_FRAME_REG_FN
;
3097 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3100 reg
->how
= DWARF2_FRAME_REG_CFA
;
3105 /* Implement the stack_frame_destroyed_p gdbarch method. */
3108 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3110 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3111 unsigned int insn
, insn2
;
3112 int found_return
= 0, found_stack_adjust
= 0;
3113 CORE_ADDR func_start
, func_end
;
3117 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3120 /* The epilogue is a sequence of instructions along the following lines:
3122 - add stack frame size to SP or FP
3123 - [if frame pointer used] restore SP from FP
3124 - restore registers from SP [may include PC]
3125 - a return-type instruction [if PC wasn't already restored]
3127 In a first pass, we scan forward from the current PC and verify the
3128 instructions we find as compatible with this sequence, ending in a
3131 However, this is not sufficient to distinguish indirect function calls
3132 within a function from indirect tail calls in the epilogue in some cases.
3133 Therefore, if we didn't already find any SP-changing instruction during
3134 forward scan, we add a backward scanning heuristic to ensure we actually
3135 are in the epilogue. */
3138 while (scan_pc
< func_end
&& !found_return
)
3140 if (target_read_memory (scan_pc
, buf
, 2))
3144 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3146 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3148 else if (insn
== 0x46f7) /* mov pc, lr */
3150 else if (thumb_instruction_restores_sp (insn
))
3152 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3155 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3157 if (target_read_memory (scan_pc
, buf
, 2))
3161 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3163 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3165 if (insn2
& 0x8000) /* <registers> include PC. */
3168 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3169 && (insn2
& 0x0fff) == 0x0b04)
3171 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3174 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3175 && (insn2
& 0x0e00) == 0x0a00)
3187 /* Since any instruction in the epilogue sequence, with the possible
3188 exception of return itself, updates the stack pointer, we need to
3189 scan backwards for at most one instruction. Try either a 16-bit or
3190 a 32-bit instruction. This is just a heuristic, so we do not worry
3191 too much about false positives. */
3193 if (pc
- 4 < func_start
)
3195 if (target_read_memory (pc
- 4, buf
, 4))
3198 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3199 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3201 if (thumb_instruction_restores_sp (insn2
))
3202 found_stack_adjust
= 1;
3203 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3204 found_stack_adjust
= 1;
3205 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3206 && (insn2
& 0x0fff) == 0x0b04)
3207 found_stack_adjust
= 1;
3208 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3209 && (insn2
& 0x0e00) == 0x0a00)
3210 found_stack_adjust
= 1;
3212 return found_stack_adjust
;
3216 arm_stack_frame_destroyed_p_1 (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3218 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3221 CORE_ADDR func_start
, func_end
;
3223 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3226 /* We are in the epilogue if the previous instruction was a stack
3227 adjustment and the next instruction is a possible return (bx, mov
3228 pc, or pop). We could have to scan backwards to find the stack
3229 adjustment, or forwards to find the return, but this is a decent
3230 approximation. First scan forwards. */
3233 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3234 if (bits (insn
, 28, 31) != INST_NV
)
3236 if ((insn
& 0x0ffffff0) == 0x012fff10)
3239 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3242 else if ((insn
& 0x0fff0000) == 0x08bd0000
3243 && (insn
& 0x0000c000) != 0)
3244 /* POP (LDMIA), including PC or LR. */
3251 /* Scan backwards. This is just a heuristic, so do not worry about
3252 false positives from mode changes. */
3254 if (pc
< func_start
+ 4)
3257 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3258 if (arm_instruction_restores_sp (insn
))
3264 /* Implement the stack_frame_destroyed_p gdbarch method. */
3267 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3269 if (arm_pc_is_thumb (gdbarch
, pc
))
3270 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3272 return arm_stack_frame_destroyed_p_1 (gdbarch
, pc
);
3275 /* When arguments must be pushed onto the stack, they go on in reverse
3276 order. The code below implements a FILO (stack) to do this. */
3281 struct stack_item
*prev
;
3285 static struct stack_item
*
3286 push_stack_item (struct stack_item
*prev
, const gdb_byte
*contents
, int len
)
3288 struct stack_item
*si
;
3289 si
= XNEW (struct stack_item
);
3290 si
->data
= (gdb_byte
*) xmalloc (len
);
3293 memcpy (si
->data
, contents
, len
);
3297 static struct stack_item
*
3298 pop_stack_item (struct stack_item
*si
)
3300 struct stack_item
*dead
= si
;
3307 /* Implement the gdbarch type alignment method, overrides the generic
3308 alignment algorithm for anything that is arm specific. */
3311 arm_type_align (gdbarch
*gdbarch
, struct type
*t
)
3313 t
= check_typedef (t
);
3314 if (TYPE_CODE (t
) == TYPE_CODE_ARRAY
&& TYPE_VECTOR (t
))
3316 /* Use the natural alignment for vector types (the same for
3317 scalar type), but the maximum alignment is 64-bit. */
3318 if (TYPE_LENGTH (t
) > 8)
3321 return TYPE_LENGTH (t
);
3324 /* Allow the common code to calculate the alignment. */
3328 /* Possible base types for a candidate for passing and returning in
3331 enum arm_vfp_cprc_base_type
3340 /* The length of one element of base type B. */
3343 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3347 case VFP_CPRC_SINGLE
:
3349 case VFP_CPRC_DOUBLE
:
3351 case VFP_CPRC_VEC64
:
3353 case VFP_CPRC_VEC128
:
3356 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3361 /* The character ('s', 'd' or 'q') for the type of VFP register used
3362 for passing base type B. */
3365 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3369 case VFP_CPRC_SINGLE
:
3371 case VFP_CPRC_DOUBLE
:
3373 case VFP_CPRC_VEC64
:
3375 case VFP_CPRC_VEC128
:
3378 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3383 /* Determine whether T may be part of a candidate for passing and
3384 returning in VFP registers, ignoring the limit on the total number
3385 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3386 classification of the first valid component found; if it is not
3387 VFP_CPRC_UNKNOWN, all components must have the same classification
3388 as *BASE_TYPE. If it is found that T contains a type not permitted
3389 for passing and returning in VFP registers, a type differently
3390 classified from *BASE_TYPE, or two types differently classified
3391 from each other, return -1, otherwise return the total number of
3392 base-type elements found (possibly 0 in an empty structure or
3393 array). Vector types are not currently supported, matching the
3394 generic AAPCS support. */
3397 arm_vfp_cprc_sub_candidate (struct type
*t
,
3398 enum arm_vfp_cprc_base_type
*base_type
)
3400 t
= check_typedef (t
);
3401 switch (TYPE_CODE (t
))
3404 switch (TYPE_LENGTH (t
))
3407 if (*base_type
== VFP_CPRC_UNKNOWN
)
3408 *base_type
= VFP_CPRC_SINGLE
;
3409 else if (*base_type
!= VFP_CPRC_SINGLE
)
3414 if (*base_type
== VFP_CPRC_UNKNOWN
)
3415 *base_type
= VFP_CPRC_DOUBLE
;
3416 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3425 case TYPE_CODE_COMPLEX
:
3426 /* Arguments of complex T where T is one of the types float or
3427 double get treated as if they are implemented as:
3436 switch (TYPE_LENGTH (t
))
3439 if (*base_type
== VFP_CPRC_UNKNOWN
)
3440 *base_type
= VFP_CPRC_SINGLE
;
3441 else if (*base_type
!= VFP_CPRC_SINGLE
)
3446 if (*base_type
== VFP_CPRC_UNKNOWN
)
3447 *base_type
= VFP_CPRC_DOUBLE
;
3448 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3457 case TYPE_CODE_ARRAY
:
3459 if (TYPE_VECTOR (t
))
3461 /* A 64-bit or 128-bit containerized vector type are VFP
3463 switch (TYPE_LENGTH (t
))
3466 if (*base_type
== VFP_CPRC_UNKNOWN
)
3467 *base_type
= VFP_CPRC_VEC64
;
3470 if (*base_type
== VFP_CPRC_UNKNOWN
)
3471 *base_type
= VFP_CPRC_VEC128
;
3482 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
),
3486 if (TYPE_LENGTH (t
) == 0)
3488 gdb_assert (count
== 0);
3491 else if (count
== 0)
3493 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3494 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3495 return TYPE_LENGTH (t
) / unitlen
;
3500 case TYPE_CODE_STRUCT
:
3505 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3509 if (!field_is_static (&TYPE_FIELD (t
, i
)))
3510 sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3512 if (sub_count
== -1)
3516 if (TYPE_LENGTH (t
) == 0)
3518 gdb_assert (count
== 0);
3521 else if (count
== 0)
3523 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3524 if (TYPE_LENGTH (t
) != unitlen
* count
)
3529 case TYPE_CODE_UNION
:
3534 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3536 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3538 if (sub_count
== -1)
3540 count
= (count
> sub_count
? count
: sub_count
);
3542 if (TYPE_LENGTH (t
) == 0)
3544 gdb_assert (count
== 0);
3547 else if (count
== 0)
3549 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3550 if (TYPE_LENGTH (t
) != unitlen
* count
)
3562 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3563 if passed to or returned from a non-variadic function with the VFP
3564 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3565 *BASE_TYPE to the base type for T and *COUNT to the number of
3566 elements of that base type before returning. */
3569 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3572 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3573 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3574 if (c
<= 0 || c
> 4)
3581 /* Return 1 if the VFP ABI should be used for passing arguments to and
3582 returning values from a function of type FUNC_TYPE, 0
3586 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3588 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3589 /* Variadic functions always use the base ABI. Assume that functions
3590 without debug info are not variadic. */
3591 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3593 /* The VFP ABI is only supported as a variant of AAPCS. */
3594 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3596 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3599 /* We currently only support passing parameters in integer registers, which
3600 conforms with GCC's default model, and VFP argument passing following
3601 the VFP variant of AAPCS. Several other variants exist and
3602 we should probably support some of them based on the selected ABI. */
3605 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3606 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3607 struct value
**args
, CORE_ADDR sp
,
3608 function_call_return_method return_method
,
3609 CORE_ADDR struct_addr
)
3611 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3615 struct stack_item
*si
= NULL
;
3618 unsigned vfp_regs_free
= (1 << 16) - 1;
3620 /* Determine the type of this function and whether the VFP ABI
3622 ftype
= check_typedef (value_type (function
));
3623 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3624 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3625 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3627 /* Set the return address. For the ARM, the return breakpoint is
3628 always at BP_ADDR. */
3629 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3631 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3633 /* Walk through the list of args and determine how large a temporary
3634 stack is required. Need to take care here as structs may be
3635 passed on the stack, and we have to push them. */
3638 argreg
= ARM_A1_REGNUM
;
3641 /* The struct_return pointer occupies the first parameter
3642 passing register. */
3643 if (return_method
== return_method_struct
)
3646 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3647 gdbarch_register_name (gdbarch
, argreg
),
3648 paddress (gdbarch
, struct_addr
));
3649 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3653 for (argnum
= 0; argnum
< nargs
; argnum
++)
3656 struct type
*arg_type
;
3657 struct type
*target_type
;
3658 enum type_code typecode
;
3659 const bfd_byte
*val
;
3661 enum arm_vfp_cprc_base_type vfp_base_type
;
3663 int may_use_core_reg
= 1;
3665 arg_type
= check_typedef (value_type (args
[argnum
]));
3666 len
= TYPE_LENGTH (arg_type
);
3667 target_type
= TYPE_TARGET_TYPE (arg_type
);
3668 typecode
= TYPE_CODE (arg_type
);
3669 val
= value_contents (args
[argnum
]);
3671 align
= type_align (arg_type
);
3672 /* Round alignment up to a whole number of words. */
3673 align
= (align
+ ARM_INT_REGISTER_SIZE
- 1)
3674 & ~(ARM_INT_REGISTER_SIZE
- 1);
3675 /* Different ABIs have different maximum alignments. */
3676 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3678 /* The APCS ABI only requires word alignment. */
3679 align
= ARM_INT_REGISTER_SIZE
;
3683 /* The AAPCS requires at most doubleword alignment. */
3684 if (align
> ARM_INT_REGISTER_SIZE
* 2)
3685 align
= ARM_INT_REGISTER_SIZE
* 2;
3689 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3697 /* Because this is a CPRC it cannot go in a core register or
3698 cause a core register to be skipped for alignment.
3699 Either it goes in VFP registers and the rest of this loop
3700 iteration is skipped for this argument, or it goes on the
3701 stack (and the stack alignment code is correct for this
3703 may_use_core_reg
= 0;
3705 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3706 shift
= unit_length
/ 4;
3707 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3708 for (regno
= 0; regno
< 16; regno
+= shift
)
3709 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3718 vfp_regs_free
&= ~(mask
<< regno
);
3719 reg_scaled
= regno
/ shift
;
3720 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3721 for (i
= 0; i
< vfp_base_count
; i
++)
3725 if (reg_char
== 'q')
3726 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3727 val
+ i
* unit_length
);
3730 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3731 reg_char
, reg_scaled
+ i
);
3732 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3734 regcache
->cooked_write (regnum
, val
+ i
* unit_length
);
3741 /* This CPRC could not go in VFP registers, so all VFP
3742 registers are now marked as used. */
3747 /* Push stack padding for dowubleword alignment. */
3748 if (nstack
& (align
- 1))
3750 si
= push_stack_item (si
, val
, ARM_INT_REGISTER_SIZE
);
3751 nstack
+= ARM_INT_REGISTER_SIZE
;
3754 /* Doubleword aligned quantities must go in even register pairs. */
3755 if (may_use_core_reg
3756 && argreg
<= ARM_LAST_ARG_REGNUM
3757 && align
> ARM_INT_REGISTER_SIZE
3761 /* If the argument is a pointer to a function, and it is a
3762 Thumb function, create a LOCAL copy of the value and set
3763 the THUMB bit in it. */
3764 if (TYPE_CODE_PTR
== typecode
3765 && target_type
!= NULL
3766 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3768 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3769 if (arm_pc_is_thumb (gdbarch
, regval
))
3771 bfd_byte
*copy
= (bfd_byte
*) alloca (len
);
3772 store_unsigned_integer (copy
, len
, byte_order
,
3773 MAKE_THUMB_ADDR (regval
));
3778 /* Copy the argument to general registers or the stack in
3779 register-sized pieces. Large arguments are split between
3780 registers and stack. */
3783 int partial_len
= len
< ARM_INT_REGISTER_SIZE
3784 ? len
: ARM_INT_REGISTER_SIZE
;
3786 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3788 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3790 /* The argument is being passed in a general purpose
3792 if (byte_order
== BFD_ENDIAN_BIG
)
3793 regval
<<= (ARM_INT_REGISTER_SIZE
- partial_len
) * 8;
3795 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3797 gdbarch_register_name
3799 phex (regval
, ARM_INT_REGISTER_SIZE
));
3800 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3805 gdb_byte buf
[ARM_INT_REGISTER_SIZE
];
3807 memset (buf
, 0, sizeof (buf
));
3808 store_unsigned_integer (buf
, partial_len
, byte_order
, regval
);
3810 /* Push the arguments onto the stack. */
3812 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3814 si
= push_stack_item (si
, buf
, ARM_INT_REGISTER_SIZE
);
3815 nstack
+= ARM_INT_REGISTER_SIZE
;
3822 /* If we have an odd number of words to push, then decrement the stack
3823 by one word now, so first stack argument will be dword aligned. */
3830 write_memory (sp
, si
->data
, si
->len
);
3831 si
= pop_stack_item (si
);
3834 /* Finally, update teh SP register. */
3835 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3841 /* Always align the frame to an 8-byte boundary. This is required on
3842 some platforms and harmless on the rest. */
3845 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3847 /* Align the stack to eight bytes. */
3848 return sp
& ~ (CORE_ADDR
) 7;
3852 print_fpu_flags (struct ui_file
*file
, int flags
)
3854 if (flags
& (1 << 0))
3855 fputs_filtered ("IVO ", file
);
3856 if (flags
& (1 << 1))
3857 fputs_filtered ("DVZ ", file
);
3858 if (flags
& (1 << 2))
3859 fputs_filtered ("OFL ", file
);
3860 if (flags
& (1 << 3))
3861 fputs_filtered ("UFL ", file
);
3862 if (flags
& (1 << 4))
3863 fputs_filtered ("INX ", file
);
3864 fputc_filtered ('\n', file
);
3867 /* Print interesting information about the floating point processor
3868 (if present) or emulator. */
3870 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3871 struct frame_info
*frame
, const char *args
)
3873 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3876 type
= (status
>> 24) & 127;
3877 if (status
& (1 << 31))
3878 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
3880 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
3881 /* i18n: [floating point unit] mask */
3882 fputs_filtered (_("mask: "), file
);
3883 print_fpu_flags (file
, status
>> 16);
3884 /* i18n: [floating point unit] flags */
3885 fputs_filtered (_("flags: "), file
);
3886 print_fpu_flags (file
, status
);
3889 /* Construct the ARM extended floating point type. */
3890 static struct type
*
3891 arm_ext_type (struct gdbarch
*gdbarch
)
3893 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3895 if (!tdep
->arm_ext_type
)
3897 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
3898 floatformats_arm_ext
);
3900 return tdep
->arm_ext_type
;
3903 static struct type
*
3904 arm_neon_double_type (struct gdbarch
*gdbarch
)
3906 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3908 if (tdep
->neon_double_type
== NULL
)
3910 struct type
*t
, *elem
;
3912 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
3914 elem
= builtin_type (gdbarch
)->builtin_uint8
;
3915 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
3916 elem
= builtin_type (gdbarch
)->builtin_uint16
;
3917 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
3918 elem
= builtin_type (gdbarch
)->builtin_uint32
;
3919 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
3920 elem
= builtin_type (gdbarch
)->builtin_uint64
;
3921 append_composite_type_field (t
, "u64", elem
);
3922 elem
= builtin_type (gdbarch
)->builtin_float
;
3923 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
3924 elem
= builtin_type (gdbarch
)->builtin_double
;
3925 append_composite_type_field (t
, "f64", elem
);
3927 TYPE_VECTOR (t
) = 1;
3928 TYPE_NAME (t
) = "neon_d";
3929 tdep
->neon_double_type
= t
;
3932 return tdep
->neon_double_type
;
3935 /* FIXME: The vector types are not correctly ordered on big-endian
3936 targets. Just as s0 is the low bits of d0, d0[0] is also the low
3937 bits of d0 - regardless of what unit size is being held in d0. So
3938 the offset of the first uint8 in d0 is 7, but the offset of the
3939 first float is 4. This code works as-is for little-endian
3942 static struct type
*
3943 arm_neon_quad_type (struct gdbarch
*gdbarch
)
3945 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3947 if (tdep
->neon_quad_type
== NULL
)
3949 struct type
*t
, *elem
;
3951 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
3953 elem
= builtin_type (gdbarch
)->builtin_uint8
;
3954 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
3955 elem
= builtin_type (gdbarch
)->builtin_uint16
;
3956 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
3957 elem
= builtin_type (gdbarch
)->builtin_uint32
;
3958 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
3959 elem
= builtin_type (gdbarch
)->builtin_uint64
;
3960 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
3961 elem
= builtin_type (gdbarch
)->builtin_float
;
3962 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
3963 elem
= builtin_type (gdbarch
)->builtin_double
;
3964 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
3966 TYPE_VECTOR (t
) = 1;
3967 TYPE_NAME (t
) = "neon_q";
3968 tdep
->neon_quad_type
= t
;
3971 return tdep
->neon_quad_type
;
3974 /* Return the GDB type object for the "standard" data type of data in
3977 static struct type
*
3978 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
3980 int num_regs
= gdbarch_num_regs (gdbarch
);
3982 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
3983 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
3984 return builtin_type (gdbarch
)->builtin_float
;
3986 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
3987 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
3988 return arm_neon_quad_type (gdbarch
);
3990 /* If the target description has register information, we are only
3991 in this function so that we can override the types of
3992 double-precision registers for NEON. */
3993 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
3995 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
3997 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
3998 && TYPE_CODE (t
) == TYPE_CODE_FLT
3999 && gdbarch_tdep (gdbarch
)->have_neon
)
4000 return arm_neon_double_type (gdbarch
);
4005 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4007 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4008 return builtin_type (gdbarch
)->builtin_void
;
4010 return arm_ext_type (gdbarch
);
4012 else if (regnum
== ARM_SP_REGNUM
)
4013 return builtin_type (gdbarch
)->builtin_data_ptr
;
4014 else if (regnum
== ARM_PC_REGNUM
)
4015 return builtin_type (gdbarch
)->builtin_func_ptr
;
4016 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4017 /* These registers are only supported on targets which supply
4018 an XML description. */
4019 return builtin_type (gdbarch
)->builtin_int0
;
4021 return builtin_type (gdbarch
)->builtin_uint32
;
4024 /* Map a DWARF register REGNUM onto the appropriate GDB register
4028 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4030 /* Core integer regs. */
4031 if (reg
>= 0 && reg
<= 15)
4034 /* Legacy FPA encoding. These were once used in a way which
4035 overlapped with VFP register numbering, so their use is
4036 discouraged, but GDB doesn't support the ARM toolchain
4037 which used them for VFP. */
4038 if (reg
>= 16 && reg
<= 23)
4039 return ARM_F0_REGNUM
+ reg
- 16;
4041 /* New assignments for the FPA registers. */
4042 if (reg
>= 96 && reg
<= 103)
4043 return ARM_F0_REGNUM
+ reg
- 96;
4045 /* WMMX register assignments. */
4046 if (reg
>= 104 && reg
<= 111)
4047 return ARM_WCGR0_REGNUM
+ reg
- 104;
4049 if (reg
>= 112 && reg
<= 127)
4050 return ARM_WR0_REGNUM
+ reg
- 112;
4052 if (reg
>= 192 && reg
<= 199)
4053 return ARM_WC0_REGNUM
+ reg
- 192;
4055 /* VFP v2 registers. A double precision value is actually
4056 in d1 rather than s2, but the ABI only defines numbering
4057 for the single precision registers. This will "just work"
4058 in GDB for little endian targets (we'll read eight bytes,
4059 starting in s0 and then progressing to s1), but will be
4060 reversed on big endian targets with VFP. This won't
4061 be a problem for the new Neon quad registers; you're supposed
4062 to use DW_OP_piece for those. */
4063 if (reg
>= 64 && reg
<= 95)
4067 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4068 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4072 /* VFP v3 / Neon registers. This range is also used for VFP v2
4073 registers, except that it now describes d0 instead of s0. */
4074 if (reg
>= 256 && reg
<= 287)
4078 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4079 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4086 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4088 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4091 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4093 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4094 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4096 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4097 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4099 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4100 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4102 if (reg
< NUM_GREGS
)
4103 return SIM_ARM_R0_REGNUM
+ reg
;
4106 if (reg
< NUM_FREGS
)
4107 return SIM_ARM_FP0_REGNUM
+ reg
;
4110 if (reg
< NUM_SREGS
)
4111 return SIM_ARM_FPS_REGNUM
+ reg
;
4114 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4117 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
4118 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
4119 NULL if an error occurs. BUF is freed. */
4122 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
4123 int old_len
, int new_len
)
4126 int bytes_to_read
= new_len
- old_len
;
4128 new_buf
= (gdb_byte
*) xmalloc (new_len
);
4129 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
4131 if (target_read_code (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
4139 /* An IT block is at most the 2-byte IT instruction followed by
4140 four 4-byte instructions. The furthest back we must search to
4141 find an IT block that affects the current instruction is thus
4142 2 + 3 * 4 == 14 bytes. */
4143 #define MAX_IT_BLOCK_PREFIX 14
4145 /* Use a quick scan if there are more than this many bytes of
4147 #define IT_SCAN_THRESHOLD 32
4149 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
4150 A breakpoint in an IT block may not be hit, depending on the
4153 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
4157 CORE_ADDR boundary
, func_start
;
4159 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
4160 int i
, any
, last_it
, last_it_count
;
4162 /* If we are using BKPT breakpoints, none of this is necessary. */
4163 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
4166 /* ARM mode does not have this problem. */
4167 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
4170 /* We are setting a breakpoint in Thumb code that could potentially
4171 contain an IT block. The first step is to find how much Thumb
4172 code there is; we do not need to read outside of known Thumb
4174 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
4176 /* Thumb-2 code must have mapping symbols to have a chance. */
4179 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
4181 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
4182 && func_start
> boundary
)
4183 boundary
= func_start
;
4185 /* Search for a candidate IT instruction. We have to do some fancy
4186 footwork to distinguish a real IT instruction from the second
4187 half of a 32-bit instruction, but there is no need for that if
4188 there's no candidate. */
4189 buf_len
= std::min (bpaddr
- boundary
, (CORE_ADDR
) MAX_IT_BLOCK_PREFIX
);
4191 /* No room for an IT instruction. */
4194 buf
= (gdb_byte
*) xmalloc (buf_len
);
4195 if (target_read_code (bpaddr
- buf_len
, buf
, buf_len
) != 0)
4198 for (i
= 0; i
< buf_len
; i
+= 2)
4200 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4201 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4214 /* OK, the code bytes before this instruction contain at least one
4215 halfword which resembles an IT instruction. We know that it's
4216 Thumb code, but there are still two possibilities. Either the
4217 halfword really is an IT instruction, or it is the second half of
4218 a 32-bit Thumb instruction. The only way we can tell is to
4219 scan forwards from a known instruction boundary. */
4220 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
4224 /* There's a lot of code before this instruction. Start with an
4225 optimistic search; it's easy to recognize halfwords that can
4226 not be the start of a 32-bit instruction, and use that to
4227 lock on to the instruction boundaries. */
4228 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
4231 buf_len
= IT_SCAN_THRESHOLD
;
4234 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
4236 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4237 if (thumb_insn_size (inst1
) == 2)
4244 /* At this point, if DEFINITE, BUF[I] is the first place we
4245 are sure that we know the instruction boundaries, and it is far
4246 enough from BPADDR that we could not miss an IT instruction
4247 affecting BPADDR. If ! DEFINITE, give up - start from a
4251 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
4255 buf_len
= bpaddr
- boundary
;
4261 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
4264 buf_len
= bpaddr
- boundary
;
4268 /* Scan forwards. Find the last IT instruction before BPADDR. */
4273 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
4275 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4280 else if (inst1
& 0x0002)
4282 else if (inst1
& 0x0004)
4287 i
+= thumb_insn_size (inst1
);
4293 /* There wasn't really an IT instruction after all. */
4296 if (last_it_count
< 1)
4297 /* It was too far away. */
4300 /* This really is a trouble spot. Move the breakpoint to the IT
4302 return bpaddr
- buf_len
+ last_it
;
4305 /* ARM displaced stepping support.
4307 Generally ARM displaced stepping works as follows:
4309 1. When an instruction is to be single-stepped, it is first decoded by
4310 arm_process_displaced_insn. Depending on the type of instruction, it is
4311 then copied to a scratch location, possibly in a modified form. The
4312 copy_* set of functions performs such modification, as necessary. A
4313 breakpoint is placed after the modified instruction in the scratch space
4314 to return control to GDB. Note in particular that instructions which
4315 modify the PC will no longer do so after modification.
4317 2. The instruction is single-stepped, by setting the PC to the scratch
4318 location address, and resuming. Control returns to GDB when the
4321 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
4322 function used for the current instruction. This function's job is to
4323 put the CPU/memory state back to what it would have been if the
4324 instruction had been executed unmodified in its original location. */
4326 /* NOP instruction (mov r0, r0). */
4327 #define ARM_NOP 0xe1a00000
4328 #define THUMB_NOP 0x4600
4330 /* Helper for register reads for displaced stepping. In particular, this
4331 returns the PC as it would be seen by the instruction at its original
4335 displaced_read_reg (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4339 CORE_ADDR from
= dsc
->insn_addr
;
4341 if (regno
== ARM_PC_REGNUM
)
4343 /* Compute pipeline offset:
4344 - When executing an ARM instruction, PC reads as the address of the
4345 current instruction plus 8.
4346 - When executing a Thumb instruction, PC reads as the address of the
4347 current instruction plus 4. */
4354 if (debug_displaced
)
4355 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
4356 (unsigned long) from
);
4357 return (ULONGEST
) from
;
4361 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
4362 if (debug_displaced
)
4363 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
4364 regno
, (unsigned long) ret
);
4370 displaced_in_arm_mode (struct regcache
*regs
)
4373 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4375 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4377 return (ps
& t_bit
) == 0;
4380 /* Write to the PC as from a branch instruction. */
4383 branch_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4387 /* Note: If bits 0/1 are set, this branch would be unpredictable for
4388 architecture versions < 6. */
4389 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4390 val
& ~(ULONGEST
) 0x3);
4392 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
4393 val
& ~(ULONGEST
) 0x1);
4396 /* Write to the PC as from a branch-exchange instruction. */
4399 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
4402 ULONGEST t_bit
= arm_psr_thumb_bit (regs
->arch ());
4404 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
4408 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
4409 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
4411 else if ((val
& 2) == 0)
4413 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4414 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
4418 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
4419 mode, align dest to 4 bytes). */
4420 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
4421 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
4422 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
4426 /* Write to the PC as if from a load instruction. */
4429 load_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4432 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
4433 bx_write_pc (regs
, val
);
4435 branch_write_pc (regs
, dsc
, val
);
4438 /* Write to the PC as if from an ALU instruction. */
4441 alu_write_pc (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4444 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
4445 bx_write_pc (regs
, val
);
4447 branch_write_pc (regs
, dsc
, val
);
4450 /* Helper for writing to registers for displaced stepping. Writing to the PC
4451 has a varying effects depending on the instruction which does the write:
4452 this is controlled by the WRITE_PC argument. */
4455 displaced_write_reg (struct regcache
*regs
, arm_displaced_step_closure
*dsc
,
4456 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
4458 if (regno
== ARM_PC_REGNUM
)
4460 if (debug_displaced
)
4461 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
4462 (unsigned long) val
);
4465 case BRANCH_WRITE_PC
:
4466 branch_write_pc (regs
, dsc
, val
);
4470 bx_write_pc (regs
, val
);
4474 load_write_pc (regs
, dsc
, val
);
4478 alu_write_pc (regs
, dsc
, val
);
4481 case CANNOT_WRITE_PC
:
4482 warning (_("Instruction wrote to PC in an unexpected way when "
4483 "single-stepping"));
4487 internal_error (__FILE__
, __LINE__
,
4488 _("Invalid argument to displaced_write_reg"));
4491 dsc
->wrote_to_pc
= 1;
4495 if (debug_displaced
)
4496 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
4497 regno
, (unsigned long) val
);
4498 regcache_cooked_write_unsigned (regs
, regno
, val
);
4502 /* This function is used to concisely determine if an instruction INSN
4503 references PC. Register fields of interest in INSN should have the
4504 corresponding fields of BITMASK set to 0b1111. The function
4505 returns return 1 if any of these fields in INSN reference the PC
4506 (also 0b1111, r15), else it returns 0. */
4509 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
4511 uint32_t lowbit
= 1;
4513 while (bitmask
!= 0)
4517 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
4523 mask
= lowbit
* 0xf;
4525 if ((insn
& mask
) == mask
)
4534 /* The simplest copy function. Many instructions have the same effect no
4535 matter what address they are executed at: in those cases, use this. */
4538 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
4539 const char *iname
, arm_displaced_step_closure
*dsc
)
4541 if (debug_displaced
)
4542 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
4543 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
4546 dsc
->modinsn
[0] = insn
;
4552 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
4553 uint16_t insn2
, const char *iname
,
4554 arm_displaced_step_closure
*dsc
)
4556 if (debug_displaced
)
4557 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
4558 "opcode/class '%s' unmodified\n", insn1
, insn2
,
4561 dsc
->modinsn
[0] = insn1
;
4562 dsc
->modinsn
[1] = insn2
;
4568 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
4571 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
4573 arm_displaced_step_closure
*dsc
)
4575 if (debug_displaced
)
4576 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
4577 "opcode/class '%s' unmodified\n", insn
,
4580 dsc
->modinsn
[0] = insn
;
4585 /* Preload instructions with immediate offset. */
4588 cleanup_preload (struct gdbarch
*gdbarch
,
4589 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4591 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4592 if (!dsc
->u
.preload
.immed
)
4593 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
4597 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4598 arm_displaced_step_closure
*dsc
, unsigned int rn
)
4601 /* Preload instructions:
4603 {pli/pld} [rn, #+/-imm]
4605 {pli/pld} [r0, #+/-imm]. */
4607 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4608 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4609 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4610 dsc
->u
.preload
.immed
= 1;
4612 dsc
->cleanup
= &cleanup_preload
;
4616 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
4617 arm_displaced_step_closure
*dsc
)
4619 unsigned int rn
= bits (insn
, 16, 19);
4621 if (!insn_references_pc (insn
, 0x000f0000ul
))
4622 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
4624 if (debug_displaced
)
4625 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
4626 (unsigned long) insn
);
4628 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4630 install_preload (gdbarch
, regs
, dsc
, rn
);
4636 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
4637 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4639 unsigned int rn
= bits (insn1
, 0, 3);
4640 unsigned int u_bit
= bit (insn1
, 7);
4641 int imm12
= bits (insn2
, 0, 11);
4644 if (rn
!= ARM_PC_REGNUM
)
4645 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
4647 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
4648 PLD (literal) Encoding T1. */
4649 if (debug_displaced
)
4650 fprintf_unfiltered (gdb_stdlog
,
4651 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
4652 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
4658 /* Rewrite instruction {pli/pld} PC imm12 into:
4659 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
4663 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
4665 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4666 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4668 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
4670 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
4671 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
4672 dsc
->u
.preload
.immed
= 0;
4674 /* {pli/pld} [r0, r1] */
4675 dsc
->modinsn
[0] = insn1
& 0xfff0;
4676 dsc
->modinsn
[1] = 0xf001;
4679 dsc
->cleanup
= &cleanup_preload
;
4683 /* Preload instructions with register offset. */
4686 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
4687 arm_displaced_step_closure
*dsc
, unsigned int rn
,
4690 ULONGEST rn_val
, rm_val
;
4692 /* Preload register-offset instructions:
4694 {pli/pld} [rn, rm {, shift}]
4696 {pli/pld} [r0, r1 {, shift}]. */
4698 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4699 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
4700 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4701 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
4702 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4703 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
4704 dsc
->u
.preload
.immed
= 0;
4706 dsc
->cleanup
= &cleanup_preload
;
4710 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
4711 struct regcache
*regs
,
4712 arm_displaced_step_closure
*dsc
)
4714 unsigned int rn
= bits (insn
, 16, 19);
4715 unsigned int rm
= bits (insn
, 0, 3);
4718 if (!insn_references_pc (insn
, 0x000f000ful
))
4719 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
4721 if (debug_displaced
)
4722 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
4723 (unsigned long) insn
);
4725 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
4727 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
4731 /* Copy/cleanup coprocessor load and store instructions. */
4734 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
4735 struct regcache
*regs
,
4736 arm_displaced_step_closure
*dsc
)
4738 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
4740 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
4742 if (dsc
->u
.ldst
.writeback
)
4743 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
4747 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4748 arm_displaced_step_closure
*dsc
,
4749 int writeback
, unsigned int rn
)
4753 /* Coprocessor load/store instructions:
4755 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
4757 {stc/stc2} [r0, #+/-imm].
4759 ldc/ldc2 are handled identically. */
4761 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
4762 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
4763 /* PC should be 4-byte aligned. */
4764 rn_val
= rn_val
& 0xfffffffc;
4765 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
4767 dsc
->u
.ldst
.writeback
= writeback
;
4768 dsc
->u
.ldst
.rn
= rn
;
4770 dsc
->cleanup
= &cleanup_copro_load_store
;
4774 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
4775 struct regcache
*regs
,
4776 arm_displaced_step_closure
*dsc
)
4778 unsigned int rn
= bits (insn
, 16, 19);
4780 if (!insn_references_pc (insn
, 0x000f0000ul
))
4781 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
4783 if (debug_displaced
)
4784 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
4785 "load/store insn %.8lx\n", (unsigned long) insn
);
4787 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
4789 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
4795 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
4796 uint16_t insn2
, struct regcache
*regs
,
4797 arm_displaced_step_closure
*dsc
)
4799 unsigned int rn
= bits (insn1
, 0, 3);
4801 if (rn
!= ARM_PC_REGNUM
)
4802 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
4803 "copro load/store", dsc
);
4805 if (debug_displaced
)
4806 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
4807 "load/store insn %.4x%.4x\n", insn1
, insn2
);
4809 dsc
->modinsn
[0] = insn1
& 0xfff0;
4810 dsc
->modinsn
[1] = insn2
;
4813 /* This function is called for copying instruction LDC/LDC2/VLDR, which
4814 doesn't support writeback, so pass 0. */
4815 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
4820 /* Clean up branch instructions (actually perform the branch, by setting
4824 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4825 arm_displaced_step_closure
*dsc
)
4827 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
4828 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
4829 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
4830 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
4835 if (dsc
->u
.branch
.link
)
4837 /* The value of LR should be the next insn of current one. In order
4838 not to confuse logic hanlding later insn `bx lr', if current insn mode
4839 is Thumb, the bit 0 of LR value should be set to 1. */
4840 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
4843 next_insn_addr
|= 0x1;
4845 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
4849 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
4852 /* Copy B/BL/BLX instructions with immediate destinations. */
4855 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
4856 arm_displaced_step_closure
*dsc
,
4857 unsigned int cond
, int exchange
, int link
, long offset
)
4859 /* Implement "BL<cond> <label>" as:
4861 Preparation: cond <- instruction condition
4862 Insn: mov r0, r0 (nop)
4863 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
4865 B<cond> similar, but don't set r14 in cleanup. */
4867 dsc
->u
.branch
.cond
= cond
;
4868 dsc
->u
.branch
.link
= link
;
4869 dsc
->u
.branch
.exchange
= exchange
;
4871 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
4872 if (link
&& exchange
)
4873 /* For BLX, offset is computed from the Align (PC, 4). */
4874 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
4877 dsc
->u
.branch
.dest
+= 4 + offset
;
4879 dsc
->u
.branch
.dest
+= 8 + offset
;
4881 dsc
->cleanup
= &cleanup_branch
;
4884 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
4885 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
4887 unsigned int cond
= bits (insn
, 28, 31);
4888 int exchange
= (cond
== 0xf);
4889 int link
= exchange
|| bit (insn
, 24);
4892 if (debug_displaced
)
4893 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
4894 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
4895 (unsigned long) insn
);
4897 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
4898 then arrange the switch into Thumb mode. */
4899 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
4901 offset
= bits (insn
, 0, 23) << 2;
4903 if (bit (offset
, 25))
4904 offset
= offset
| ~0x3ffffff;
4906 dsc
->modinsn
[0] = ARM_NOP
;
4908 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
4913 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
4914 uint16_t insn2
, struct regcache
*regs
,
4915 arm_displaced_step_closure
*dsc
)
4917 int link
= bit (insn2
, 14);
4918 int exchange
= link
&& !bit (insn2
, 12);
4921 int j1
= bit (insn2
, 13);
4922 int j2
= bit (insn2
, 11);
4923 int s
= sbits (insn1
, 10, 10);
4924 int i1
= !(j1
^ bit (insn1
, 10));
4925 int i2
= !(j2
^ bit (insn1
, 10));
4927 if (!link
&& !exchange
) /* B */
4929 offset
= (bits (insn2
, 0, 10) << 1);
4930 if (bit (insn2
, 12)) /* Encoding T4 */
4932 offset
|= (bits (insn1
, 0, 9) << 12)
4938 else /* Encoding T3 */
4940 offset
|= (bits (insn1
, 0, 5) << 12)
4944 cond
= bits (insn1
, 6, 9);
4949 offset
= (bits (insn1
, 0, 9) << 12);
4950 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
4951 offset
|= exchange
?
4952 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
4955 if (debug_displaced
)
4956 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
4957 "%.4x %.4x with offset %.8lx\n",
4958 link
? (exchange
) ? "blx" : "bl" : "b",
4959 insn1
, insn2
, offset
);
4961 dsc
->modinsn
[0] = THUMB_NOP
;
4963 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
4967 /* Copy B Thumb instructions. */
4969 thumb_copy_b (struct gdbarch
*gdbarch
, uint16_t insn
,
4970 arm_displaced_step_closure
*dsc
)
4972 unsigned int cond
= 0;
4974 unsigned short bit_12_15
= bits (insn
, 12, 15);
4975 CORE_ADDR from
= dsc
->insn_addr
;
4977 if (bit_12_15
== 0xd)
4979 /* offset = SignExtend (imm8:0, 32) */
4980 offset
= sbits ((insn
<< 1), 0, 8);
4981 cond
= bits (insn
, 8, 11);
4983 else if (bit_12_15
== 0xe) /* Encoding T2 */
4985 offset
= sbits ((insn
<< 1), 0, 11);
4989 if (debug_displaced
)
4990 fprintf_unfiltered (gdb_stdlog
,
4991 "displaced: copying b immediate insn %.4x "
4992 "with offset %d\n", insn
, offset
);
4994 dsc
->u
.branch
.cond
= cond
;
4995 dsc
->u
.branch
.link
= 0;
4996 dsc
->u
.branch
.exchange
= 0;
4997 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
4999 dsc
->modinsn
[0] = THUMB_NOP
;
5001 dsc
->cleanup
= &cleanup_branch
;
5006 /* Copy BX/BLX with register-specified destinations. */
5009 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5010 arm_displaced_step_closure
*dsc
, int link
,
5011 unsigned int cond
, unsigned int rm
)
5013 /* Implement {BX,BLX}<cond> <reg>" as:
5015 Preparation: cond <- instruction condition
5016 Insn: mov r0, r0 (nop)
5017 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
5019 Don't set r14 in cleanup for BX. */
5021 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
5023 dsc
->u
.branch
.cond
= cond
;
5024 dsc
->u
.branch
.link
= link
;
5026 dsc
->u
.branch
.exchange
= 1;
5028 dsc
->cleanup
= &cleanup_branch
;
5032 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5033 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5035 unsigned int cond
= bits (insn
, 28, 31);
5038 int link
= bit (insn
, 5);
5039 unsigned int rm
= bits (insn
, 0, 3);
5041 if (debug_displaced
)
5042 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
5043 (unsigned long) insn
);
5045 dsc
->modinsn
[0] = ARM_NOP
;
5047 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
5052 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5053 struct regcache
*regs
,
5054 arm_displaced_step_closure
*dsc
)
5056 int link
= bit (insn
, 7);
5057 unsigned int rm
= bits (insn
, 3, 6);
5059 if (debug_displaced
)
5060 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
5061 (unsigned short) insn
);
5063 dsc
->modinsn
[0] = THUMB_NOP
;
5065 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
5071 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
5074 cleanup_alu_imm (struct gdbarch
*gdbarch
,
5075 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5077 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5078 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5079 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5080 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5084 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5085 arm_displaced_step_closure
*dsc
)
5087 unsigned int rn
= bits (insn
, 16, 19);
5088 unsigned int rd
= bits (insn
, 12, 15);
5089 unsigned int op
= bits (insn
, 21, 24);
5090 int is_mov
= (op
== 0xd);
5091 ULONGEST rd_val
, rn_val
;
5093 if (!insn_references_pc (insn
, 0x000ff000ul
))
5094 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
5096 if (debug_displaced
)
5097 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
5098 "%.8lx\n", is_mov
? "move" : "ALU",
5099 (unsigned long) insn
);
5101 /* Instruction is of form:
5103 <op><cond> rd, [rn,] #imm
5107 Preparation: tmp1, tmp2 <- r0, r1;
5109 Insn: <op><cond> r0, r1, #imm
5110 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5113 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5114 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5115 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5116 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5117 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5118 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5122 dsc
->modinsn
[0] = insn
& 0xfff00fff;
5124 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
5126 dsc
->cleanup
= &cleanup_alu_imm
;
5132 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5133 uint16_t insn2
, struct regcache
*regs
,
5134 arm_displaced_step_closure
*dsc
)
5136 unsigned int op
= bits (insn1
, 5, 8);
5137 unsigned int rn
, rm
, rd
;
5138 ULONGEST rd_val
, rn_val
;
5140 rn
= bits (insn1
, 0, 3); /* Rn */
5141 rm
= bits (insn2
, 0, 3); /* Rm */
5142 rd
= bits (insn2
, 8, 11); /* Rd */
5144 /* This routine is only called for instruction MOV. */
5145 gdb_assert (op
== 0x2 && rn
== 0xf);
5147 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
5148 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
5150 if (debug_displaced
)
5151 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
5152 "ALU", insn1
, insn2
);
5154 /* Instruction is of form:
5156 <op><cond> rd, [rn,] #imm
5160 Preparation: tmp1, tmp2 <- r0, r1;
5162 Insn: <op><cond> r0, r1, #imm
5163 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
5166 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5167 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5168 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5169 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5170 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5171 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5174 dsc
->modinsn
[0] = insn1
;
5175 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
5178 dsc
->cleanup
= &cleanup_alu_imm
;
5183 /* Copy/cleanup arithmetic/logic insns with register RHS. */
5186 cleanup_alu_reg (struct gdbarch
*gdbarch
,
5187 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5192 rd_val
= displaced_read_reg (regs
, dsc
, 0);
5194 for (i
= 0; i
< 3; i
++)
5195 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5197 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5201 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5202 arm_displaced_step_closure
*dsc
,
5203 unsigned int rd
, unsigned int rn
, unsigned int rm
)
5205 ULONGEST rd_val
, rn_val
, rm_val
;
5207 /* Instruction is of form:
5209 <op><cond> rd, [rn,] rm [, <shift>]
5213 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
5214 r0, r1, r2 <- rd, rn, rm
5215 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
5216 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
5219 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5220 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5221 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5222 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5223 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5224 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5225 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5226 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5227 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5230 dsc
->cleanup
= &cleanup_alu_reg
;
5234 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5235 arm_displaced_step_closure
*dsc
)
5237 unsigned int op
= bits (insn
, 21, 24);
5238 int is_mov
= (op
== 0xd);
5240 if (!insn_references_pc (insn
, 0x000ff00ful
))
5241 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
5243 if (debug_displaced
)
5244 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
5245 is_mov
? "move" : "ALU", (unsigned long) insn
);
5248 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
5250 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
5252 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
5258 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
5259 struct regcache
*regs
,
5260 arm_displaced_step_closure
*dsc
)
5264 rm
= bits (insn
, 3, 6);
5265 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
5267 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
5268 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
5270 if (debug_displaced
)
5271 fprintf_unfiltered (gdb_stdlog
, "displaced: copying ALU reg insn %.4x\n",
5272 (unsigned short) insn
);
5274 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
5276 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
5281 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
5284 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
5285 struct regcache
*regs
,
5286 arm_displaced_step_closure
*dsc
)
5288 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
5291 for (i
= 0; i
< 4; i
++)
5292 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
5294 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
5298 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5299 arm_displaced_step_closure
*dsc
,
5300 unsigned int rd
, unsigned int rn
, unsigned int rm
,
5304 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
5306 /* Instruction is of form:
5308 <op><cond> rd, [rn,] rm, <shift> rs
5312 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
5313 r0, r1, r2, r3 <- rd, rn, rm, rs
5314 Insn: <op><cond> r0, r1, r2, <shift> r3
5316 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
5320 for (i
= 0; i
< 4; i
++)
5321 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
5323 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
5324 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5325 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5326 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
5327 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
5328 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
5329 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
5330 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
5332 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
5336 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5337 struct regcache
*regs
,
5338 arm_displaced_step_closure
*dsc
)
5340 unsigned int op
= bits (insn
, 21, 24);
5341 int is_mov
= (op
== 0xd);
5342 unsigned int rd
, rn
, rm
, rs
;
5344 if (!insn_references_pc (insn
, 0x000fff0ful
))
5345 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
5347 if (debug_displaced
)
5348 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
5349 "%.8lx\n", is_mov
? "move" : "ALU",
5350 (unsigned long) insn
);
5352 rn
= bits (insn
, 16, 19);
5353 rm
= bits (insn
, 0, 3);
5354 rs
= bits (insn
, 8, 11);
5355 rd
= bits (insn
, 12, 15);
5358 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
5360 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
5362 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
5367 /* Clean up load instructions. */
5370 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5371 arm_displaced_step_closure
*dsc
)
5373 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
5375 rt_val
= displaced_read_reg (regs
, dsc
, 0);
5376 if (dsc
->u
.ldst
.xfersize
== 8)
5377 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
5378 rn_val
= displaced_read_reg (regs
, dsc
, 2);
5380 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5381 if (dsc
->u
.ldst
.xfersize
> 4)
5382 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5383 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5384 if (!dsc
->u
.ldst
.immed
)
5385 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5387 /* Handle register writeback. */
5388 if (dsc
->u
.ldst
.writeback
)
5389 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5390 /* Put result in right place. */
5391 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
5392 if (dsc
->u
.ldst
.xfersize
== 8)
5393 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
5396 /* Clean up store instructions. */
5399 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5400 arm_displaced_step_closure
*dsc
)
5402 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
5404 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5405 if (dsc
->u
.ldst
.xfersize
> 4)
5406 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5407 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
5408 if (!dsc
->u
.ldst
.immed
)
5409 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
5410 if (!dsc
->u
.ldst
.restore_r4
)
5411 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
5414 if (dsc
->u
.ldst
.writeback
)
5415 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
5418 /* Copy "extra" load/store instructions. These are halfword/doubleword
5419 transfers, which have a different encoding to byte/word transfers. */
5422 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unprivileged
,
5423 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
5425 unsigned int op1
= bits (insn
, 20, 24);
5426 unsigned int op2
= bits (insn
, 5, 6);
5427 unsigned int rt
= bits (insn
, 12, 15);
5428 unsigned int rn
= bits (insn
, 16, 19);
5429 unsigned int rm
= bits (insn
, 0, 3);
5430 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
5431 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
5432 int immed
= (op1
& 0x4) != 0;
5434 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
5436 if (!insn_references_pc (insn
, 0x000ff00ful
))
5437 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
5439 if (debug_displaced
)
5440 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
5441 "insn %.8lx\n", unprivileged
? "unprivileged " : "",
5442 (unsigned long) insn
);
5444 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
5447 internal_error (__FILE__
, __LINE__
,
5448 _("copy_extra_ld_st: instruction decode error"));
5450 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5451 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5452 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5454 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5456 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5457 if (bytesize
[opcode
] == 8)
5458 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
5459 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5461 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5463 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5464 if (bytesize
[opcode
] == 8)
5465 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
5466 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5468 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5471 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
5472 dsc
->u
.ldst
.rn
= rn
;
5473 dsc
->u
.ldst
.immed
= immed
;
5474 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
5475 dsc
->u
.ldst
.restore_r4
= 0;
5478 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
5480 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
5481 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5483 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
5485 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
5486 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5488 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
5493 /* Copy byte/half word/word loads and stores. */
5496 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5497 arm_displaced_step_closure
*dsc
, int load
,
5498 int immed
, int writeback
, int size
, int usermode
,
5499 int rt
, int rm
, int rn
)
5501 ULONGEST rt_val
, rn_val
, rm_val
= 0;
5503 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5504 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5506 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5508 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
5510 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
5511 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5513 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5515 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
5516 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
5518 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
5520 dsc
->u
.ldst
.xfersize
= size
;
5521 dsc
->u
.ldst
.rn
= rn
;
5522 dsc
->u
.ldst
.immed
= immed
;
5523 dsc
->u
.ldst
.writeback
= writeback
;
5525 /* To write PC we can do:
5527 Before this sequence of instructions:
5528 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
5529 r2 is the Rn value got from dispalced_read_reg.
5531 Insn1: push {pc} Write address of STR instruction + offset on stack
5532 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
5533 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
5534 = addr(Insn1) + offset - addr(Insn3) - 8
5536 Insn4: add r4, r4, #8 r4 = offset - 8
5537 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
5539 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
5541 Otherwise we don't know what value to write for PC, since the offset is
5542 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
5543 of this can be found in Section "Saving from r15" in
5544 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
5546 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5551 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
5552 uint16_t insn2
, struct regcache
*regs
,
5553 arm_displaced_step_closure
*dsc
, int size
)
5555 unsigned int u_bit
= bit (insn1
, 7);
5556 unsigned int rt
= bits (insn2
, 12, 15);
5557 int imm12
= bits (insn2
, 0, 11);
5560 if (debug_displaced
)
5561 fprintf_unfiltered (gdb_stdlog
,
5562 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
5563 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
5569 /* Rewrite instruction LDR Rt imm12 into:
5571 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
5575 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
5578 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5579 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
5580 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
5582 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5584 pc_val
= pc_val
& 0xfffffffc;
5586 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
5587 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
5591 dsc
->u
.ldst
.xfersize
= size
;
5592 dsc
->u
.ldst
.immed
= 0;
5593 dsc
->u
.ldst
.writeback
= 0;
5594 dsc
->u
.ldst
.restore_r4
= 0;
5596 /* LDR R0, R2, R3 */
5597 dsc
->modinsn
[0] = 0xf852;
5598 dsc
->modinsn
[1] = 0x3;
5601 dsc
->cleanup
= &cleanup_load
;
5607 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
5608 uint16_t insn2
, struct regcache
*regs
,
5609 arm_displaced_step_closure
*dsc
,
5610 int writeback
, int immed
)
5612 unsigned int rt
= bits (insn2
, 12, 15);
5613 unsigned int rn
= bits (insn1
, 0, 3);
5614 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
5615 /* In LDR (register), there is also a register Rm, which is not allowed to
5616 be PC, so we don't have to check it. */
5618 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
5619 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
5622 if (debug_displaced
)
5623 fprintf_unfiltered (gdb_stdlog
,
5624 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
5625 rt
, rn
, insn1
, insn2
);
5627 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
5630 dsc
->u
.ldst
.restore_r4
= 0;
5633 /* ldr[b]<cond> rt, [rn, #imm], etc.
5635 ldr[b]<cond> r0, [r2, #imm]. */
5637 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5638 dsc
->modinsn
[1] = insn2
& 0x0fff;
5641 /* ldr[b]<cond> rt, [rn, rm], etc.
5643 ldr[b]<cond> r0, [r2, r3]. */
5645 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
5646 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
5656 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
5657 struct regcache
*regs
,
5658 arm_displaced_step_closure
*dsc
,
5659 int load
, int size
, int usermode
)
5661 int immed
= !bit (insn
, 25);
5662 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
5663 unsigned int rt
= bits (insn
, 12, 15);
5664 unsigned int rn
= bits (insn
, 16, 19);
5665 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
5667 if (!insn_references_pc (insn
, 0x000ff00ful
))
5668 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
5670 if (debug_displaced
)
5671 fprintf_unfiltered (gdb_stdlog
,
5672 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
5673 load
? (size
== 1 ? "ldrb" : "ldr")
5674 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
5676 (unsigned long) insn
);
5678 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
5679 usermode
, rt
, rm
, rn
);
5681 if (load
|| rt
!= ARM_PC_REGNUM
)
5683 dsc
->u
.ldst
.restore_r4
= 0;
5686 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
5688 {ldr,str}[b]<cond> r0, [r2, #imm]. */
5689 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
5691 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
5693 {ldr,str}[b]<cond> r0, [r2, r3]. */
5694 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
5698 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
5699 dsc
->u
.ldst
.restore_r4
= 1;
5700 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
5701 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
5702 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
5703 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
5704 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
5708 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
5710 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
5715 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
5720 /* Cleanup LDM instructions with fully-populated register list. This is an
5721 unfortunate corner case: it's impossible to implement correctly by modifying
5722 the instruction. The issue is as follows: we have an instruction,
5726 which we must rewrite to avoid loading PC. A possible solution would be to
5727 do the load in two halves, something like (with suitable cleanup
5731 ldm[id][ab] r8!, {r0-r7}
5733 ldm[id][ab] r8, {r7-r14}
5736 but at present there's no suitable place for <temp>, since the scratch space
5737 is overwritten before the cleanup routine is called. For now, we simply
5738 emulate the instruction. */
5741 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5742 arm_displaced_step_closure
*dsc
)
5744 int inc
= dsc
->u
.block
.increment
;
5745 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
5746 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
5747 uint32_t regmask
= dsc
->u
.block
.regmask
;
5748 int regno
= inc
? 0 : 15;
5749 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
5750 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
5751 && (regmask
& 0x8000) != 0;
5752 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5753 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
5754 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5759 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
5760 sensible we can do here. Complain loudly. */
5761 if (exception_return
)
5762 error (_("Cannot single-step exception return"));
5764 /* We don't handle any stores here for now. */
5765 gdb_assert (dsc
->u
.block
.load
!= 0);
5767 if (debug_displaced
)
5768 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
5769 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
5770 dsc
->u
.block
.increment
? "inc" : "dec",
5771 dsc
->u
.block
.before
? "before" : "after");
5778 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
5781 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
5784 xfer_addr
+= bump_before
;
5786 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
5787 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
5789 xfer_addr
+= bump_after
;
5791 regmask
&= ~(1 << regno
);
5794 if (dsc
->u
.block
.writeback
)
5795 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
5799 /* Clean up an STM which included the PC in the register list. */
5802 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5803 arm_displaced_step_closure
*dsc
)
5805 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5806 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5807 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
5808 CORE_ADDR stm_insn_addr
;
5811 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
5813 /* If condition code fails, there's nothing else to do. */
5814 if (!store_executed
)
5817 if (dsc
->u
.block
.increment
)
5819 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
5821 if (dsc
->u
.block
.before
)
5826 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
5828 if (dsc
->u
.block
.before
)
5832 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
5833 stm_insn_addr
= dsc
->scratch_base
;
5834 offset
= pc_val
- stm_insn_addr
;
5836 if (debug_displaced
)
5837 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
5838 "STM instruction\n", offset
);
5840 /* Rewrite the stored PC to the proper value for the non-displaced original
5842 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
5843 dsc
->insn_addr
+ offset
);
5846 /* Clean up an LDM which includes the PC in the register list. We clumped all
5847 the registers in the transferred list into a contiguous range r0...rX (to
5848 avoid loading PC directly and losing control of the debugged program), so we
5849 must undo that here. */
5852 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
5853 struct regcache
*regs
,
5854 arm_displaced_step_closure
*dsc
)
5856 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
5857 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
5858 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
5859 unsigned int regs_loaded
= bitcount (mask
);
5860 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
5862 /* The method employed here will fail if the register list is fully populated
5863 (we need to avoid loading PC directly). */
5864 gdb_assert (num_to_shuffle
< 16);
5869 clobbered
= (1 << num_to_shuffle
) - 1;
5871 while (num_to_shuffle
> 0)
5873 if ((mask
& (1 << write_reg
)) != 0)
5875 unsigned int read_reg
= num_to_shuffle
- 1;
5877 if (read_reg
!= write_reg
)
5879 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
5880 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
5881 if (debug_displaced
)
5882 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
5883 "loaded register r%d to r%d\n"), read_reg
,
5886 else if (debug_displaced
)
5887 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
5888 "r%d already in the right place\n"),
5891 clobbered
&= ~(1 << write_reg
);
5899 /* Restore any registers we scribbled over. */
5900 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
5902 if ((clobbered
& (1 << write_reg
)) != 0)
5904 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
5906 if (debug_displaced
)
5907 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
5908 "clobbered register r%d\n"), write_reg
);
5909 clobbered
&= ~(1 << write_reg
);
5913 /* Perform register writeback manually. */
5914 if (dsc
->u
.block
.writeback
)
5916 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
5918 if (dsc
->u
.block
.increment
)
5919 new_rn_val
+= regs_loaded
* 4;
5921 new_rn_val
-= regs_loaded
* 4;
5923 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
5928 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
5929 in user-level code (in particular exception return, ldm rn, {...pc}^). */
5932 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
5933 struct regcache
*regs
,
5934 arm_displaced_step_closure
*dsc
)
5936 int load
= bit (insn
, 20);
5937 int user
= bit (insn
, 22);
5938 int increment
= bit (insn
, 23);
5939 int before
= bit (insn
, 24);
5940 int writeback
= bit (insn
, 21);
5941 int rn
= bits (insn
, 16, 19);
5943 /* Block transfers which don't mention PC can be run directly
5945 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
5946 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
5948 if (rn
== ARM_PC_REGNUM
)
5950 warning (_("displaced: Unpredictable LDM or STM with "
5951 "base register r15"));
5952 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
5955 if (debug_displaced
)
5956 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
5957 "%.8lx\n", (unsigned long) insn
);
5959 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
5960 dsc
->u
.block
.rn
= rn
;
5962 dsc
->u
.block
.load
= load
;
5963 dsc
->u
.block
.user
= user
;
5964 dsc
->u
.block
.increment
= increment
;
5965 dsc
->u
.block
.before
= before
;
5966 dsc
->u
.block
.writeback
= writeback
;
5967 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
5969 dsc
->u
.block
.regmask
= insn
& 0xffff;
5973 if ((insn
& 0xffff) == 0xffff)
5975 /* LDM with a fully-populated register list. This case is
5976 particularly tricky. Implement for now by fully emulating the
5977 instruction (which might not behave perfectly in all cases, but
5978 these instructions should be rare enough for that not to matter
5980 dsc
->modinsn
[0] = ARM_NOP
;
5982 dsc
->cleanup
= &cleanup_block_load_all
;
5986 /* LDM of a list of registers which includes PC. Implement by
5987 rewriting the list of registers to be transferred into a
5988 contiguous chunk r0...rX before doing the transfer, then shuffling
5989 registers into the correct places in the cleanup routine. */
5990 unsigned int regmask
= insn
& 0xffff;
5991 unsigned int num_in_list
= bitcount (regmask
), new_regmask
;
5994 for (i
= 0; i
< num_in_list
; i
++)
5995 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
5997 /* Writeback makes things complicated. We need to avoid clobbering
5998 the base register with one of the registers in our modified
5999 register list, but just using a different register can't work in
6002 ldm r14!, {r0-r13,pc}
6004 which would need to be rewritten as:
6008 but that can't work, because there's no free register for N.
6010 Solve this by turning off the writeback bit, and emulating
6011 writeback manually in the cleanup routine. */
6016 new_regmask
= (1 << num_in_list
) - 1;
6018 if (debug_displaced
)
6019 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6020 "{..., pc}: original reg list %.4x, modified "
6021 "list %.4x\n"), rn
, writeback
? "!" : "",
6022 (int) insn
& 0xffff, new_regmask
);
6024 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
6026 dsc
->cleanup
= &cleanup_block_load_pc
;
6031 /* STM of a list of registers which includes PC. Run the instruction
6032 as-is, but out of line: this will store the wrong value for the PC,
6033 so we must manually fix up the memory in the cleanup routine.
6034 Doing things this way has the advantage that we can auto-detect
6035 the offset of the PC write (which is architecture-dependent) in
6036 the cleanup routine. */
6037 dsc
->modinsn
[0] = insn
;
6039 dsc
->cleanup
= &cleanup_block_store_pc
;
6046 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6047 struct regcache
*regs
,
6048 arm_displaced_step_closure
*dsc
)
6050 int rn
= bits (insn1
, 0, 3);
6051 int load
= bit (insn1
, 4);
6052 int writeback
= bit (insn1
, 5);
6054 /* Block transfers which don't mention PC can be run directly
6056 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
6057 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
6059 if (rn
== ARM_PC_REGNUM
)
6061 warning (_("displaced: Unpredictable LDM or STM with "
6062 "base register r15"));
6063 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6064 "unpredictable ldm/stm", dsc
);
6067 if (debug_displaced
)
6068 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
6069 "%.4x%.4x\n", insn1
, insn2
);
6071 /* Clear bit 13, since it should be always zero. */
6072 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
6073 dsc
->u
.block
.rn
= rn
;
6075 dsc
->u
.block
.load
= load
;
6076 dsc
->u
.block
.user
= 0;
6077 dsc
->u
.block
.increment
= bit (insn1
, 7);
6078 dsc
->u
.block
.before
= bit (insn1
, 8);
6079 dsc
->u
.block
.writeback
= writeback
;
6080 dsc
->u
.block
.cond
= INST_AL
;
6081 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
6085 if (dsc
->u
.block
.regmask
== 0xffff)
6087 /* This branch is impossible to happen. */
6092 unsigned int regmask
= dsc
->u
.block
.regmask
;
6093 unsigned int num_in_list
= bitcount (regmask
), new_regmask
;
6096 for (i
= 0; i
< num_in_list
; i
++)
6097 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6102 new_regmask
= (1 << num_in_list
) - 1;
6104 if (debug_displaced
)
6105 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
6106 "{..., pc}: original reg list %.4x, modified "
6107 "list %.4x\n"), rn
, writeback
? "!" : "",
6108 (int) dsc
->u
.block
.regmask
, new_regmask
);
6110 dsc
->modinsn
[0] = insn1
;
6111 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
6114 dsc
->cleanup
= &cleanup_block_load_pc
;
6119 dsc
->modinsn
[0] = insn1
;
6120 dsc
->modinsn
[1] = insn2
;
6122 dsc
->cleanup
= &cleanup_block_store_pc
;
6127 /* Wrapper over read_memory_unsigned_integer for use in arm_get_next_pcs.
6128 This is used to avoid a dependency on BFD's bfd_endian enum. */
6131 arm_get_next_pcs_read_memory_unsigned_integer (CORE_ADDR memaddr
, int len
,
6134 return read_memory_unsigned_integer (memaddr
, len
,
6135 (enum bfd_endian
) byte_order
);
6138 /* Wrapper over gdbarch_addr_bits_remove for use in arm_get_next_pcs. */
6141 arm_get_next_pcs_addr_bits_remove (struct arm_get_next_pcs
*self
,
6144 return gdbarch_addr_bits_remove (self
->regcache
->arch (), val
);
6147 /* Wrapper over syscall_next_pc for use in get_next_pcs. */
6150 arm_get_next_pcs_syscall_next_pc (struct arm_get_next_pcs
*self
)
6155 /* Wrapper over arm_is_thumb for use in arm_get_next_pcs. */
6158 arm_get_next_pcs_is_thumb (struct arm_get_next_pcs
*self
)
6160 return arm_is_thumb (self
->regcache
);
6163 /* single_step() is called just before we want to resume the inferior,
6164 if we want to single-step it but there is no hardware or kernel
6165 single-step support. We find the target of the coming instructions
6166 and breakpoint them. */
6168 std::vector
<CORE_ADDR
>
6169 arm_software_single_step (struct regcache
*regcache
)
6171 struct gdbarch
*gdbarch
= regcache
->arch ();
6172 struct arm_get_next_pcs next_pcs_ctx
;
6174 arm_get_next_pcs_ctor (&next_pcs_ctx
,
6175 &arm_get_next_pcs_ops
,
6176 gdbarch_byte_order (gdbarch
),
6177 gdbarch_byte_order_for_code (gdbarch
),
6181 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
6183 for (CORE_ADDR
&pc_ref
: next_pcs
)
6184 pc_ref
= gdbarch_addr_bits_remove (gdbarch
, pc_ref
);
6189 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
6190 for Linux, where some SVC instructions must be treated specially. */
6193 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6194 arm_displaced_step_closure
*dsc
)
6196 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6198 if (debug_displaced
)
6199 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
6200 "%.8lx\n", (unsigned long) resume_addr
);
6202 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
6206 /* Common copy routine for svc instruciton. */
6209 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6210 arm_displaced_step_closure
*dsc
)
6212 /* Preparation: none.
6213 Insn: unmodified svc.
6214 Cleanup: pc <- insn_addr + insn_size. */
6216 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
6218 dsc
->wrote_to_pc
= 1;
6220 /* Allow OS-specific code to override SVC handling. */
6221 if (dsc
->u
.svc
.copy_svc_os
)
6222 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
6225 dsc
->cleanup
= &cleanup_svc
;
6231 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
6232 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6235 if (debug_displaced
)
6236 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
6237 (unsigned long) insn
);
6239 dsc
->modinsn
[0] = insn
;
6241 return install_svc (gdbarch
, regs
, dsc
);
6245 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
6246 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6249 if (debug_displaced
)
6250 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
6253 dsc
->modinsn
[0] = insn
;
6255 return install_svc (gdbarch
, regs
, dsc
);
6258 /* Copy undefined instructions. */
6261 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
6262 arm_displaced_step_closure
*dsc
)
6264 if (debug_displaced
)
6265 fprintf_unfiltered (gdb_stdlog
,
6266 "displaced: copying undefined insn %.8lx\n",
6267 (unsigned long) insn
);
6269 dsc
->modinsn
[0] = insn
;
6275 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
6276 arm_displaced_step_closure
*dsc
)
6279 if (debug_displaced
)
6280 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
6281 "%.4x %.4x\n", (unsigned short) insn1
,
6282 (unsigned short) insn2
);
6284 dsc
->modinsn
[0] = insn1
;
6285 dsc
->modinsn
[1] = insn2
;
6291 /* Copy unpredictable instructions. */
6294 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
6295 arm_displaced_step_closure
*dsc
)
6297 if (debug_displaced
)
6298 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
6299 "%.8lx\n", (unsigned long) insn
);
6301 dsc
->modinsn
[0] = insn
;
6306 /* The decode_* functions are instruction decoding helpers. They mostly follow
6307 the presentation in the ARM ARM. */
6310 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
6311 struct regcache
*regs
,
6312 arm_displaced_step_closure
*dsc
)
6314 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
6315 unsigned int rn
= bits (insn
, 16, 19);
6317 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0x1) == 0x0)
6318 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
6319 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0x1) == 0x1)
6320 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
6321 else if ((op1
& 0x60) == 0x20)
6322 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
6323 else if ((op1
& 0x71) == 0x40)
6324 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
6326 else if ((op1
& 0x77) == 0x41)
6327 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6328 else if ((op1
& 0x77) == 0x45)
6329 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
6330 else if ((op1
& 0x77) == 0x51)
6333 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6335 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6337 else if ((op1
& 0x77) == 0x55)
6338 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
6339 else if (op1
== 0x57)
6342 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
6343 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
6344 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
6345 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
6346 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
6348 else if ((op1
& 0x63) == 0x43)
6349 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6350 else if ((op2
& 0x1) == 0x0)
6351 switch (op1
& ~0x80)
6354 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
6356 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
6357 case 0x71: case 0x75:
6359 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
6360 case 0x63: case 0x67: case 0x73: case 0x77:
6361 return arm_copy_unpred (gdbarch
, insn
, dsc
);
6363 return arm_copy_undef (gdbarch
, insn
, dsc
);
6366 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
6370 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
6371 struct regcache
*regs
,
6372 arm_displaced_step_closure
*dsc
)
6374 if (bit (insn
, 27) == 0)
6375 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
6376 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
6377 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
6380 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
6383 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
6385 case 0x4: case 0x5: case 0x6: case 0x7:
6386 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6389 switch ((insn
& 0xe00000) >> 21)
6391 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
6393 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6396 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6399 return arm_copy_undef (gdbarch
, insn
, dsc
);
6404 int rn_f
= (bits (insn
, 16, 19) == 0xf);
6405 switch ((insn
& 0xe00000) >> 21)
6408 /* ldc/ldc2 imm (undefined for rn == pc). */
6409 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
6410 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6413 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6415 case 0x4: case 0x5: case 0x6: case 0x7:
6416 /* ldc/ldc2 lit (undefined for rn != pc). */
6417 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
6418 : arm_copy_undef (gdbarch
, insn
, dsc
);
6421 return arm_copy_undef (gdbarch
, insn
, dsc
);
6426 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
6429 if (bits (insn
, 16, 19) == 0xf)
6431 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6433 return arm_copy_undef (gdbarch
, insn
, dsc
);
6437 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6439 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6443 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6445 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6448 return arm_copy_undef (gdbarch
, insn
, dsc
);
6452 /* Decode miscellaneous instructions in dp/misc encoding space. */
6455 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
6456 struct regcache
*regs
,
6457 arm_displaced_step_closure
*dsc
)
6459 unsigned int op2
= bits (insn
, 4, 6);
6460 unsigned int op
= bits (insn
, 21, 22);
6465 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
6468 if (op
== 0x1) /* bx. */
6469 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
6471 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
6473 return arm_copy_undef (gdbarch
, insn
, dsc
);
6477 /* Not really supported. */
6478 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
6480 return arm_copy_undef (gdbarch
, insn
, dsc
);
6484 return arm_copy_bx_blx_reg (gdbarch
, insn
,
6485 regs
, dsc
); /* blx register. */
6487 return arm_copy_undef (gdbarch
, insn
, dsc
);
6490 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
6494 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
6496 /* Not really supported. */
6497 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
6501 return arm_copy_undef (gdbarch
, insn
, dsc
);
6506 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
6507 struct regcache
*regs
,
6508 arm_displaced_step_closure
*dsc
)
6511 switch (bits (insn
, 20, 24))
6514 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
6517 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
6519 case 0x12: case 0x16:
6520 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
6523 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
6527 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
6529 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
6530 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
6531 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
6532 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
6533 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
6534 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
6535 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
6536 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
6537 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
6538 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
6539 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
6540 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
6541 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
6542 /* 2nd arg means "unprivileged". */
6543 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
6547 /* Should be unreachable. */
6552 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
6553 struct regcache
*regs
,
6554 arm_displaced_step_closure
*dsc
)
6556 int a
= bit (insn
, 25), b
= bit (insn
, 4);
6557 uint32_t op1
= bits (insn
, 20, 24);
6559 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
6560 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
6561 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
6562 else if ((!a
&& (op1
& 0x17) == 0x02)
6563 || (a
&& (op1
& 0x17) == 0x02 && !b
))
6564 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
6565 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
6566 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
6567 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
6568 else if ((!a
&& (op1
& 0x17) == 0x03)
6569 || (a
&& (op1
& 0x17) == 0x03 && !b
))
6570 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
6571 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
6572 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
6573 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
6574 else if ((!a
&& (op1
& 0x17) == 0x06)
6575 || (a
&& (op1
& 0x17) == 0x06 && !b
))
6576 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
6577 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
6578 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
6579 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
6580 else if ((!a
&& (op1
& 0x17) == 0x07)
6581 || (a
&& (op1
& 0x17) == 0x07 && !b
))
6582 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
6584 /* Should be unreachable. */
6589 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
6590 arm_displaced_step_closure
*dsc
)
6592 switch (bits (insn
, 20, 24))
6594 case 0x00: case 0x01: case 0x02: case 0x03:
6595 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
6597 case 0x04: case 0x05: case 0x06: case 0x07:
6598 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
6600 case 0x08: case 0x09: case 0x0a: case 0x0b:
6601 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
6602 return arm_copy_unmodified (gdbarch
, insn
,
6603 "decode/pack/unpack/saturate/reverse", dsc
);
6606 if (bits (insn
, 5, 7) == 0) /* op2. */
6608 if (bits (insn
, 12, 15) == 0xf)
6609 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
6611 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
6614 return arm_copy_undef (gdbarch
, insn
, dsc
);
6616 case 0x1a: case 0x1b:
6617 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6618 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
6620 return arm_copy_undef (gdbarch
, insn
, dsc
);
6622 case 0x1c: case 0x1d:
6623 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
6625 if (bits (insn
, 0, 3) == 0xf)
6626 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
6628 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
6631 return arm_copy_undef (gdbarch
, insn
, dsc
);
6633 case 0x1e: case 0x1f:
6634 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
6635 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
6637 return arm_copy_undef (gdbarch
, insn
, dsc
);
6640 /* Should be unreachable. */
6645 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, uint32_t insn
,
6646 struct regcache
*regs
,
6647 arm_displaced_step_closure
*dsc
)
6650 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
6652 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
6656 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
6657 struct regcache
*regs
,
6658 arm_displaced_step_closure
*dsc
)
6660 unsigned int opcode
= bits (insn
, 20, 24);
6664 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
6665 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
6667 case 0x08: case 0x0a: case 0x0c: case 0x0e:
6668 case 0x12: case 0x16:
6669 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
6671 case 0x09: case 0x0b: case 0x0d: case 0x0f:
6672 case 0x13: case 0x17:
6673 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
6675 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6676 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6677 /* Note: no writeback for these instructions. Bit 25 will always be
6678 zero though (via caller), so the following works OK. */
6679 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6682 /* Should be unreachable. */
6686 /* Decode shifted register instructions. */
6689 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
6690 uint16_t insn2
, struct regcache
*regs
,
6691 arm_displaced_step_closure
*dsc
)
6693 /* PC is only allowed to be used in instruction MOV. */
6695 unsigned int op
= bits (insn1
, 5, 8);
6696 unsigned int rn
= bits (insn1
, 0, 3);
6698 if (op
== 0x2 && rn
== 0xf) /* MOV */
6699 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
6701 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6702 "dp (shift reg)", dsc
);
6706 /* Decode extension register load/store. Exactly the same as
6707 arm_decode_ext_reg_ld_st. */
6710 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
6711 uint16_t insn2
, struct regcache
*regs
,
6712 arm_displaced_step_closure
*dsc
)
6714 unsigned int opcode
= bits (insn1
, 4, 8);
6718 case 0x04: case 0x05:
6719 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6720 "vfp/neon vmov", dsc
);
6722 case 0x08: case 0x0c: /* 01x00 */
6723 case 0x0a: case 0x0e: /* 01x10 */
6724 case 0x12: case 0x16: /* 10x10 */
6725 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6726 "vfp/neon vstm/vpush", dsc
);
6728 case 0x09: case 0x0d: /* 01x01 */
6729 case 0x0b: case 0x0f: /* 01x11 */
6730 case 0x13: case 0x17: /* 10x11 */
6731 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6732 "vfp/neon vldm/vpop", dsc
);
6734 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
6735 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6737 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
6738 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
6741 /* Should be unreachable. */
6746 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
,
6747 struct regcache
*regs
, arm_displaced_step_closure
*dsc
)
6749 unsigned int op1
= bits (insn
, 20, 25);
6750 int op
= bit (insn
, 4);
6751 unsigned int coproc
= bits (insn
, 8, 11);
6753 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
6754 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
6755 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
6756 && (coproc
& 0xe) != 0xa)
6758 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6759 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
6760 && (coproc
& 0xe) != 0xa)
6761 /* ldc/ldc2 imm/lit. */
6762 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
6763 else if ((op1
& 0x3e) == 0x00)
6764 return arm_copy_undef (gdbarch
, insn
, dsc
);
6765 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
6766 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
6767 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
6768 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
6769 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
6770 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
6771 else if ((op1
& 0x30) == 0x20 && !op
)
6773 if ((coproc
& 0xe) == 0xa)
6774 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
6776 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
6778 else if ((op1
& 0x30) == 0x20 && op
)
6779 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
6780 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
6781 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
6782 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
6783 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
6784 else if ((op1
& 0x30) == 0x30)
6785 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
6787 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
6791 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
6792 uint16_t insn2
, struct regcache
*regs
,
6793 arm_displaced_step_closure
*dsc
)
6795 unsigned int coproc
= bits (insn2
, 8, 11);
6796 unsigned int bit_5_8
= bits (insn1
, 5, 8);
6797 unsigned int bit_9
= bit (insn1
, 9);
6798 unsigned int bit_4
= bit (insn1
, 4);
6803 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6804 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
6806 else if (bit_5_8
== 0) /* UNDEFINED. */
6807 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
6810 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
6811 if ((coproc
& 0xe) == 0xa)
6812 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
6814 else /* coproc is not 101x. */
6816 if (bit_4
== 0) /* STC/STC2. */
6817 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6819 else /* LDC/LDC2 {literal, immeidate}. */
6820 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
6826 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
6832 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6833 arm_displaced_step_closure
*dsc
, int rd
)
6839 Preparation: Rd <- PC
6845 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6846 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
6850 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6851 arm_displaced_step_closure
*dsc
,
6852 int rd
, unsigned int imm
)
6855 /* Encoding T2: ADDS Rd, #imm */
6856 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
6858 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6864 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
6865 struct regcache
*regs
,
6866 arm_displaced_step_closure
*dsc
)
6868 unsigned int rd
= bits (insn
, 8, 10);
6869 unsigned int imm8
= bits (insn
, 0, 7);
6871 if (debug_displaced
)
6872 fprintf_unfiltered (gdb_stdlog
,
6873 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
6876 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
6880 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
6881 uint16_t insn2
, struct regcache
*regs
,
6882 arm_displaced_step_closure
*dsc
)
6884 unsigned int rd
= bits (insn2
, 8, 11);
6885 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
6886 extract raw immediate encoding rather than computing immediate. When
6887 generating ADD or SUB instruction, we can simply perform OR operation to
6888 set immediate into ADD. */
6889 unsigned int imm_3_8
= insn2
& 0x70ff;
6890 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
6892 if (debug_displaced
)
6893 fprintf_unfiltered (gdb_stdlog
,
6894 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
6895 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
6897 if (bit (insn1
, 7)) /* Encoding T2 */
6899 /* Encoding T3: SUB Rd, Rd, #imm */
6900 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
6901 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
6903 else /* Encoding T3 */
6905 /* Encoding T3: ADD Rd, Rd, #imm */
6906 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
6907 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
6911 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
6917 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6918 struct regcache
*regs
,
6919 arm_displaced_step_closure
*dsc
)
6921 unsigned int rt
= bits (insn1
, 8, 10);
6923 int imm8
= (bits (insn1
, 0, 7) << 2);
6929 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
6931 Insn: LDR R0, [R2, R3];
6932 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
6934 if (debug_displaced
)
6935 fprintf_unfiltered (gdb_stdlog
,
6936 "displaced: copying thumb ldr r%d [pc #%d]\n"
6939 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6940 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6941 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6942 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6943 /* The assembler calculates the required value of the offset from the
6944 Align(PC,4) value of this instruction to the label. */
6945 pc
= pc
& 0xfffffffc;
6947 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
6948 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
6951 dsc
->u
.ldst
.xfersize
= 4;
6953 dsc
->u
.ldst
.immed
= 0;
6954 dsc
->u
.ldst
.writeback
= 0;
6955 dsc
->u
.ldst
.restore_r4
= 0;
6957 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
6959 dsc
->cleanup
= &cleanup_load
;
6964 /* Copy Thumb cbnz/cbz insruction. */
6967 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
6968 struct regcache
*regs
,
6969 arm_displaced_step_closure
*dsc
)
6971 int non_zero
= bit (insn1
, 11);
6972 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
6973 CORE_ADDR from
= dsc
->insn_addr
;
6974 int rn
= bits (insn1
, 0, 2);
6975 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6977 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
6978 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
6979 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
6980 condition is false, let it be, cleanup_branch will do nothing. */
6981 if (dsc
->u
.branch
.cond
)
6983 dsc
->u
.branch
.cond
= INST_AL
;
6984 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
6987 dsc
->u
.branch
.dest
= from
+ 2;
6989 dsc
->u
.branch
.link
= 0;
6990 dsc
->u
.branch
.exchange
= 0;
6992 if (debug_displaced
)
6993 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
6994 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
6995 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
6997 dsc
->modinsn
[0] = THUMB_NOP
;
6999 dsc
->cleanup
= &cleanup_branch
;
7003 /* Copy Table Branch Byte/Halfword */
7005 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
7006 uint16_t insn2
, struct regcache
*regs
,
7007 arm_displaced_step_closure
*dsc
)
7009 ULONGEST rn_val
, rm_val
;
7010 int is_tbh
= bit (insn2
, 4);
7011 CORE_ADDR halfwords
= 0;
7012 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7014 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
7015 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
7021 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
7022 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
7028 target_read_memory (rn_val
+ rm_val
, buf
, 1);
7029 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
7032 if (debug_displaced
)
7033 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
7034 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
7035 (unsigned int) rn_val
, (unsigned int) rm_val
,
7036 (unsigned int) halfwords
);
7038 dsc
->u
.branch
.cond
= INST_AL
;
7039 dsc
->u
.branch
.link
= 0;
7040 dsc
->u
.branch
.exchange
= 0;
7041 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
7043 dsc
->cleanup
= &cleanup_branch
;
7049 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7050 arm_displaced_step_closure
*dsc
)
7053 int val
= displaced_read_reg (regs
, dsc
, 7);
7054 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
7057 val
= displaced_read_reg (regs
, dsc
, 8);
7058 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
7061 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
7066 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
7067 struct regcache
*regs
,
7068 arm_displaced_step_closure
*dsc
)
7070 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
7072 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
7075 (1) register list is full, that is, r0-r7 are used.
7076 Prepare: tmp[0] <- r8
7078 POP {r0, r1, ...., r6, r7}; remove PC from reglist
7079 MOV r8, r7; Move value of r7 to r8;
7080 POP {r7}; Store PC value into r7.
7082 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
7084 (2) register list is not full, supposing there are N registers in
7085 register list (except PC, 0 <= N <= 7).
7086 Prepare: for each i, 0 - N, tmp[i] <- ri.
7088 POP {r0, r1, ...., rN};
7090 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
7091 from tmp[] properly.
7093 if (debug_displaced
)
7094 fprintf_unfiltered (gdb_stdlog
,
7095 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
7096 dsc
->u
.block
.regmask
, insn1
);
7098 if (dsc
->u
.block
.regmask
== 0xff)
7100 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
7102 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
7103 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
7104 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
7107 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
7111 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
7113 unsigned int new_regmask
;
7115 for (i
= 0; i
< num_in_list
+ 1; i
++)
7116 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7118 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
7120 if (debug_displaced
)
7121 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
7122 "{..., pc}: original reg list %.4x,"
7123 " modified list %.4x\n"),
7124 (int) dsc
->u
.block
.regmask
, new_regmask
);
7126 dsc
->u
.block
.regmask
|= 0x8000;
7127 dsc
->u
.block
.writeback
= 0;
7128 dsc
->u
.block
.cond
= INST_AL
;
7130 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
7132 dsc
->cleanup
= &cleanup_block_load_pc
;
7139 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7140 struct regcache
*regs
,
7141 arm_displaced_step_closure
*dsc
)
7143 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
7144 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
7147 /* 16-bit thumb instructions. */
7148 switch (op_bit_12_15
)
7150 /* Shift (imme), add, subtract, move and compare. */
7151 case 0: case 1: case 2: case 3:
7152 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7153 "shift/add/sub/mov/cmp",
7157 switch (op_bit_10_11
)
7159 case 0: /* Data-processing */
7160 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
7164 case 1: /* Special data instructions and branch and exchange. */
7166 unsigned short op
= bits (insn1
, 7, 9);
7167 if (op
== 6 || op
== 7) /* BX or BLX */
7168 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
7169 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
7170 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
7172 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
7176 default: /* LDR (literal) */
7177 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
7180 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
7181 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
7184 if (op_bit_10_11
< 2) /* Generate PC-relative address */
7185 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
7186 else /* Generate SP-relative address */
7187 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
7189 case 11: /* Misc 16-bit instructions */
7191 switch (bits (insn1
, 8, 11))
7193 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
7194 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
7196 case 12: case 13: /* POP */
7197 if (bit (insn1
, 8)) /* PC is in register list. */
7198 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
7200 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
7202 case 15: /* If-Then, and hints */
7203 if (bits (insn1
, 0, 3))
7204 /* If-Then makes up to four following instructions conditional.
7205 IT instruction itself is not conditional, so handle it as a
7206 common unmodified instruction. */
7207 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
7210 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
7213 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
7218 if (op_bit_10_11
< 2) /* Store multiple registers */
7219 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
7220 else /* Load multiple registers */
7221 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
7223 case 13: /* Conditional branch and supervisor call */
7224 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
7225 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7227 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
7229 case 14: /* Unconditional branch */
7230 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
7237 internal_error (__FILE__
, __LINE__
,
7238 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
7242 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
7243 uint16_t insn1
, uint16_t insn2
,
7244 struct regcache
*regs
,
7245 arm_displaced_step_closure
*dsc
)
7247 int rt
= bits (insn2
, 12, 15);
7248 int rn
= bits (insn1
, 0, 3);
7249 int op1
= bits (insn1
, 7, 8);
7251 switch (bits (insn1
, 5, 6))
7253 case 0: /* Load byte and memory hints */
7254 if (rt
== 0xf) /* PLD/PLI */
7257 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
7258 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
7260 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7265 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
7266 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7269 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7270 "ldrb{reg, immediate}/ldrbt",
7275 case 1: /* Load halfword and memory hints. */
7276 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
7277 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7278 "pld/unalloc memhint", dsc
);
7282 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
7285 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7289 case 2: /* Load word */
7291 int insn2_bit_8_11
= bits (insn2
, 8, 11);
7294 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
7295 else if (op1
== 0x1) /* Encoding T3 */
7296 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
7298 else /* op1 == 0x0 */
7300 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
7301 /* LDR (immediate) */
7302 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7303 dsc
, bit (insn2
, 8), 1);
7304 else if (insn2_bit_8_11
== 0xe) /* LDRT */
7305 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7308 /* LDR (register) */
7309 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
7315 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7322 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
7323 uint16_t insn2
, struct regcache
*regs
,
7324 arm_displaced_step_closure
*dsc
)
7327 unsigned short op
= bit (insn2
, 15);
7328 unsigned int op1
= bits (insn1
, 11, 12);
7334 switch (bits (insn1
, 9, 10))
7339 /* Load/store {dual, execlusive}, table branch. */
7340 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
7341 && bits (insn2
, 5, 7) == 0)
7342 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
7345 /* PC is not allowed to use in load/store {dual, exclusive}
7347 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7348 "load/store dual/ex", dsc
);
7350 else /* load/store multiple */
7352 switch (bits (insn1
, 7, 8))
7354 case 0: case 3: /* SRS, RFE */
7355 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7358 case 1: case 2: /* LDM/STM/PUSH/POP */
7359 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
7366 /* Data-processing (shift register). */
7367 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
7370 default: /* Coprocessor instructions. */
7371 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7376 case 2: /* op1 = 2 */
7377 if (op
) /* Branch and misc control. */
7379 if (bit (insn2
, 14) /* BLX/BL */
7380 || bit (insn2
, 12) /* Unconditional branch */
7381 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
7382 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
7384 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7389 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
7391 int dp_op
= bits (insn1
, 4, 8);
7392 int rn
= bits (insn1
, 0, 3);
7393 if ((dp_op
== 0 || dp_op
== 0xa) && rn
== 0xf)
7394 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
7397 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7400 else /* Data processing (modified immeidate) */
7401 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7405 case 3: /* op1 = 3 */
7406 switch (bits (insn1
, 9, 10))
7410 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
7412 else /* NEON Load/Store and Store single data item */
7413 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7414 "neon elt/struct load/store",
7417 case 1: /* op1 = 3, bits (9, 10) == 1 */
7418 switch (bits (insn1
, 7, 8))
7420 case 0: case 1: /* Data processing (register) */
7421 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7424 case 2: /* Multiply and absolute difference */
7425 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7426 "mul/mua/diff", dsc
);
7428 case 3: /* Long multiply and divide */
7429 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7434 default: /* Coprocessor instructions */
7435 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
7444 internal_error (__FILE__
, __LINE__
,
7445 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
7450 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7451 struct regcache
*regs
,
7452 arm_displaced_step_closure
*dsc
)
7454 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7456 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
7458 if (debug_displaced
)
7459 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
7460 "at %.8lx\n", insn1
, (unsigned long) from
);
7463 dsc
->insn_size
= thumb_insn_size (insn1
);
7464 if (thumb_insn_size (insn1
) == 4)
7467 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
7468 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
7471 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
7475 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7476 CORE_ADDR to
, struct regcache
*regs
,
7477 arm_displaced_step_closure
*dsc
)
7480 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7483 /* Most displaced instructions use a 1-instruction scratch space, so set this
7484 here and override below if/when necessary. */
7486 dsc
->insn_addr
= from
;
7487 dsc
->scratch_base
= to
;
7488 dsc
->cleanup
= NULL
;
7489 dsc
->wrote_to_pc
= 0;
7491 if (!displaced_in_arm_mode (regs
))
7492 return thumb_process_displaced_insn (gdbarch
, from
, regs
, dsc
);
7496 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
7497 if (debug_displaced
)
7498 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
7499 "at %.8lx\n", (unsigned long) insn
,
7500 (unsigned long) from
);
7502 if ((insn
& 0xf0000000) == 0xf0000000)
7503 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
7504 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
7506 case 0x0: case 0x1: case 0x2: case 0x3:
7507 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
7510 case 0x4: case 0x5: case 0x6:
7511 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
7515 err
= arm_decode_media (gdbarch
, insn
, dsc
);
7518 case 0x8: case 0x9: case 0xa: case 0xb:
7519 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
7522 case 0xc: case 0xd: case 0xe: case 0xf:
7523 err
= arm_decode_svc_copro (gdbarch
, insn
, regs
, dsc
);
7528 internal_error (__FILE__
, __LINE__
,
7529 _("arm_process_displaced_insn: Instruction decode error"));
7532 /* Actually set up the scratch space for a displaced instruction. */
7535 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
7536 CORE_ADDR to
, arm_displaced_step_closure
*dsc
)
7538 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7539 unsigned int i
, len
, offset
;
7540 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7541 int size
= dsc
->is_thumb
? 2 : 4;
7542 const gdb_byte
*bkp_insn
;
7545 /* Poke modified instruction(s). */
7546 for (i
= 0; i
< dsc
->numinsns
; i
++)
7548 if (debug_displaced
)
7550 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
7552 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
7555 fprintf_unfiltered (gdb_stdlog
, "%.4x",
7556 (unsigned short)dsc
->modinsn
[i
]);
7558 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
7559 (unsigned long) to
+ offset
);
7562 write_memory_unsigned_integer (to
+ offset
, size
,
7563 byte_order_for_code
,
7568 /* Choose the correct breakpoint instruction. */
7571 bkp_insn
= tdep
->thumb_breakpoint
;
7572 len
= tdep
->thumb_breakpoint_size
;
7576 bkp_insn
= tdep
->arm_breakpoint
;
7577 len
= tdep
->arm_breakpoint_size
;
7580 /* Put breakpoint afterwards. */
7581 write_memory (to
+ offset
, bkp_insn
, len
);
7583 if (debug_displaced
)
7584 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
7585 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
7588 /* Entry point for cleaning things up after a displaced instruction has been
7592 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
7593 struct displaced_step_closure
*dsc_
,
7594 CORE_ADDR from
, CORE_ADDR to
,
7595 struct regcache
*regs
)
7597 arm_displaced_step_closure
*dsc
= (arm_displaced_step_closure
*) dsc_
;
7600 dsc
->cleanup (gdbarch
, regs
, dsc
);
7602 if (!dsc
->wrote_to_pc
)
7603 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
7604 dsc
->insn_addr
+ dsc
->insn_size
);
7608 #include "bfd-in2.h"
7609 #include "libcoff.h"
7612 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
7614 gdb_disassembler
*di
7615 = static_cast<gdb_disassembler
*>(info
->application_data
);
7616 struct gdbarch
*gdbarch
= di
->arch ();
7618 if (arm_pc_is_thumb (gdbarch
, memaddr
))
7620 static asymbol
*asym
;
7621 static combined_entry_type ce
;
7622 static struct coff_symbol_struct csym
;
7623 static struct bfd fake_bfd
;
7624 static bfd_target fake_target
;
7626 if (csym
.native
== NULL
)
7628 /* Create a fake symbol vector containing a Thumb symbol.
7629 This is solely so that the code in print_insn_little_arm()
7630 and print_insn_big_arm() in opcodes/arm-dis.c will detect
7631 the presence of a Thumb symbol and switch to decoding
7632 Thumb instructions. */
7634 fake_target
.flavour
= bfd_target_coff_flavour
;
7635 fake_bfd
.xvec
= &fake_target
;
7636 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
7638 csym
.symbol
.the_bfd
= &fake_bfd
;
7639 csym
.symbol
.name
= "fake";
7640 asym
= (asymbol
*) & csym
;
7643 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
7644 info
->symbols
= &asym
;
7647 info
->symbols
= NULL
;
7649 /* GDB is able to get bfd_mach from the exe_bfd, info->mach is
7650 accurate, so mark USER_SPECIFIED_MACHINE_TYPE bit. Otherwise,
7651 opcodes/arm-dis.c:print_insn reset info->mach, and it will trigger
7652 the assert on the mismatch of info->mach and bfd_get_mach (exec_bfd)
7653 in default_print_insn. */
7654 if (exec_bfd
!= NULL
)
7655 info
->flags
|= USER_SPECIFIED_MACHINE_TYPE
;
7657 return default_print_insn (memaddr
, info
);
7660 /* The following define instruction sequences that will cause ARM
7661 cpu's to take an undefined instruction trap. These are used to
7662 signal a breakpoint to GDB.
7664 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
7665 modes. A different instruction is required for each mode. The ARM
7666 cpu's can also be big or little endian. Thus four different
7667 instructions are needed to support all cases.
7669 Note: ARMv4 defines several new instructions that will take the
7670 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
7671 not in fact add the new instructions. The new undefined
7672 instructions in ARMv4 are all instructions that had no defined
7673 behaviour in earlier chips. There is no guarantee that they will
7674 raise an exception, but may be treated as NOP's. In practice, it
7675 may only safe to rely on instructions matching:
7677 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
7678 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
7679 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
7681 Even this may only true if the condition predicate is true. The
7682 following use a condition predicate of ALWAYS so it is always TRUE.
7684 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
7685 and NetBSD all use a software interrupt rather than an undefined
7686 instruction to force a trap. This can be handled by by the
7687 abi-specific code during establishment of the gdbarch vector. */
7689 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
7690 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
7691 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
7692 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
7694 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
7695 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
7696 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
7697 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
7699 /* Implement the breakpoint_kind_from_pc gdbarch method. */
7702 arm_breakpoint_kind_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
)
7704 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7705 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
7707 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
7709 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
7711 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
7712 check whether we are replacing a 32-bit instruction. */
7713 if (tdep
->thumb2_breakpoint
!= NULL
)
7717 if (target_read_memory (*pcptr
, buf
, 2) == 0)
7719 unsigned short inst1
;
7721 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
7722 if (thumb_insn_size (inst1
) == 4)
7723 return ARM_BP_KIND_THUMB2
;
7727 return ARM_BP_KIND_THUMB
;
7730 return ARM_BP_KIND_ARM
;
7734 /* Implement the sw_breakpoint_from_kind gdbarch method. */
7736 static const gdb_byte
*
7737 arm_sw_breakpoint_from_kind (struct gdbarch
*gdbarch
, int kind
, int *size
)
7739 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
7743 case ARM_BP_KIND_ARM
:
7744 *size
= tdep
->arm_breakpoint_size
;
7745 return tdep
->arm_breakpoint
;
7746 case ARM_BP_KIND_THUMB
:
7747 *size
= tdep
->thumb_breakpoint_size
;
7748 return tdep
->thumb_breakpoint
;
7749 case ARM_BP_KIND_THUMB2
:
7750 *size
= tdep
->thumb2_breakpoint_size
;
7751 return tdep
->thumb2_breakpoint
;
7753 gdb_assert_not_reached ("unexpected arm breakpoint kind");
7757 /* Implement the breakpoint_kind_from_current_state gdbarch method. */
7760 arm_breakpoint_kind_from_current_state (struct gdbarch
*gdbarch
,
7761 struct regcache
*regcache
,
7766 /* Check the memory pointed by PC is readable. */
7767 if (target_read_memory (regcache_read_pc (regcache
), buf
, 4) == 0)
7769 struct arm_get_next_pcs next_pcs_ctx
;
7771 arm_get_next_pcs_ctor (&next_pcs_ctx
,
7772 &arm_get_next_pcs_ops
,
7773 gdbarch_byte_order (gdbarch
),
7774 gdbarch_byte_order_for_code (gdbarch
),
7778 std::vector
<CORE_ADDR
> next_pcs
= arm_get_next_pcs (&next_pcs_ctx
);
7780 /* If MEMADDR is the next instruction of current pc, do the
7781 software single step computation, and get the thumb mode by
7782 the destination address. */
7783 for (CORE_ADDR pc
: next_pcs
)
7785 if (UNMAKE_THUMB_ADDR (pc
) == *pcptr
)
7787 if (IS_THUMB_ADDR (pc
))
7789 *pcptr
= MAKE_THUMB_ADDR (*pcptr
);
7790 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7793 return ARM_BP_KIND_ARM
;
7798 return arm_breakpoint_kind_from_pc (gdbarch
, pcptr
);
7801 /* Extract from an array REGBUF containing the (raw) register state a
7802 function return value of type TYPE, and copy that, in virtual
7803 format, into VALBUF. */
7806 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
7809 struct gdbarch
*gdbarch
= regs
->arch ();
7810 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7812 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
7814 switch (gdbarch_tdep (gdbarch
)->fp_model
)
7818 /* The value is in register F0 in internal format. We need to
7819 extract the raw value and then convert it to the desired
7821 bfd_byte tmpbuf
[ARM_FP_REGISTER_SIZE
];
7823 regs
->cooked_read (ARM_F0_REGNUM
, tmpbuf
);
7824 target_float_convert (tmpbuf
, arm_ext_type (gdbarch
),
7829 case ARM_FLOAT_SOFT_FPA
:
7830 case ARM_FLOAT_SOFT_VFP
:
7831 /* ARM_FLOAT_VFP can arise if this is a variadic function so
7832 not using the VFP ABI code. */
7834 regs
->cooked_read (ARM_A1_REGNUM
, valbuf
);
7835 if (TYPE_LENGTH (type
) > 4)
7836 regs
->cooked_read (ARM_A1_REGNUM
+ 1,
7837 valbuf
+ ARM_INT_REGISTER_SIZE
);
7841 internal_error (__FILE__
, __LINE__
,
7842 _("arm_extract_return_value: "
7843 "Floating point model not supported"));
7847 else if (TYPE_CODE (type
) == TYPE_CODE_INT
7848 || TYPE_CODE (type
) == TYPE_CODE_CHAR
7849 || TYPE_CODE (type
) == TYPE_CODE_BOOL
7850 || TYPE_CODE (type
) == TYPE_CODE_PTR
7851 || TYPE_IS_REFERENCE (type
)
7852 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
7854 /* If the type is a plain integer, then the access is
7855 straight-forward. Otherwise we have to play around a bit
7857 int len
= TYPE_LENGTH (type
);
7858 int regno
= ARM_A1_REGNUM
;
7863 /* By using store_unsigned_integer we avoid having to do
7864 anything special for small big-endian values. */
7865 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
7866 store_unsigned_integer (valbuf
,
7867 (len
> ARM_INT_REGISTER_SIZE
7868 ? ARM_INT_REGISTER_SIZE
: len
),
7870 len
-= ARM_INT_REGISTER_SIZE
;
7871 valbuf
+= ARM_INT_REGISTER_SIZE
;
7876 /* For a structure or union the behaviour is as if the value had
7877 been stored to word-aligned memory and then loaded into
7878 registers with 32-bit load instruction(s). */
7879 int len
= TYPE_LENGTH (type
);
7880 int regno
= ARM_A1_REGNUM
;
7881 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
7885 regs
->cooked_read (regno
++, tmpbuf
);
7886 memcpy (valbuf
, tmpbuf
,
7887 len
> ARM_INT_REGISTER_SIZE
? ARM_INT_REGISTER_SIZE
: len
);
7888 len
-= ARM_INT_REGISTER_SIZE
;
7889 valbuf
+= ARM_INT_REGISTER_SIZE
;
7895 /* Will a function return an aggregate type in memory or in a
7896 register? Return 0 if an aggregate type can be returned in a
7897 register, 1 if it must be returned in memory. */
7900 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
7902 enum type_code code
;
7904 type
= check_typedef (type
);
7906 /* Simple, non-aggregate types (ie not including vectors and
7907 complex) are always returned in a register (or registers). */
7908 code
= TYPE_CODE (type
);
7909 if (TYPE_CODE_STRUCT
!= code
&& TYPE_CODE_UNION
!= code
7910 && TYPE_CODE_ARRAY
!= code
&& TYPE_CODE_COMPLEX
!= code
)
7913 if (TYPE_CODE_ARRAY
== code
&& TYPE_VECTOR (type
))
7915 /* Vector values should be returned using ARM registers if they
7916 are not over 16 bytes. */
7917 return (TYPE_LENGTH (type
) > 16);
7920 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
7922 /* The AAPCS says all aggregates not larger than a word are returned
7924 if (TYPE_LENGTH (type
) <= ARM_INT_REGISTER_SIZE
)
7933 /* All aggregate types that won't fit in a register must be returned
7935 if (TYPE_LENGTH (type
) > ARM_INT_REGISTER_SIZE
)
7938 /* In the ARM ABI, "integer" like aggregate types are returned in
7939 registers. For an aggregate type to be integer like, its size
7940 must be less than or equal to ARM_INT_REGISTER_SIZE and the
7941 offset of each addressable subfield must be zero. Note that bit
7942 fields are not addressable, and all addressable subfields of
7943 unions always start at offset zero.
7945 This function is based on the behaviour of GCC 2.95.1.
7946 See: gcc/arm.c: arm_return_in_memory() for details.
7948 Note: All versions of GCC before GCC 2.95.2 do not set up the
7949 parameters correctly for a function returning the following
7950 structure: struct { float f;}; This should be returned in memory,
7951 not a register. Richard Earnshaw sent me a patch, but I do not
7952 know of any way to detect if a function like the above has been
7953 compiled with the correct calling convention. */
7955 /* Assume all other aggregate types can be returned in a register.
7956 Run a check for structures, unions and arrays. */
7959 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
7962 /* Need to check if this struct/union is "integer" like. For
7963 this to be true, its size must be less than or equal to
7964 ARM_INT_REGISTER_SIZE and the offset of each addressable
7965 subfield must be zero. Note that bit fields are not
7966 addressable, and unions always start at offset zero. If any
7967 of the subfields is a floating point type, the struct/union
7968 cannot be an integer type. */
7970 /* For each field in the object, check:
7971 1) Is it FP? --> yes, nRc = 1;
7972 2) Is it addressable (bitpos != 0) and
7973 not packed (bitsize == 0)?
7977 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
7979 enum type_code field_type_code
;
7982 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
7985 /* Is it a floating point type field? */
7986 if (field_type_code
== TYPE_CODE_FLT
)
7992 /* If bitpos != 0, then we have to care about it. */
7993 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
7995 /* Bitfields are not addressable. If the field bitsize is
7996 zero, then the field is not packed. Hence it cannot be
7997 a bitfield or any other packed type. */
7998 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
8011 /* Write into appropriate registers a function return value of type
8012 TYPE, given in virtual format. */
8015 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
8016 const gdb_byte
*valbuf
)
8018 struct gdbarch
*gdbarch
= regs
->arch ();
8019 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8021 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
8023 gdb_byte buf
[ARM_FP_REGISTER_SIZE
];
8025 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8029 target_float_convert (valbuf
, type
, buf
, arm_ext_type (gdbarch
));
8030 regs
->cooked_write (ARM_F0_REGNUM
, buf
);
8033 case ARM_FLOAT_SOFT_FPA
:
8034 case ARM_FLOAT_SOFT_VFP
:
8035 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8036 not using the VFP ABI code. */
8038 regs
->cooked_write (ARM_A1_REGNUM
, valbuf
);
8039 if (TYPE_LENGTH (type
) > 4)
8040 regs
->cooked_write (ARM_A1_REGNUM
+ 1,
8041 valbuf
+ ARM_INT_REGISTER_SIZE
);
8045 internal_error (__FILE__
, __LINE__
,
8046 _("arm_store_return_value: Floating "
8047 "point model not supported"));
8051 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8052 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8053 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8054 || TYPE_CODE (type
) == TYPE_CODE_PTR
8055 || TYPE_IS_REFERENCE (type
)
8056 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8058 if (TYPE_LENGTH (type
) <= 4)
8060 /* Values of one word or less are zero/sign-extended and
8062 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8063 LONGEST val
= unpack_long (type
, valbuf
);
8065 store_signed_integer (tmpbuf
, ARM_INT_REGISTER_SIZE
, byte_order
, val
);
8066 regs
->cooked_write (ARM_A1_REGNUM
, tmpbuf
);
8070 /* Integral values greater than one word are stored in consecutive
8071 registers starting with r0. This will always be a multiple of
8072 the regiser size. */
8073 int len
= TYPE_LENGTH (type
);
8074 int regno
= ARM_A1_REGNUM
;
8078 regs
->cooked_write (regno
++, valbuf
);
8079 len
-= ARM_INT_REGISTER_SIZE
;
8080 valbuf
+= ARM_INT_REGISTER_SIZE
;
8086 /* For a structure or union the behaviour is as if the value had
8087 been stored to word-aligned memory and then loaded into
8088 registers with 32-bit load instruction(s). */
8089 int len
= TYPE_LENGTH (type
);
8090 int regno
= ARM_A1_REGNUM
;
8091 bfd_byte tmpbuf
[ARM_INT_REGISTER_SIZE
];
8095 memcpy (tmpbuf
, valbuf
,
8096 len
> ARM_INT_REGISTER_SIZE
? ARM_INT_REGISTER_SIZE
: len
);
8097 regs
->cooked_write (regno
++, tmpbuf
);
8098 len
-= ARM_INT_REGISTER_SIZE
;
8099 valbuf
+= ARM_INT_REGISTER_SIZE
;
8105 /* Handle function return values. */
8107 static enum return_value_convention
8108 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
8109 struct type
*valtype
, struct regcache
*regcache
,
8110 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
8112 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8113 struct type
*func_type
= function
? value_type (function
) : NULL
;
8114 enum arm_vfp_cprc_base_type vfp_base_type
;
8117 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
8118 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
8120 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
8121 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
8123 for (i
= 0; i
< vfp_base_count
; i
++)
8125 if (reg_char
== 'q')
8128 arm_neon_quad_write (gdbarch
, regcache
, i
,
8129 writebuf
+ i
* unit_length
);
8132 arm_neon_quad_read (gdbarch
, regcache
, i
,
8133 readbuf
+ i
* unit_length
);
8140 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
8141 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8144 regcache
->cooked_write (regnum
, writebuf
+ i
* unit_length
);
8146 regcache
->cooked_read (regnum
, readbuf
+ i
* unit_length
);
8149 return RETURN_VALUE_REGISTER_CONVENTION
;
8152 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
8153 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
8154 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
8156 if (tdep
->struct_return
== pcc_struct_return
8157 || arm_return_in_memory (gdbarch
, valtype
))
8158 return RETURN_VALUE_STRUCT_CONVENTION
;
8160 else if (TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
)
8162 if (arm_return_in_memory (gdbarch
, valtype
))
8163 return RETURN_VALUE_STRUCT_CONVENTION
;
8167 arm_store_return_value (valtype
, regcache
, writebuf
);
8170 arm_extract_return_value (valtype
, regcache
, readbuf
);
8172 return RETURN_VALUE_REGISTER_CONVENTION
;
8177 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
8179 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
8180 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8181 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8183 gdb_byte buf
[ARM_INT_REGISTER_SIZE
];
8185 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
8187 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
8188 ARM_INT_REGISTER_SIZE
))
8191 *pc
= extract_unsigned_integer (buf
, ARM_INT_REGISTER_SIZE
, byte_order
);
8195 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
8196 return the target PC. Otherwise return 0. */
8199 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
8203 CORE_ADDR start_addr
;
8205 /* Find the starting address and name of the function containing the PC. */
8206 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
8208 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
8210 start_addr
= arm_skip_bx_reg (frame
, pc
);
8211 if (start_addr
!= 0)
8217 /* If PC is in a Thumb call or return stub, return the address of the
8218 target PC, which is in a register. The thunk functions are called
8219 _call_via_xx, where x is the register name. The possible names
8220 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
8221 functions, named __ARM_call_via_r[0-7]. */
8222 if (startswith (name
, "_call_via_")
8223 || startswith (name
, "__ARM_call_via_"))
8225 /* Use the name suffix to determine which register contains the
8227 static const char *table
[15] =
8228 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
8229 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
8232 int offset
= strlen (name
) - 2;
8234 for (regno
= 0; regno
<= 14; regno
++)
8235 if (strcmp (&name
[offset
], table
[regno
]) == 0)
8236 return get_frame_register_unsigned (frame
, regno
);
8239 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
8240 non-interworking calls to foo. We could decode the stubs
8241 to find the target but it's easier to use the symbol table. */
8242 namelen
= strlen (name
);
8243 if (name
[0] == '_' && name
[1] == '_'
8244 && ((namelen
> 2 + strlen ("_from_thumb")
8245 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
8246 || (namelen
> 2 + strlen ("_from_arm")
8247 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
8250 int target_len
= namelen
- 2;
8251 struct bound_minimal_symbol minsym
;
8252 struct objfile
*objfile
;
8253 struct obj_section
*sec
;
8255 if (name
[namelen
- 1] == 'b')
8256 target_len
-= strlen ("_from_thumb");
8258 target_len
-= strlen ("_from_arm");
8260 target_name
= (char *) alloca (target_len
+ 1);
8261 memcpy (target_name
, name
+ 2, target_len
);
8262 target_name
[target_len
] = '\0';
8264 sec
= find_pc_section (pc
);
8265 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
8266 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
8267 if (minsym
.minsym
!= NULL
)
8268 return BMSYMBOL_VALUE_ADDRESS (minsym
);
8273 return 0; /* not a stub */
8277 set_arm_command (const char *args
, int from_tty
)
8279 printf_unfiltered (_("\
8280 \"set arm\" must be followed by an apporpriate subcommand.\n"));
8281 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
8285 show_arm_command (const char *args
, int from_tty
)
8287 cmd_show_list (showarmcmdlist
, from_tty
, "");
8291 arm_update_current_architecture (void)
8293 struct gdbarch_info info
;
8295 /* If the current architecture is not ARM, we have nothing to do. */
8296 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
8299 /* Update the architecture. */
8300 gdbarch_info_init (&info
);
8302 if (!gdbarch_update_p (info
))
8303 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
8307 set_fp_model_sfunc (const char *args
, int from_tty
,
8308 struct cmd_list_element
*c
)
8312 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
8313 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
8315 arm_fp_model
= (enum arm_float_model
) fp_model
;
8319 if (fp_model
== ARM_FLOAT_LAST
)
8320 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
8323 arm_update_current_architecture ();
8327 show_fp_model (struct ui_file
*file
, int from_tty
,
8328 struct cmd_list_element
*c
, const char *value
)
8330 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8332 if (arm_fp_model
== ARM_FLOAT_AUTO
8333 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8334 fprintf_filtered (file
, _("\
8335 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
8336 fp_model_strings
[tdep
->fp_model
]);
8338 fprintf_filtered (file
, _("\
8339 The current ARM floating point model is \"%s\".\n"),
8340 fp_model_strings
[arm_fp_model
]);
8344 arm_set_abi (const char *args
, int from_tty
,
8345 struct cmd_list_element
*c
)
8349 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
8350 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
8352 arm_abi_global
= (enum arm_abi_kind
) arm_abi
;
8356 if (arm_abi
== ARM_ABI_LAST
)
8357 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
8360 arm_update_current_architecture ();
8364 arm_show_abi (struct ui_file
*file
, int from_tty
,
8365 struct cmd_list_element
*c
, const char *value
)
8367 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
8369 if (arm_abi_global
== ARM_ABI_AUTO
8370 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
8371 fprintf_filtered (file
, _("\
8372 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
8373 arm_abi_strings
[tdep
->arm_abi
]);
8375 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
8380 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
8381 struct cmd_list_element
*c
, const char *value
)
8383 fprintf_filtered (file
,
8384 _("The current execution mode assumed "
8385 "(when symbols are unavailable) is \"%s\".\n"),
8386 arm_fallback_mode_string
);
8390 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
8391 struct cmd_list_element
*c
, const char *value
)
8393 fprintf_filtered (file
,
8394 _("The current execution mode assumed "
8395 "(even when symbols are available) is \"%s\".\n"),
8396 arm_force_mode_string
);
8399 /* If the user changes the register disassembly style used for info
8400 register and other commands, we have to also switch the style used
8401 in opcodes for disassembly output. This function is run in the "set
8402 arm disassembly" command, and does that. */
8405 set_disassembly_style_sfunc (const char *args
, int from_tty
,
8406 struct cmd_list_element
*c
)
8408 /* Convert the short style name into the long style name (eg, reg-names-*)
8409 before calling the generic set_disassembler_options() function. */
8410 std::string long_name
= std::string ("reg-names-") + disassembly_style
;
8411 set_disassembler_options (&long_name
[0]);
8415 show_disassembly_style_sfunc (struct ui_file
*file
, int from_tty
,
8416 struct cmd_list_element
*c
, const char *value
)
8418 struct gdbarch
*gdbarch
= get_current_arch ();
8419 char *options
= get_disassembler_options (gdbarch
);
8420 const char *style
= "";
8424 FOR_EACH_DISASSEMBLER_OPTION (opt
, options
)
8425 if (CONST_STRNEQ (opt
, "reg-names-"))
8427 style
= &opt
[strlen ("reg-names-")];
8428 len
= strcspn (style
, ",");
8431 fprintf_unfiltered (file
, "The disassembly style is \"%.*s\".\n", len
, style
);
8434 /* Return the ARM register name corresponding to register I. */
8436 arm_register_name (struct gdbarch
*gdbarch
, int i
)
8438 const int num_regs
= gdbarch_num_regs (gdbarch
);
8440 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
8441 && i
>= num_regs
&& i
< num_regs
+ 32)
8443 static const char *const vfp_pseudo_names
[] = {
8444 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
8445 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
8446 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
8447 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
8450 return vfp_pseudo_names
[i
- num_regs
];
8453 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
8454 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
8456 static const char *const neon_pseudo_names
[] = {
8457 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
8458 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
8461 return neon_pseudo_names
[i
- num_regs
- 32];
8464 if (i
>= ARRAY_SIZE (arm_register_names
))
8465 /* These registers are only supported on targets which supply
8466 an XML description. */
8469 return arm_register_names
[i
];
8472 /* Test whether the coff symbol specific value corresponds to a Thumb
8476 coff_sym_is_thumb (int val
)
8478 return (val
== C_THUMBEXT
8479 || val
== C_THUMBSTAT
8480 || val
== C_THUMBEXTFUNC
8481 || val
== C_THUMBSTATFUNC
8482 || val
== C_THUMBLABEL
);
8485 /* arm_coff_make_msymbol_special()
8486 arm_elf_make_msymbol_special()
8488 These functions test whether the COFF or ELF symbol corresponds to
8489 an address in thumb code, and set a "special" bit in a minimal
8490 symbol to indicate that it does. */
8493 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
8495 elf_symbol_type
*elfsym
= (elf_symbol_type
*) sym
;
8497 if (ARM_GET_SYM_BRANCH_TYPE (elfsym
->internal_elf_sym
.st_target_internal
)
8498 == ST_BRANCH_TO_THUMB
)
8499 MSYMBOL_SET_SPECIAL (msym
);
8503 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
8505 if (coff_sym_is_thumb (val
))
8506 MSYMBOL_SET_SPECIAL (msym
);
8510 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
8513 const char *name
= bfd_asymbol_name (sym
);
8514 struct arm_per_objfile
*data
;
8515 struct arm_mapping_symbol new_map_sym
;
8517 gdb_assert (name
[0] == '$');
8518 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
8521 data
= arm_objfile_data_key
.get (objfile
);
8523 data
= arm_objfile_data_key
.emplace (objfile
,
8524 objfile
->obfd
->section_count
);
8525 arm_mapping_symbol_vec
&map
8526 = data
->section_maps
[bfd_get_section (sym
)->index
];
8528 new_map_sym
.value
= sym
->value
;
8529 new_map_sym
.type
= name
[1];
8531 /* Insert at the end, the vector will be sorted on first use. */
8532 map
.push_back (new_map_sym
);
8536 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
8538 struct gdbarch
*gdbarch
= regcache
->arch ();
8539 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
8541 /* If necessary, set the T bit. */
8544 ULONGEST val
, t_bit
;
8545 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
8546 t_bit
= arm_psr_thumb_bit (gdbarch
);
8547 if (arm_pc_is_thumb (gdbarch
, pc
))
8548 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8551 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
8556 /* Read the contents of a NEON quad register, by reading from two
8557 double registers. This is used to implement the quad pseudo
8558 registers, and for argument passing in case the quad registers are
8559 missing; vectors are passed in quad registers when using the VFP
8560 ABI, even if a NEON unit is not present. REGNUM is the index of
8561 the quad register, in [0, 15]. */
8563 static enum register_status
8564 arm_neon_quad_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8565 int regnum
, gdb_byte
*buf
)
8568 gdb_byte reg_buf
[8];
8569 int offset
, double_regnum
;
8570 enum register_status status
;
8572 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8573 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8576 /* d0 is always the least significant half of q0. */
8577 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8582 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8583 if (status
!= REG_VALID
)
8585 memcpy (buf
+ offset
, reg_buf
, 8);
8587 offset
= 8 - offset
;
8588 status
= regcache
->raw_read (double_regnum
+ 1, reg_buf
);
8589 if (status
!= REG_VALID
)
8591 memcpy (buf
+ offset
, reg_buf
, 8);
8596 static enum register_status
8597 arm_pseudo_read (struct gdbarch
*gdbarch
, readable_regcache
*regcache
,
8598 int regnum
, gdb_byte
*buf
)
8600 const int num_regs
= gdbarch_num_regs (gdbarch
);
8602 gdb_byte reg_buf
[8];
8603 int offset
, double_regnum
;
8605 gdb_assert (regnum
>= num_regs
);
8608 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8609 /* Quad-precision register. */
8610 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
8613 enum register_status status
;
8615 /* Single-precision register. */
8616 gdb_assert (regnum
< 32);
8618 /* s0 is always the least significant half of d0. */
8619 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8620 offset
= (regnum
& 1) ? 0 : 4;
8622 offset
= (regnum
& 1) ? 4 : 0;
8624 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8625 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8628 status
= regcache
->raw_read (double_regnum
, reg_buf
);
8629 if (status
== REG_VALID
)
8630 memcpy (buf
, reg_buf
+ offset
, 4);
8635 /* Store the contents of BUF to a NEON quad register, by writing to
8636 two double registers. This is used to implement the quad pseudo
8637 registers, and for argument passing in case the quad registers are
8638 missing; vectors are passed in quad registers when using the VFP
8639 ABI, even if a NEON unit is not present. REGNUM is the index
8640 of the quad register, in [0, 15]. */
8643 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8644 int regnum
, const gdb_byte
*buf
)
8647 int offset
, double_regnum
;
8649 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
8650 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8653 /* d0 is always the least significant half of q0. */
8654 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8659 regcache
->raw_write (double_regnum
, buf
+ offset
);
8660 offset
= 8 - offset
;
8661 regcache
->raw_write (double_regnum
+ 1, buf
+ offset
);
8665 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
8666 int regnum
, const gdb_byte
*buf
)
8668 const int num_regs
= gdbarch_num_regs (gdbarch
);
8670 gdb_byte reg_buf
[8];
8671 int offset
, double_regnum
;
8673 gdb_assert (regnum
>= num_regs
);
8676 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
8677 /* Quad-precision register. */
8678 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
8681 /* Single-precision register. */
8682 gdb_assert (regnum
< 32);
8684 /* s0 is always the least significant half of d0. */
8685 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
8686 offset
= (regnum
& 1) ? 0 : 4;
8688 offset
= (regnum
& 1) ? 4 : 0;
8690 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
8691 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
8694 regcache
->raw_read (double_regnum
, reg_buf
);
8695 memcpy (reg_buf
+ offset
, buf
, 4);
8696 regcache
->raw_write (double_regnum
, reg_buf
);
8700 static struct value
*
8701 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
8703 const int *reg_p
= (const int *) baton
;
8704 return value_of_register (*reg_p
, frame
);
8707 static enum gdb_osabi
8708 arm_elf_osabi_sniffer (bfd
*abfd
)
8710 unsigned int elfosabi
;
8711 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
8713 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
8715 if (elfosabi
== ELFOSABI_ARM
)
8716 /* GNU tools use this value. Check note sections in this case,
8718 bfd_map_over_sections (abfd
,
8719 generic_elf_osabi_sniff_abi_tag_sections
,
8722 /* Anything else will be handled by the generic ELF sniffer. */
8727 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
8728 struct reggroup
*group
)
8730 /* FPS register's type is INT, but belongs to float_reggroup. Beside
8731 this, FPS register belongs to save_regroup, restore_reggroup, and
8732 all_reggroup, of course. */
8733 if (regnum
== ARM_FPS_REGNUM
)
8734 return (group
== float_reggroup
8735 || group
== save_reggroup
8736 || group
== restore_reggroup
8737 || group
== all_reggroup
);
8739 return default_register_reggroup_p (gdbarch
, regnum
, group
);
8743 /* For backward-compatibility we allow two 'g' packet lengths with
8744 the remote protocol depending on whether FPA registers are
8745 supplied. M-profile targets do not have FPA registers, but some
8746 stubs already exist in the wild which use a 'g' packet which
8747 supplies them albeit with dummy values. The packet format which
8748 includes FPA registers should be considered deprecated for
8749 M-profile targets. */
8752 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
8754 if (gdbarch_tdep (gdbarch
)->is_m
)
8756 /* If we know from the executable this is an M-profile target,
8757 cater for remote targets whose register set layout is the
8758 same as the FPA layout. */
8759 register_remote_g_packet_guess (gdbarch
,
8760 ARM_CORE_REGS_SIZE
+ ARM_FP_REGS_SIZE
,
8761 tdesc_arm_with_m_fpa_layout
);
8763 /* The regular M-profile layout. */
8764 register_remote_g_packet_guess (gdbarch
, ARM_CORE_REGS_SIZE
,
8767 /* M-profile plus M4F VFP. */
8768 register_remote_g_packet_guess (gdbarch
,
8769 ARM_CORE_REGS_SIZE
+ ARM_VFP2_REGS_SIZE
,
8770 tdesc_arm_with_m_vfp_d16
);
8773 /* Otherwise we don't have a useful guess. */
8776 /* Implement the code_of_frame_writable gdbarch method. */
8779 arm_code_of_frame_writable (struct gdbarch
*gdbarch
, struct frame_info
*frame
)
8781 if (gdbarch_tdep (gdbarch
)->is_m
8782 && get_frame_type (frame
) == SIGTRAMP_FRAME
)
8784 /* M-profile exception frames return to some magic PCs, where
8785 isn't writable at all. */
8792 /* Implement gdbarch_gnu_triplet_regexp. If the arch name is arm then allow it
8793 to be postfixed by a version (eg armv7hl). */
8796 arm_gnu_triplet_regexp (struct gdbarch
*gdbarch
)
8798 if (strcmp (gdbarch_bfd_arch_info (gdbarch
)->arch_name
, "arm") == 0)
8799 return "arm(v[^- ]*)?";
8800 return gdbarch_bfd_arch_info (gdbarch
)->arch_name
;
8803 /* Initialize the current architecture based on INFO. If possible,
8804 re-use an architecture from ARCHES, which is a list of
8805 architectures already created during this debugging session.
8807 Called e.g. at program startup, when reading a core file, and when
8808 reading a binary file. */
8810 static struct gdbarch
*
8811 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
8813 struct gdbarch_tdep
*tdep
;
8814 struct gdbarch
*gdbarch
;
8815 struct gdbarch_list
*best_arch
;
8816 enum arm_abi_kind arm_abi
= arm_abi_global
;
8817 enum arm_float_model fp_model
= arm_fp_model
;
8818 struct tdesc_arch_data
*tdesc_data
= NULL
;
8820 int vfp_register_count
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
8821 int have_wmmx_registers
= 0;
8823 int have_fpa_registers
= 1;
8824 const struct target_desc
*tdesc
= info
.target_desc
;
8826 /* If we have an object to base this architecture on, try to determine
8829 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
8831 int ei_osabi
, e_flags
;
8833 switch (bfd_get_flavour (info
.abfd
))
8835 case bfd_target_coff_flavour
:
8836 /* Assume it's an old APCS-style ABI. */
8838 arm_abi
= ARM_ABI_APCS
;
8841 case bfd_target_elf_flavour
:
8842 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
8843 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
8845 if (ei_osabi
== ELFOSABI_ARM
)
8847 /* GNU tools used to use this value, but do not for EABI
8848 objects. There's nowhere to tag an EABI version
8849 anyway, so assume APCS. */
8850 arm_abi
= ARM_ABI_APCS
;
8852 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
8854 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
8858 case EF_ARM_EABI_UNKNOWN
:
8859 /* Assume GNU tools. */
8860 arm_abi
= ARM_ABI_APCS
;
8863 case EF_ARM_EABI_VER4
:
8864 case EF_ARM_EABI_VER5
:
8865 arm_abi
= ARM_ABI_AAPCS
;
8866 /* EABI binaries default to VFP float ordering.
8867 They may also contain build attributes that can
8868 be used to identify if the VFP argument-passing
8870 if (fp_model
== ARM_FLOAT_AUTO
)
8873 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
8877 case AEABI_VFP_args_base
:
8878 /* "The user intended FP parameter/result
8879 passing to conform to AAPCS, base
8881 fp_model
= ARM_FLOAT_SOFT_VFP
;
8883 case AEABI_VFP_args_vfp
:
8884 /* "The user intended FP parameter/result
8885 passing to conform to AAPCS, VFP
8887 fp_model
= ARM_FLOAT_VFP
;
8889 case AEABI_VFP_args_toolchain
:
8890 /* "The user intended FP parameter/result
8891 passing to conform to tool chain-specific
8892 conventions" - we don't know any such
8893 conventions, so leave it as "auto". */
8895 case AEABI_VFP_args_compatible
:
8896 /* "Code is compatible with both the base
8897 and VFP variants; the user did not permit
8898 non-variadic functions to pass FP
8899 parameters/results" - leave it as
8903 /* Attribute value not mentioned in the
8904 November 2012 ABI, so leave it as
8909 fp_model
= ARM_FLOAT_SOFT_VFP
;
8915 /* Leave it as "auto". */
8916 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
8921 /* Detect M-profile programs. This only works if the
8922 executable file includes build attributes; GCC does
8923 copy them to the executable, but e.g. RealView does
8926 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
8929 = bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
8930 Tag_CPU_arch_profile
);
8932 /* GCC specifies the profile for v6-M; RealView only
8933 specifies the profile for architectures starting with
8934 V7 (as opposed to architectures with a tag
8935 numerically greater than TAG_CPU_ARCH_V7). */
8936 if (!tdesc_has_registers (tdesc
)
8937 && (attr_arch
== TAG_CPU_ARCH_V6_M
8938 || attr_arch
== TAG_CPU_ARCH_V6S_M
8939 || attr_profile
== 'M'))
8944 if (fp_model
== ARM_FLOAT_AUTO
)
8946 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
8949 /* Leave it as "auto". Strictly speaking this case
8950 means FPA, but almost nobody uses that now, and
8951 many toolchains fail to set the appropriate bits
8952 for the floating-point model they use. */
8954 case EF_ARM_SOFT_FLOAT
:
8955 fp_model
= ARM_FLOAT_SOFT_FPA
;
8957 case EF_ARM_VFP_FLOAT
:
8958 fp_model
= ARM_FLOAT_VFP
;
8960 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
8961 fp_model
= ARM_FLOAT_SOFT_VFP
;
8966 if (e_flags
& EF_ARM_BE8
)
8967 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
8972 /* Leave it as "auto". */
8977 /* Check any target description for validity. */
8978 if (tdesc_has_registers (tdesc
))
8980 /* For most registers we require GDB's default names; but also allow
8981 the numeric names for sp / lr / pc, as a convenience. */
8982 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
8983 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
8984 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
8986 const struct tdesc_feature
*feature
;
8989 feature
= tdesc_find_feature (tdesc
,
8990 "org.gnu.gdb.arm.core");
8991 if (feature
== NULL
)
8993 feature
= tdesc_find_feature (tdesc
,
8994 "org.gnu.gdb.arm.m-profile");
8995 if (feature
== NULL
)
9001 tdesc_data
= tdesc_data_alloc ();
9004 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
9005 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9006 arm_register_names
[i
]);
9007 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9010 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9013 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
9017 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9018 ARM_PS_REGNUM
, "xpsr");
9020 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9021 ARM_PS_REGNUM
, "cpsr");
9025 tdesc_data_cleanup (tdesc_data
);
9029 feature
= tdesc_find_feature (tdesc
,
9030 "org.gnu.gdb.arm.fpa");
9031 if (feature
!= NULL
)
9034 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
9035 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
9036 arm_register_names
[i
]);
9039 tdesc_data_cleanup (tdesc_data
);
9044 have_fpa_registers
= 0;
9046 feature
= tdesc_find_feature (tdesc
,
9047 "org.gnu.gdb.xscale.iwmmxt");
9048 if (feature
!= NULL
)
9050 static const char *const iwmmxt_names
[] = {
9051 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
9052 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
9053 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
9054 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
9058 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
9060 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9061 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9063 /* Check for the control registers, but do not fail if they
9065 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
9066 tdesc_numbered_register (feature
, tdesc_data
, i
,
9067 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9069 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
9071 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
9072 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
9076 tdesc_data_cleanup (tdesc_data
);
9080 have_wmmx_registers
= 1;
9083 /* If we have a VFP unit, check whether the single precision registers
9084 are present. If not, then we will synthesize them as pseudo
9086 feature
= tdesc_find_feature (tdesc
,
9087 "org.gnu.gdb.arm.vfp");
9088 if (feature
!= NULL
)
9090 static const char *const vfp_double_names
[] = {
9091 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
9092 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
9093 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
9094 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
9097 /* Require the double precision registers. There must be either
9100 for (i
= 0; i
< 32; i
++)
9102 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9104 vfp_double_names
[i
]);
9108 if (!valid_p
&& i
== 16)
9111 /* Also require FPSCR. */
9112 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
9113 ARM_FPSCR_REGNUM
, "fpscr");
9116 tdesc_data_cleanup (tdesc_data
);
9120 if (tdesc_unnumbered_register (feature
, "s0") == 0)
9121 have_vfp_pseudos
= 1;
9123 vfp_register_count
= i
;
9125 /* If we have VFP, also check for NEON. The architecture allows
9126 NEON without VFP (integer vector operations only), but GDB
9127 does not support that. */
9128 feature
= tdesc_find_feature (tdesc
,
9129 "org.gnu.gdb.arm.neon");
9130 if (feature
!= NULL
)
9132 /* NEON requires 32 double-precision registers. */
9135 tdesc_data_cleanup (tdesc_data
);
9139 /* If there are quad registers defined by the stub, use
9140 their type; otherwise (normally) provide them with
9141 the default type. */
9142 if (tdesc_unnumbered_register (feature
, "q0") == 0)
9143 have_neon_pseudos
= 1;
9150 /* If there is already a candidate, use it. */
9151 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
9153 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
9155 if (arm_abi
!= ARM_ABI_AUTO
9156 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
9159 if (fp_model
!= ARM_FLOAT_AUTO
9160 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
9163 /* There are various other properties in tdep that we do not
9164 need to check here: those derived from a target description,
9165 since gdbarches with a different target description are
9166 automatically disqualified. */
9168 /* Do check is_m, though, since it might come from the binary. */
9169 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
9172 /* Found a match. */
9176 if (best_arch
!= NULL
)
9178 if (tdesc_data
!= NULL
)
9179 tdesc_data_cleanup (tdesc_data
);
9180 return best_arch
->gdbarch
;
9183 tdep
= XCNEW (struct gdbarch_tdep
);
9184 gdbarch
= gdbarch_alloc (&info
, tdep
);
9186 /* Record additional information about the architecture we are defining.
9187 These are gdbarch discriminators, like the OSABI. */
9188 tdep
->arm_abi
= arm_abi
;
9189 tdep
->fp_model
= fp_model
;
9191 tdep
->have_fpa_registers
= have_fpa_registers
;
9192 tdep
->have_wmmx_registers
= have_wmmx_registers
;
9193 gdb_assert (vfp_register_count
== 0
9194 || vfp_register_count
== 16
9195 || vfp_register_count
== 32);
9196 tdep
->vfp_register_count
= vfp_register_count
;
9197 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
9198 tdep
->have_neon_pseudos
= have_neon_pseudos
;
9199 tdep
->have_neon
= have_neon
;
9201 arm_register_g_packet_guesses (gdbarch
);
9204 switch (info
.byte_order_for_code
)
9206 case BFD_ENDIAN_BIG
:
9207 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
9208 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
9209 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
9210 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
9214 case BFD_ENDIAN_LITTLE
:
9215 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
9216 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
9217 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
9218 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
9223 internal_error (__FILE__
, __LINE__
,
9224 _("arm_gdbarch_init: bad byte order for float format"));
9227 /* On ARM targets char defaults to unsigned. */
9228 set_gdbarch_char_signed (gdbarch
, 0);
9230 /* wchar_t is unsigned under the AAPCS. */
9231 if (tdep
->arm_abi
== ARM_ABI_AAPCS
)
9232 set_gdbarch_wchar_signed (gdbarch
, 0);
9234 set_gdbarch_wchar_signed (gdbarch
, 1);
9236 /* Compute type alignment. */
9237 set_gdbarch_type_align (gdbarch
, arm_type_align
);
9239 /* Note: for displaced stepping, this includes the breakpoint, and one word
9240 of additional scratch space. This setting isn't used for anything beside
9241 displaced stepping at present. */
9242 set_gdbarch_max_insn_length (gdbarch
, 4 * ARM_DISPLACED_MODIFIED_INSNS
);
9244 /* This should be low enough for everything. */
9245 tdep
->lowest_pc
= 0x20;
9246 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
9248 /* The default, for both APCS and AAPCS, is to return small
9249 structures in registers. */
9250 tdep
->struct_return
= reg_struct_return
;
9252 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
9253 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
9256 set_gdbarch_code_of_frame_writable (gdbarch
, arm_code_of_frame_writable
);
9258 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
9260 frame_base_set_default (gdbarch
, &arm_normal_base
);
9262 /* Address manipulation. */
9263 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
9265 /* Advance PC across function entry code. */
9266 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
9268 /* Detect whether PC is at a point where the stack has been destroyed. */
9269 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
9271 /* Skip trampolines. */
9272 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
9274 /* The stack grows downward. */
9275 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
9277 /* Breakpoint manipulation. */
9278 set_gdbarch_breakpoint_kind_from_pc (gdbarch
, arm_breakpoint_kind_from_pc
);
9279 set_gdbarch_sw_breakpoint_from_kind (gdbarch
, arm_sw_breakpoint_from_kind
);
9280 set_gdbarch_breakpoint_kind_from_current_state (gdbarch
,
9281 arm_breakpoint_kind_from_current_state
);
9283 /* Information about registers, etc. */
9284 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
9285 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
9286 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
9287 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9288 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
9290 /* This "info float" is FPA-specific. Use the generic version if we
9292 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
9293 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
9295 /* Internal <-> external register number maps. */
9296 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
9297 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
9299 set_gdbarch_register_name (gdbarch
, arm_register_name
);
9301 /* Returning results. */
9302 set_gdbarch_return_value (gdbarch
, arm_return_value
);
9305 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
9307 /* Minsymbol frobbing. */
9308 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
9309 set_gdbarch_coff_make_msymbol_special (gdbarch
,
9310 arm_coff_make_msymbol_special
);
9311 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
9313 /* Thumb-2 IT block support. */
9314 set_gdbarch_adjust_breakpoint_address (gdbarch
,
9315 arm_adjust_breakpoint_address
);
9317 /* Virtual tables. */
9318 set_gdbarch_vbit_in_delta (gdbarch
, 1);
9320 /* Hook in the ABI-specific overrides, if they have been registered. */
9321 gdbarch_init_osabi (info
, gdbarch
);
9323 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
9325 /* Add some default predicates. */
9327 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
9328 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
9329 dwarf2_append_unwinders (gdbarch
);
9330 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
9331 frame_unwind_append_unwinder (gdbarch
, &arm_epilogue_frame_unwind
);
9332 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
9334 /* Now we have tuned the configuration, set a few final things,
9335 based on what the OS ABI has told us. */
9337 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
9338 binaries are always marked. */
9339 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
9340 tdep
->arm_abi
= ARM_ABI_APCS
;
9342 /* Watchpoints are not steppable. */
9343 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
9345 /* We used to default to FPA for generic ARM, but almost nobody
9346 uses that now, and we now provide a way for the user to force
9347 the model. So default to the most useful variant. */
9348 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
9349 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
9351 if (tdep
->jb_pc
>= 0)
9352 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
9354 /* Floating point sizes and format. */
9355 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
9356 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
9358 set_gdbarch_double_format
9359 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9360 set_gdbarch_long_double_format
9361 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
9365 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
9366 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
9369 if (have_vfp_pseudos
)
9371 /* NOTE: These are the only pseudo registers used by
9372 the ARM target at the moment. If more are added, a
9373 little more care in numbering will be needed. */
9375 int num_pseudos
= 32;
9376 if (have_neon_pseudos
)
9378 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
9379 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
9380 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
9385 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
9387 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
9389 /* Override tdesc_register_type to adjust the types of VFP
9390 registers for NEON. */
9391 set_gdbarch_register_type (gdbarch
, arm_register_type
);
9394 /* Add standard register aliases. We add aliases even for those
9395 nanes which are used by the current architecture - it's simpler,
9396 and does no harm, since nothing ever lists user registers. */
9397 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
9398 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
9399 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
9401 set_gdbarch_disassembler_options (gdbarch
, &arm_disassembler_options
);
9402 set_gdbarch_valid_disassembler_options (gdbarch
, disassembler_options_arm ());
9404 set_gdbarch_gnu_triplet_regexp (gdbarch
, arm_gnu_triplet_regexp
);
9410 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
9412 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9417 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
9418 (unsigned long) tdep
->lowest_pc
);
9424 static void arm_record_test (void);
9429 _initialize_arm_tdep (void)
9433 char regdesc
[1024], *rdptr
= regdesc
;
9434 size_t rest
= sizeof (regdesc
);
9436 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
9438 /* Add ourselves to objfile event chain. */
9439 gdb::observers::new_objfile
.attach (arm_exidx_new_objfile
);
9441 /* Register an ELF OS ABI sniffer for ARM binaries. */
9442 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
9443 bfd_target_elf_flavour
,
9444 arm_elf_osabi_sniffer
);
9446 /* Initialize the standard target descriptions. */
9447 initialize_tdesc_arm_with_m ();
9448 initialize_tdesc_arm_with_m_fpa_layout ();
9449 initialize_tdesc_arm_with_m_vfp_d16 ();
9450 initialize_tdesc_arm_with_iwmmxt ();
9451 initialize_tdesc_arm_with_vfpv2 ();
9452 initialize_tdesc_arm_with_vfpv3 ();
9453 initialize_tdesc_arm_with_neon ();
9455 /* Add root prefix command for all "set arm"/"show arm" commands. */
9456 add_prefix_cmd ("arm", no_class
, set_arm_command
,
9457 _("Various ARM-specific commands."),
9458 &setarmcmdlist
, "set arm ", 0, &setlist
);
9460 add_prefix_cmd ("arm", no_class
, show_arm_command
,
9461 _("Various ARM-specific commands."),
9462 &showarmcmdlist
, "show arm ", 0, &showlist
);
9465 arm_disassembler_options
= xstrdup ("reg-names-std");
9466 const disasm_options_t
*disasm_options
9467 = &disassembler_options_arm ()->options
;
9468 int num_disassembly_styles
= 0;
9469 for (i
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9470 if (CONST_STRNEQ (disasm_options
->name
[i
], "reg-names-"))
9471 num_disassembly_styles
++;
9473 /* Initialize the array that will be passed to add_setshow_enum_cmd(). */
9474 valid_disassembly_styles
= XNEWVEC (const char *,
9475 num_disassembly_styles
+ 1);
9476 for (i
= j
= 0; disasm_options
->name
[i
] != NULL
; i
++)
9477 if (CONST_STRNEQ (disasm_options
->name
[i
], "reg-names-"))
9479 size_t offset
= strlen ("reg-names-");
9480 const char *style
= disasm_options
->name
[i
];
9481 valid_disassembly_styles
[j
++] = &style
[offset
];
9482 length
= snprintf (rdptr
, rest
, "%s - %s\n", &style
[offset
],
9483 disasm_options
->description
[i
]);
9487 /* Mark the end of valid options. */
9488 valid_disassembly_styles
[num_disassembly_styles
] = NULL
;
9490 /* Create the help text. */
9491 std::string helptext
= string_printf ("%s%s%s",
9492 _("The valid values are:\n"),
9494 _("The default is \"std\"."));
9496 add_setshow_enum_cmd("disassembler", no_class
,
9497 valid_disassembly_styles
, &disassembly_style
,
9498 _("Set the disassembly style."),
9499 _("Show the disassembly style."),
9501 set_disassembly_style_sfunc
,
9502 show_disassembly_style_sfunc
,
9503 &setarmcmdlist
, &showarmcmdlist
);
9505 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
9506 _("Set usage of ARM 32-bit mode."),
9507 _("Show usage of ARM 32-bit mode."),
9508 _("When off, a 26-bit PC will be used."),
9510 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
9512 &setarmcmdlist
, &showarmcmdlist
);
9514 /* Add a command to allow the user to force the FPU model. */
9515 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
9516 _("Set the floating point type."),
9517 _("Show the floating point type."),
9518 _("auto - Determine the FP typefrom the OS-ABI.\n\
9519 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
9520 fpa - FPA co-processor (GCC compiled).\n\
9521 softvfp - Software FP with pure-endian doubles.\n\
9522 vfp - VFP co-processor."),
9523 set_fp_model_sfunc
, show_fp_model
,
9524 &setarmcmdlist
, &showarmcmdlist
);
9526 /* Add a command to allow the user to force the ABI. */
9527 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
9530 NULL
, arm_set_abi
, arm_show_abi
,
9531 &setarmcmdlist
, &showarmcmdlist
);
9533 /* Add two commands to allow the user to force the assumed
9535 add_setshow_enum_cmd ("fallback-mode", class_support
,
9536 arm_mode_strings
, &arm_fallback_mode_string
,
9537 _("Set the mode assumed when symbols are unavailable."),
9538 _("Show the mode assumed when symbols are unavailable."),
9539 NULL
, NULL
, arm_show_fallback_mode
,
9540 &setarmcmdlist
, &showarmcmdlist
);
9541 add_setshow_enum_cmd ("force-mode", class_support
,
9542 arm_mode_strings
, &arm_force_mode_string
,
9543 _("Set the mode assumed even when symbols are available."),
9544 _("Show the mode assumed even when symbols are available."),
9545 NULL
, NULL
, arm_show_force_mode
,
9546 &setarmcmdlist
, &showarmcmdlist
);
9548 /* Debugging flag. */
9549 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
9550 _("Set ARM debugging."),
9551 _("Show ARM debugging."),
9552 _("When on, arm-specific debugging is enabled."),
9554 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
9555 &setdebuglist
, &showdebuglist
);
9558 selftests::register_test ("arm-record", selftests::arm_record_test
);
9563 /* ARM-reversible process record data structures. */
9565 #define ARM_INSN_SIZE_BYTES 4
9566 #define THUMB_INSN_SIZE_BYTES 2
9567 #define THUMB2_INSN_SIZE_BYTES 4
9570 /* Position of the bit within a 32-bit ARM instruction
9571 that defines whether the instruction is a load or store. */
9572 #define INSN_S_L_BIT_NUM 20
9574 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
9577 unsigned int reg_len = LENGTH; \
9580 REGS = XNEWVEC (uint32_t, reg_len); \
9581 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
9586 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
9589 unsigned int mem_len = LENGTH; \
9592 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
9593 memcpy(&MEMS->len, &RECORD_BUF[0], \
9594 sizeof(struct arm_mem_r) * LENGTH); \
9599 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
9600 #define INSN_RECORDED(ARM_RECORD) \
9601 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
9603 /* ARM memory record structure. */
9606 uint32_t len
; /* Record length. */
9607 uint32_t addr
; /* Memory address. */
9610 /* ARM instruction record contains opcode of current insn
9611 and execution state (before entry to decode_insn()),
9612 contains list of to-be-modified registers and
9613 memory blocks (on return from decode_insn()). */
9615 typedef struct insn_decode_record_t
9617 struct gdbarch
*gdbarch
;
9618 struct regcache
*regcache
;
9619 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
9620 uint32_t arm_insn
; /* Should accommodate thumb. */
9621 uint32_t cond
; /* Condition code. */
9622 uint32_t opcode
; /* Insn opcode. */
9623 uint32_t decode
; /* Insn decode bits. */
9624 uint32_t mem_rec_count
; /* No of mem records. */
9625 uint32_t reg_rec_count
; /* No of reg records. */
9626 uint32_t *arm_regs
; /* Registers to be saved for this record. */
9627 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
9628 } insn_decode_record
;
9631 /* Checks ARM SBZ and SBO mandatory fields. */
9634 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
9636 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
9655 enum arm_record_result
9657 ARM_RECORD_SUCCESS
= 0,
9658 ARM_RECORD_FAILURE
= 1
9665 } arm_record_strx_t
;
9676 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
9677 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
9680 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9681 ULONGEST u_regval
[2]= {0};
9683 uint32_t reg_src1
= 0, reg_src2
= 0;
9684 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
9686 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
9687 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
9689 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
9691 /* 1) Handle misc store, immediate offset. */
9692 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9693 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9694 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9695 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
9697 if (ARM_PC_REGNUM
== reg_src1
)
9699 /* If R15 was used as Rn, hence current PC+8. */
9700 u_regval
[0] = u_regval
[0] + 8;
9702 offset_8
= (immed_high
<< 4) | immed_low
;
9703 /* Calculate target store address. */
9704 if (14 == arm_insn_r
->opcode
)
9706 tgt_mem_addr
= u_regval
[0] + offset_8
;
9710 tgt_mem_addr
= u_regval
[0] - offset_8
;
9712 if (ARM_RECORD_STRH
== str_type
)
9714 record_buf_mem
[0] = 2;
9715 record_buf_mem
[1] = tgt_mem_addr
;
9716 arm_insn_r
->mem_rec_count
= 1;
9718 else if (ARM_RECORD_STRD
== str_type
)
9720 record_buf_mem
[0] = 4;
9721 record_buf_mem
[1] = tgt_mem_addr
;
9722 record_buf_mem
[2] = 4;
9723 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9724 arm_insn_r
->mem_rec_count
= 2;
9727 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
9729 /* 2) Store, register offset. */
9731 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9733 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9734 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9735 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9738 /* If R15 was used as Rn, hence current PC+8. */
9739 u_regval
[0] = u_regval
[0] + 8;
9741 /* Calculate target store address, Rn +/- Rm, register offset. */
9742 if (12 == arm_insn_r
->opcode
)
9744 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9748 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9750 if (ARM_RECORD_STRH
== str_type
)
9752 record_buf_mem
[0] = 2;
9753 record_buf_mem
[1] = tgt_mem_addr
;
9754 arm_insn_r
->mem_rec_count
= 1;
9756 else if (ARM_RECORD_STRD
== str_type
)
9758 record_buf_mem
[0] = 4;
9759 record_buf_mem
[1] = tgt_mem_addr
;
9760 record_buf_mem
[2] = 4;
9761 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9762 arm_insn_r
->mem_rec_count
= 2;
9765 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
9766 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9768 /* 3) Store, immediate pre-indexed. */
9769 /* 5) Store, immediate post-indexed. */
9770 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
9771 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
9772 offset_8
= (immed_high
<< 4) | immed_low
;
9773 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
9774 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9775 /* Calculate target store address, Rn +/- Rm, register offset. */
9776 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
9778 tgt_mem_addr
= u_regval
[0] + offset_8
;
9782 tgt_mem_addr
= u_regval
[0] - offset_8
;
9784 if (ARM_RECORD_STRH
== str_type
)
9786 record_buf_mem
[0] = 2;
9787 record_buf_mem
[1] = tgt_mem_addr
;
9788 arm_insn_r
->mem_rec_count
= 1;
9790 else if (ARM_RECORD_STRD
== str_type
)
9792 record_buf_mem
[0] = 4;
9793 record_buf_mem
[1] = tgt_mem_addr
;
9794 record_buf_mem
[2] = 4;
9795 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9796 arm_insn_r
->mem_rec_count
= 2;
9798 /* Record Rn also as it changes. */
9799 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9800 arm_insn_r
->reg_rec_count
= 1;
9802 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
9803 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9805 /* 4) Store, register pre-indexed. */
9806 /* 6) Store, register post -indexed. */
9807 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
9808 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
9809 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
9810 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
9811 /* Calculate target store address, Rn +/- Rm, register offset. */
9812 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
9814 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
9818 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
9820 if (ARM_RECORD_STRH
== str_type
)
9822 record_buf_mem
[0] = 2;
9823 record_buf_mem
[1] = tgt_mem_addr
;
9824 arm_insn_r
->mem_rec_count
= 1;
9826 else if (ARM_RECORD_STRD
== str_type
)
9828 record_buf_mem
[0] = 4;
9829 record_buf_mem
[1] = tgt_mem_addr
;
9830 record_buf_mem
[2] = 4;
9831 record_buf_mem
[3] = tgt_mem_addr
+ 4;
9832 arm_insn_r
->mem_rec_count
= 2;
9834 /* Record Rn also as it changes. */
9835 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
9836 arm_insn_r
->reg_rec_count
= 1;
9841 /* Handling ARM extension space insns. */
9844 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
9846 int ret
= 0; /* Return value: -1:record failure ; 0:success */
9847 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
9848 uint32_t record_buf
[8], record_buf_mem
[8];
9849 uint32_t reg_src1
= 0;
9850 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
9851 ULONGEST u_regval
= 0;
9853 gdb_assert (!INSN_RECORDED(arm_insn_r
));
9854 /* Handle unconditional insn extension space. */
9856 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
9857 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
9858 if (arm_insn_r
->cond
)
9860 /* PLD has no affect on architectural state, it just affects
9862 if (5 == ((opcode1
& 0xE0) >> 5))
9865 record_buf
[0] = ARM_PS_REGNUM
;
9866 record_buf
[1] = ARM_LR_REGNUM
;
9867 arm_insn_r
->reg_rec_count
= 2;
9869 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
9873 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
9874 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
9877 /* Undefined instruction on ARM V5; need to handle if later
9878 versions define it. */
9881 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
9882 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
9883 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
9885 /* Handle arithmetic insn extension space. */
9886 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
9887 && !INSN_RECORDED(arm_insn_r
))
9889 /* Handle MLA(S) and MUL(S). */
9890 if (in_inclusive_range (insn_op1
, 0U, 3U))
9892 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9893 record_buf
[1] = ARM_PS_REGNUM
;
9894 arm_insn_r
->reg_rec_count
= 2;
9896 else if (in_inclusive_range (insn_op1
, 4U, 15U))
9898 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
9899 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
9900 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
9901 record_buf
[2] = ARM_PS_REGNUM
;
9902 arm_insn_r
->reg_rec_count
= 3;
9906 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
9907 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
9908 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
9910 /* Handle control insn extension space. */
9912 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
9913 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
9915 if (!bit (arm_insn_r
->arm_insn
,25))
9917 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
9919 if ((0 == insn_op1
) || (2 == insn_op1
))
9922 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9923 arm_insn_r
->reg_rec_count
= 1;
9925 else if (1 == insn_op1
)
9927 /* CSPR is going to be changed. */
9928 record_buf
[0] = ARM_PS_REGNUM
;
9929 arm_insn_r
->reg_rec_count
= 1;
9931 else if (3 == insn_op1
)
9933 /* SPSR is going to be changed. */
9934 /* We need to get SPSR value, which is yet to be done. */
9938 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
9943 record_buf
[0] = ARM_PS_REGNUM
;
9944 arm_insn_r
->reg_rec_count
= 1;
9946 else if (3 == insn_op1
)
9949 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9950 arm_insn_r
->reg_rec_count
= 1;
9953 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
9956 record_buf
[0] = ARM_PS_REGNUM
;
9957 record_buf
[1] = ARM_LR_REGNUM
;
9958 arm_insn_r
->reg_rec_count
= 2;
9960 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
9962 /* QADD, QSUB, QDADD, QDSUB */
9963 record_buf
[0] = ARM_PS_REGNUM
;
9964 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
9965 arm_insn_r
->reg_rec_count
= 2;
9967 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
9970 record_buf
[0] = ARM_PS_REGNUM
;
9971 record_buf
[1] = ARM_LR_REGNUM
;
9972 arm_insn_r
->reg_rec_count
= 2;
9974 /* Save SPSR also;how? */
9977 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
9978 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
9979 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
9980 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
9983 if (0 == insn_op1
|| 1 == insn_op1
)
9985 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
9986 /* We dont do optimization for SMULW<y> where we
9988 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9989 record_buf
[1] = ARM_PS_REGNUM
;
9990 arm_insn_r
->reg_rec_count
= 2;
9992 else if (2 == insn_op1
)
9995 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
9996 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
9997 arm_insn_r
->reg_rec_count
= 2;
9999 else if (3 == insn_op1
)
10002 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10003 arm_insn_r
->reg_rec_count
= 1;
10009 /* MSR : immediate form. */
10012 /* CSPR is going to be changed. */
10013 record_buf
[0] = ARM_PS_REGNUM
;
10014 arm_insn_r
->reg_rec_count
= 1;
10016 else if (3 == insn_op1
)
10018 /* SPSR is going to be changed. */
10019 /* we need to get SPSR value, which is yet to be done */
10025 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10026 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
10027 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
10029 /* Handle load/store insn extension space. */
10031 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
10032 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
10033 && !INSN_RECORDED(arm_insn_r
))
10038 /* These insn, changes register and memory as well. */
10039 /* SWP or SWPB insn. */
10040 /* Get memory address given by Rn. */
10041 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10042 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
10043 /* SWP insn ?, swaps word. */
10044 if (8 == arm_insn_r
->opcode
)
10046 record_buf_mem
[0] = 4;
10050 /* SWPB insn, swaps only byte. */
10051 record_buf_mem
[0] = 1;
10053 record_buf_mem
[1] = u_regval
;
10054 arm_insn_r
->mem_rec_count
= 1;
10055 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10056 arm_insn_r
->reg_rec_count
= 1;
10058 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10061 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10064 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10067 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10068 record_buf
[1] = record_buf
[0] + 1;
10069 arm_insn_r
->reg_rec_count
= 2;
10071 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
10074 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
10077 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
10079 /* LDRH, LDRSB, LDRSH. */
10080 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10081 arm_insn_r
->reg_rec_count
= 1;
10086 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
10087 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
10088 && !INSN_RECORDED(arm_insn_r
))
10091 /* Handle coprocessor insn extension space. */
10094 /* To be done for ARMv5 and later; as of now we return -1. */
10098 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10099 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10104 /* Handling opcode 000 insns. */
10107 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
10109 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10110 uint32_t record_buf
[8], record_buf_mem
[8];
10111 ULONGEST u_regval
[2] = {0};
10113 uint32_t reg_src1
= 0;
10114 uint32_t opcode1
= 0;
10116 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10117 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10118 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10120 if (!((opcode1
& 0x19) == 0x10))
10122 /* Data-processing (register) and Data-processing (register-shifted
10124 /* Out of 11 shifter operands mode, all the insn modifies destination
10125 register, which is specified by 13-16 decode. */
10126 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10127 record_buf
[1] = ARM_PS_REGNUM
;
10128 arm_insn_r
->reg_rec_count
= 2;
10130 else if ((arm_insn_r
->decode
< 8) && ((opcode1
& 0x19) == 0x10))
10132 /* Miscellaneous instructions */
10134 if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
10135 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10137 /* Handle BLX, branch and link/exchange. */
10138 if (9 == arm_insn_r
->opcode
)
10140 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
10141 and R14 stores the return address. */
10142 record_buf
[0] = ARM_PS_REGNUM
;
10143 record_buf
[1] = ARM_LR_REGNUM
;
10144 arm_insn_r
->reg_rec_count
= 2;
10147 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
10149 /* Handle enhanced software breakpoint insn, BKPT. */
10150 /* CPSR is changed to be executed in ARM state, disabling normal
10151 interrupts, entering abort mode. */
10152 /* According to high vector configuration PC is set. */
10153 /* user hit breakpoint and type reverse, in
10154 that case, we need to go back with previous CPSR and
10155 Program Counter. */
10156 record_buf
[0] = ARM_PS_REGNUM
;
10157 record_buf
[1] = ARM_LR_REGNUM
;
10158 arm_insn_r
->reg_rec_count
= 2;
10160 /* Save SPSR also; how? */
10163 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
10164 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
10166 /* Handle BX, branch and link/exchange. */
10167 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
10168 record_buf
[0] = ARM_PS_REGNUM
;
10169 arm_insn_r
->reg_rec_count
= 1;
10171 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
10172 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
10173 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
10175 /* Count leading zeros: CLZ. */
10176 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10177 arm_insn_r
->reg_rec_count
= 1;
10179 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
10180 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10181 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
10182 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0))
10184 /* Handle MRS insn. */
10185 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10186 arm_insn_r
->reg_rec_count
= 1;
10189 else if (9 == arm_insn_r
->decode
&& opcode1
< 0x10)
10191 /* Multiply and multiply-accumulate */
10193 /* Handle multiply instructions. */
10194 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
10195 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
10197 /* Handle MLA and MUL. */
10198 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10199 record_buf
[1] = ARM_PS_REGNUM
;
10200 arm_insn_r
->reg_rec_count
= 2;
10202 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
10204 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
10205 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
10206 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
10207 record_buf
[2] = ARM_PS_REGNUM
;
10208 arm_insn_r
->reg_rec_count
= 3;
10211 else if (9 == arm_insn_r
->decode
&& opcode1
> 0x10)
10213 /* Synchronization primitives */
10215 /* Handling SWP, SWPB. */
10216 /* These insn, changes register and memory as well. */
10217 /* SWP or SWPB insn. */
10219 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10220 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10221 /* SWP insn ?, swaps word. */
10222 if (8 == arm_insn_r
->opcode
)
10224 record_buf_mem
[0] = 4;
10228 /* SWPB insn, swaps only byte. */
10229 record_buf_mem
[0] = 1;
10231 record_buf_mem
[1] = u_regval
[0];
10232 arm_insn_r
->mem_rec_count
= 1;
10233 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10234 arm_insn_r
->reg_rec_count
= 1;
10236 else if (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
10237 || 15 == arm_insn_r
->decode
)
10239 if ((opcode1
& 0x12) == 2)
10241 /* Extra load/store (unprivileged) */
10246 /* Extra load/store */
10247 switch (bits (arm_insn_r
->arm_insn
, 5, 6))
10250 if ((opcode1
& 0x05) == 0x0 || (opcode1
& 0x05) == 0x4)
10252 /* STRH (register), STRH (immediate) */
10253 arm_record_strx (arm_insn_r
, &record_buf
[0],
10254 &record_buf_mem
[0], ARM_RECORD_STRH
);
10256 else if ((opcode1
& 0x05) == 0x1)
10258 /* LDRH (register) */
10259 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10260 arm_insn_r
->reg_rec_count
= 1;
10262 if (bit (arm_insn_r
->arm_insn
, 21))
10264 /* Write back to Rn. */
10265 record_buf
[arm_insn_r
->reg_rec_count
++]
10266 = bits (arm_insn_r
->arm_insn
, 16, 19);
10269 else if ((opcode1
& 0x05) == 0x5)
10271 /* LDRH (immediate), LDRH (literal) */
10272 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10274 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10275 arm_insn_r
->reg_rec_count
= 1;
10279 /*LDRH (immediate) */
10280 if (bit (arm_insn_r
->arm_insn
, 21))
10282 /* Write back to Rn. */
10283 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10291 if ((opcode1
& 0x05) == 0x0)
10293 /* LDRD (register) */
10294 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10295 record_buf
[1] = record_buf
[0] + 1;
10296 arm_insn_r
->reg_rec_count
= 2;
10298 if (bit (arm_insn_r
->arm_insn
, 21))
10300 /* Write back to Rn. */
10301 record_buf
[arm_insn_r
->reg_rec_count
++]
10302 = bits (arm_insn_r
->arm_insn
, 16, 19);
10305 else if ((opcode1
& 0x05) == 0x1)
10307 /* LDRSB (register) */
10308 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10309 arm_insn_r
->reg_rec_count
= 1;
10311 if (bit (arm_insn_r
->arm_insn
, 21))
10313 /* Write back to Rn. */
10314 record_buf
[arm_insn_r
->reg_rec_count
++]
10315 = bits (arm_insn_r
->arm_insn
, 16, 19);
10318 else if ((opcode1
& 0x05) == 0x4 || (opcode1
& 0x05) == 0x5)
10320 /* LDRD (immediate), LDRD (literal), LDRSB (immediate),
10322 int rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
10324 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10325 arm_insn_r
->reg_rec_count
= 1;
10329 /*LDRD (immediate), LDRSB (immediate) */
10330 if (bit (arm_insn_r
->arm_insn
, 21))
10332 /* Write back to Rn. */
10333 record_buf
[arm_insn_r
->reg_rec_count
++] = rn
;
10341 if ((opcode1
& 0x05) == 0x0)
10343 /* STRD (register) */
10344 arm_record_strx (arm_insn_r
, &record_buf
[0],
10345 &record_buf_mem
[0], ARM_RECORD_STRD
);
10347 else if ((opcode1
& 0x05) == 0x1)
10349 /* LDRSH (register) */
10350 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10351 arm_insn_r
->reg_rec_count
= 1;
10353 if (bit (arm_insn_r
->arm_insn
, 21))
10355 /* Write back to Rn. */
10356 record_buf
[arm_insn_r
->reg_rec_count
++]
10357 = bits (arm_insn_r
->arm_insn
, 16, 19);
10360 else if ((opcode1
& 0x05) == 0x4)
10362 /* STRD (immediate) */
10363 arm_record_strx (arm_insn_r
, &record_buf
[0],
10364 &record_buf_mem
[0], ARM_RECORD_STRD
);
10366 else if ((opcode1
& 0x05) == 0x5)
10368 /* LDRSH (immediate), LDRSH (literal) */
10369 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10370 arm_insn_r
->reg_rec_count
= 1;
10372 if (bit (arm_insn_r
->arm_insn
, 21))
10374 /* Write back to Rn. */
10375 record_buf
[arm_insn_r
->reg_rec_count
++]
10376 = bits (arm_insn_r
->arm_insn
, 16, 19);
10392 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10393 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10397 /* Handling opcode 001 insns. */
10400 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
10402 uint32_t record_buf
[8], record_buf_mem
[8];
10404 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10405 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10407 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
10408 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
10409 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
10412 /* Handle MSR insn. */
10413 if (9 == arm_insn_r
->opcode
)
10415 /* CSPR is going to be changed. */
10416 record_buf
[0] = ARM_PS_REGNUM
;
10417 arm_insn_r
->reg_rec_count
= 1;
10421 /* SPSR is going to be changed. */
10424 else if (arm_insn_r
->opcode
<= 15)
10426 /* Normal data processing insns. */
10427 /* Out of 11 shifter operands mode, all the insn modifies destination
10428 register, which is specified by 13-16 decode. */
10429 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10430 record_buf
[1] = ARM_PS_REGNUM
;
10431 arm_insn_r
->reg_rec_count
= 2;
10438 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10439 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10444 arm_record_media (insn_decode_record
*arm_insn_r
)
10446 uint32_t record_buf
[8];
10448 switch (bits (arm_insn_r
->arm_insn
, 22, 24))
10451 /* Parallel addition and subtraction, signed */
10453 /* Parallel addition and subtraction, unsigned */
10456 /* Packing, unpacking, saturation and reversal */
10458 int rd
= bits (arm_insn_r
->arm_insn
, 12, 15);
10460 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10466 /* Signed multiplies */
10468 int rd
= bits (arm_insn_r
->arm_insn
, 16, 19);
10469 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 22);
10471 record_buf
[arm_insn_r
->reg_rec_count
++] = rd
;
10473 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10474 else if (op1
== 0x4)
10475 record_buf
[arm_insn_r
->reg_rec_count
++]
10476 = bits (arm_insn_r
->arm_insn
, 12, 15);
10482 if (bit (arm_insn_r
->arm_insn
, 21)
10483 && bits (arm_insn_r
->arm_insn
, 5, 6) == 0x2)
10486 record_buf
[arm_insn_r
->reg_rec_count
++]
10487 = bits (arm_insn_r
->arm_insn
, 12, 15);
10489 else if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x0
10490 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x0)
10492 /* USAD8 and USADA8 */
10493 record_buf
[arm_insn_r
->reg_rec_count
++]
10494 = bits (arm_insn_r
->arm_insn
, 16, 19);
10501 if (bits (arm_insn_r
->arm_insn
, 20, 21) == 0x3
10502 && bits (arm_insn_r
->arm_insn
, 5, 7) == 0x7)
10504 /* Permanently UNDEFINED */
10509 /* BFC, BFI and UBFX */
10510 record_buf
[arm_insn_r
->reg_rec_count
++]
10511 = bits (arm_insn_r
->arm_insn
, 12, 15);
10520 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10525 /* Handle ARM mode instructions with opcode 010. */
10528 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
10530 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10532 uint32_t reg_base
, reg_dest
;
10533 uint32_t offset_12
, tgt_mem_addr
;
10534 uint32_t record_buf
[8], record_buf_mem
[8];
10535 unsigned char wback
;
10538 /* Calculate wback. */
10539 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
10540 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
10542 arm_insn_r
->reg_rec_count
= 0;
10543 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10545 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10547 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
10550 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10551 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
10553 /* The LDR instruction is capable of doing branching. If MOV LR, PC
10554 preceeds a LDR instruction having R15 as reg_base, it
10555 emulates a branch and link instruction, and hence we need to save
10556 CPSR and PC as well. */
10557 if (ARM_PC_REGNUM
== reg_dest
)
10558 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10560 /* If wback is true, also save the base register, which is going to be
10563 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10567 /* STR (immediate), STRB (immediate), STRBT and STRT. */
10569 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
10570 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10572 /* Handle bit U. */
10573 if (bit (arm_insn_r
->arm_insn
, 23))
10575 /* U == 1: Add the offset. */
10576 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
10580 /* U == 0: subtract the offset. */
10581 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
10584 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
10586 if (bit (arm_insn_r
->arm_insn
, 22))
10588 /* STRB and STRBT: 1 byte. */
10589 record_buf_mem
[0] = 1;
10593 /* STR and STRT: 4 bytes. */
10594 record_buf_mem
[0] = 4;
10597 /* Handle bit P. */
10598 if (bit (arm_insn_r
->arm_insn
, 24))
10599 record_buf_mem
[1] = tgt_mem_addr
;
10601 record_buf_mem
[1] = (uint32_t) u_regval
;
10603 arm_insn_r
->mem_rec_count
= 1;
10605 /* If wback is true, also save the base register, which is going to be
10608 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10611 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10612 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10616 /* Handling opcode 011 insns. */
10619 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
10621 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10623 uint32_t shift_imm
= 0;
10624 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
10625 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
10626 uint32_t record_buf
[8], record_buf_mem
[8];
10629 ULONGEST u_regval
[2];
10631 if (bit (arm_insn_r
->arm_insn
, 4))
10632 return arm_record_media (arm_insn_r
);
10634 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10635 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10637 /* Handle enhanced store insns and LDRD DSP insn,
10638 order begins according to addressing modes for store insns
10642 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10644 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
10645 /* LDR insn has a capability to do branching, if
10646 MOV LR, PC is precedded by LDR insn having Rn as R15
10647 in that case, it emulates branch and link insn, and hence we
10648 need to save CSPR and PC as well. */
10649 if (15 != reg_dest
)
10651 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
10652 arm_insn_r
->reg_rec_count
= 1;
10656 record_buf
[0] = reg_dest
;
10657 record_buf
[1] = ARM_PS_REGNUM
;
10658 arm_insn_r
->reg_rec_count
= 2;
10663 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
10665 /* Store insn, register offset and register pre-indexed,
10666 register post-indexed. */
10668 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10670 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10671 regcache_raw_read_unsigned (reg_cache
, reg_src1
10673 regcache_raw_read_unsigned (reg_cache
, reg_src2
10675 if (15 == reg_src2
)
10677 /* If R15 was used as Rn, hence current PC+8. */
10678 /* Pre-indexed mode doesnt reach here ; illegal insn. */
10679 u_regval
[0] = u_regval
[0] + 8;
10681 /* Calculate target store address, Rn +/- Rm, register offset. */
10683 if (bit (arm_insn_r
->arm_insn
, 23))
10685 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10689 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10692 switch (arm_insn_r
->opcode
)
10706 record_buf_mem
[0] = 4;
10721 record_buf_mem
[0] = 1;
10725 gdb_assert_not_reached ("no decoding pattern found");
10728 record_buf_mem
[1] = tgt_mem_addr
;
10729 arm_insn_r
->mem_rec_count
= 1;
10731 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10732 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10733 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10734 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10735 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10736 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10739 /* Rn is going to be changed in pre-indexed mode and
10740 post-indexed mode as well. */
10741 record_buf
[0] = reg_src2
;
10742 arm_insn_r
->reg_rec_count
= 1;
10747 /* Store insn, scaled register offset; scaled pre-indexed. */
10748 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
10750 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10752 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10753 /* Get shift_imm. */
10754 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
10755 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10756 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
10757 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10758 /* Offset_12 used as shift. */
10762 /* Offset_12 used as index. */
10763 offset_12
= u_regval
[0] << shift_imm
;
10767 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
10773 if (bit (u_regval
[0], 31))
10775 offset_12
= 0xFFFFFFFF;
10784 /* This is arithmetic shift. */
10785 offset_12
= s_word
>> shift_imm
;
10792 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
10794 /* Get C flag value and shift it by 31. */
10795 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
10796 | (u_regval
[0]) >> 1);
10800 offset_12
= (u_regval
[0] >> shift_imm
) \
10802 (sizeof(uint32_t) - shift_imm
));
10807 gdb_assert_not_reached ("no decoding pattern found");
10811 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10813 if (bit (arm_insn_r
->arm_insn
, 23))
10815 tgt_mem_addr
= u_regval
[1] + offset_12
;
10819 tgt_mem_addr
= u_regval
[1] - offset_12
;
10822 switch (arm_insn_r
->opcode
)
10836 record_buf_mem
[0] = 4;
10851 record_buf_mem
[0] = 1;
10855 gdb_assert_not_reached ("no decoding pattern found");
10858 record_buf_mem
[1] = tgt_mem_addr
;
10859 arm_insn_r
->mem_rec_count
= 1;
10861 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
10862 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10863 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
10864 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
10865 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
10866 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
10869 /* Rn is going to be changed in register scaled pre-indexed
10870 mode,and scaled post indexed mode. */
10871 record_buf
[0] = reg_src2
;
10872 arm_insn_r
->reg_rec_count
= 1;
10877 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10878 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10882 /* Handle ARM mode instructions with opcode 100. */
10885 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
10887 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10888 uint32_t register_count
= 0, register_bits
;
10889 uint32_t reg_base
, addr_mode
;
10890 uint32_t record_buf
[24], record_buf_mem
[48];
10894 /* Fetch the list of registers. */
10895 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
10896 arm_insn_r
->reg_rec_count
= 0;
10898 /* Fetch the base register that contains the address we are loading data
10900 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
10902 /* Calculate wback. */
10903 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
10905 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
10907 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
10909 /* Find out which registers are going to be loaded from memory. */
10910 while (register_bits
)
10912 if (register_bits
& 0x00000001)
10913 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
10914 register_bits
= register_bits
>> 1;
10919 /* If wback is true, also save the base register, which is going to be
10922 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10924 /* Save the CPSR register. */
10925 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
10929 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
10931 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
10933 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
10935 /* Find out how many registers are going to be stored to memory. */
10936 while (register_bits
)
10938 if (register_bits
& 0x00000001)
10940 register_bits
= register_bits
>> 1;
10945 /* STMDA (STMED): Decrement after. */
10947 record_buf_mem
[1] = (uint32_t) u_regval
10948 - register_count
* ARM_INT_REGISTER_SIZE
+ 4;
10950 /* STM (STMIA, STMEA): Increment after. */
10952 record_buf_mem
[1] = (uint32_t) u_regval
;
10954 /* STMDB (STMFD): Decrement before. */
10956 record_buf_mem
[1] = (uint32_t) u_regval
10957 - register_count
* ARM_INT_REGISTER_SIZE
;
10959 /* STMIB (STMFA): Increment before. */
10961 record_buf_mem
[1] = (uint32_t) u_regval
+ ARM_INT_REGISTER_SIZE
;
10964 gdb_assert_not_reached ("no decoding pattern found");
10968 record_buf_mem
[0] = register_count
* ARM_INT_REGISTER_SIZE
;
10969 arm_insn_r
->mem_rec_count
= 1;
10971 /* If wback is true, also save the base register, which is going to be
10974 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
10977 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
10978 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
10982 /* Handling opcode 101 insns. */
10985 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
10987 uint32_t record_buf
[8];
10989 /* Handle B, BL, BLX(1) insns. */
10990 /* B simply branches so we do nothing here. */
10991 /* Note: BLX(1) doesnt fall here but instead it falls into
10992 extension space. */
10993 if (bit (arm_insn_r
->arm_insn
, 24))
10995 record_buf
[0] = ARM_LR_REGNUM
;
10996 arm_insn_r
->reg_rec_count
= 1;
10999 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11005 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11007 printf_unfiltered (_("Process record does not support instruction "
11008 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11009 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11014 /* Record handler for vector data transfer instructions. */
11017 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11019 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11020 uint32_t record_buf
[4];
11022 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11023 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11024 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11025 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11026 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11028 /* Handle VMOV instruction. */
11029 if (bit_l
&& bit_c
)
11031 record_buf
[0] = reg_t
;
11032 arm_insn_r
->reg_rec_count
= 1;
11034 else if (bit_l
&& !bit_c
)
11036 /* Handle VMOV instruction. */
11037 if (bits_a
== 0x00)
11039 record_buf
[0] = reg_t
;
11040 arm_insn_r
->reg_rec_count
= 1;
11042 /* Handle VMRS instruction. */
11043 else if (bits_a
== 0x07)
11046 reg_t
= ARM_PS_REGNUM
;
11048 record_buf
[0] = reg_t
;
11049 arm_insn_r
->reg_rec_count
= 1;
11052 else if (!bit_l
&& !bit_c
)
11054 /* Handle VMOV instruction. */
11055 if (bits_a
== 0x00)
11057 record_buf
[0] = ARM_D0_REGNUM
+ reg_v
;
11059 arm_insn_r
->reg_rec_count
= 1;
11061 /* Handle VMSR instruction. */
11062 else if (bits_a
== 0x07)
11064 record_buf
[0] = ARM_FPSCR_REGNUM
;
11065 arm_insn_r
->reg_rec_count
= 1;
11068 else if (!bit_l
&& bit_c
)
11070 /* Handle VMOV instruction. */
11071 if (!(bits_a
& 0x04))
11073 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
11075 arm_insn_r
->reg_rec_count
= 1;
11077 /* Handle VDUP instruction. */
11080 if (bit (arm_insn_r
->arm_insn
, 21))
11082 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11083 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11084 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
11085 arm_insn_r
->reg_rec_count
= 2;
11089 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
11090 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
11091 arm_insn_r
->reg_rec_count
= 1;
11096 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11100 /* Record handler for extension register load/store instructions. */
11103 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
11105 uint32_t opcode
, single_reg
;
11106 uint8_t op_vldm_vstm
;
11107 uint32_t record_buf
[8], record_buf_mem
[128];
11108 ULONGEST u_regval
= 0;
11110 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11112 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
11113 single_reg
= !bit (arm_insn_r
->arm_insn
, 8);
11114 op_vldm_vstm
= opcode
& 0x1b;
11116 /* Handle VMOV instructions. */
11117 if ((opcode
& 0x1e) == 0x04)
11119 if (bit (arm_insn_r
->arm_insn
, 20)) /* to_arm_registers bit 20? */
11121 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11122 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11123 arm_insn_r
->reg_rec_count
= 2;
11127 uint8_t reg_m
= bits (arm_insn_r
->arm_insn
, 0, 3);
11128 uint8_t bit_m
= bit (arm_insn_r
->arm_insn
, 5);
11132 /* The first S register number m is REG_M:M (M is bit 5),
11133 the corresponding D register number is REG_M:M / 2, which
11135 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_D0_REGNUM
+ reg_m
;
11136 /* The second S register number is REG_M:M + 1, the
11137 corresponding D register number is (REG_M:M + 1) / 2.
11138 IOW, if bit M is 1, the first and second S registers
11139 are mapped to different D registers, otherwise, they are
11140 in the same D register. */
11143 record_buf
[arm_insn_r
->reg_rec_count
++]
11144 = ARM_D0_REGNUM
+ reg_m
+ 1;
11149 record_buf
[0] = ((bit_m
<< 4) + reg_m
+ ARM_D0_REGNUM
);
11150 arm_insn_r
->reg_rec_count
= 1;
11154 /* Handle VSTM and VPUSH instructions. */
11155 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
11156 || op_vldm_vstm
== 0x12)
11158 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
11159 uint32_t memory_index
= 0;
11161 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11162 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11163 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11164 imm_off32
= imm_off8
<< 2;
11165 memory_count
= imm_off8
;
11167 if (bit (arm_insn_r
->arm_insn
, 23))
11168 start_address
= u_regval
;
11170 start_address
= u_regval
- imm_off32
;
11172 if (bit (arm_insn_r
->arm_insn
, 21))
11174 record_buf
[0] = reg_rn
;
11175 arm_insn_r
->reg_rec_count
= 1;
11178 while (memory_count
> 0)
11182 record_buf_mem
[memory_index
] = 4;
11183 record_buf_mem
[memory_index
+ 1] = start_address
;
11184 start_address
= start_address
+ 4;
11185 memory_index
= memory_index
+ 2;
11189 record_buf_mem
[memory_index
] = 4;
11190 record_buf_mem
[memory_index
+ 1] = start_address
;
11191 record_buf_mem
[memory_index
+ 2] = 4;
11192 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11193 start_address
= start_address
+ 8;
11194 memory_index
= memory_index
+ 4;
11198 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
11200 /* Handle VLDM instructions. */
11201 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
11202 || op_vldm_vstm
== 0x13)
11204 uint32_t reg_count
, reg_vd
;
11205 uint32_t reg_index
= 0;
11206 uint32_t bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11208 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11209 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
11211 /* REG_VD is the first D register number. If the instruction
11212 loads memory to S registers (SINGLE_REG is TRUE), the register
11213 number is (REG_VD << 1 | bit D), so the corresponding D
11214 register number is (REG_VD << 1 | bit D) / 2 = REG_VD. */
11216 reg_vd
= reg_vd
| (bit_d
<< 4);
11218 if (bit (arm_insn_r
->arm_insn
, 21) /* write back */)
11219 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
11221 /* If the instruction loads memory to D register, REG_COUNT should
11222 be divided by 2, according to the ARM Architecture Reference
11223 Manual. If the instruction loads memory to S register, divide by
11224 2 as well because two S registers are mapped to D register. */
11225 reg_count
= reg_count
/ 2;
11226 if (single_reg
&& bit_d
)
11228 /* Increase the register count if S register list starts from
11229 an odd number (bit d is one). */
11233 while (reg_count
> 0)
11235 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
11238 arm_insn_r
->reg_rec_count
= reg_index
;
11240 /* VSTR Vector store register. */
11241 else if ((opcode
& 0x13) == 0x10)
11243 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
;
11244 uint32_t memory_index
= 0;
11246 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
11247 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
11248 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
11249 imm_off32
= imm_off8
<< 2;
11251 if (bit (arm_insn_r
->arm_insn
, 23))
11252 start_address
= u_regval
+ imm_off32
;
11254 start_address
= u_regval
- imm_off32
;
11258 record_buf_mem
[memory_index
] = 4;
11259 record_buf_mem
[memory_index
+ 1] = start_address
;
11260 arm_insn_r
->mem_rec_count
= 1;
11264 record_buf_mem
[memory_index
] = 4;
11265 record_buf_mem
[memory_index
+ 1] = start_address
;
11266 record_buf_mem
[memory_index
+ 2] = 4;
11267 record_buf_mem
[memory_index
+ 3] = start_address
+ 4;
11268 arm_insn_r
->mem_rec_count
= 2;
11271 /* VLDR Vector load register. */
11272 else if ((opcode
& 0x13) == 0x11)
11274 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11278 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
11279 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
11283 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
11284 /* Record register D rather than pseudo register S. */
11285 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
/ 2;
11287 arm_insn_r
->reg_rec_count
= 1;
11290 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11291 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11295 /* Record handler for arm/thumb mode VFP data processing instructions. */
11298 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
11300 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
11301 uint32_t record_buf
[4];
11302 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
11303 enum insn_types curr_insn_type
= INSN_INV
;
11305 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
11306 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
11307 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11308 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
11309 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
11310 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
11311 /* Mask off the "D" bit. */
11312 opc1
= opc1
& ~0x04;
11314 /* Handle VMLA, VMLS. */
11317 if (bit (arm_insn_r
->arm_insn
, 10))
11319 if (bit (arm_insn_r
->arm_insn
, 6))
11320 curr_insn_type
= INSN_T0
;
11322 curr_insn_type
= INSN_T1
;
11327 curr_insn_type
= INSN_T1
;
11329 curr_insn_type
= INSN_T2
;
11332 /* Handle VNMLA, VNMLS, VNMUL. */
11333 else if (opc1
== 0x01)
11336 curr_insn_type
= INSN_T1
;
11338 curr_insn_type
= INSN_T2
;
11341 else if (opc1
== 0x02 && !(opc3
& 0x01))
11343 if (bit (arm_insn_r
->arm_insn
, 10))
11345 if (bit (arm_insn_r
->arm_insn
, 6))
11346 curr_insn_type
= INSN_T0
;
11348 curr_insn_type
= INSN_T1
;
11353 curr_insn_type
= INSN_T1
;
11355 curr_insn_type
= INSN_T2
;
11358 /* Handle VADD, VSUB. */
11359 else if (opc1
== 0x03)
11361 if (!bit (arm_insn_r
->arm_insn
, 9))
11363 if (bit (arm_insn_r
->arm_insn
, 6))
11364 curr_insn_type
= INSN_T0
;
11366 curr_insn_type
= INSN_T1
;
11371 curr_insn_type
= INSN_T1
;
11373 curr_insn_type
= INSN_T2
;
11377 else if (opc1
== 0x08)
11380 curr_insn_type
= INSN_T1
;
11382 curr_insn_type
= INSN_T2
;
11384 /* Handle all other vfp data processing instructions. */
11385 else if (opc1
== 0x0b)
11388 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
11390 if (bit (arm_insn_r
->arm_insn
, 4))
11392 if (bit (arm_insn_r
->arm_insn
, 6))
11393 curr_insn_type
= INSN_T0
;
11395 curr_insn_type
= INSN_T1
;
11400 curr_insn_type
= INSN_T1
;
11402 curr_insn_type
= INSN_T2
;
11405 /* Handle VNEG and VABS. */
11406 else if ((opc2
== 0x01 && opc3
== 0x01)
11407 || (opc2
== 0x00 && opc3
== 0x03))
11409 if (!bit (arm_insn_r
->arm_insn
, 11))
11411 if (bit (arm_insn_r
->arm_insn
, 6))
11412 curr_insn_type
= INSN_T0
;
11414 curr_insn_type
= INSN_T1
;
11419 curr_insn_type
= INSN_T1
;
11421 curr_insn_type
= INSN_T2
;
11424 /* Handle VSQRT. */
11425 else if (opc2
== 0x01 && opc3
== 0x03)
11428 curr_insn_type
= INSN_T1
;
11430 curr_insn_type
= INSN_T2
;
11433 else if (opc2
== 0x07 && opc3
== 0x03)
11436 curr_insn_type
= INSN_T1
;
11438 curr_insn_type
= INSN_T2
;
11440 else if (opc3
& 0x01)
11443 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
11445 if (!bit (arm_insn_r
->arm_insn
, 18))
11446 curr_insn_type
= INSN_T2
;
11450 curr_insn_type
= INSN_T1
;
11452 curr_insn_type
= INSN_T2
;
11456 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
11459 curr_insn_type
= INSN_T1
;
11461 curr_insn_type
= INSN_T2
;
11463 /* Handle VCVTB, VCVTT. */
11464 else if ((opc2
& 0x0e) == 0x02)
11465 curr_insn_type
= INSN_T2
;
11466 /* Handle VCMP, VCMPE. */
11467 else if ((opc2
& 0x0e) == 0x04)
11468 curr_insn_type
= INSN_T3
;
11472 switch (curr_insn_type
)
11475 reg_vd
= reg_vd
| (bit_d
<< 4);
11476 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11477 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
11478 arm_insn_r
->reg_rec_count
= 2;
11482 reg_vd
= reg_vd
| (bit_d
<< 4);
11483 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11484 arm_insn_r
->reg_rec_count
= 1;
11488 reg_vd
= (reg_vd
<< 1) | bit_d
;
11489 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
11490 arm_insn_r
->reg_rec_count
= 1;
11494 record_buf
[0] = ARM_FPSCR_REGNUM
;
11495 arm_insn_r
->reg_rec_count
= 1;
11499 gdb_assert_not_reached ("no decoding pattern found");
11503 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11507 /* Handling opcode 110 insns. */
11510 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
11512 uint32_t op1
, op1_ebit
, coproc
;
11514 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11515 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11516 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11518 if ((coproc
& 0x0e) == 0x0a)
11520 /* Handle extension register ld/st instructions. */
11522 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11524 /* 64-bit transfers between arm core and extension registers. */
11525 if ((op1
& 0x3e) == 0x04)
11526 return arm_record_exreg_ld_st_insn (arm_insn_r
);
11530 /* Handle coprocessor ld/st instructions. */
11535 return arm_record_unsupported_insn (arm_insn_r
);
11538 return arm_record_unsupported_insn (arm_insn_r
);
11541 /* Move to coprocessor from two arm core registers. */
11543 return arm_record_unsupported_insn (arm_insn_r
);
11545 /* Move to two arm core registers from coprocessor. */
11550 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11551 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11552 arm_insn_r
->reg_rec_count
= 2;
11554 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
11558 return arm_record_unsupported_insn (arm_insn_r
);
11561 /* Handling opcode 111 insns. */
11564 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
11566 uint32_t op
, op1_ebit
, coproc
, bits_24_25
;
11567 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
11568 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11570 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
11571 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
11572 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
11573 op
= bit (arm_insn_r
->arm_insn
, 4);
11574 bits_24_25
= bits (arm_insn_r
->arm_insn
, 24, 25);
11576 /* Handle arm SWI/SVC system call instructions. */
11577 if (bits_24_25
== 0x3)
11579 if (tdep
->arm_syscall_record
!= NULL
)
11581 ULONGEST svc_operand
, svc_number
;
11583 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
11585 if (svc_operand
) /* OABI. */
11586 svc_number
= svc_operand
- 0x900000;
11588 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
11590 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
11594 printf_unfiltered (_("no syscall record support\n"));
11598 else if (bits_24_25
== 0x02)
11602 if ((coproc
& 0x0e) == 0x0a)
11604 /* 8, 16, and 32-bit transfer */
11605 return arm_record_vdata_transfer_insn (arm_insn_r
);
11612 uint32_t record_buf
[1];
11614 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11615 if (record_buf
[0] == 15)
11616 record_buf
[0] = ARM_PS_REGNUM
;
11618 arm_insn_r
->reg_rec_count
= 1;
11619 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
11632 if ((coproc
& 0x0e) == 0x0a)
11634 /* VFP data-processing instructions. */
11635 return arm_record_vfp_data_proc_insn (arm_insn_r
);
11646 unsigned int op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
11650 if ((coproc
& 0x0e) != 0x0a)
11656 else if (op1
== 4 || op1
== 5)
11658 if ((coproc
& 0x0e) == 0x0a)
11660 /* 64-bit transfers between ARM core and extension */
11669 else if (op1
== 0 || op1
== 1)
11676 if ((coproc
& 0x0e) == 0x0a)
11678 /* Extension register load/store */
11682 /* STC, STC2, LDC, LDC2 */
11691 /* Handling opcode 000 insns. */
11694 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
11696 uint32_t record_buf
[8];
11697 uint32_t reg_src1
= 0;
11699 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11701 record_buf
[0] = ARM_PS_REGNUM
;
11702 record_buf
[1] = reg_src1
;
11703 thumb_insn_r
->reg_rec_count
= 2;
11705 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11711 /* Handling opcode 001 insns. */
11714 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
11716 uint32_t record_buf
[8];
11717 uint32_t reg_src1
= 0;
11719 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11721 record_buf
[0] = ARM_PS_REGNUM
;
11722 record_buf
[1] = reg_src1
;
11723 thumb_insn_r
->reg_rec_count
= 2;
11725 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11730 /* Handling opcode 010 insns. */
11733 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
11735 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11736 uint32_t record_buf
[8], record_buf_mem
[8];
11738 uint32_t reg_src1
= 0, reg_src2
= 0;
11739 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
11741 ULONGEST u_regval
[2] = {0};
11743 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
11745 if (bit (thumb_insn_r
->arm_insn
, 12))
11747 /* Handle load/store register offset. */
11748 uint32_t opB
= bits (thumb_insn_r
->arm_insn
, 9, 11);
11750 if (in_inclusive_range (opB
, 4U, 7U))
11752 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
11753 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
11754 record_buf
[0] = reg_src1
;
11755 thumb_insn_r
->reg_rec_count
= 1;
11757 else if (in_inclusive_range (opB
, 0U, 2U))
11759 /* STR(2), STRB(2), STRH(2) . */
11760 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11761 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
11762 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11763 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11765 record_buf_mem
[0] = 4; /* STR (2). */
11767 record_buf_mem
[0] = 1; /* STRB (2). */
11769 record_buf_mem
[0] = 2; /* STRH (2). */
11770 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
11771 thumb_insn_r
->mem_rec_count
= 1;
11774 else if (bit (thumb_insn_r
->arm_insn
, 11))
11776 /* Handle load from literal pool. */
11778 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11779 record_buf
[0] = reg_src1
;
11780 thumb_insn_r
->reg_rec_count
= 1;
11784 /* Special data instructions and branch and exchange */
11785 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
11786 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11787 if ((3 == opcode2
) && (!opcode3
))
11789 /* Branch with exchange. */
11790 record_buf
[0] = ARM_PS_REGNUM
;
11791 thumb_insn_r
->reg_rec_count
= 1;
11795 /* Format 8; special data processing insns. */
11796 record_buf
[0] = ARM_PS_REGNUM
;
11797 record_buf
[1] = (bit (thumb_insn_r
->arm_insn
, 7) << 3
11798 | bits (thumb_insn_r
->arm_insn
, 0, 2));
11799 thumb_insn_r
->reg_rec_count
= 2;
11804 /* Format 5; data processing insns. */
11805 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11806 if (bit (thumb_insn_r
->arm_insn
, 7))
11808 reg_src1
= reg_src1
+ 8;
11810 record_buf
[0] = ARM_PS_REGNUM
;
11811 record_buf
[1] = reg_src1
;
11812 thumb_insn_r
->reg_rec_count
= 2;
11815 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11816 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11822 /* Handling opcode 001 insns. */
11825 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
11827 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11828 uint32_t record_buf
[8], record_buf_mem
[8];
11830 uint32_t reg_src1
= 0;
11831 uint32_t opcode
= 0, immed_5
= 0;
11833 ULONGEST u_regval
= 0;
11835 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11840 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11841 record_buf
[0] = reg_src1
;
11842 thumb_insn_r
->reg_rec_count
= 1;
11847 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11848 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11849 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11850 record_buf_mem
[0] = 4;
11851 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
11852 thumb_insn_r
->mem_rec_count
= 1;
11855 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11856 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11862 /* Handling opcode 100 insns. */
11865 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
11867 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11868 uint32_t record_buf
[8], record_buf_mem
[8];
11870 uint32_t reg_src1
= 0;
11871 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
11873 ULONGEST u_regval
= 0;
11875 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11880 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11881 record_buf
[0] = reg_src1
;
11882 thumb_insn_r
->reg_rec_count
= 1;
11884 else if (1 == opcode
)
11887 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
11888 record_buf
[0] = reg_src1
;
11889 thumb_insn_r
->reg_rec_count
= 1;
11891 else if (2 == opcode
)
11894 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
11895 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
11896 record_buf_mem
[0] = 4;
11897 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
11898 thumb_insn_r
->mem_rec_count
= 1;
11900 else if (0 == opcode
)
11903 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
11904 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
11905 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11906 record_buf_mem
[0] = 2;
11907 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
11908 thumb_insn_r
->mem_rec_count
= 1;
11911 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
11912 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
11918 /* Handling opcode 101 insns. */
11921 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
11923 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
11925 uint32_t opcode
= 0;
11926 uint32_t register_bits
= 0, register_count
= 0;
11927 uint32_t index
= 0, start_address
= 0;
11928 uint32_t record_buf
[24], record_buf_mem
[48];
11931 ULONGEST u_regval
= 0;
11933 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
11935 if (opcode
== 0 || opcode
== 1)
11937 /* ADR and ADD (SP plus immediate) */
11939 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11940 record_buf
[0] = reg_src1
;
11941 thumb_insn_r
->reg_rec_count
= 1;
11945 /* Miscellaneous 16-bit instructions */
11946 uint32_t opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 11);
11951 /* SETEND and CPS */
11954 /* ADD/SUB (SP plus immediate) */
11955 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
11956 record_buf
[0] = ARM_SP_REGNUM
;
11957 thumb_insn_r
->reg_rec_count
= 1;
11959 case 1: /* fall through */
11960 case 3: /* fall through */
11961 case 9: /* fall through */
11966 /* SXTH, SXTB, UXTH, UXTB */
11967 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
11968 thumb_insn_r
->reg_rec_count
= 1;
11970 case 4: /* fall through */
11973 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
11974 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
11975 while (register_bits
)
11977 if (register_bits
& 0x00000001)
11979 register_bits
= register_bits
>> 1;
11981 start_address
= u_regval
- \
11982 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
11983 thumb_insn_r
->mem_rec_count
= register_count
;
11984 while (register_count
)
11986 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
11987 record_buf_mem
[(register_count
* 2) - 2] = 4;
11988 start_address
= start_address
+ 4;
11991 record_buf
[0] = ARM_SP_REGNUM
;
11992 thumb_insn_r
->reg_rec_count
= 1;
11995 /* REV, REV16, REVSH */
11996 record_buf
[0] = bits (thumb_insn_r
->arm_insn
, 0, 2);
11997 thumb_insn_r
->reg_rec_count
= 1;
11999 case 12: /* fall through */
12002 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12003 while (register_bits
)
12005 if (register_bits
& 0x00000001)
12006 record_buf
[index
++] = register_count
;
12007 register_bits
= register_bits
>> 1;
12010 record_buf
[index
++] = ARM_PS_REGNUM
;
12011 record_buf
[index
++] = ARM_SP_REGNUM
;
12012 thumb_insn_r
->reg_rec_count
= index
;
12016 /* Handle enhanced software breakpoint insn, BKPT. */
12017 /* CPSR is changed to be executed in ARM state, disabling normal
12018 interrupts, entering abort mode. */
12019 /* According to high vector configuration PC is set. */
12020 /* User hits breakpoint and type reverse, in that case, we need to go back with
12021 previous CPSR and Program Counter. */
12022 record_buf
[0] = ARM_PS_REGNUM
;
12023 record_buf
[1] = ARM_LR_REGNUM
;
12024 thumb_insn_r
->reg_rec_count
= 2;
12025 /* We need to save SPSR value, which is not yet done. */
12026 printf_unfiltered (_("Process record does not support instruction "
12027 "0x%0x at address %s.\n"),
12028 thumb_insn_r
->arm_insn
,
12029 paddress (thumb_insn_r
->gdbarch
,
12030 thumb_insn_r
->this_addr
));
12034 /* If-Then, and hints */
12041 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12042 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12048 /* Handling opcode 110 insns. */
12051 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12053 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12054 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12056 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12057 uint32_t reg_src1
= 0;
12058 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12059 uint32_t index
= 0, start_address
= 0;
12060 uint32_t record_buf
[24], record_buf_mem
[48];
12062 ULONGEST u_regval
= 0;
12064 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12065 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12071 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12073 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12074 while (register_bits
)
12076 if (register_bits
& 0x00000001)
12077 record_buf
[index
++] = register_count
;
12078 register_bits
= register_bits
>> 1;
12081 record_buf
[index
++] = reg_src1
;
12082 thumb_insn_r
->reg_rec_count
= index
;
12084 else if (0 == opcode2
)
12086 /* It handles both STMIA. */
12087 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12089 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12090 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12091 while (register_bits
)
12093 if (register_bits
& 0x00000001)
12095 register_bits
= register_bits
>> 1;
12097 start_address
= u_regval
;
12098 thumb_insn_r
->mem_rec_count
= register_count
;
12099 while (register_count
)
12101 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12102 record_buf_mem
[(register_count
* 2) - 2] = 4;
12103 start_address
= start_address
+ 4;
12107 else if (0x1F == opcode1
)
12109 /* Handle arm syscall insn. */
12110 if (tdep
->arm_syscall_record
!= NULL
)
12112 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12113 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12117 printf_unfiltered (_("no syscall record support\n"));
12122 /* B (1), conditional branch is automatically taken care in process_record,
12123 as PC is saved there. */
12125 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12126 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12132 /* Handling opcode 111 insns. */
12135 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12137 uint32_t record_buf
[8];
12138 uint32_t bits_h
= 0;
12140 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12142 if (2 == bits_h
|| 3 == bits_h
)
12145 record_buf
[0] = ARM_LR_REGNUM
;
12146 thumb_insn_r
->reg_rec_count
= 1;
12148 else if (1 == bits_h
)
12151 record_buf
[0] = ARM_PS_REGNUM
;
12152 record_buf
[1] = ARM_LR_REGNUM
;
12153 thumb_insn_r
->reg_rec_count
= 2;
12156 /* B(2) is automatically taken care in process_record, as PC is
12159 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12164 /* Handler for thumb2 load/store multiple instructions. */
12167 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
12169 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12171 uint32_t reg_rn
, op
;
12172 uint32_t register_bits
= 0, register_count
= 0;
12173 uint32_t index
= 0, start_address
= 0;
12174 uint32_t record_buf
[24], record_buf_mem
[48];
12176 ULONGEST u_regval
= 0;
12178 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12179 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12181 if (0 == op
|| 3 == op
)
12183 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12185 /* Handle RFE instruction. */
12186 record_buf
[0] = ARM_PS_REGNUM
;
12187 thumb2_insn_r
->reg_rec_count
= 1;
12191 /* Handle SRS instruction after reading banked SP. */
12192 return arm_record_unsupported_insn (thumb2_insn_r
);
12195 else if (1 == op
|| 2 == op
)
12197 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12199 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
12200 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12201 while (register_bits
)
12203 if (register_bits
& 0x00000001)
12204 record_buf
[index
++] = register_count
;
12207 register_bits
= register_bits
>> 1;
12209 record_buf
[index
++] = reg_rn
;
12210 record_buf
[index
++] = ARM_PS_REGNUM
;
12211 thumb2_insn_r
->reg_rec_count
= index
;
12215 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
12216 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
12217 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12218 while (register_bits
)
12220 if (register_bits
& 0x00000001)
12223 register_bits
= register_bits
>> 1;
12228 /* Start address calculation for LDMDB/LDMEA. */
12229 start_address
= u_regval
;
12233 /* Start address calculation for LDMDB/LDMEA. */
12234 start_address
= u_regval
- register_count
* 4;
12237 thumb2_insn_r
->mem_rec_count
= register_count
;
12238 while (register_count
)
12240 record_buf_mem
[register_count
* 2 - 1] = start_address
;
12241 record_buf_mem
[register_count
* 2 - 2] = 4;
12242 start_address
= start_address
+ 4;
12245 record_buf
[0] = reg_rn
;
12246 record_buf
[1] = ARM_PS_REGNUM
;
12247 thumb2_insn_r
->reg_rec_count
= 2;
12251 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12253 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12255 return ARM_RECORD_SUCCESS
;
12258 /* Handler for thumb2 load/store (dual/exclusive) and table branch
12262 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
12264 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12266 uint32_t reg_rd
, reg_rn
, offset_imm
;
12267 uint32_t reg_dest1
, reg_dest2
;
12268 uint32_t address
, offset_addr
;
12269 uint32_t record_buf
[8], record_buf_mem
[8];
12270 uint32_t op1
, op2
, op3
;
12272 ULONGEST u_regval
[2];
12274 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
12275 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
12276 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12278 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
12280 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
12282 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12283 record_buf
[0] = reg_dest1
;
12284 record_buf
[1] = ARM_PS_REGNUM
;
12285 thumb2_insn_r
->reg_rec_count
= 2;
12288 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
12290 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12291 record_buf
[2] = reg_dest2
;
12292 thumb2_insn_r
->reg_rec_count
= 3;
12297 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12298 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12300 if (0 == op1
&& 0 == op2
)
12302 /* Handle STREX. */
12303 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12304 address
= u_regval
[0] + (offset_imm
* 4);
12305 record_buf_mem
[0] = 4;
12306 record_buf_mem
[1] = address
;
12307 thumb2_insn_r
->mem_rec_count
= 1;
12308 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12309 record_buf
[0] = reg_rd
;
12310 thumb2_insn_r
->reg_rec_count
= 1;
12312 else if (1 == op1
&& 0 == op2
)
12314 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12315 record_buf
[0] = reg_rd
;
12316 thumb2_insn_r
->reg_rec_count
= 1;
12317 address
= u_regval
[0];
12318 record_buf_mem
[1] = address
;
12322 /* Handle STREXB. */
12323 record_buf_mem
[0] = 1;
12324 thumb2_insn_r
->mem_rec_count
= 1;
12328 /* Handle STREXH. */
12329 record_buf_mem
[0] = 2 ;
12330 thumb2_insn_r
->mem_rec_count
= 1;
12334 /* Handle STREXD. */
12335 address
= u_regval
[0];
12336 record_buf_mem
[0] = 4;
12337 record_buf_mem
[2] = 4;
12338 record_buf_mem
[3] = address
+ 4;
12339 thumb2_insn_r
->mem_rec_count
= 2;
12344 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12346 if (bit (thumb2_insn_r
->arm_insn
, 24))
12348 if (bit (thumb2_insn_r
->arm_insn
, 23))
12349 offset_addr
= u_regval
[0] + (offset_imm
* 4);
12351 offset_addr
= u_regval
[0] - (offset_imm
* 4);
12353 address
= offset_addr
;
12356 address
= u_regval
[0];
12358 record_buf_mem
[0] = 4;
12359 record_buf_mem
[1] = address
;
12360 record_buf_mem
[2] = 4;
12361 record_buf_mem
[3] = address
+ 4;
12362 thumb2_insn_r
->mem_rec_count
= 2;
12363 record_buf
[0] = reg_rn
;
12364 thumb2_insn_r
->reg_rec_count
= 1;
12368 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12370 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12372 return ARM_RECORD_SUCCESS
;
12375 /* Handler for thumb2 data processing (shift register and modified immediate)
12379 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
12381 uint32_t reg_rd
, op
;
12382 uint32_t record_buf
[8];
12384 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
12385 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12387 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
12389 record_buf
[0] = ARM_PS_REGNUM
;
12390 thumb2_insn_r
->reg_rec_count
= 1;
12394 record_buf
[0] = reg_rd
;
12395 record_buf
[1] = ARM_PS_REGNUM
;
12396 thumb2_insn_r
->reg_rec_count
= 2;
12399 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12401 return ARM_RECORD_SUCCESS
;
12404 /* Generic handler for thumb2 instructions which effect destination and PS
12408 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
12411 uint32_t record_buf
[8];
12413 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12415 record_buf
[0] = reg_rd
;
12416 record_buf
[1] = ARM_PS_REGNUM
;
12417 thumb2_insn_r
->reg_rec_count
= 2;
12419 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12421 return ARM_RECORD_SUCCESS
;
12424 /* Handler for thumb2 branch and miscellaneous control instructions. */
12427 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
12429 uint32_t op
, op1
, op2
;
12430 uint32_t record_buf
[8];
12432 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12433 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
12434 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12436 /* Handle MSR insn. */
12437 if (!(op1
& 0x2) && 0x38 == op
)
12441 /* CPSR is going to be changed. */
12442 record_buf
[0] = ARM_PS_REGNUM
;
12443 thumb2_insn_r
->reg_rec_count
= 1;
12447 arm_record_unsupported_insn(thumb2_insn_r
);
12451 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
12454 record_buf
[0] = ARM_PS_REGNUM
;
12455 record_buf
[1] = ARM_LR_REGNUM
;
12456 thumb2_insn_r
->reg_rec_count
= 2;
12459 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12461 return ARM_RECORD_SUCCESS
;
12464 /* Handler for thumb2 store single data item instructions. */
12467 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
12469 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12471 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
12472 uint32_t address
, offset_addr
;
12473 uint32_t record_buf
[8], record_buf_mem
[8];
12476 ULONGEST u_regval
[2];
12478 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
12479 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
12480 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12481 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
12483 if (bit (thumb2_insn_r
->arm_insn
, 23))
12486 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
12487 offset_addr
= u_regval
[0] + offset_imm
;
12488 address
= offset_addr
;
12493 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
12495 /* Handle STRB (register). */
12496 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
12497 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
12498 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
12499 offset_addr
= u_regval
[1] << shift_imm
;
12500 address
= u_regval
[0] + offset_addr
;
12504 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
12505 if (bit (thumb2_insn_r
->arm_insn
, 10))
12507 if (bit (thumb2_insn_r
->arm_insn
, 9))
12508 offset_addr
= u_regval
[0] + offset_imm
;
12510 offset_addr
= u_regval
[0] - offset_imm
;
12512 address
= offset_addr
;
12515 address
= u_regval
[0];
12521 /* Store byte instructions. */
12524 record_buf_mem
[0] = 1;
12526 /* Store half word instructions. */
12529 record_buf_mem
[0] = 2;
12531 /* Store word instructions. */
12534 record_buf_mem
[0] = 4;
12538 gdb_assert_not_reached ("no decoding pattern found");
12542 record_buf_mem
[1] = address
;
12543 thumb2_insn_r
->mem_rec_count
= 1;
12544 record_buf
[0] = reg_rn
;
12545 thumb2_insn_r
->reg_rec_count
= 1;
12547 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12549 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12551 return ARM_RECORD_SUCCESS
;
12554 /* Handler for thumb2 load memory hints instructions. */
12557 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
12559 uint32_t record_buf
[8];
12560 uint32_t reg_rt
, reg_rn
;
12562 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12563 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12565 if (ARM_PC_REGNUM
!= reg_rt
)
12567 record_buf
[0] = reg_rt
;
12568 record_buf
[1] = reg_rn
;
12569 record_buf
[2] = ARM_PS_REGNUM
;
12570 thumb2_insn_r
->reg_rec_count
= 3;
12572 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12574 return ARM_RECORD_SUCCESS
;
12577 return ARM_RECORD_FAILURE
;
12580 /* Handler for thumb2 load word instructions. */
12583 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
12585 uint32_t record_buf
[8];
12587 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12588 record_buf
[1] = ARM_PS_REGNUM
;
12589 thumb2_insn_r
->reg_rec_count
= 2;
12591 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12593 return ARM_RECORD_SUCCESS
;
12596 /* Handler for thumb2 long multiply, long multiply accumulate, and
12597 divide instructions. */
12600 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
12602 uint32_t opcode1
= 0, opcode2
= 0;
12603 uint32_t record_buf
[8];
12605 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
12606 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
12608 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
12610 /* Handle SMULL, UMULL, SMULAL. */
12611 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
12612 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12613 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12614 record_buf
[2] = ARM_PS_REGNUM
;
12615 thumb2_insn_r
->reg_rec_count
= 3;
12617 else if (1 == opcode1
|| 3 == opcode2
)
12619 /* Handle SDIV and UDIV. */
12620 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
12621 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
12622 record_buf
[2] = ARM_PS_REGNUM
;
12623 thumb2_insn_r
->reg_rec_count
= 3;
12626 return ARM_RECORD_FAILURE
;
12628 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12630 return ARM_RECORD_SUCCESS
;
12633 /* Record handler for thumb32 coprocessor instructions. */
12636 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
12638 if (bit (thumb2_insn_r
->arm_insn
, 25))
12639 return arm_record_coproc_data_proc (thumb2_insn_r
);
12641 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
12644 /* Record handler for advance SIMD structure load/store instructions. */
12647 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
12649 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
12650 uint32_t l_bit
, a_bit
, b_bits
;
12651 uint32_t record_buf
[128], record_buf_mem
[128];
12652 uint32_t reg_rn
, reg_vd
, address
, f_elem
;
12653 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
12656 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
12657 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
12658 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
12659 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
12660 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
12661 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
12662 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
12663 f_elem
= 8 / f_ebytes
;
12667 ULONGEST u_regval
= 0;
12668 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12669 address
= u_regval
;
12674 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12676 if (b_bits
== 0x07)
12678 else if (b_bits
== 0x0a)
12680 else if (b_bits
== 0x06)
12682 else if (b_bits
== 0x02)
12687 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12689 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12691 record_buf_mem
[index_m
++] = f_ebytes
;
12692 record_buf_mem
[index_m
++] = address
;
12693 address
= address
+ f_ebytes
;
12694 thumb2_insn_r
->mem_rec_count
+= 1;
12699 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12701 if (b_bits
== 0x09 || b_bits
== 0x08)
12703 else if (b_bits
== 0x03)
12708 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
12709 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12711 for (loop_t
= 0; loop_t
< 2; loop_t
++)
12713 record_buf_mem
[index_m
++] = f_ebytes
;
12714 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12715 thumb2_insn_r
->mem_rec_count
+= 1;
12717 address
= address
+ (2 * f_ebytes
);
12721 else if ((b_bits
& 0x0e) == 0x04)
12723 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12725 for (loop_t
= 0; loop_t
< 3; loop_t
++)
12727 record_buf_mem
[index_m
++] = f_ebytes
;
12728 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12729 thumb2_insn_r
->mem_rec_count
+= 1;
12731 address
= address
+ (3 * f_ebytes
);
12735 else if (!(b_bits
& 0x0e))
12737 for (index_e
= 0; index_e
< f_elem
; index_e
++)
12739 for (loop_t
= 0; loop_t
< 4; loop_t
++)
12741 record_buf_mem
[index_m
++] = f_ebytes
;
12742 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
12743 thumb2_insn_r
->mem_rec_count
+= 1;
12745 address
= address
+ (4 * f_ebytes
);
12751 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
12753 if (bft_size
== 0x00)
12755 else if (bft_size
== 0x01)
12757 else if (bft_size
== 0x02)
12763 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
12764 thumb2_insn_r
->mem_rec_count
= 1;
12766 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
12767 thumb2_insn_r
->mem_rec_count
= 2;
12769 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
12770 thumb2_insn_r
->mem_rec_count
= 3;
12772 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
12773 thumb2_insn_r
->mem_rec_count
= 4;
12775 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
12777 record_buf_mem
[index_m
] = f_ebytes
;
12778 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
12787 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
12788 thumb2_insn_r
->reg_rec_count
= 1;
12790 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
12791 thumb2_insn_r
->reg_rec_count
= 2;
12793 else if ((b_bits
& 0x0e) == 0x04)
12794 thumb2_insn_r
->reg_rec_count
= 3;
12796 else if (!(b_bits
& 0x0e))
12797 thumb2_insn_r
->reg_rec_count
= 4;
12802 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
12803 thumb2_insn_r
->reg_rec_count
= 1;
12805 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
12806 thumb2_insn_r
->reg_rec_count
= 2;
12808 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
12809 thumb2_insn_r
->reg_rec_count
= 3;
12811 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
12812 thumb2_insn_r
->reg_rec_count
= 4;
12814 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
12815 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
12819 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
12821 record_buf
[index_r
] = reg_rn
;
12822 thumb2_insn_r
->reg_rec_count
+= 1;
12825 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
12827 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
12832 /* Decodes thumb2 instruction type and invokes its record handler. */
12834 static unsigned int
12835 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
12837 uint32_t op
, op1
, op2
;
12839 op
= bit (thumb2_insn_r
->arm_insn
, 15);
12840 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
12841 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
12845 if (!(op2
& 0x64 ))
12847 /* Load/store multiple instruction. */
12848 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
12850 else if ((op2
& 0x64) == 0x4)
12852 /* Load/store (dual/exclusive) and table branch instruction. */
12853 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
12855 else if ((op2
& 0x60) == 0x20)
12857 /* Data-processing (shifted register). */
12858 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12860 else if (op2
& 0x40)
12862 /* Co-processor instructions. */
12863 return thumb2_record_coproc_insn (thumb2_insn_r
);
12866 else if (op1
== 0x02)
12870 /* Branches and miscellaneous control instructions. */
12871 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
12873 else if (op2
& 0x20)
12875 /* Data-processing (plain binary immediate) instruction. */
12876 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12880 /* Data-processing (modified immediate). */
12881 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
12884 else if (op1
== 0x03)
12886 if (!(op2
& 0x71 ))
12888 /* Store single data item. */
12889 return thumb2_record_str_single_data (thumb2_insn_r
);
12891 else if (!((op2
& 0x71) ^ 0x10))
12893 /* Advanced SIMD or structure load/store instructions. */
12894 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
12896 else if (!((op2
& 0x67) ^ 0x01))
12898 /* Load byte, memory hints instruction. */
12899 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
12901 else if (!((op2
& 0x67) ^ 0x03))
12903 /* Load halfword, memory hints instruction. */
12904 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
12906 else if (!((op2
& 0x67) ^ 0x05))
12908 /* Load word instruction. */
12909 return thumb2_record_ld_word (thumb2_insn_r
);
12911 else if (!((op2
& 0x70) ^ 0x20))
12913 /* Data-processing (register) instruction. */
12914 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12916 else if (!((op2
& 0x78) ^ 0x30))
12918 /* Multiply, multiply accumulate, abs diff instruction. */
12919 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
12921 else if (!((op2
& 0x78) ^ 0x38))
12923 /* Long multiply, long multiply accumulate, and divide. */
12924 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
12926 else if (op2
& 0x40)
12928 /* Co-processor instructions. */
12929 return thumb2_record_coproc_insn (thumb2_insn_r
);
12937 /* Abstract memory reader. */
12939 class abstract_memory_reader
12942 /* Read LEN bytes of target memory at address MEMADDR, placing the
12943 results in GDB's memory at BUF. Return true on success. */
12945 virtual bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) = 0;
12948 /* Instruction reader from real target. */
12950 class instruction_reader
: public abstract_memory_reader
12953 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
12955 if (target_read_memory (memaddr
, buf
, len
))
12964 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
12965 and positive val on fauilure. */
12968 extract_arm_insn (abstract_memory_reader
& reader
,
12969 insn_decode_record
*insn_record
, uint32_t insn_size
)
12971 gdb_byte buf
[insn_size
];
12973 memset (&buf
[0], 0, insn_size
);
12975 if (!reader
.read (insn_record
->this_addr
, buf
, insn_size
))
12977 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
12979 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
12983 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
12985 /* Decode arm/thumb insn depending on condition cods and opcodes; and
12989 decode_insn (abstract_memory_reader
&reader
, insn_decode_record
*arm_record
,
12990 record_type_t record_type
, uint32_t insn_size
)
12993 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm
12995 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
12997 arm_record_data_proc_misc_ld_str
, /* 000. */
12998 arm_record_data_proc_imm
, /* 001. */
12999 arm_record_ld_st_imm_offset
, /* 010. */
13000 arm_record_ld_st_reg_offset
, /* 011. */
13001 arm_record_ld_st_multiple
, /* 100. */
13002 arm_record_b_bl
, /* 101. */
13003 arm_record_asimd_vfp_coproc
, /* 110. */
13004 arm_record_coproc_data_proc
/* 111. */
13007 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb
13009 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
13011 thumb_record_shift_add_sub
, /* 000. */
13012 thumb_record_add_sub_cmp_mov
, /* 001. */
13013 thumb_record_ld_st_reg_offset
, /* 010. */
13014 thumb_record_ld_st_imm_offset
, /* 011. */
13015 thumb_record_ld_st_stack
, /* 100. */
13016 thumb_record_misc
, /* 101. */
13017 thumb_record_ldm_stm_swi
, /* 110. */
13018 thumb_record_branch
/* 111. */
13021 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13022 uint32_t insn_id
= 0;
13024 if (extract_arm_insn (reader
, arm_record
, insn_size
))
13028 printf_unfiltered (_("Process record: error reading memory at "
13029 "addr %s len = %d.\n"),
13030 paddress (arm_record
->gdbarch
,
13031 arm_record
->this_addr
), insn_size
);
13035 else if (ARM_RECORD
== record_type
)
13037 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13038 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13040 if (arm_record
->cond
== 0xf)
13041 ret
= arm_record_extension_space (arm_record
);
13044 /* If this insn has fallen into extension space
13045 then we need not decode it anymore. */
13046 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13048 if (ret
!= ARM_RECORD_SUCCESS
)
13050 arm_record_unsupported_insn (arm_record
);
13054 else if (THUMB_RECORD
== record_type
)
13056 /* As thumb does not have condition codes, we set negative. */
13057 arm_record
->cond
= -1;
13058 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13059 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13060 if (ret
!= ARM_RECORD_SUCCESS
)
13062 arm_record_unsupported_insn (arm_record
);
13066 else if (THUMB2_RECORD
== record_type
)
13068 /* As thumb does not have condition codes, we set negative. */
13069 arm_record
->cond
= -1;
13071 /* Swap first half of 32bit thumb instruction with second half. */
13072 arm_record
->arm_insn
13073 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13075 ret
= thumb2_record_decode_insn_handler (arm_record
);
13077 if (ret
!= ARM_RECORD_SUCCESS
)
13079 arm_record_unsupported_insn (arm_record
);
13085 /* Throw assertion. */
13086 gdb_assert_not_reached ("not a valid instruction, could not decode");
13093 namespace selftests
{
13095 /* Provide both 16-bit and 32-bit thumb instructions. */
13097 class instruction_reader_thumb
: public abstract_memory_reader
13100 template<size_t SIZE
>
13101 instruction_reader_thumb (enum bfd_endian endian
,
13102 const uint16_t (&insns
)[SIZE
])
13103 : m_endian (endian
), m_insns (insns
), m_insns_size (SIZE
)
13106 bool read (CORE_ADDR memaddr
, gdb_byte
*buf
, const size_t len
) override
13108 SELF_CHECK (len
== 4 || len
== 2);
13109 SELF_CHECK (memaddr
% 2 == 0);
13110 SELF_CHECK ((memaddr
/ 2) < m_insns_size
);
13112 store_unsigned_integer (buf
, 2, m_endian
, m_insns
[memaddr
/ 2]);
13115 store_unsigned_integer (&buf
[2], 2, m_endian
,
13116 m_insns
[memaddr
/ 2 + 1]);
13122 enum bfd_endian m_endian
;
13123 const uint16_t *m_insns
;
13124 size_t m_insns_size
;
13128 arm_record_test (void)
13130 struct gdbarch_info info
;
13131 gdbarch_info_init (&info
);
13132 info
.bfd_arch_info
= bfd_scan_arch ("arm");
13134 struct gdbarch
*gdbarch
= gdbarch_find_by_info (info
);
13136 SELF_CHECK (gdbarch
!= NULL
);
13138 /* 16-bit Thumb instructions. */
13140 insn_decode_record arm_record
;
13142 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13143 arm_record
.gdbarch
= gdbarch
;
13145 static const uint16_t insns
[] = {
13146 /* db b2 uxtb r3, r3 */
13148 /* cd 58 ldr r5, [r1, r3] */
13152 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13153 instruction_reader_thumb
reader (endian
, insns
);
13154 int ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13155 THUMB_INSN_SIZE_BYTES
);
13157 SELF_CHECK (ret
== 0);
13158 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13159 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13160 SELF_CHECK (arm_record
.arm_regs
[0] == 3);
13162 arm_record
.this_addr
+= 2;
13163 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13164 THUMB_INSN_SIZE_BYTES
);
13166 SELF_CHECK (ret
== 0);
13167 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13168 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13169 SELF_CHECK (arm_record
.arm_regs
[0] == 5);
13172 /* 32-bit Thumb-2 instructions. */
13174 insn_decode_record arm_record
;
13176 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13177 arm_record
.gdbarch
= gdbarch
;
13179 static const uint16_t insns
[] = {
13180 /* 1d ee 70 7f mrc 15, 0, r7, cr13, cr0, {3} */
13184 enum bfd_endian endian
= gdbarch_byte_order_for_code (arm_record
.gdbarch
);
13185 instruction_reader_thumb
reader (endian
, insns
);
13186 int ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13187 THUMB2_INSN_SIZE_BYTES
);
13189 SELF_CHECK (ret
== 0);
13190 SELF_CHECK (arm_record
.mem_rec_count
== 0);
13191 SELF_CHECK (arm_record
.reg_rec_count
== 1);
13192 SELF_CHECK (arm_record
.arm_regs
[0] == 7);
13195 } // namespace selftests
13196 #endif /* GDB_SELF_TEST */
13198 /* Cleans up local record registers and memory allocations. */
13201 deallocate_reg_mem (insn_decode_record
*record
)
13203 xfree (record
->arm_regs
);
13204 xfree (record
->arm_mems
);
13208 /* Parse the current instruction and record the values of the registers and
13209 memory that will be changed in current instruction to record_arch_list".
13210 Return -1 if something is wrong. */
13213 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13214 CORE_ADDR insn_addr
)
13217 uint32_t no_of_rec
= 0;
13218 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13219 ULONGEST t_bit
= 0, insn_id
= 0;
13221 ULONGEST u_regval
= 0;
13223 insn_decode_record arm_record
;
13225 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13226 arm_record
.regcache
= regcache
;
13227 arm_record
.this_addr
= insn_addr
;
13228 arm_record
.gdbarch
= gdbarch
;
13231 if (record_debug
> 1)
13233 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13235 paddress (gdbarch
, arm_record
.this_addr
));
13238 instruction_reader reader
;
13239 if (extract_arm_insn (reader
, &arm_record
, 2))
13243 printf_unfiltered (_("Process record: error reading memory at "
13244 "addr %s len = %d.\n"),
13245 paddress (arm_record
.gdbarch
,
13246 arm_record
.this_addr
), 2);
13251 /* Check the insn, whether it is thumb or arm one. */
13253 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13254 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13257 if (!(u_regval
& t_bit
))
13259 /* We are decoding arm insn. */
13260 ret
= decode_insn (reader
, &arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13264 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13265 /* is it thumb2 insn? */
13266 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13268 ret
= decode_insn (reader
, &arm_record
, THUMB2_RECORD
,
13269 THUMB2_INSN_SIZE_BYTES
);
13273 /* We are decoding thumb insn. */
13274 ret
= decode_insn (reader
, &arm_record
, THUMB_RECORD
,
13275 THUMB_INSN_SIZE_BYTES
);
13281 /* Record registers. */
13282 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13283 if (arm_record
.arm_regs
)
13285 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13287 if (record_full_arch_list_add_reg
13288 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
13292 /* Record memories. */
13293 if (arm_record
.arm_mems
)
13295 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
13297 if (record_full_arch_list_add_mem
13298 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
13299 arm_record
.arm_mems
[no_of_rec
].len
))
13304 if (record_full_arch_list_add_end ())
13309 deallocate_reg_mem (&arm_record
);