1 /* Common target dependent code for GDB on ARM systems.
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
5 This file is part of GDB.
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22 #include <ctype.h> /* XXX for isupper (). */
29 #include "dis-asm.h" /* For register styles. */
31 #include "reggroups.h"
34 #include "arch-utils.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
40 #include "dwarf2-frame.h"
42 #include "prologue-value.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
49 #include "gdb/sim-arm.h"
52 #include "coff/internal.h"
58 #include "record-full.h"
60 #include "features/arm-with-m.c"
61 #include "features/arm-with-m-fpa-layout.c"
62 #include "features/arm-with-m-vfp-d16.c"
63 #include "features/arm-with-iwmmxt.c"
64 #include "features/arm-with-vfpv2.c"
65 #include "features/arm-with-vfpv3.c"
66 #include "features/arm-with-neon.c"
70 /* Macros for setting and testing a bit in a minimal symbol that marks
71 it as Thumb function. The MSB of the minimal symbol's "info" field
72 is used for this purpose.
74 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
75 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77 #define MSYMBOL_SET_SPECIAL(msym) \
78 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80 #define MSYMBOL_IS_SPECIAL(msym) \
81 MSYMBOL_TARGET_FLAG_1 (msym)
83 /* Per-objfile data used for mapping symbols. */
84 static const struct objfile_data
*arm_objfile_data_key
;
86 struct arm_mapping_symbol
91 typedef struct arm_mapping_symbol arm_mapping_symbol_s
;
92 DEF_VEC_O(arm_mapping_symbol_s
);
94 struct arm_per_objfile
96 VEC(arm_mapping_symbol_s
) **section_maps
;
99 /* The list of available "set arm ..." and "show arm ..." commands. */
100 static struct cmd_list_element
*setarmcmdlist
= NULL
;
101 static struct cmd_list_element
*showarmcmdlist
= NULL
;
103 /* The type of floating-point to use. Keep this in sync with enum
104 arm_float_model, and the help string in _initialize_arm_tdep. */
105 static const char *const fp_model_strings
[] =
115 /* A variable that can be configured by the user. */
116 static enum arm_float_model arm_fp_model
= ARM_FLOAT_AUTO
;
117 static const char *current_fp_model
= "auto";
119 /* The ABI to use. Keep this in sync with arm_abi_kind. */
120 static const char *const arm_abi_strings
[] =
128 /* A variable that can be configured by the user. */
129 static enum arm_abi_kind arm_abi_global
= ARM_ABI_AUTO
;
130 static const char *arm_abi_string
= "auto";
132 /* The execution mode to assume. */
133 static const char *const arm_mode_strings
[] =
141 static const char *arm_fallback_mode_string
= "auto";
142 static const char *arm_force_mode_string
= "auto";
144 /* Internal override of the execution mode. -1 means no override,
145 0 means override to ARM mode, 1 means override to Thumb mode.
146 The effect is the same as if arm_force_mode has been set by the
147 user (except the internal override has precedence over a user's
148 arm_force_mode override). */
149 static int arm_override_mode
= -1;
151 /* Number of different reg name sets (options). */
152 static int num_disassembly_options
;
154 /* The standard register names, and all the valid aliases for them. Note
155 that `fp', `sp' and `pc' are not added in this alias list, because they
156 have been added as builtin user registers in
157 std-regs.c:_initialize_frame_reg. */
162 } arm_register_aliases
[] = {
163 /* Basic register numbers. */
180 /* Synonyms (argument and variable registers). */
193 /* Other platform-specific names for r9. */
199 /* Names used by GCC (not listed in the ARM EABI). */
201 /* A special name from the older ATPCS. */
205 static const char *const arm_register_names
[] =
206 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
207 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
208 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
209 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
210 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
211 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
212 "fps", "cpsr" }; /* 24 25 */
214 /* Valid register name styles. */
215 static const char **valid_disassembly_styles
;
217 /* Disassembly style to use. Default to "std" register names. */
218 static const char *disassembly_style
;
220 /* This is used to keep the bfd arch_info in sync with the disassembly
222 static void set_disassembly_style_sfunc(char *, int,
223 struct cmd_list_element
*);
224 static void set_disassembly_style (void);
226 static void convert_from_extended (const struct floatformat
*, const void *,
228 static void convert_to_extended (const struct floatformat
*, void *,
231 static enum register_status
arm_neon_quad_read (struct gdbarch
*gdbarch
,
232 struct regcache
*regcache
,
233 int regnum
, gdb_byte
*buf
);
234 static void arm_neon_quad_write (struct gdbarch
*gdbarch
,
235 struct regcache
*regcache
,
236 int regnum
, const gdb_byte
*buf
);
238 static int thumb_insn_size (unsigned short inst1
);
240 struct arm_prologue_cache
242 /* The stack pointer at the time this frame was created; i.e. the
243 caller's stack pointer when this function was called. It is used
244 to identify this frame. */
247 /* The frame base for this frame is just prev_sp - frame size.
248 FRAMESIZE is the distance from the frame pointer to the
249 initial stack pointer. */
253 /* The register used to hold the frame pointer for this frame. */
256 /* Saved register offsets. */
257 struct trad_frame_saved_reg
*saved_regs
;
260 static CORE_ADDR
arm_analyze_prologue (struct gdbarch
*gdbarch
,
261 CORE_ADDR prologue_start
,
262 CORE_ADDR prologue_end
,
263 struct arm_prologue_cache
*cache
);
265 /* Architecture version for displaced stepping. This effects the behaviour of
266 certain instructions, and really should not be hard-wired. */
268 #define DISPLACED_STEPPING_ARCH_VERSION 5
270 /* Addresses for calling Thumb functions have the bit 0 set.
271 Here are some macros to test, set, or clear bit 0 of addresses. */
272 #define IS_THUMB_ADDR(addr) ((addr) & 1)
273 #define MAKE_THUMB_ADDR(addr) ((addr) | 1)
274 #define UNMAKE_THUMB_ADDR(addr) ((addr) & ~1)
276 /* Set to true if the 32-bit mode is in use. */
280 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
283 arm_psr_thumb_bit (struct gdbarch
*gdbarch
)
285 if (gdbarch_tdep (gdbarch
)->is_m
)
291 /* Determine if FRAME is executing in Thumb mode. */
294 arm_frame_is_thumb (struct frame_info
*frame
)
297 ULONGEST t_bit
= arm_psr_thumb_bit (get_frame_arch (frame
));
299 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
300 directly (from a signal frame or dummy frame) or by interpreting
301 the saved LR (from a prologue or DWARF frame). So consult it and
302 trust the unwinders. */
303 cpsr
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
305 return (cpsr
& t_bit
) != 0;
308 /* Callback for VEC_lower_bound. */
311 arm_compare_mapping_symbols (const struct arm_mapping_symbol
*lhs
,
312 const struct arm_mapping_symbol
*rhs
)
314 return lhs
->value
< rhs
->value
;
317 /* Search for the mapping symbol covering MEMADDR. If one is found,
318 return its type. Otherwise, return 0. If START is non-NULL,
319 set *START to the location of the mapping symbol. */
322 arm_find_mapping_symbol (CORE_ADDR memaddr
, CORE_ADDR
*start
)
324 struct obj_section
*sec
;
326 /* If there are mapping symbols, consult them. */
327 sec
= find_pc_section (memaddr
);
330 struct arm_per_objfile
*data
;
331 VEC(arm_mapping_symbol_s
) *map
;
332 struct arm_mapping_symbol map_key
= { memaddr
- obj_section_addr (sec
),
336 data
= (struct arm_per_objfile
*) objfile_data (sec
->objfile
,
337 arm_objfile_data_key
);
340 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
341 if (!VEC_empty (arm_mapping_symbol_s
, map
))
343 struct arm_mapping_symbol
*map_sym
;
345 idx
= VEC_lower_bound (arm_mapping_symbol_s
, map
, &map_key
,
346 arm_compare_mapping_symbols
);
348 /* VEC_lower_bound finds the earliest ordered insertion
349 point. If the following symbol starts at this exact
350 address, we use that; otherwise, the preceding
351 mapping symbol covers this address. */
352 if (idx
< VEC_length (arm_mapping_symbol_s
, map
))
354 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
);
355 if (map_sym
->value
== map_key
.value
)
358 *start
= map_sym
->value
+ obj_section_addr (sec
);
359 return map_sym
->type
;
365 map_sym
= VEC_index (arm_mapping_symbol_s
, map
, idx
- 1);
367 *start
= map_sym
->value
+ obj_section_addr (sec
);
368 return map_sym
->type
;
377 /* Determine if the program counter specified in MEMADDR is in a Thumb
378 function. This function should be called for addresses unrelated to
379 any executing frame; otherwise, prefer arm_frame_is_thumb. */
382 arm_pc_is_thumb (struct gdbarch
*gdbarch
, CORE_ADDR memaddr
)
384 struct bound_minimal_symbol sym
;
386 struct displaced_step_closure
* dsc
387 = get_displaced_step_closure_by_addr(memaddr
);
389 /* If checking the mode of displaced instruction in copy area, the mode
390 should be determined by instruction on the original address. */
394 fprintf_unfiltered (gdb_stdlog
,
395 "displaced: check mode of %.8lx instead of %.8lx\n",
396 (unsigned long) dsc
->insn_addr
,
397 (unsigned long) memaddr
);
398 memaddr
= dsc
->insn_addr
;
401 /* If bit 0 of the address is set, assume this is a Thumb address. */
402 if (IS_THUMB_ADDR (memaddr
))
405 /* Respect internal mode override if active. */
406 if (arm_override_mode
!= -1)
407 return arm_override_mode
;
409 /* If the user wants to override the symbol table, let him. */
410 if (strcmp (arm_force_mode_string
, "arm") == 0)
412 if (strcmp (arm_force_mode_string
, "thumb") == 0)
415 /* ARM v6-M and v7-M are always in Thumb mode. */
416 if (gdbarch_tdep (gdbarch
)->is_m
)
419 /* If there are mapping symbols, consult them. */
420 type
= arm_find_mapping_symbol (memaddr
, NULL
);
424 /* Thumb functions have a "special" bit set in minimal symbols. */
425 sym
= lookup_minimal_symbol_by_pc (memaddr
);
427 return (MSYMBOL_IS_SPECIAL (sym
.minsym
));
429 /* If the user wants to override the fallback mode, let them. */
430 if (strcmp (arm_fallback_mode_string
, "arm") == 0)
432 if (strcmp (arm_fallback_mode_string
, "thumb") == 0)
435 /* If we couldn't find any symbol, but we're talking to a running
436 target, then trust the current value of $cpsr. This lets
437 "display/i $pc" always show the correct mode (though if there is
438 a symbol table we will not reach here, so it still may not be
439 displayed in the mode it will be executed). */
440 if (target_has_registers
)
441 return arm_frame_is_thumb (get_current_frame ());
443 /* Otherwise we're out of luck; we assume ARM. */
447 /* Remove useless bits from addresses in a running program. */
449 arm_addr_bits_remove (struct gdbarch
*gdbarch
, CORE_ADDR val
)
451 /* On M-profile devices, do not strip the low bit from EXC_RETURN
452 (the magic exception return address). */
453 if (gdbarch_tdep (gdbarch
)->is_m
454 && (val
& 0xfffffff0) == 0xfffffff0)
458 return UNMAKE_THUMB_ADDR (val
);
460 return (val
& 0x03fffffc);
463 /* Return 1 if PC is the start of a compiler helper function which
464 can be safely ignored during prologue skipping. IS_THUMB is true
465 if the function is known to be a Thumb function due to the way it
468 skip_prologue_function (struct gdbarch
*gdbarch
, CORE_ADDR pc
, int is_thumb
)
470 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
471 struct bound_minimal_symbol msym
;
473 msym
= lookup_minimal_symbol_by_pc (pc
);
474 if (msym
.minsym
!= NULL
475 && BMSYMBOL_VALUE_ADDRESS (msym
) == pc
476 && MSYMBOL_LINKAGE_NAME (msym
.minsym
) != NULL
)
478 const char *name
= MSYMBOL_LINKAGE_NAME (msym
.minsym
);
480 /* The GNU linker's Thumb call stub to foo is named
482 if (strstr (name
, "_from_thumb") != NULL
)
485 /* On soft-float targets, __truncdfsf2 is called to convert promoted
486 arguments to their argument types in non-prototyped
488 if (startswith (name
, "__truncdfsf2"))
490 if (startswith (name
, "__aeabi_d2f"))
493 /* Internal functions related to thread-local storage. */
494 if (startswith (name
, "__tls_get_addr"))
496 if (startswith (name
, "__aeabi_read_tp"))
501 /* If we run against a stripped glibc, we may be unable to identify
502 special functions by name. Check for one important case,
503 __aeabi_read_tp, by comparing the *code* against the default
504 implementation (this is hand-written ARM assembler in glibc). */
507 && read_memory_unsigned_integer (pc
, 4, byte_order_for_code
)
508 == 0xe3e00a0f /* mov r0, #0xffff0fff */
509 && read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
)
510 == 0xe240f01f) /* sub pc, r0, #31 */
517 /* Support routines for instruction parsing. */
518 #define submask(x) ((1L << ((x) + 1)) - 1)
519 #define bit(obj,st) (((obj) >> (st)) & 1)
520 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
521 #define sbits(obj,st,fn) \
522 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
523 #define BranchDest(addr,instr) \
524 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
526 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
527 the first 16-bit of instruction, and INSN2 is the second 16-bit of
529 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
530 ((bits ((insn1), 0, 3) << 12) \
531 | (bits ((insn1), 10, 10) << 11) \
532 | (bits ((insn2), 12, 14) << 8) \
533 | bits ((insn2), 0, 7))
535 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
536 the 32-bit instruction. */
537 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
538 ((bits ((insn), 16, 19) << 12) \
539 | bits ((insn), 0, 11))
541 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
544 thumb_expand_immediate (unsigned int imm
)
546 unsigned int count
= imm
>> 7;
554 return (imm
& 0xff) | ((imm
& 0xff) << 16);
556 return ((imm
& 0xff) << 8) | ((imm
& 0xff) << 24);
558 return (imm
& 0xff) | ((imm
& 0xff) << 8)
559 | ((imm
& 0xff) << 16) | ((imm
& 0xff) << 24);
562 return (0x80 | (imm
& 0x7f)) << (32 - count
);
565 /* Return 1 if the 16-bit Thumb instruction INST might change
566 control flow, 0 otherwise. */
569 thumb_instruction_changes_pc (unsigned short inst
)
571 if ((inst
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
574 if ((inst
& 0xf000) == 0xd000) /* conditional branch */
577 if ((inst
& 0xf800) == 0xe000) /* unconditional branch */
580 if ((inst
& 0xff00) == 0x4700) /* bx REG, blx REG */
583 if ((inst
& 0xff87) == 0x4687) /* mov pc, REG */
586 if ((inst
& 0xf500) == 0xb100) /* CBNZ or CBZ. */
592 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
593 might change control flow, 0 otherwise. */
596 thumb2_instruction_changes_pc (unsigned short inst1
, unsigned short inst2
)
598 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
600 /* Branches and miscellaneous control instructions. */
602 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
607 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
609 /* SUBS PC, LR, #imm8. */
612 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
614 /* Conditional branch. */
621 if ((inst1
& 0xfe50) == 0xe810)
623 /* Load multiple or RFE. */
625 if (bit (inst1
, 7) && !bit (inst1
, 8))
631 else if (!bit (inst1
, 7) && bit (inst1
, 8))
637 else if (bit (inst1
, 7) && bit (inst1
, 8))
642 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
651 if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
653 /* MOV PC or MOVS PC. */
657 if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
660 if (bits (inst1
, 0, 3) == 15)
666 if ((inst2
& 0x0fc0) == 0x0000)
672 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
678 if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
687 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
688 epilogue, 0 otherwise. */
691 thumb_instruction_restores_sp (unsigned short insn
)
693 return (insn
== 0x46bd /* mov sp, r7 */
694 || (insn
& 0xff80) == 0xb000 /* add sp, imm */
695 || (insn
& 0xfe00) == 0xbc00); /* pop <registers> */
698 /* Analyze a Thumb prologue, looking for a recognizable stack frame
699 and frame pointer. Scan until we encounter a store that could
700 clobber the stack frame unexpectedly, or an unknown instruction.
701 Return the last address which is definitely safe to skip for an
702 initial breakpoint. */
705 thumb_analyze_prologue (struct gdbarch
*gdbarch
,
706 CORE_ADDR start
, CORE_ADDR limit
,
707 struct arm_prologue_cache
*cache
)
709 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
710 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
713 struct pv_area
*stack
;
714 struct cleanup
*back_to
;
716 CORE_ADDR unrecognized_pc
= 0;
718 for (i
= 0; i
< 16; i
++)
719 regs
[i
] = pv_register (i
, 0);
720 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
721 back_to
= make_cleanup_free_pv_area (stack
);
723 while (start
< limit
)
727 insn
= read_memory_unsigned_integer (start
, 2, byte_order_for_code
);
729 if ((insn
& 0xfe00) == 0xb400) /* push { rlist } */
734 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
737 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
738 whether to save LR (R14). */
739 mask
= (insn
& 0xff) | ((insn
& 0x100) << 6);
741 /* Calculate offsets of saved R0-R7 and LR. */
742 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
743 if (mask
& (1 << regno
))
745 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
747 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
750 else if ((insn
& 0xff80) == 0xb080) /* sub sp, #imm */
752 offset
= (insn
& 0x7f) << 2; /* get scaled offset */
753 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
],
756 else if (thumb_instruction_restores_sp (insn
))
758 /* Don't scan past the epilogue. */
761 else if ((insn
& 0xf800) == 0xa800) /* add Rd, sp, #imm */
762 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[ARM_SP_REGNUM
],
764 else if ((insn
& 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
765 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
766 regs
[bits (insn
, 0, 2)] = pv_add_constant (regs
[bits (insn
, 3, 5)],
768 else if ((insn
& 0xf800) == 0x3000 /* add Rd, #imm */
769 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
770 regs
[bits (insn
, 8, 10)] = pv_add_constant (regs
[bits (insn
, 8, 10)],
772 else if ((insn
& 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
773 && pv_is_register (regs
[bits (insn
, 6, 8)], ARM_SP_REGNUM
)
774 && pv_is_constant (regs
[bits (insn
, 3, 5)]))
775 regs
[bits (insn
, 0, 2)] = pv_add (regs
[bits (insn
, 3, 5)],
776 regs
[bits (insn
, 6, 8)]);
777 else if ((insn
& 0xff00) == 0x4400 /* add Rd, Rm */
778 && pv_is_constant (regs
[bits (insn
, 3, 6)]))
780 int rd
= (bit (insn
, 7) << 3) + bits (insn
, 0, 2);
781 int rm
= bits (insn
, 3, 6);
782 regs
[rd
] = pv_add (regs
[rd
], regs
[rm
]);
784 else if ((insn
& 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
786 int dst_reg
= (insn
& 0x7) + ((insn
& 0x80) >> 4);
787 int src_reg
= (insn
& 0x78) >> 3;
788 regs
[dst_reg
] = regs
[src_reg
];
790 else if ((insn
& 0xf800) == 0x9000) /* str rd, [sp, #off] */
792 /* Handle stores to the stack. Normally pushes are used,
793 but with GCC -mtpcs-frame, there may be other stores
794 in the prologue to create the frame. */
795 int regno
= (insn
>> 8) & 0x7;
798 offset
= (insn
& 0xff) << 2;
799 addr
= pv_add_constant (regs
[ARM_SP_REGNUM
], offset
);
801 if (pv_area_store_would_trash (stack
, addr
))
804 pv_area_store (stack
, addr
, 4, regs
[regno
]);
806 else if ((insn
& 0xf800) == 0x6000) /* str rd, [rn, #off] */
808 int rd
= bits (insn
, 0, 2);
809 int rn
= bits (insn
, 3, 5);
812 offset
= bits (insn
, 6, 10) << 2;
813 addr
= pv_add_constant (regs
[rn
], offset
);
815 if (pv_area_store_would_trash (stack
, addr
))
818 pv_area_store (stack
, addr
, 4, regs
[rd
]);
820 else if (((insn
& 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
821 || (insn
& 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
822 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
))
823 /* Ignore stores of argument registers to the stack. */
825 else if ((insn
& 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
826 && pv_is_register (regs
[bits (insn
, 8, 10)], ARM_SP_REGNUM
))
827 /* Ignore block loads from the stack, potentially copying
828 parameters from memory. */
830 else if ((insn
& 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
831 || ((insn
& 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
832 && pv_is_register (regs
[bits (insn
, 3, 5)], ARM_SP_REGNUM
)))
833 /* Similarly ignore single loads from the stack. */
835 else if ((insn
& 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
836 || (insn
& 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
837 /* Skip register copies, i.e. saves to another register
838 instead of the stack. */
840 else if ((insn
& 0xf800) == 0x2000) /* movs Rd, #imm */
841 /* Recognize constant loads; even with small stacks these are necessary
843 regs
[bits (insn
, 8, 10)] = pv_constant (bits (insn
, 0, 7));
844 else if ((insn
& 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
846 /* Constant pool loads, for the same reason. */
847 unsigned int constant
;
850 loc
= start
+ 4 + bits (insn
, 0, 7) * 4;
851 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
852 regs
[bits (insn
, 8, 10)] = pv_constant (constant
);
854 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instructions. */
856 unsigned short inst2
;
858 inst2
= read_memory_unsigned_integer (start
+ 2, 2,
859 byte_order_for_code
);
861 if ((insn
& 0xf800) == 0xf000 && (inst2
& 0xe800) == 0xe800)
863 /* BL, BLX. Allow some special function calls when
864 skipping the prologue; GCC generates these before
865 storing arguments to the stack. */
867 int j1
, j2
, imm1
, imm2
;
869 imm1
= sbits (insn
, 0, 10);
870 imm2
= bits (inst2
, 0, 10);
871 j1
= bit (inst2
, 13);
872 j2
= bit (inst2
, 11);
874 offset
= ((imm1
<< 12) + (imm2
<< 1));
875 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
877 nextpc
= start
+ 4 + offset
;
878 /* For BLX make sure to clear the low bits. */
879 if (bit (inst2
, 12) == 0)
880 nextpc
= nextpc
& 0xfffffffc;
882 if (!skip_prologue_function (gdbarch
, nextpc
,
883 bit (inst2
, 12) != 0))
887 else if ((insn
& 0xffd0) == 0xe900 /* stmdb Rn{!},
889 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
891 pv_t addr
= regs
[bits (insn
, 0, 3)];
894 if (pv_area_store_would_trash (stack
, addr
))
897 /* Calculate offsets of saved registers. */
898 for (regno
= ARM_LR_REGNUM
; regno
>= 0; regno
--)
899 if (inst2
& (1 << regno
))
901 addr
= pv_add_constant (addr
, -4);
902 pv_area_store (stack
, addr
, 4, regs
[regno
]);
906 regs
[bits (insn
, 0, 3)] = addr
;
909 else if ((insn
& 0xff50) == 0xe940 /* strd Rt, Rt2,
911 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
913 int regno1
= bits (inst2
, 12, 15);
914 int regno2
= bits (inst2
, 8, 11);
915 pv_t addr
= regs
[bits (insn
, 0, 3)];
917 offset
= inst2
& 0xff;
919 addr
= pv_add_constant (addr
, offset
);
921 addr
= pv_add_constant (addr
, -offset
);
923 if (pv_area_store_would_trash (stack
, addr
))
926 pv_area_store (stack
, addr
, 4, regs
[regno1
]);
927 pv_area_store (stack
, pv_add_constant (addr
, 4),
931 regs
[bits (insn
, 0, 3)] = addr
;
934 else if ((insn
& 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
935 && (inst2
& 0x0c00) == 0x0c00
936 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
938 int regno
= bits (inst2
, 12, 15);
939 pv_t addr
= regs
[bits (insn
, 0, 3)];
941 offset
= inst2
& 0xff;
943 addr
= pv_add_constant (addr
, offset
);
945 addr
= pv_add_constant (addr
, -offset
);
947 if (pv_area_store_would_trash (stack
, addr
))
950 pv_area_store (stack
, addr
, 4, regs
[regno
]);
953 regs
[bits (insn
, 0, 3)] = addr
;
956 else if ((insn
& 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
957 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
959 int regno
= bits (inst2
, 12, 15);
962 offset
= inst2
& 0xfff;
963 addr
= pv_add_constant (regs
[bits (insn
, 0, 3)], offset
);
965 if (pv_area_store_would_trash (stack
, addr
))
968 pv_area_store (stack
, addr
, 4, regs
[regno
]);
971 else if ((insn
& 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
972 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
973 /* Ignore stores of argument registers to the stack. */
976 else if ((insn
& 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
977 && (inst2
& 0x0d00) == 0x0c00
978 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
979 /* Ignore stores of argument registers to the stack. */
982 else if ((insn
& 0xffd0) == 0xe890 /* ldmia Rn[!],
984 && (inst2
& 0x8000) == 0x0000
985 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
986 /* Ignore block loads from the stack, potentially copying
987 parameters from memory. */
990 else if ((insn
& 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
992 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
993 /* Similarly ignore dual loads from the stack. */
996 else if ((insn
& 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
997 && (inst2
& 0x0d00) == 0x0c00
998 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
999 /* Similarly ignore single loads from the stack. */
1002 else if ((insn
& 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
1003 && pv_is_register (regs
[bits (insn
, 0, 3)], ARM_SP_REGNUM
))
1004 /* Similarly ignore single loads from the stack. */
1007 else if ((insn
& 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1008 && (inst2
& 0x8000) == 0x0000)
1010 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1011 | (bits (inst2
, 12, 14) << 8)
1012 | bits (inst2
, 0, 7));
1014 regs
[bits (inst2
, 8, 11)]
1015 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1016 thumb_expand_immediate (imm
));
1019 else if ((insn
& 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1020 && (inst2
& 0x8000) == 0x0000)
1022 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1023 | (bits (inst2
, 12, 14) << 8)
1024 | bits (inst2
, 0, 7));
1026 regs
[bits (inst2
, 8, 11)]
1027 = pv_add_constant (regs
[bits (insn
, 0, 3)], imm
);
1030 else if ((insn
& 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1031 && (inst2
& 0x8000) == 0x0000)
1033 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1034 | (bits (inst2
, 12, 14) << 8)
1035 | bits (inst2
, 0, 7));
1037 regs
[bits (inst2
, 8, 11)]
1038 = pv_add_constant (regs
[bits (insn
, 0, 3)],
1039 - (CORE_ADDR
) thumb_expand_immediate (imm
));
1042 else if ((insn
& 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1043 && (inst2
& 0x8000) == 0x0000)
1045 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1046 | (bits (inst2
, 12, 14) << 8)
1047 | bits (inst2
, 0, 7));
1049 regs
[bits (inst2
, 8, 11)]
1050 = pv_add_constant (regs
[bits (insn
, 0, 3)], - (CORE_ADDR
) imm
);
1053 else if ((insn
& 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1055 unsigned int imm
= ((bits (insn
, 10, 10) << 11)
1056 | (bits (inst2
, 12, 14) << 8)
1057 | bits (inst2
, 0, 7));
1059 regs
[bits (inst2
, 8, 11)]
1060 = pv_constant (thumb_expand_immediate (imm
));
1063 else if ((insn
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1066 = EXTRACT_MOVW_MOVT_IMM_T (insn
, inst2
);
1068 regs
[bits (inst2
, 8, 11)] = pv_constant (imm
);
1071 else if (insn
== 0xea5f /* mov.w Rd,Rm */
1072 && (inst2
& 0xf0f0) == 0)
1074 int dst_reg
= (inst2
& 0x0f00) >> 8;
1075 int src_reg
= inst2
& 0xf;
1076 regs
[dst_reg
] = regs
[src_reg
];
1079 else if ((insn
& 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1081 /* Constant pool loads. */
1082 unsigned int constant
;
1085 offset
= bits (inst2
, 0, 11);
1087 loc
= start
+ 4 + offset
;
1089 loc
= start
+ 4 - offset
;
1091 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1092 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1095 else if ((insn
& 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1097 /* Constant pool loads. */
1098 unsigned int constant
;
1101 offset
= bits (inst2
, 0, 7) << 2;
1103 loc
= start
+ 4 + offset
;
1105 loc
= start
+ 4 - offset
;
1107 constant
= read_memory_unsigned_integer (loc
, 4, byte_order
);
1108 regs
[bits (inst2
, 12, 15)] = pv_constant (constant
);
1110 constant
= read_memory_unsigned_integer (loc
+ 4, 4, byte_order
);
1111 regs
[bits (inst2
, 8, 11)] = pv_constant (constant
);
1114 else if (thumb2_instruction_changes_pc (insn
, inst2
))
1116 /* Don't scan past anything that might change control flow. */
1121 /* The optimizer might shove anything into the prologue,
1122 so we just skip what we don't recognize. */
1123 unrecognized_pc
= start
;
1128 else if (thumb_instruction_changes_pc (insn
))
1130 /* Don't scan past anything that might change control flow. */
1135 /* The optimizer might shove anything into the prologue,
1136 so we just skip what we don't recognize. */
1137 unrecognized_pc
= start
;
1144 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1145 paddress (gdbarch
, start
));
1147 if (unrecognized_pc
== 0)
1148 unrecognized_pc
= start
;
1152 do_cleanups (back_to
);
1153 return unrecognized_pc
;
1156 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1158 /* Frame pointer is fp. Frame size is constant. */
1159 cache
->framereg
= ARM_FP_REGNUM
;
1160 cache
->framesize
= -regs
[ARM_FP_REGNUM
].k
;
1162 else if (pv_is_register (regs
[THUMB_FP_REGNUM
], ARM_SP_REGNUM
))
1164 /* Frame pointer is r7. Frame size is constant. */
1165 cache
->framereg
= THUMB_FP_REGNUM
;
1166 cache
->framesize
= -regs
[THUMB_FP_REGNUM
].k
;
1170 /* Try the stack pointer... this is a bit desperate. */
1171 cache
->framereg
= ARM_SP_REGNUM
;
1172 cache
->framesize
= -regs
[ARM_SP_REGNUM
].k
;
1175 for (i
= 0; i
< 16; i
++)
1176 if (pv_area_find_reg (stack
, gdbarch
, i
, &offset
))
1177 cache
->saved_regs
[i
].addr
= offset
;
1179 do_cleanups (back_to
);
1180 return unrecognized_pc
;
1184 /* Try to analyze the instructions starting from PC, which load symbol
1185 __stack_chk_guard. Return the address of instruction after loading this
1186 symbol, set the dest register number to *BASEREG, and set the size of
1187 instructions for loading symbol in OFFSET. Return 0 if instructions are
1191 arm_analyze_load_stack_chk_guard(CORE_ADDR pc
, struct gdbarch
*gdbarch
,
1192 unsigned int *destreg
, int *offset
)
1194 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1195 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1196 unsigned int low
, high
, address
;
1201 unsigned short insn1
1202 = read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
1204 if ((insn1
& 0xf800) == 0x4800) /* ldr Rd, #immed */
1206 *destreg
= bits (insn1
, 8, 10);
1208 address
= (pc
& 0xfffffffc) + 4 + (bits (insn1
, 0, 7) << 2);
1209 address
= read_memory_unsigned_integer (address
, 4,
1210 byte_order_for_code
);
1212 else if ((insn1
& 0xfbf0) == 0xf240) /* movw Rd, #const */
1214 unsigned short insn2
1215 = read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
1217 low
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1220 = read_memory_unsigned_integer (pc
+ 4, 2, byte_order_for_code
);
1222 = read_memory_unsigned_integer (pc
+ 6, 2, byte_order_for_code
);
1224 /* movt Rd, #const */
1225 if ((insn1
& 0xfbc0) == 0xf2c0)
1227 high
= EXTRACT_MOVW_MOVT_IMM_T (insn1
, insn2
);
1228 *destreg
= bits (insn2
, 8, 11);
1230 address
= (high
<< 16 | low
);
1237 = read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
1239 if ((insn
& 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1241 address
= bits (insn
, 0, 11) + pc
+ 8;
1242 address
= read_memory_unsigned_integer (address
, 4,
1243 byte_order_for_code
);
1245 *destreg
= bits (insn
, 12, 15);
1248 else if ((insn
& 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1250 low
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1253 = read_memory_unsigned_integer (pc
+ 4, 4, byte_order_for_code
);
1255 if ((insn
& 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1257 high
= EXTRACT_MOVW_MOVT_IMM_A (insn
);
1258 *destreg
= bits (insn
, 12, 15);
1260 address
= (high
<< 16 | low
);
1268 /* Try to skip a sequence of instructions used for stack protector. If PC
1269 points to the first instruction of this sequence, return the address of
1270 first instruction after this sequence, otherwise, return original PC.
1272 On arm, this sequence of instructions is composed of mainly three steps,
1273 Step 1: load symbol __stack_chk_guard,
1274 Step 2: load from address of __stack_chk_guard,
1275 Step 3: store it to somewhere else.
1277 Usually, instructions on step 2 and step 3 are the same on various ARM
1278 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1279 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1280 instructions in step 1 vary from different ARM architectures. On ARMv7,
1283 movw Rn, #:lower16:__stack_chk_guard
1284 movt Rn, #:upper16:__stack_chk_guard
1291 .word __stack_chk_guard
1293 Since ldr/str is a very popular instruction, we can't use them as
1294 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1295 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1296 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1299 arm_skip_stack_protector(CORE_ADDR pc
, struct gdbarch
*gdbarch
)
1301 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1302 unsigned int basereg
;
1303 struct bound_minimal_symbol stack_chk_guard
;
1305 int is_thumb
= arm_pc_is_thumb (gdbarch
, pc
);
1308 /* Try to parse the instructions in Step 1. */
1309 addr
= arm_analyze_load_stack_chk_guard (pc
, gdbarch
,
1314 stack_chk_guard
= lookup_minimal_symbol_by_pc (addr
);
1315 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1316 Otherwise, this sequence cannot be for stack protector. */
1317 if (stack_chk_guard
.minsym
== NULL
1318 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard
.minsym
), "__stack_chk_guard"))
1323 unsigned int destreg
;
1325 = read_memory_unsigned_integer (pc
+ offset
, 2, byte_order_for_code
);
1327 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1328 if ((insn
& 0xf800) != 0x6800)
1330 if (bits (insn
, 3, 5) != basereg
)
1332 destreg
= bits (insn
, 0, 2);
1334 insn
= read_memory_unsigned_integer (pc
+ offset
+ 2, 2,
1335 byte_order_for_code
);
1336 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1337 if ((insn
& 0xf800) != 0x6000)
1339 if (destreg
!= bits (insn
, 0, 2))
1344 unsigned int destreg
;
1346 = read_memory_unsigned_integer (pc
+ offset
, 4, byte_order_for_code
);
1348 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1349 if ((insn
& 0x0e500000) != 0x04100000)
1351 if (bits (insn
, 16, 19) != basereg
)
1353 destreg
= bits (insn
, 12, 15);
1354 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1355 insn
= read_memory_unsigned_integer (pc
+ offset
+ 4,
1356 4, byte_order_for_code
);
1357 if ((insn
& 0x0e500000) != 0x04000000)
1359 if (bits (insn
, 12, 15) != destreg
)
1362 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1365 return pc
+ offset
+ 4;
1367 return pc
+ offset
+ 8;
1370 /* Advance the PC across any function entry prologue instructions to
1371 reach some "real" code.
1373 The APCS (ARM Procedure Call Standard) defines the following
1377 [stmfd sp!, {a1,a2,a3,a4}]
1378 stmfd sp!, {...,fp,ip,lr,pc}
1379 [stfe f7, [sp, #-12]!]
1380 [stfe f6, [sp, #-12]!]
1381 [stfe f5, [sp, #-12]!]
1382 [stfe f4, [sp, #-12]!]
1383 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1386 arm_skip_prologue (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
1388 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1390 CORE_ADDR func_addr
, limit_pc
;
1392 /* See if we can determine the end of the prologue via the symbol table.
1393 If so, then return either PC, or the PC after the prologue, whichever
1395 if (find_pc_partial_function (pc
, NULL
, &func_addr
, NULL
))
1397 CORE_ADDR post_prologue_pc
1398 = skip_prologue_using_sal (gdbarch
, func_addr
);
1399 struct compunit_symtab
*cust
= find_pc_compunit_symtab (func_addr
);
1401 if (post_prologue_pc
)
1403 = arm_skip_stack_protector (post_prologue_pc
, gdbarch
);
1406 /* GCC always emits a line note before the prologue and another
1407 one after, even if the two are at the same address or on the
1408 same line. Take advantage of this so that we do not need to
1409 know every instruction that might appear in the prologue. We
1410 will have producer information for most binaries; if it is
1411 missing (e.g. for -gstabs), assuming the GNU tools. */
1412 if (post_prologue_pc
1414 || COMPUNIT_PRODUCER (cust
) == NULL
1415 || startswith (COMPUNIT_PRODUCER (cust
), "GNU ")
1416 || startswith (COMPUNIT_PRODUCER (cust
), "clang ")))
1417 return post_prologue_pc
;
1419 if (post_prologue_pc
!= 0)
1421 CORE_ADDR analyzed_limit
;
1423 /* For non-GCC compilers, make sure the entire line is an
1424 acceptable prologue; GDB will round this function's
1425 return value up to the end of the following line so we
1426 can not skip just part of a line (and we do not want to).
1428 RealView does not treat the prologue specially, but does
1429 associate prologue code with the opening brace; so this
1430 lets us skip the first line if we think it is the opening
1432 if (arm_pc_is_thumb (gdbarch
, func_addr
))
1433 analyzed_limit
= thumb_analyze_prologue (gdbarch
, func_addr
,
1434 post_prologue_pc
, NULL
);
1436 analyzed_limit
= arm_analyze_prologue (gdbarch
, func_addr
,
1437 post_prologue_pc
, NULL
);
1439 if (analyzed_limit
!= post_prologue_pc
)
1442 return post_prologue_pc
;
1446 /* Can't determine prologue from the symbol table, need to examine
1449 /* Find an upper limit on the function prologue using the debug
1450 information. If the debug information could not be used to provide
1451 that bound, then use an arbitrary large number as the upper bound. */
1452 /* Like arm_scan_prologue, stop no later than pc + 64. */
1453 limit_pc
= skip_prologue_using_sal (gdbarch
, pc
);
1455 limit_pc
= pc
+ 64; /* Magic. */
1458 /* Check if this is Thumb code. */
1459 if (arm_pc_is_thumb (gdbarch
, pc
))
1460 return thumb_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1462 return arm_analyze_prologue (gdbarch
, pc
, limit_pc
, NULL
);
1466 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1467 This function decodes a Thumb function prologue to determine:
1468 1) the size of the stack frame
1469 2) which registers are saved on it
1470 3) the offsets of saved regs
1471 4) the offset from the stack pointer to the frame pointer
1473 A typical Thumb function prologue would create this stack frame
1474 (offsets relative to FP)
1475 old SP -> 24 stack parameters
1478 R7 -> 0 local variables (16 bytes)
1479 SP -> -12 additional stack space (12 bytes)
1480 The frame size would thus be 36 bytes, and the frame offset would be
1481 12 bytes. The frame register is R7.
1483 The comments for thumb_skip_prolog() describe the algorithm we use
1484 to detect the end of the prolog. */
1488 thumb_scan_prologue (struct gdbarch
*gdbarch
, CORE_ADDR prev_pc
,
1489 CORE_ADDR block_addr
, struct arm_prologue_cache
*cache
)
1491 CORE_ADDR prologue_start
;
1492 CORE_ADDR prologue_end
;
1494 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1497 /* See comment in arm_scan_prologue for an explanation of
1499 if (prologue_end
> prologue_start
+ 64)
1501 prologue_end
= prologue_start
+ 64;
1505 /* We're in the boondocks: we have no idea where the start of the
1509 prologue_end
= min (prologue_end
, prev_pc
);
1511 thumb_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1514 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1517 arm_instruction_changes_pc (uint32_t this_instr
)
1519 if (bits (this_instr
, 28, 31) == INST_NV
)
1520 /* Unconditional instructions. */
1521 switch (bits (this_instr
, 24, 27))
1525 /* Branch with Link and change to Thumb. */
1530 /* Coprocessor register transfer. */
1531 if (bits (this_instr
, 12, 15) == 15)
1532 error (_("Invalid update to pc in instruction"));
1538 switch (bits (this_instr
, 25, 27))
1541 if (bits (this_instr
, 23, 24) == 2 && bit (this_instr
, 20) == 0)
1543 /* Multiplies and extra load/stores. */
1544 if (bit (this_instr
, 4) == 1 && bit (this_instr
, 7) == 1)
1545 /* Neither multiplies nor extension load/stores are allowed
1549 /* Otherwise, miscellaneous instructions. */
1551 /* BX <reg>, BXJ <reg>, BLX <reg> */
1552 if (bits (this_instr
, 4, 27) == 0x12fff1
1553 || bits (this_instr
, 4, 27) == 0x12fff2
1554 || bits (this_instr
, 4, 27) == 0x12fff3)
1557 /* Other miscellaneous instructions are unpredictable if they
1561 /* Data processing instruction. Fall through. */
1564 if (bits (this_instr
, 12, 15) == 15)
1571 /* Media instructions and architecturally undefined instructions. */
1572 if (bits (this_instr
, 25, 27) == 3 && bit (this_instr
, 4) == 1)
1576 if (bit (this_instr
, 20) == 0)
1580 if (bits (this_instr
, 12, 15) == ARM_PC_REGNUM
)
1586 /* Load/store multiple. */
1587 if (bit (this_instr
, 20) == 1 && bit (this_instr
, 15) == 1)
1593 /* Branch and branch with link. */
1598 /* Coprocessor transfers or SWIs can not affect PC. */
1602 internal_error (__FILE__
, __LINE__
, _("bad value in switch"));
1606 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1610 arm_instruction_restores_sp (unsigned int insn
)
1612 if (bits (insn
, 28, 31) != INST_NV
)
1614 if ((insn
& 0x0df0f000) == 0x0080d000
1615 /* ADD SP (register or immediate). */
1616 || (insn
& 0x0df0f000) == 0x0040d000
1617 /* SUB SP (register or immediate). */
1618 || (insn
& 0x0ffffff0) == 0x01a0d000
1620 || (insn
& 0x0fff0000) == 0x08bd0000
1622 || (insn
& 0x0fff0000) == 0x049d0000)
1623 /* POP of a single register. */
1630 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1631 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1632 fill it in. Return the first address not recognized as a prologue
1635 We recognize all the instructions typically found in ARM prologues,
1636 plus harmless instructions which can be skipped (either for analysis
1637 purposes, or a more restrictive set that can be skipped when finding
1638 the end of the prologue). */
1641 arm_analyze_prologue (struct gdbarch
*gdbarch
,
1642 CORE_ADDR prologue_start
, CORE_ADDR prologue_end
,
1643 struct arm_prologue_cache
*cache
)
1645 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1646 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
1648 CORE_ADDR offset
, current_pc
;
1649 pv_t regs
[ARM_FPS_REGNUM
];
1650 struct pv_area
*stack
;
1651 struct cleanup
*back_to
;
1652 CORE_ADDR unrecognized_pc
= 0;
1654 /* Search the prologue looking for instructions that set up the
1655 frame pointer, adjust the stack pointer, and save registers.
1657 Be careful, however, and if it doesn't look like a prologue,
1658 don't try to scan it. If, for instance, a frameless function
1659 begins with stmfd sp!, then we will tell ourselves there is
1660 a frame, which will confuse stack traceback, as well as "finish"
1661 and other operations that rely on a knowledge of the stack
1664 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1665 regs
[regno
] = pv_register (regno
, 0);
1666 stack
= make_pv_area (ARM_SP_REGNUM
, gdbarch_addr_bit (gdbarch
));
1667 back_to
= make_cleanup_free_pv_area (stack
);
1669 for (current_pc
= prologue_start
;
1670 current_pc
< prologue_end
;
1674 = read_memory_unsigned_integer (current_pc
, 4, byte_order_for_code
);
1676 if (insn
== 0xe1a0c00d) /* mov ip, sp */
1678 regs
[ARM_IP_REGNUM
] = regs
[ARM_SP_REGNUM
];
1681 else if ((insn
& 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1682 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1684 unsigned imm
= insn
& 0xff; /* immediate value */
1685 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1686 int rd
= bits (insn
, 12, 15);
1687 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1688 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], imm
);
1691 else if ((insn
& 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1692 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1694 unsigned imm
= insn
& 0xff; /* immediate value */
1695 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1696 int rd
= bits (insn
, 12, 15);
1697 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1698 regs
[rd
] = pv_add_constant (regs
[bits (insn
, 16, 19)], -imm
);
1701 else if ((insn
& 0xffff0fff) == 0xe52d0004) /* str Rd,
1704 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1706 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1707 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4,
1708 regs
[bits (insn
, 12, 15)]);
1711 else if ((insn
& 0xffff0000) == 0xe92d0000)
1712 /* stmfd sp!, {..., fp, ip, lr, pc}
1714 stmfd sp!, {a1, a2, a3, a4} */
1716 int mask
= insn
& 0xffff;
1718 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1721 /* Calculate offsets of saved registers. */
1722 for (regno
= ARM_PC_REGNUM
; regno
>= 0; regno
--)
1723 if (mask
& (1 << regno
))
1726 = pv_add_constant (regs
[ARM_SP_REGNUM
], -4);
1727 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 4, regs
[regno
]);
1730 else if ((insn
& 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1731 || (insn
& 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1732 || (insn
& 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1734 /* No need to add this to saved_regs -- it's just an arg reg. */
1737 else if ((insn
& 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1738 || (insn
& 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1739 || (insn
& 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1741 /* No need to add this to saved_regs -- it's just an arg reg. */
1744 else if ((insn
& 0xfff00000) == 0xe8800000 /* stm Rn,
1746 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1748 /* No need to add this to saved_regs -- it's just arg regs. */
1751 else if ((insn
& 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1753 unsigned imm
= insn
& 0xff; /* immediate value */
1754 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1755 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1756 regs
[ARM_FP_REGNUM
] = pv_add_constant (regs
[ARM_IP_REGNUM
], -imm
);
1758 else if ((insn
& 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1760 unsigned imm
= insn
& 0xff; /* immediate value */
1761 unsigned rot
= (insn
& 0xf00) >> 7; /* rotate amount */
1762 imm
= (imm
>> rot
) | (imm
<< (32 - rot
));
1763 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -imm
);
1765 else if ((insn
& 0xffff7fff) == 0xed6d0103 /* stfe f?,
1767 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1769 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1772 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1773 regno
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x07);
1774 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12, regs
[regno
]);
1776 else if ((insn
& 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1778 && gdbarch_tdep (gdbarch
)->have_fpa_registers
)
1780 int n_saved_fp_regs
;
1781 unsigned int fp_start_reg
, fp_bound_reg
;
1783 if (pv_area_store_would_trash (stack
, regs
[ARM_SP_REGNUM
]))
1786 if ((insn
& 0x800) == 0x800) /* N0 is set */
1788 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1789 n_saved_fp_regs
= 3;
1791 n_saved_fp_regs
= 1;
1795 if ((insn
& 0x40000) == 0x40000) /* N1 is set */
1796 n_saved_fp_regs
= 2;
1798 n_saved_fp_regs
= 4;
1801 fp_start_reg
= ARM_F0_REGNUM
+ ((insn
>> 12) & 0x7);
1802 fp_bound_reg
= fp_start_reg
+ n_saved_fp_regs
;
1803 for (; fp_start_reg
< fp_bound_reg
; fp_start_reg
++)
1805 regs
[ARM_SP_REGNUM
] = pv_add_constant (regs
[ARM_SP_REGNUM
], -12);
1806 pv_area_store (stack
, regs
[ARM_SP_REGNUM
], 12,
1807 regs
[fp_start_reg
++]);
1810 else if ((insn
& 0xff000000) == 0xeb000000 && cache
== NULL
) /* bl */
1812 /* Allow some special function calls when skipping the
1813 prologue; GCC generates these before storing arguments to
1815 CORE_ADDR dest
= BranchDest (current_pc
, insn
);
1817 if (skip_prologue_function (gdbarch
, dest
, 0))
1822 else if ((insn
& 0xf0000000) != 0xe0000000)
1823 break; /* Condition not true, exit early. */
1824 else if (arm_instruction_changes_pc (insn
))
1825 /* Don't scan past anything that might change control flow. */
1827 else if (arm_instruction_restores_sp (insn
))
1829 /* Don't scan past the epilogue. */
1832 else if ((insn
& 0xfe500000) == 0xe8100000 /* ldm */
1833 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1834 /* Ignore block loads from the stack, potentially copying
1835 parameters from memory. */
1837 else if ((insn
& 0xfc500000) == 0xe4100000
1838 && pv_is_register (regs
[bits (insn
, 16, 19)], ARM_SP_REGNUM
))
1839 /* Similarly ignore single loads from the stack. */
1841 else if ((insn
& 0xffff0ff0) == 0xe1a00000)
1842 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1843 register instead of the stack. */
1847 /* The optimizer might shove anything into the prologue, if
1848 we build up cache (cache != NULL) from scanning prologue,
1849 we just skip what we don't recognize and scan further to
1850 make cache as complete as possible. However, if we skip
1851 prologue, we'll stop immediately on unrecognized
1853 unrecognized_pc
= current_pc
;
1861 if (unrecognized_pc
== 0)
1862 unrecognized_pc
= current_pc
;
1866 int framereg
, framesize
;
1868 /* The frame size is just the distance from the frame register
1869 to the original stack pointer. */
1870 if (pv_is_register (regs
[ARM_FP_REGNUM
], ARM_SP_REGNUM
))
1872 /* Frame pointer is fp. */
1873 framereg
= ARM_FP_REGNUM
;
1874 framesize
= -regs
[ARM_FP_REGNUM
].k
;
1878 /* Try the stack pointer... this is a bit desperate. */
1879 framereg
= ARM_SP_REGNUM
;
1880 framesize
= -regs
[ARM_SP_REGNUM
].k
;
1883 cache
->framereg
= framereg
;
1884 cache
->framesize
= framesize
;
1886 for (regno
= 0; regno
< ARM_FPS_REGNUM
; regno
++)
1887 if (pv_area_find_reg (stack
, gdbarch
, regno
, &offset
))
1888 cache
->saved_regs
[regno
].addr
= offset
;
1892 fprintf_unfiltered (gdb_stdlog
, "Prologue scan stopped at %s\n",
1893 paddress (gdbarch
, unrecognized_pc
));
1895 do_cleanups (back_to
);
1896 return unrecognized_pc
;
1900 arm_scan_prologue (struct frame_info
*this_frame
,
1901 struct arm_prologue_cache
*cache
)
1903 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
1904 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
1906 CORE_ADDR prologue_start
, prologue_end
, current_pc
;
1907 CORE_ADDR prev_pc
= get_frame_pc (this_frame
);
1908 CORE_ADDR block_addr
= get_frame_address_in_block (this_frame
);
1909 pv_t regs
[ARM_FPS_REGNUM
];
1910 struct pv_area
*stack
;
1911 struct cleanup
*back_to
;
1914 /* Assume there is no frame until proven otherwise. */
1915 cache
->framereg
= ARM_SP_REGNUM
;
1916 cache
->framesize
= 0;
1918 /* Check for Thumb prologue. */
1919 if (arm_frame_is_thumb (this_frame
))
1921 thumb_scan_prologue (gdbarch
, prev_pc
, block_addr
, cache
);
1925 /* Find the function prologue. If we can't find the function in
1926 the symbol table, peek in the stack frame to find the PC. */
1927 if (find_pc_partial_function (block_addr
, NULL
, &prologue_start
,
1930 /* One way to find the end of the prologue (which works well
1931 for unoptimized code) is to do the following:
1933 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1936 prologue_end = prev_pc;
1937 else if (sal.end < prologue_end)
1938 prologue_end = sal.end;
1940 This mechanism is very accurate so long as the optimizer
1941 doesn't move any instructions from the function body into the
1942 prologue. If this happens, sal.end will be the last
1943 instruction in the first hunk of prologue code just before
1944 the first instruction that the scheduler has moved from
1945 the body to the prologue.
1947 In order to make sure that we scan all of the prologue
1948 instructions, we use a slightly less accurate mechanism which
1949 may scan more than necessary. To help compensate for this
1950 lack of accuracy, the prologue scanning loop below contains
1951 several clauses which'll cause the loop to terminate early if
1952 an implausible prologue instruction is encountered.
1958 is a suitable endpoint since it accounts for the largest
1959 possible prologue plus up to five instructions inserted by
1962 if (prologue_end
> prologue_start
+ 64)
1964 prologue_end
= prologue_start
+ 64; /* See above. */
1969 /* We have no symbol information. Our only option is to assume this
1970 function has a standard stack frame and the normal frame register.
1971 Then, we can find the value of our frame pointer on entrance to
1972 the callee (or at the present moment if this is the innermost frame).
1973 The value stored there should be the address of the stmfd + 8. */
1974 CORE_ADDR frame_loc
;
1975 LONGEST return_value
;
1977 frame_loc
= get_frame_register_unsigned (this_frame
, ARM_FP_REGNUM
);
1978 if (!safe_read_memory_integer (frame_loc
, 4, byte_order
, &return_value
))
1982 prologue_start
= gdbarch_addr_bits_remove
1983 (gdbarch
, return_value
) - 8;
1984 prologue_end
= prologue_start
+ 64; /* See above. */
1988 if (prev_pc
< prologue_end
)
1989 prologue_end
= prev_pc
;
1991 arm_analyze_prologue (gdbarch
, prologue_start
, prologue_end
, cache
);
1994 static struct arm_prologue_cache
*
1995 arm_make_prologue_cache (struct frame_info
*this_frame
)
1998 struct arm_prologue_cache
*cache
;
1999 CORE_ADDR unwound_fp
;
2001 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2002 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2004 arm_scan_prologue (this_frame
, cache
);
2006 unwound_fp
= get_frame_register_unsigned (this_frame
, cache
->framereg
);
2007 if (unwound_fp
== 0)
2010 cache
->prev_sp
= unwound_fp
+ cache
->framesize
;
2012 /* Calculate actual addresses of saved registers using offsets
2013 determined by arm_scan_prologue. */
2014 for (reg
= 0; reg
< gdbarch_num_regs (get_frame_arch (this_frame
)); reg
++)
2015 if (trad_frame_addr_p (cache
->saved_regs
, reg
))
2016 cache
->saved_regs
[reg
].addr
+= cache
->prev_sp
;
2021 /* Implementation of the stop_reason hook for arm_prologue frames. */
2023 static enum unwind_stop_reason
2024 arm_prologue_unwind_stop_reason (struct frame_info
*this_frame
,
2027 struct arm_prologue_cache
*cache
;
2030 if (*this_cache
== NULL
)
2031 *this_cache
= arm_make_prologue_cache (this_frame
);
2032 cache
= (struct arm_prologue_cache
*) *this_cache
;
2034 /* This is meant to halt the backtrace at "_start". */
2035 pc
= get_frame_pc (this_frame
);
2036 if (pc
<= gdbarch_tdep (get_frame_arch (this_frame
))->lowest_pc
)
2037 return UNWIND_OUTERMOST
;
2039 /* If we've hit a wall, stop. */
2040 if (cache
->prev_sp
== 0)
2041 return UNWIND_OUTERMOST
;
2043 return UNWIND_NO_REASON
;
2046 /* Our frame ID for a normal frame is the current function's starting PC
2047 and the caller's SP when we were called. */
2050 arm_prologue_this_id (struct frame_info
*this_frame
,
2052 struct frame_id
*this_id
)
2054 struct arm_prologue_cache
*cache
;
2058 if (*this_cache
== NULL
)
2059 *this_cache
= arm_make_prologue_cache (this_frame
);
2060 cache
= (struct arm_prologue_cache
*) *this_cache
;
2062 /* Use function start address as part of the frame ID. If we cannot
2063 identify the start address (due to missing symbol information),
2064 fall back to just using the current PC. */
2065 pc
= get_frame_pc (this_frame
);
2066 func
= get_frame_func (this_frame
);
2070 id
= frame_id_build (cache
->prev_sp
, func
);
2074 static struct value
*
2075 arm_prologue_prev_register (struct frame_info
*this_frame
,
2079 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2080 struct arm_prologue_cache
*cache
;
2082 if (*this_cache
== NULL
)
2083 *this_cache
= arm_make_prologue_cache (this_frame
);
2084 cache
= (struct arm_prologue_cache
*) *this_cache
;
2086 /* If we are asked to unwind the PC, then we need to return the LR
2087 instead. The prologue may save PC, but it will point into this
2088 frame's prologue, not the next frame's resume location. Also
2089 strip the saved T bit. A valid LR may have the low bit set, but
2090 a valid PC never does. */
2091 if (prev_regnum
== ARM_PC_REGNUM
)
2095 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2096 return frame_unwind_got_constant (this_frame
, prev_regnum
,
2097 arm_addr_bits_remove (gdbarch
, lr
));
2100 /* SP is generally not saved to the stack, but this frame is
2101 identified by the next frame's stack pointer at the time of the call.
2102 The value was already reconstructed into PREV_SP. */
2103 if (prev_regnum
== ARM_SP_REGNUM
)
2104 return frame_unwind_got_constant (this_frame
, prev_regnum
, cache
->prev_sp
);
2106 /* The CPSR may have been changed by the call instruction and by the
2107 called function. The only bit we can reconstruct is the T bit,
2108 by checking the low bit of LR as of the call. This is a reliable
2109 indicator of Thumb-ness except for some ARM v4T pre-interworking
2110 Thumb code, which could get away with a clear low bit as long as
2111 the called function did not use bx. Guess that all other
2112 bits are unchanged; the condition flags are presumably lost,
2113 but the processor status is likely valid. */
2114 if (prev_regnum
== ARM_PS_REGNUM
)
2117 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
2119 cpsr
= get_frame_register_unsigned (this_frame
, prev_regnum
);
2120 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
2121 if (IS_THUMB_ADDR (lr
))
2125 return frame_unwind_got_constant (this_frame
, prev_regnum
, cpsr
);
2128 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
2132 struct frame_unwind arm_prologue_unwind
= {
2134 arm_prologue_unwind_stop_reason
,
2135 arm_prologue_this_id
,
2136 arm_prologue_prev_register
,
2138 default_frame_sniffer
2141 /* Maintain a list of ARM exception table entries per objfile, similar to the
2142 list of mapping symbols. We only cache entries for standard ARM-defined
2143 personality routines; the cache will contain only the frame unwinding
2144 instructions associated with the entry (not the descriptors). */
2146 static const struct objfile_data
*arm_exidx_data_key
;
2148 struct arm_exidx_entry
2153 typedef struct arm_exidx_entry arm_exidx_entry_s
;
2154 DEF_VEC_O(arm_exidx_entry_s
);
2156 struct arm_exidx_data
2158 VEC(arm_exidx_entry_s
) **section_maps
;
2162 arm_exidx_data_free (struct objfile
*objfile
, void *arg
)
2164 struct arm_exidx_data
*data
= (struct arm_exidx_data
*) arg
;
2167 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
2168 VEC_free (arm_exidx_entry_s
, data
->section_maps
[i
]);
2172 arm_compare_exidx_entries (const struct arm_exidx_entry
*lhs
,
2173 const struct arm_exidx_entry
*rhs
)
2175 return lhs
->addr
< rhs
->addr
;
2178 static struct obj_section
*
2179 arm_obj_section_from_vma (struct objfile
*objfile
, bfd_vma vma
)
2181 struct obj_section
*osect
;
2183 ALL_OBJFILE_OSECTIONS (objfile
, osect
)
2184 if (bfd_get_section_flags (objfile
->obfd
,
2185 osect
->the_bfd_section
) & SEC_ALLOC
)
2187 bfd_vma start
, size
;
2188 start
= bfd_get_section_vma (objfile
->obfd
, osect
->the_bfd_section
);
2189 size
= bfd_get_section_size (osect
->the_bfd_section
);
2191 if (start
<= vma
&& vma
< start
+ size
)
2198 /* Parse contents of exception table and exception index sections
2199 of OBJFILE, and fill in the exception table entry cache.
2201 For each entry that refers to a standard ARM-defined personality
2202 routine, extract the frame unwinding instructions (from either
2203 the index or the table section). The unwinding instructions
2205 - extracting them from the rest of the table data
2206 - converting to host endianness
2207 - appending the implicit 0xb0 ("Finish") code
2209 The extracted and normalized instructions are stored for later
2210 retrieval by the arm_find_exidx_entry routine. */
2213 arm_exidx_new_objfile (struct objfile
*objfile
)
2215 struct cleanup
*cleanups
;
2216 struct arm_exidx_data
*data
;
2217 asection
*exidx
, *extab
;
2218 bfd_vma exidx_vma
= 0, extab_vma
= 0;
2219 bfd_size_type exidx_size
= 0, extab_size
= 0;
2220 gdb_byte
*exidx_data
= NULL
, *extab_data
= NULL
;
2223 /* If we've already touched this file, do nothing. */
2224 if (!objfile
|| objfile_data (objfile
, arm_exidx_data_key
) != NULL
)
2226 cleanups
= make_cleanup (null_cleanup
, NULL
);
2228 /* Read contents of exception table and index. */
2229 exidx
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.exidx");
2232 exidx_vma
= bfd_section_vma (objfile
->obfd
, exidx
);
2233 exidx_size
= bfd_get_section_size (exidx
);
2234 exidx_data
= (gdb_byte
*) xmalloc (exidx_size
);
2235 make_cleanup (xfree
, exidx_data
);
2237 if (!bfd_get_section_contents (objfile
->obfd
, exidx
,
2238 exidx_data
, 0, exidx_size
))
2240 do_cleanups (cleanups
);
2245 extab
= bfd_get_section_by_name (objfile
->obfd
, ".ARM.extab");
2248 extab_vma
= bfd_section_vma (objfile
->obfd
, extab
);
2249 extab_size
= bfd_get_section_size (extab
);
2250 extab_data
= (gdb_byte
*) xmalloc (extab_size
);
2251 make_cleanup (xfree
, extab_data
);
2253 if (!bfd_get_section_contents (objfile
->obfd
, extab
,
2254 extab_data
, 0, extab_size
))
2256 do_cleanups (cleanups
);
2261 /* Allocate exception table data structure. */
2262 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
, struct arm_exidx_data
);
2263 set_objfile_data (objfile
, arm_exidx_data_key
, data
);
2264 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
2265 objfile
->obfd
->section_count
,
2266 VEC(arm_exidx_entry_s
) *);
2268 /* Fill in exception table. */
2269 for (i
= 0; i
< exidx_size
/ 8; i
++)
2271 struct arm_exidx_entry new_exidx_entry
;
2272 bfd_vma idx
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8);
2273 bfd_vma val
= bfd_h_get_32 (objfile
->obfd
, exidx_data
+ i
* 8 + 4);
2274 bfd_vma addr
= 0, word
= 0;
2275 int n_bytes
= 0, n_words
= 0;
2276 struct obj_section
*sec
;
2277 gdb_byte
*entry
= NULL
;
2279 /* Extract address of start of function. */
2280 idx
= ((idx
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2281 idx
+= exidx_vma
+ i
* 8;
2283 /* Find section containing function and compute section offset. */
2284 sec
= arm_obj_section_from_vma (objfile
, idx
);
2287 idx
-= bfd_get_section_vma (objfile
->obfd
, sec
->the_bfd_section
);
2289 /* Determine address of exception table entry. */
2292 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2294 else if ((val
& 0xff000000) == 0x80000000)
2296 /* Exception table entry embedded in .ARM.exidx
2297 -- must be short form. */
2301 else if (!(val
& 0x80000000))
2303 /* Exception table entry in .ARM.extab. */
2304 addr
= ((val
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2305 addr
+= exidx_vma
+ i
* 8 + 4;
2307 if (addr
>= extab_vma
&& addr
+ 4 <= extab_vma
+ extab_size
)
2309 word
= bfd_h_get_32 (objfile
->obfd
,
2310 extab_data
+ addr
- extab_vma
);
2313 if ((word
& 0xff000000) == 0x80000000)
2318 else if ((word
& 0xff000000) == 0x81000000
2319 || (word
& 0xff000000) == 0x82000000)
2323 n_words
= ((word
>> 16) & 0xff);
2325 else if (!(word
& 0x80000000))
2328 struct obj_section
*pers_sec
;
2329 int gnu_personality
= 0;
2331 /* Custom personality routine. */
2332 pers
= ((word
& 0x7fffffff) ^ 0x40000000) - 0x40000000;
2333 pers
= UNMAKE_THUMB_ADDR (pers
+ addr
- 4);
2335 /* Check whether we've got one of the variants of the
2336 GNU personality routines. */
2337 pers_sec
= arm_obj_section_from_vma (objfile
, pers
);
2340 static const char *personality
[] =
2342 "__gcc_personality_v0",
2343 "__gxx_personality_v0",
2344 "__gcj_personality_v0",
2345 "__gnu_objc_personality_v0",
2349 CORE_ADDR pc
= pers
+ obj_section_offset (pers_sec
);
2352 for (k
= 0; personality
[k
]; k
++)
2353 if (lookup_minimal_symbol_by_pc_name
2354 (pc
, personality
[k
], objfile
))
2356 gnu_personality
= 1;
2361 /* If so, the next word contains a word count in the high
2362 byte, followed by the same unwind instructions as the
2363 pre-defined forms. */
2365 && addr
+ 4 <= extab_vma
+ extab_size
)
2367 word
= bfd_h_get_32 (objfile
->obfd
,
2368 extab_data
+ addr
- extab_vma
);
2371 n_words
= ((word
>> 24) & 0xff);
2377 /* Sanity check address. */
2379 if (addr
< extab_vma
|| addr
+ 4 * n_words
> extab_vma
+ extab_size
)
2380 n_words
= n_bytes
= 0;
2382 /* The unwind instructions reside in WORD (only the N_BYTES least
2383 significant bytes are valid), followed by N_WORDS words in the
2384 extab section starting at ADDR. */
2385 if (n_bytes
|| n_words
)
2388 = (gdb_byte
*) obstack_alloc (&objfile
->objfile_obstack
,
2389 n_bytes
+ n_words
* 4 + 1);
2392 *p
++ = (gdb_byte
) ((word
>> (8 * n_bytes
)) & 0xff);
2396 word
= bfd_h_get_32 (objfile
->obfd
,
2397 extab_data
+ addr
- extab_vma
);
2400 *p
++ = (gdb_byte
) ((word
>> 24) & 0xff);
2401 *p
++ = (gdb_byte
) ((word
>> 16) & 0xff);
2402 *p
++ = (gdb_byte
) ((word
>> 8) & 0xff);
2403 *p
++ = (gdb_byte
) (word
& 0xff);
2406 /* Implied "Finish" to terminate the list. */
2410 /* Push entry onto vector. They are guaranteed to always
2411 appear in order of increasing addresses. */
2412 new_exidx_entry
.addr
= idx
;
2413 new_exidx_entry
.entry
= entry
;
2414 VEC_safe_push (arm_exidx_entry_s
,
2415 data
->section_maps
[sec
->the_bfd_section
->index
],
2419 do_cleanups (cleanups
);
2422 /* Search for the exception table entry covering MEMADDR. If one is found,
2423 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2424 set *START to the start of the region covered by this entry. */
2427 arm_find_exidx_entry (CORE_ADDR memaddr
, CORE_ADDR
*start
)
2429 struct obj_section
*sec
;
2431 sec
= find_pc_section (memaddr
);
2434 struct arm_exidx_data
*data
;
2435 VEC(arm_exidx_entry_s
) *map
;
2436 struct arm_exidx_entry map_key
= { memaddr
- obj_section_addr (sec
), 0 };
2439 data
= ((struct arm_exidx_data
*)
2440 objfile_data (sec
->objfile
, arm_exidx_data_key
));
2443 map
= data
->section_maps
[sec
->the_bfd_section
->index
];
2444 if (!VEC_empty (arm_exidx_entry_s
, map
))
2446 struct arm_exidx_entry
*map_sym
;
2448 idx
= VEC_lower_bound (arm_exidx_entry_s
, map
, &map_key
,
2449 arm_compare_exidx_entries
);
2451 /* VEC_lower_bound finds the earliest ordered insertion
2452 point. If the following symbol starts at this exact
2453 address, we use that; otherwise, the preceding
2454 exception table entry covers this address. */
2455 if (idx
< VEC_length (arm_exidx_entry_s
, map
))
2457 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
);
2458 if (map_sym
->addr
== map_key
.addr
)
2461 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2462 return map_sym
->entry
;
2468 map_sym
= VEC_index (arm_exidx_entry_s
, map
, idx
- 1);
2470 *start
= map_sym
->addr
+ obj_section_addr (sec
);
2471 return map_sym
->entry
;
2480 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2481 instruction list from the ARM exception table entry ENTRY, allocate and
2482 return a prologue cache structure describing how to unwind this frame.
2484 Return NULL if the unwinding instruction list contains a "spare",
2485 "reserved" or "refuse to unwind" instruction as defined in section
2486 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2487 for the ARM Architecture" document. */
2489 static struct arm_prologue_cache
*
2490 arm_exidx_fill_cache (struct frame_info
*this_frame
, gdb_byte
*entry
)
2495 struct arm_prologue_cache
*cache
;
2496 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2497 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2503 /* Whenever we reload SP, we actually have to retrieve its
2504 actual value in the current frame. */
2507 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2509 int reg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2510 vsp
= get_frame_register_unsigned (this_frame
, reg
);
2514 CORE_ADDR addr
= cache
->saved_regs
[ARM_SP_REGNUM
].addr
;
2515 vsp
= get_frame_memory_unsigned (this_frame
, addr
, 4);
2521 /* Decode next unwind instruction. */
2524 if ((insn
& 0xc0) == 0)
2526 int offset
= insn
& 0x3f;
2527 vsp
+= (offset
<< 2) + 4;
2529 else if ((insn
& 0xc0) == 0x40)
2531 int offset
= insn
& 0x3f;
2532 vsp
-= (offset
<< 2) + 4;
2534 else if ((insn
& 0xf0) == 0x80)
2536 int mask
= ((insn
& 0xf) << 8) | *entry
++;
2539 /* The special case of an all-zero mask identifies
2540 "Refuse to unwind". We return NULL to fall back
2541 to the prologue analyzer. */
2545 /* Pop registers r4..r15 under mask. */
2546 for (i
= 0; i
< 12; i
++)
2547 if (mask
& (1 << i
))
2549 cache
->saved_regs
[4 + i
].addr
= vsp
;
2553 /* Special-case popping SP -- we need to reload vsp. */
2554 if (mask
& (1 << (ARM_SP_REGNUM
- 4)))
2557 else if ((insn
& 0xf0) == 0x90)
2559 int reg
= insn
& 0xf;
2561 /* Reserved cases. */
2562 if (reg
== ARM_SP_REGNUM
|| reg
== ARM_PC_REGNUM
)
2565 /* Set SP from another register and mark VSP for reload. */
2566 cache
->saved_regs
[ARM_SP_REGNUM
] = cache
->saved_regs
[reg
];
2569 else if ((insn
& 0xf0) == 0xa0)
2571 int count
= insn
& 0x7;
2572 int pop_lr
= (insn
& 0x8) != 0;
2575 /* Pop r4..r[4+count]. */
2576 for (i
= 0; i
<= count
; i
++)
2578 cache
->saved_regs
[4 + i
].addr
= vsp
;
2582 /* If indicated by flag, pop LR as well. */
2585 cache
->saved_regs
[ARM_LR_REGNUM
].addr
= vsp
;
2589 else if (insn
== 0xb0)
2591 /* We could only have updated PC by popping into it; if so, it
2592 will show up as address. Otherwise, copy LR into PC. */
2593 if (!trad_frame_addr_p (cache
->saved_regs
, ARM_PC_REGNUM
))
2594 cache
->saved_regs
[ARM_PC_REGNUM
]
2595 = cache
->saved_regs
[ARM_LR_REGNUM
];
2600 else if (insn
== 0xb1)
2602 int mask
= *entry
++;
2605 /* All-zero mask and mask >= 16 is "spare". */
2606 if (mask
== 0 || mask
>= 16)
2609 /* Pop r0..r3 under mask. */
2610 for (i
= 0; i
< 4; i
++)
2611 if (mask
& (1 << i
))
2613 cache
->saved_regs
[i
].addr
= vsp
;
2617 else if (insn
== 0xb2)
2619 ULONGEST offset
= 0;
2624 offset
|= (*entry
& 0x7f) << shift
;
2627 while (*entry
++ & 0x80);
2629 vsp
+= 0x204 + (offset
<< 2);
2631 else if (insn
== 0xb3)
2633 int start
= *entry
>> 4;
2634 int count
= (*entry
++) & 0xf;
2637 /* Only registers D0..D15 are valid here. */
2638 if (start
+ count
>= 16)
2641 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2642 for (i
= 0; i
<= count
; i
++)
2644 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2648 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2651 else if ((insn
& 0xf8) == 0xb8)
2653 int count
= insn
& 0x7;
2656 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2657 for (i
= 0; i
<= count
; i
++)
2659 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2663 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2666 else if (insn
== 0xc6)
2668 int start
= *entry
>> 4;
2669 int count
= (*entry
++) & 0xf;
2672 /* Only registers WR0..WR15 are valid. */
2673 if (start
+ count
>= 16)
2676 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2677 for (i
= 0; i
<= count
; i
++)
2679 cache
->saved_regs
[ARM_WR0_REGNUM
+ start
+ i
].addr
= vsp
;
2683 else if (insn
== 0xc7)
2685 int mask
= *entry
++;
2688 /* All-zero mask and mask >= 16 is "spare". */
2689 if (mask
== 0 || mask
>= 16)
2692 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2693 for (i
= 0; i
< 4; i
++)
2694 if (mask
& (1 << i
))
2696 cache
->saved_regs
[ARM_WCGR0_REGNUM
+ i
].addr
= vsp
;
2700 else if ((insn
& 0xf8) == 0xc0)
2702 int count
= insn
& 0x7;
2705 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2706 for (i
= 0; i
<= count
; i
++)
2708 cache
->saved_regs
[ARM_WR0_REGNUM
+ 10 + i
].addr
= vsp
;
2712 else if (insn
== 0xc8)
2714 int start
= *entry
>> 4;
2715 int count
= (*entry
++) & 0xf;
2718 /* Only registers D0..D31 are valid. */
2719 if (start
+ count
>= 16)
2722 /* Pop VFP double-precision registers
2723 D[16+start]..D[16+start+count]. */
2724 for (i
= 0; i
<= count
; i
++)
2726 cache
->saved_regs
[ARM_D0_REGNUM
+ 16 + start
+ i
].addr
= vsp
;
2730 else if (insn
== 0xc9)
2732 int start
= *entry
>> 4;
2733 int count
= (*entry
++) & 0xf;
2736 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2737 for (i
= 0; i
<= count
; i
++)
2739 cache
->saved_regs
[ARM_D0_REGNUM
+ start
+ i
].addr
= vsp
;
2743 else if ((insn
& 0xf8) == 0xd0)
2745 int count
= insn
& 0x7;
2748 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2749 for (i
= 0; i
<= count
; i
++)
2751 cache
->saved_regs
[ARM_D0_REGNUM
+ 8 + i
].addr
= vsp
;
2757 /* Everything else is "spare". */
2762 /* If we restore SP from a register, assume this was the frame register.
2763 Otherwise just fall back to SP as frame register. */
2764 if (trad_frame_realreg_p (cache
->saved_regs
, ARM_SP_REGNUM
))
2765 cache
->framereg
= cache
->saved_regs
[ARM_SP_REGNUM
].realreg
;
2767 cache
->framereg
= ARM_SP_REGNUM
;
2769 /* Determine offset to previous frame. */
2771 = vsp
- get_frame_register_unsigned (this_frame
, cache
->framereg
);
2773 /* We already got the previous SP. */
2774 cache
->prev_sp
= vsp
;
2779 /* Unwinding via ARM exception table entries. Note that the sniffer
2780 already computes a filled-in prologue cache, which is then used
2781 with the same arm_prologue_this_id and arm_prologue_prev_register
2782 routines also used for prologue-parsing based unwinding. */
2785 arm_exidx_unwind_sniffer (const struct frame_unwind
*self
,
2786 struct frame_info
*this_frame
,
2787 void **this_prologue_cache
)
2789 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
2790 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
2791 CORE_ADDR addr_in_block
, exidx_region
, func_start
;
2792 struct arm_prologue_cache
*cache
;
2795 /* See if we have an ARM exception table entry covering this address. */
2796 addr_in_block
= get_frame_address_in_block (this_frame
);
2797 entry
= arm_find_exidx_entry (addr_in_block
, &exidx_region
);
2801 /* The ARM exception table does not describe unwind information
2802 for arbitrary PC values, but is guaranteed to be correct only
2803 at call sites. We have to decide here whether we want to use
2804 ARM exception table information for this frame, or fall back
2805 to using prologue parsing. (Note that if we have DWARF CFI,
2806 this sniffer isn't even called -- CFI is always preferred.)
2808 Before we make this decision, however, we check whether we
2809 actually have *symbol* information for the current frame.
2810 If not, prologue parsing would not work anyway, so we might
2811 as well use the exception table and hope for the best. */
2812 if (find_pc_partial_function (addr_in_block
, NULL
, &func_start
, NULL
))
2816 /* If the next frame is "normal", we are at a call site in this
2817 frame, so exception information is guaranteed to be valid. */
2818 if (get_next_frame (this_frame
)
2819 && get_frame_type (get_next_frame (this_frame
)) == NORMAL_FRAME
)
2822 /* We also assume exception information is valid if we're currently
2823 blocked in a system call. The system library is supposed to
2824 ensure this, so that e.g. pthread cancellation works. */
2825 if (arm_frame_is_thumb (this_frame
))
2829 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 2, 2,
2830 byte_order_for_code
, &insn
)
2831 && (insn
& 0xff00) == 0xdf00 /* svc */)
2838 if (safe_read_memory_integer (get_frame_pc (this_frame
) - 4, 4,
2839 byte_order_for_code
, &insn
)
2840 && (insn
& 0x0f000000) == 0x0f000000 /* svc */)
2844 /* Bail out if we don't know that exception information is valid. */
2848 /* The ARM exception index does not mark the *end* of the region
2849 covered by the entry, and some functions will not have any entry.
2850 To correctly recognize the end of the covered region, the linker
2851 should have inserted dummy records with a CANTUNWIND marker.
2853 Unfortunately, current versions of GNU ld do not reliably do
2854 this, and thus we may have found an incorrect entry above.
2855 As a (temporary) sanity check, we only use the entry if it
2856 lies *within* the bounds of the function. Note that this check
2857 might reject perfectly valid entries that just happen to cover
2858 multiple functions; therefore this check ought to be removed
2859 once the linker is fixed. */
2860 if (func_start
> exidx_region
)
2864 /* Decode the list of unwinding instructions into a prologue cache.
2865 Note that this may fail due to e.g. a "refuse to unwind" code. */
2866 cache
= arm_exidx_fill_cache (this_frame
, entry
);
2870 *this_prologue_cache
= cache
;
2874 struct frame_unwind arm_exidx_unwind
= {
2876 default_frame_unwind_stop_reason
,
2877 arm_prologue_this_id
,
2878 arm_prologue_prev_register
,
2880 arm_exidx_unwind_sniffer
2883 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2884 trampoline, return the target PC. Otherwise return 0.
2886 void call0a (char c, short s, int i, long l) {}
2890 (*pointer_to_call0a) (c, s, i, l);
2893 Instead of calling a stub library function _call_via_xx (xx is
2894 the register name), GCC may inline the trampoline in the object
2895 file as below (register r2 has the address of call0a).
2898 .type main, %function
2907 The trampoline 'bx r2' doesn't belong to main. */
2910 arm_skip_bx_reg (struct frame_info
*frame
, CORE_ADDR pc
)
2912 /* The heuristics of recognizing such trampoline is that FRAME is
2913 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2914 if (arm_frame_is_thumb (frame
))
2918 if (target_read_memory (pc
, buf
, 2) == 0)
2920 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
2921 enum bfd_endian byte_order_for_code
2922 = gdbarch_byte_order_for_code (gdbarch
);
2924 = extract_unsigned_integer (buf
, 2, byte_order_for_code
);
2926 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
2929 = get_frame_register_unsigned (frame
, bits (insn
, 3, 6));
2931 /* Clear the LSB so that gdb core sets step-resume
2932 breakpoint at the right address. */
2933 return UNMAKE_THUMB_ADDR (dest
);
2941 static struct arm_prologue_cache
*
2942 arm_make_stub_cache (struct frame_info
*this_frame
)
2944 struct arm_prologue_cache
*cache
;
2946 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
2947 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
2949 cache
->prev_sp
= get_frame_register_unsigned (this_frame
, ARM_SP_REGNUM
);
2954 /* Our frame ID for a stub frame is the current SP and LR. */
2957 arm_stub_this_id (struct frame_info
*this_frame
,
2959 struct frame_id
*this_id
)
2961 struct arm_prologue_cache
*cache
;
2963 if (*this_cache
== NULL
)
2964 *this_cache
= arm_make_stub_cache (this_frame
);
2965 cache
= (struct arm_prologue_cache
*) *this_cache
;
2967 *this_id
= frame_id_build (cache
->prev_sp
, get_frame_pc (this_frame
));
2971 arm_stub_unwind_sniffer (const struct frame_unwind
*self
,
2972 struct frame_info
*this_frame
,
2973 void **this_prologue_cache
)
2975 CORE_ADDR addr_in_block
;
2977 CORE_ADDR pc
, start_addr
;
2980 addr_in_block
= get_frame_address_in_block (this_frame
);
2981 pc
= get_frame_pc (this_frame
);
2982 if (in_plt_section (addr_in_block
)
2983 /* We also use the stub winder if the target memory is unreadable
2984 to avoid having the prologue unwinder trying to read it. */
2985 || target_read_memory (pc
, dummy
, 4) != 0)
2988 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0
2989 && arm_skip_bx_reg (this_frame
, pc
) != 0)
2995 struct frame_unwind arm_stub_unwind
= {
2997 default_frame_unwind_stop_reason
,
2999 arm_prologue_prev_register
,
3001 arm_stub_unwind_sniffer
3004 /* Put here the code to store, into CACHE->saved_regs, the addresses
3005 of the saved registers of frame described by THIS_FRAME. CACHE is
3008 static struct arm_prologue_cache
*
3009 arm_m_exception_cache (struct frame_info
*this_frame
)
3011 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3012 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3013 struct arm_prologue_cache
*cache
;
3014 CORE_ADDR unwound_sp
;
3017 cache
= FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache
);
3018 cache
->saved_regs
= trad_frame_alloc_saved_regs (this_frame
);
3020 unwound_sp
= get_frame_register_unsigned (this_frame
,
3023 /* The hardware saves eight 32-bit words, comprising xPSR,
3024 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3025 "B1.5.6 Exception entry behavior" in
3026 "ARMv7-M Architecture Reference Manual". */
3027 cache
->saved_regs
[0].addr
= unwound_sp
;
3028 cache
->saved_regs
[1].addr
= unwound_sp
+ 4;
3029 cache
->saved_regs
[2].addr
= unwound_sp
+ 8;
3030 cache
->saved_regs
[3].addr
= unwound_sp
+ 12;
3031 cache
->saved_regs
[12].addr
= unwound_sp
+ 16;
3032 cache
->saved_regs
[14].addr
= unwound_sp
+ 20;
3033 cache
->saved_regs
[15].addr
= unwound_sp
+ 24;
3034 cache
->saved_regs
[ARM_PS_REGNUM
].addr
= unwound_sp
+ 28;
3036 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3037 aligner between the top of the 32-byte stack frame and the
3038 previous context's stack pointer. */
3039 cache
->prev_sp
= unwound_sp
+ 32;
3040 if (safe_read_memory_integer (unwound_sp
+ 28, 4, byte_order
, &xpsr
)
3041 && (xpsr
& (1 << 9)) != 0)
3042 cache
->prev_sp
+= 4;
3047 /* Implementation of function hook 'this_id' in
3048 'struct frame_uwnind'. */
3051 arm_m_exception_this_id (struct frame_info
*this_frame
,
3053 struct frame_id
*this_id
)
3055 struct arm_prologue_cache
*cache
;
3057 if (*this_cache
== NULL
)
3058 *this_cache
= arm_m_exception_cache (this_frame
);
3059 cache
= (struct arm_prologue_cache
*) *this_cache
;
3061 /* Our frame ID for a stub frame is the current SP and LR. */
3062 *this_id
= frame_id_build (cache
->prev_sp
,
3063 get_frame_pc (this_frame
));
3066 /* Implementation of function hook 'prev_register' in
3067 'struct frame_uwnind'. */
3069 static struct value
*
3070 arm_m_exception_prev_register (struct frame_info
*this_frame
,
3074 struct gdbarch
*gdbarch
= get_frame_arch (this_frame
);
3075 struct arm_prologue_cache
*cache
;
3077 if (*this_cache
== NULL
)
3078 *this_cache
= arm_m_exception_cache (this_frame
);
3079 cache
= (struct arm_prologue_cache
*) *this_cache
;
3081 /* The value was already reconstructed into PREV_SP. */
3082 if (prev_regnum
== ARM_SP_REGNUM
)
3083 return frame_unwind_got_constant (this_frame
, prev_regnum
,
3086 return trad_frame_get_prev_register (this_frame
, cache
->saved_regs
,
3090 /* Implementation of function hook 'sniffer' in
3091 'struct frame_uwnind'. */
3094 arm_m_exception_unwind_sniffer (const struct frame_unwind
*self
,
3095 struct frame_info
*this_frame
,
3096 void **this_prologue_cache
)
3098 CORE_ADDR this_pc
= get_frame_pc (this_frame
);
3100 /* No need to check is_m; this sniffer is only registered for
3101 M-profile architectures. */
3103 /* Exception frames return to one of these magic PCs. Other values
3104 are not defined as of v7-M. See details in "B1.5.8 Exception
3105 return behavior" in "ARMv7-M Architecture Reference Manual". */
3106 if (this_pc
== 0xfffffff1 || this_pc
== 0xfffffff9
3107 || this_pc
== 0xfffffffd)
3113 /* Frame unwinder for M-profile exceptions. */
3115 struct frame_unwind arm_m_exception_unwind
=
3118 default_frame_unwind_stop_reason
,
3119 arm_m_exception_this_id
,
3120 arm_m_exception_prev_register
,
3122 arm_m_exception_unwind_sniffer
3126 arm_normal_frame_base (struct frame_info
*this_frame
, void **this_cache
)
3128 struct arm_prologue_cache
*cache
;
3130 if (*this_cache
== NULL
)
3131 *this_cache
= arm_make_prologue_cache (this_frame
);
3132 cache
= (struct arm_prologue_cache
*) *this_cache
;
3134 return cache
->prev_sp
- cache
->framesize
;
3137 struct frame_base arm_normal_base
= {
3138 &arm_prologue_unwind
,
3139 arm_normal_frame_base
,
3140 arm_normal_frame_base
,
3141 arm_normal_frame_base
3144 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3145 dummy frame. The frame ID's base needs to match the TOS value
3146 saved by save_dummy_frame_tos() and returned from
3147 arm_push_dummy_call, and the PC needs to match the dummy frame's
3150 static struct frame_id
3151 arm_dummy_id (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3153 return frame_id_build (get_frame_register_unsigned (this_frame
,
3155 get_frame_pc (this_frame
));
3158 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3159 be used to construct the previous frame's ID, after looking up the
3160 containing function). */
3163 arm_unwind_pc (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3166 pc
= frame_unwind_register_unsigned (this_frame
, ARM_PC_REGNUM
);
3167 return arm_addr_bits_remove (gdbarch
, pc
);
3171 arm_unwind_sp (struct gdbarch
*gdbarch
, struct frame_info
*this_frame
)
3173 return frame_unwind_register_unsigned (this_frame
, ARM_SP_REGNUM
);
3176 static struct value
*
3177 arm_dwarf2_prev_register (struct frame_info
*this_frame
, void **this_cache
,
3180 struct gdbarch
* gdbarch
= get_frame_arch (this_frame
);
3182 ULONGEST t_bit
= arm_psr_thumb_bit (gdbarch
);
3187 /* The PC is normally copied from the return column, which
3188 describes saves of LR. However, that version may have an
3189 extra bit set to indicate Thumb state. The bit is not
3191 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3192 return frame_unwind_got_constant (this_frame
, regnum
,
3193 arm_addr_bits_remove (gdbarch
, lr
));
3196 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3197 cpsr
= get_frame_register_unsigned (this_frame
, regnum
);
3198 lr
= frame_unwind_register_unsigned (this_frame
, ARM_LR_REGNUM
);
3199 if (IS_THUMB_ADDR (lr
))
3203 return frame_unwind_got_constant (this_frame
, regnum
, cpsr
);
3206 internal_error (__FILE__
, __LINE__
,
3207 _("Unexpected register %d"), regnum
);
3212 arm_dwarf2_frame_init_reg (struct gdbarch
*gdbarch
, int regnum
,
3213 struct dwarf2_frame_state_reg
*reg
,
3214 struct frame_info
*this_frame
)
3220 reg
->how
= DWARF2_FRAME_REG_FN
;
3221 reg
->loc
.fn
= arm_dwarf2_prev_register
;
3224 reg
->how
= DWARF2_FRAME_REG_CFA
;
3229 /* Implement the stack_frame_destroyed_p gdbarch method. */
3232 thumb_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3234 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3235 unsigned int insn
, insn2
;
3236 int found_return
= 0, found_stack_adjust
= 0;
3237 CORE_ADDR func_start
, func_end
;
3241 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3244 /* The epilogue is a sequence of instructions along the following lines:
3246 - add stack frame size to SP or FP
3247 - [if frame pointer used] restore SP from FP
3248 - restore registers from SP [may include PC]
3249 - a return-type instruction [if PC wasn't already restored]
3251 In a first pass, we scan forward from the current PC and verify the
3252 instructions we find as compatible with this sequence, ending in a
3255 However, this is not sufficient to distinguish indirect function calls
3256 within a function from indirect tail calls in the epilogue in some cases.
3257 Therefore, if we didn't already find any SP-changing instruction during
3258 forward scan, we add a backward scanning heuristic to ensure we actually
3259 are in the epilogue. */
3262 while (scan_pc
< func_end
&& !found_return
)
3264 if (target_read_memory (scan_pc
, buf
, 2))
3268 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3270 if ((insn
& 0xff80) == 0x4700) /* bx <Rm> */
3272 else if (insn
== 0x46f7) /* mov pc, lr */
3274 else if (thumb_instruction_restores_sp (insn
))
3276 if ((insn
& 0xff00) == 0xbd00) /* pop <registers, PC> */
3279 else if (thumb_insn_size (insn
) == 4) /* 32-bit Thumb-2 instruction */
3281 if (target_read_memory (scan_pc
, buf
, 2))
3285 insn2
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3287 if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3289 if (insn2
& 0x8000) /* <registers> include PC. */
3292 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3293 && (insn2
& 0x0fff) == 0x0b04)
3295 if ((insn2
& 0xf000) == 0xf000) /* <Rt> is PC. */
3298 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3299 && (insn2
& 0x0e00) == 0x0a00)
3311 /* Since any instruction in the epilogue sequence, with the possible
3312 exception of return itself, updates the stack pointer, we need to
3313 scan backwards for at most one instruction. Try either a 16-bit or
3314 a 32-bit instruction. This is just a heuristic, so we do not worry
3315 too much about false positives. */
3317 if (pc
- 4 < func_start
)
3319 if (target_read_memory (pc
- 4, buf
, 4))
3322 insn
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
3323 insn2
= extract_unsigned_integer (buf
+ 2, 2, byte_order_for_code
);
3325 if (thumb_instruction_restores_sp (insn2
))
3326 found_stack_adjust
= 1;
3327 else if (insn
== 0xe8bd) /* ldm.w sp!, <registers> */
3328 found_stack_adjust
= 1;
3329 else if (insn
== 0xf85d /* ldr.w <Rt>, [sp], #4 */
3330 && (insn2
& 0x0fff) == 0x0b04)
3331 found_stack_adjust
= 1;
3332 else if ((insn
& 0xffbf) == 0xecbd /* vldm sp!, <list> */
3333 && (insn2
& 0x0e00) == 0x0a00)
3334 found_stack_adjust
= 1;
3336 return found_stack_adjust
;
3339 /* Implement the stack_frame_destroyed_p gdbarch method. */
3342 arm_stack_frame_destroyed_p (struct gdbarch
*gdbarch
, CORE_ADDR pc
)
3344 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
3347 CORE_ADDR func_start
, func_end
;
3349 if (arm_pc_is_thumb (gdbarch
, pc
))
3350 return thumb_stack_frame_destroyed_p (gdbarch
, pc
);
3352 if (!find_pc_partial_function (pc
, NULL
, &func_start
, &func_end
))
3355 /* We are in the epilogue if the previous instruction was a stack
3356 adjustment and the next instruction is a possible return (bx, mov
3357 pc, or pop). We could have to scan backwards to find the stack
3358 adjustment, or forwards to find the return, but this is a decent
3359 approximation. First scan forwards. */
3362 insn
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
3363 if (bits (insn
, 28, 31) != INST_NV
)
3365 if ((insn
& 0x0ffffff0) == 0x012fff10)
3368 else if ((insn
& 0x0ffffff0) == 0x01a0f000)
3371 else if ((insn
& 0x0fff0000) == 0x08bd0000
3372 && (insn
& 0x0000c000) != 0)
3373 /* POP (LDMIA), including PC or LR. */
3380 /* Scan backwards. This is just a heuristic, so do not worry about
3381 false positives from mode changes. */
3383 if (pc
< func_start
+ 4)
3386 insn
= read_memory_unsigned_integer (pc
- 4, 4, byte_order_for_code
);
3387 if (arm_instruction_restores_sp (insn
))
3394 /* When arguments must be pushed onto the stack, they go on in reverse
3395 order. The code below implements a FILO (stack) to do this. */
3400 struct stack_item
*prev
;
3404 static struct stack_item
*
3405 push_stack_item (struct stack_item
*prev
, const void *contents
, int len
)
3407 struct stack_item
*si
;
3408 si
= XNEW (struct stack_item
);
3409 si
->data
= xmalloc (len
);
3412 memcpy (si
->data
, contents
, len
);
3416 static struct stack_item
*
3417 pop_stack_item (struct stack_item
*si
)
3419 struct stack_item
*dead
= si
;
3427 /* Return the alignment (in bytes) of the given type. */
3430 arm_type_align (struct type
*t
)
3436 t
= check_typedef (t
);
3437 switch (TYPE_CODE (t
))
3440 /* Should never happen. */
3441 internal_error (__FILE__
, __LINE__
, _("unknown type alignment"));
3445 case TYPE_CODE_ENUM
:
3449 case TYPE_CODE_RANGE
:
3451 case TYPE_CODE_CHAR
:
3452 case TYPE_CODE_BOOL
:
3453 return TYPE_LENGTH (t
);
3455 case TYPE_CODE_ARRAY
:
3456 case TYPE_CODE_COMPLEX
:
3457 /* TODO: What about vector types? */
3458 return arm_type_align (TYPE_TARGET_TYPE (t
));
3460 case TYPE_CODE_STRUCT
:
3461 case TYPE_CODE_UNION
:
3463 for (n
= 0; n
< TYPE_NFIELDS (t
); n
++)
3465 falign
= arm_type_align (TYPE_FIELD_TYPE (t
, n
));
3473 /* Possible base types for a candidate for passing and returning in
3476 enum arm_vfp_cprc_base_type
3485 /* The length of one element of base type B. */
3488 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b
)
3492 case VFP_CPRC_SINGLE
:
3494 case VFP_CPRC_DOUBLE
:
3496 case VFP_CPRC_VEC64
:
3498 case VFP_CPRC_VEC128
:
3501 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3506 /* The character ('s', 'd' or 'q') for the type of VFP register used
3507 for passing base type B. */
3510 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b
)
3514 case VFP_CPRC_SINGLE
:
3516 case VFP_CPRC_DOUBLE
:
3518 case VFP_CPRC_VEC64
:
3520 case VFP_CPRC_VEC128
:
3523 internal_error (__FILE__
, __LINE__
, _("Invalid VFP CPRC type: %d."),
3528 /* Determine whether T may be part of a candidate for passing and
3529 returning in VFP registers, ignoring the limit on the total number
3530 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3531 classification of the first valid component found; if it is not
3532 VFP_CPRC_UNKNOWN, all components must have the same classification
3533 as *BASE_TYPE. If it is found that T contains a type not permitted
3534 for passing and returning in VFP registers, a type differently
3535 classified from *BASE_TYPE, or two types differently classified
3536 from each other, return -1, otherwise return the total number of
3537 base-type elements found (possibly 0 in an empty structure or
3538 array). Vector types are not currently supported, matching the
3539 generic AAPCS support. */
3542 arm_vfp_cprc_sub_candidate (struct type
*t
,
3543 enum arm_vfp_cprc_base_type
*base_type
)
3545 t
= check_typedef (t
);
3546 switch (TYPE_CODE (t
))
3549 switch (TYPE_LENGTH (t
))
3552 if (*base_type
== VFP_CPRC_UNKNOWN
)
3553 *base_type
= VFP_CPRC_SINGLE
;
3554 else if (*base_type
!= VFP_CPRC_SINGLE
)
3559 if (*base_type
== VFP_CPRC_UNKNOWN
)
3560 *base_type
= VFP_CPRC_DOUBLE
;
3561 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3570 case TYPE_CODE_COMPLEX
:
3571 /* Arguments of complex T where T is one of the types float or
3572 double get treated as if they are implemented as:
3581 switch (TYPE_LENGTH (t
))
3584 if (*base_type
== VFP_CPRC_UNKNOWN
)
3585 *base_type
= VFP_CPRC_SINGLE
;
3586 else if (*base_type
!= VFP_CPRC_SINGLE
)
3591 if (*base_type
== VFP_CPRC_UNKNOWN
)
3592 *base_type
= VFP_CPRC_DOUBLE
;
3593 else if (*base_type
!= VFP_CPRC_DOUBLE
)
3602 case TYPE_CODE_ARRAY
:
3606 count
= arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t
), base_type
);
3609 if (TYPE_LENGTH (t
) == 0)
3611 gdb_assert (count
== 0);
3614 else if (count
== 0)
3616 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3617 gdb_assert ((TYPE_LENGTH (t
) % unitlen
) == 0);
3618 return TYPE_LENGTH (t
) / unitlen
;
3622 case TYPE_CODE_STRUCT
:
3627 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3629 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3631 if (sub_count
== -1)
3635 if (TYPE_LENGTH (t
) == 0)
3637 gdb_assert (count
== 0);
3640 else if (count
== 0)
3642 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3643 if (TYPE_LENGTH (t
) != unitlen
* count
)
3648 case TYPE_CODE_UNION
:
3653 for (i
= 0; i
< TYPE_NFIELDS (t
); i
++)
3655 int sub_count
= arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t
, i
),
3657 if (sub_count
== -1)
3659 count
= (count
> sub_count
? count
: sub_count
);
3661 if (TYPE_LENGTH (t
) == 0)
3663 gdb_assert (count
== 0);
3666 else if (count
== 0)
3668 unitlen
= arm_vfp_cprc_unit_length (*base_type
);
3669 if (TYPE_LENGTH (t
) != unitlen
* count
)
3681 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3682 if passed to or returned from a non-variadic function with the VFP
3683 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3684 *BASE_TYPE to the base type for T and *COUNT to the number of
3685 elements of that base type before returning. */
3688 arm_vfp_call_candidate (struct type
*t
, enum arm_vfp_cprc_base_type
*base_type
,
3691 enum arm_vfp_cprc_base_type b
= VFP_CPRC_UNKNOWN
;
3692 int c
= arm_vfp_cprc_sub_candidate (t
, &b
);
3693 if (c
<= 0 || c
> 4)
3700 /* Return 1 if the VFP ABI should be used for passing arguments to and
3701 returning values from a function of type FUNC_TYPE, 0
3705 arm_vfp_abi_for_function (struct gdbarch
*gdbarch
, struct type
*func_type
)
3707 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
3708 /* Variadic functions always use the base ABI. Assume that functions
3709 without debug info are not variadic. */
3710 if (func_type
&& TYPE_VARARGS (check_typedef (func_type
)))
3712 /* The VFP ABI is only supported as a variant of AAPCS. */
3713 if (tdep
->arm_abi
!= ARM_ABI_AAPCS
)
3715 return gdbarch_tdep (gdbarch
)->fp_model
== ARM_FLOAT_VFP
;
3718 /* We currently only support passing parameters in integer registers, which
3719 conforms with GCC's default model, and VFP argument passing following
3720 the VFP variant of AAPCS. Several other variants exist and
3721 we should probably support some of them based on the selected ABI. */
3724 arm_push_dummy_call (struct gdbarch
*gdbarch
, struct value
*function
,
3725 struct regcache
*regcache
, CORE_ADDR bp_addr
, int nargs
,
3726 struct value
**args
, CORE_ADDR sp
, int struct_return
,
3727 CORE_ADDR struct_addr
)
3729 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
3733 struct stack_item
*si
= NULL
;
3736 unsigned vfp_regs_free
= (1 << 16) - 1;
3738 /* Determine the type of this function and whether the VFP ABI
3740 ftype
= check_typedef (value_type (function
));
3741 if (TYPE_CODE (ftype
) == TYPE_CODE_PTR
)
3742 ftype
= check_typedef (TYPE_TARGET_TYPE (ftype
));
3743 use_vfp_abi
= arm_vfp_abi_for_function (gdbarch
, ftype
);
3745 /* Set the return address. For the ARM, the return breakpoint is
3746 always at BP_ADDR. */
3747 if (arm_pc_is_thumb (gdbarch
, bp_addr
))
3749 regcache_cooked_write_unsigned (regcache
, ARM_LR_REGNUM
, bp_addr
);
3751 /* Walk through the list of args and determine how large a temporary
3752 stack is required. Need to take care here as structs may be
3753 passed on the stack, and we have to push them. */
3756 argreg
= ARM_A1_REGNUM
;
3759 /* The struct_return pointer occupies the first parameter
3760 passing register. */
3764 fprintf_unfiltered (gdb_stdlog
, "struct return in %s = %s\n",
3765 gdbarch_register_name (gdbarch
, argreg
),
3766 paddress (gdbarch
, struct_addr
));
3767 regcache_cooked_write_unsigned (regcache
, argreg
, struct_addr
);
3771 for (argnum
= 0; argnum
< nargs
; argnum
++)
3774 struct type
*arg_type
;
3775 struct type
*target_type
;
3776 enum type_code typecode
;
3777 const bfd_byte
*val
;
3779 enum arm_vfp_cprc_base_type vfp_base_type
;
3781 int may_use_core_reg
= 1;
3783 arg_type
= check_typedef (value_type (args
[argnum
]));
3784 len
= TYPE_LENGTH (arg_type
);
3785 target_type
= TYPE_TARGET_TYPE (arg_type
);
3786 typecode
= TYPE_CODE (arg_type
);
3787 val
= value_contents (args
[argnum
]);
3789 align
= arm_type_align (arg_type
);
3790 /* Round alignment up to a whole number of words. */
3791 align
= (align
+ INT_REGISTER_SIZE
- 1) & ~(INT_REGISTER_SIZE
- 1);
3792 /* Different ABIs have different maximum alignments. */
3793 if (gdbarch_tdep (gdbarch
)->arm_abi
== ARM_ABI_APCS
)
3795 /* The APCS ABI only requires word alignment. */
3796 align
= INT_REGISTER_SIZE
;
3800 /* The AAPCS requires at most doubleword alignment. */
3801 if (align
> INT_REGISTER_SIZE
* 2)
3802 align
= INT_REGISTER_SIZE
* 2;
3806 && arm_vfp_call_candidate (arg_type
, &vfp_base_type
,
3814 /* Because this is a CPRC it cannot go in a core register or
3815 cause a core register to be skipped for alignment.
3816 Either it goes in VFP registers and the rest of this loop
3817 iteration is skipped for this argument, or it goes on the
3818 stack (and the stack alignment code is correct for this
3820 may_use_core_reg
= 0;
3822 unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
3823 shift
= unit_length
/ 4;
3824 mask
= (1 << (shift
* vfp_base_count
)) - 1;
3825 for (regno
= 0; regno
< 16; regno
+= shift
)
3826 if (((vfp_regs_free
>> regno
) & mask
) == mask
)
3835 vfp_regs_free
&= ~(mask
<< regno
);
3836 reg_scaled
= regno
/ shift
;
3837 reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
3838 for (i
= 0; i
< vfp_base_count
; i
++)
3842 if (reg_char
== 'q')
3843 arm_neon_quad_write (gdbarch
, regcache
, reg_scaled
+ i
,
3844 val
+ i
* unit_length
);
3847 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d",
3848 reg_char
, reg_scaled
+ i
);
3849 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
3851 regcache_cooked_write (regcache
, regnum
,
3852 val
+ i
* unit_length
);
3859 /* This CPRC could not go in VFP registers, so all VFP
3860 registers are now marked as used. */
3865 /* Push stack padding for dowubleword alignment. */
3866 if (nstack
& (align
- 1))
3868 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3869 nstack
+= INT_REGISTER_SIZE
;
3872 /* Doubleword aligned quantities must go in even register pairs. */
3873 if (may_use_core_reg
3874 && argreg
<= ARM_LAST_ARG_REGNUM
3875 && align
> INT_REGISTER_SIZE
3879 /* If the argument is a pointer to a function, and it is a
3880 Thumb function, create a LOCAL copy of the value and set
3881 the THUMB bit in it. */
3882 if (TYPE_CODE_PTR
== typecode
3883 && target_type
!= NULL
3884 && TYPE_CODE_FUNC
== TYPE_CODE (check_typedef (target_type
)))
3886 CORE_ADDR regval
= extract_unsigned_integer (val
, len
, byte_order
);
3887 if (arm_pc_is_thumb (gdbarch
, regval
))
3889 bfd_byte
*copy
= (bfd_byte
*) alloca (len
);
3890 store_unsigned_integer (copy
, len
, byte_order
,
3891 MAKE_THUMB_ADDR (regval
));
3896 /* Copy the argument to general registers or the stack in
3897 register-sized pieces. Large arguments are split between
3898 registers and stack. */
3901 int partial_len
= len
< INT_REGISTER_SIZE
? len
: INT_REGISTER_SIZE
;
3903 if (may_use_core_reg
&& argreg
<= ARM_LAST_ARG_REGNUM
)
3905 /* The argument is being passed in a general purpose
3908 = extract_unsigned_integer (val
, partial_len
, byte_order
);
3909 if (byte_order
== BFD_ENDIAN_BIG
)
3910 regval
<<= (INT_REGISTER_SIZE
- partial_len
) * 8;
3912 fprintf_unfiltered (gdb_stdlog
, "arg %d in %s = 0x%s\n",
3914 gdbarch_register_name
3916 phex (regval
, INT_REGISTER_SIZE
));
3917 regcache_cooked_write_unsigned (regcache
, argreg
, regval
);
3922 /* Push the arguments onto the stack. */
3924 fprintf_unfiltered (gdb_stdlog
, "arg %d @ sp + %d\n",
3926 si
= push_stack_item (si
, val
, INT_REGISTER_SIZE
);
3927 nstack
+= INT_REGISTER_SIZE
;
3934 /* If we have an odd number of words to push, then decrement the stack
3935 by one word now, so first stack argument will be dword aligned. */
3942 write_memory (sp
, si
->data
, si
->len
);
3943 si
= pop_stack_item (si
);
3946 /* Finally, update teh SP register. */
3947 regcache_cooked_write_unsigned (regcache
, ARM_SP_REGNUM
, sp
);
3953 /* Always align the frame to an 8-byte boundary. This is required on
3954 some platforms and harmless on the rest. */
3957 arm_frame_align (struct gdbarch
*gdbarch
, CORE_ADDR sp
)
3959 /* Align the stack to eight bytes. */
3960 return sp
& ~ (CORE_ADDR
) 7;
3964 print_fpu_flags (struct ui_file
*file
, int flags
)
3966 if (flags
& (1 << 0))
3967 fputs_filtered ("IVO ", file
);
3968 if (flags
& (1 << 1))
3969 fputs_filtered ("DVZ ", file
);
3970 if (flags
& (1 << 2))
3971 fputs_filtered ("OFL ", file
);
3972 if (flags
& (1 << 3))
3973 fputs_filtered ("UFL ", file
);
3974 if (flags
& (1 << 4))
3975 fputs_filtered ("INX ", file
);
3976 fputc_filtered ('\n', file
);
3979 /* Print interesting information about the floating point processor
3980 (if present) or emulator. */
3982 arm_print_float_info (struct gdbarch
*gdbarch
, struct ui_file
*file
,
3983 struct frame_info
*frame
, const char *args
)
3985 unsigned long status
= get_frame_register_unsigned (frame
, ARM_FPS_REGNUM
);
3988 type
= (status
>> 24) & 127;
3989 if (status
& (1 << 31))
3990 fprintf_filtered (file
, _("Hardware FPU type %d\n"), type
);
3992 fprintf_filtered (file
, _("Software FPU type %d\n"), type
);
3993 /* i18n: [floating point unit] mask */
3994 fputs_filtered (_("mask: "), file
);
3995 print_fpu_flags (file
, status
>> 16);
3996 /* i18n: [floating point unit] flags */
3997 fputs_filtered (_("flags: "), file
);
3998 print_fpu_flags (file
, status
);
4001 /* Construct the ARM extended floating point type. */
4002 static struct type
*
4003 arm_ext_type (struct gdbarch
*gdbarch
)
4005 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4007 if (!tdep
->arm_ext_type
)
4009 = arch_float_type (gdbarch
, -1, "builtin_type_arm_ext",
4010 floatformats_arm_ext
);
4012 return tdep
->arm_ext_type
;
4015 static struct type
*
4016 arm_neon_double_type (struct gdbarch
*gdbarch
)
4018 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4020 if (tdep
->neon_double_type
== NULL
)
4022 struct type
*t
, *elem
;
4024 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_d",
4026 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4027 append_composite_type_field (t
, "u8", init_vector_type (elem
, 8));
4028 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4029 append_composite_type_field (t
, "u16", init_vector_type (elem
, 4));
4030 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4031 append_composite_type_field (t
, "u32", init_vector_type (elem
, 2));
4032 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4033 append_composite_type_field (t
, "u64", elem
);
4034 elem
= builtin_type (gdbarch
)->builtin_float
;
4035 append_composite_type_field (t
, "f32", init_vector_type (elem
, 2));
4036 elem
= builtin_type (gdbarch
)->builtin_double
;
4037 append_composite_type_field (t
, "f64", elem
);
4039 TYPE_VECTOR (t
) = 1;
4040 TYPE_NAME (t
) = "neon_d";
4041 tdep
->neon_double_type
= t
;
4044 return tdep
->neon_double_type
;
4047 /* FIXME: The vector types are not correctly ordered on big-endian
4048 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4049 bits of d0 - regardless of what unit size is being held in d0. So
4050 the offset of the first uint8 in d0 is 7, but the offset of the
4051 first float is 4. This code works as-is for little-endian
4054 static struct type
*
4055 arm_neon_quad_type (struct gdbarch
*gdbarch
)
4057 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
4059 if (tdep
->neon_quad_type
== NULL
)
4061 struct type
*t
, *elem
;
4063 t
= arch_composite_type (gdbarch
, "__gdb_builtin_type_neon_q",
4065 elem
= builtin_type (gdbarch
)->builtin_uint8
;
4066 append_composite_type_field (t
, "u8", init_vector_type (elem
, 16));
4067 elem
= builtin_type (gdbarch
)->builtin_uint16
;
4068 append_composite_type_field (t
, "u16", init_vector_type (elem
, 8));
4069 elem
= builtin_type (gdbarch
)->builtin_uint32
;
4070 append_composite_type_field (t
, "u32", init_vector_type (elem
, 4));
4071 elem
= builtin_type (gdbarch
)->builtin_uint64
;
4072 append_composite_type_field (t
, "u64", init_vector_type (elem
, 2));
4073 elem
= builtin_type (gdbarch
)->builtin_float
;
4074 append_composite_type_field (t
, "f32", init_vector_type (elem
, 4));
4075 elem
= builtin_type (gdbarch
)->builtin_double
;
4076 append_composite_type_field (t
, "f64", init_vector_type (elem
, 2));
4078 TYPE_VECTOR (t
) = 1;
4079 TYPE_NAME (t
) = "neon_q";
4080 tdep
->neon_quad_type
= t
;
4083 return tdep
->neon_quad_type
;
4086 /* Return the GDB type object for the "standard" data type of data in
4089 static struct type
*
4090 arm_register_type (struct gdbarch
*gdbarch
, int regnum
)
4092 int num_regs
= gdbarch_num_regs (gdbarch
);
4094 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
4095 && regnum
>= num_regs
&& regnum
< num_regs
+ 32)
4096 return builtin_type (gdbarch
)->builtin_float
;
4098 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
4099 && regnum
>= num_regs
+ 32 && regnum
< num_regs
+ 32 + 16)
4100 return arm_neon_quad_type (gdbarch
);
4102 /* If the target description has register information, we are only
4103 in this function so that we can override the types of
4104 double-precision registers for NEON. */
4105 if (tdesc_has_registers (gdbarch_target_desc (gdbarch
)))
4107 struct type
*t
= tdesc_register_type (gdbarch
, regnum
);
4109 if (regnum
>= ARM_D0_REGNUM
&& regnum
< ARM_D0_REGNUM
+ 32
4110 && TYPE_CODE (t
) == TYPE_CODE_FLT
4111 && gdbarch_tdep (gdbarch
)->have_neon
)
4112 return arm_neon_double_type (gdbarch
);
4117 if (regnum
>= ARM_F0_REGNUM
&& regnum
< ARM_F0_REGNUM
+ NUM_FREGS
)
4119 if (!gdbarch_tdep (gdbarch
)->have_fpa_registers
)
4120 return builtin_type (gdbarch
)->builtin_void
;
4122 return arm_ext_type (gdbarch
);
4124 else if (regnum
== ARM_SP_REGNUM
)
4125 return builtin_type (gdbarch
)->builtin_data_ptr
;
4126 else if (regnum
== ARM_PC_REGNUM
)
4127 return builtin_type (gdbarch
)->builtin_func_ptr
;
4128 else if (regnum
>= ARRAY_SIZE (arm_register_names
))
4129 /* These registers are only supported on targets which supply
4130 an XML description. */
4131 return builtin_type (gdbarch
)->builtin_int0
;
4133 return builtin_type (gdbarch
)->builtin_uint32
;
4136 /* Map a DWARF register REGNUM onto the appropriate GDB register
4140 arm_dwarf_reg_to_regnum (struct gdbarch
*gdbarch
, int reg
)
4142 /* Core integer regs. */
4143 if (reg
>= 0 && reg
<= 15)
4146 /* Legacy FPA encoding. These were once used in a way which
4147 overlapped with VFP register numbering, so their use is
4148 discouraged, but GDB doesn't support the ARM toolchain
4149 which used them for VFP. */
4150 if (reg
>= 16 && reg
<= 23)
4151 return ARM_F0_REGNUM
+ reg
- 16;
4153 /* New assignments for the FPA registers. */
4154 if (reg
>= 96 && reg
<= 103)
4155 return ARM_F0_REGNUM
+ reg
- 96;
4157 /* WMMX register assignments. */
4158 if (reg
>= 104 && reg
<= 111)
4159 return ARM_WCGR0_REGNUM
+ reg
- 104;
4161 if (reg
>= 112 && reg
<= 127)
4162 return ARM_WR0_REGNUM
+ reg
- 112;
4164 if (reg
>= 192 && reg
<= 199)
4165 return ARM_WC0_REGNUM
+ reg
- 192;
4167 /* VFP v2 registers. A double precision value is actually
4168 in d1 rather than s2, but the ABI only defines numbering
4169 for the single precision registers. This will "just work"
4170 in GDB for little endian targets (we'll read eight bytes,
4171 starting in s0 and then progressing to s1), but will be
4172 reversed on big endian targets with VFP. This won't
4173 be a problem for the new Neon quad registers; you're supposed
4174 to use DW_OP_piece for those. */
4175 if (reg
>= 64 && reg
<= 95)
4179 xsnprintf (name_buf
, sizeof (name_buf
), "s%d", reg
- 64);
4180 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4184 /* VFP v3 / Neon registers. This range is also used for VFP v2
4185 registers, except that it now describes d0 instead of s0. */
4186 if (reg
>= 256 && reg
<= 287)
4190 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", reg
- 256);
4191 return user_reg_map_name_to_regnum (gdbarch
, name_buf
,
4198 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4200 arm_register_sim_regno (struct gdbarch
*gdbarch
, int regnum
)
4203 gdb_assert (reg
>= 0 && reg
< gdbarch_num_regs (gdbarch
));
4205 if (regnum
>= ARM_WR0_REGNUM
&& regnum
<= ARM_WR15_REGNUM
)
4206 return regnum
- ARM_WR0_REGNUM
+ SIM_ARM_IWMMXT_COP0R0_REGNUM
;
4208 if (regnum
>= ARM_WC0_REGNUM
&& regnum
<= ARM_WC7_REGNUM
)
4209 return regnum
- ARM_WC0_REGNUM
+ SIM_ARM_IWMMXT_COP1R0_REGNUM
;
4211 if (regnum
>= ARM_WCGR0_REGNUM
&& regnum
<= ARM_WCGR7_REGNUM
)
4212 return regnum
- ARM_WCGR0_REGNUM
+ SIM_ARM_IWMMXT_COP1R8_REGNUM
;
4214 if (reg
< NUM_GREGS
)
4215 return SIM_ARM_R0_REGNUM
+ reg
;
4218 if (reg
< NUM_FREGS
)
4219 return SIM_ARM_FP0_REGNUM
+ reg
;
4222 if (reg
< NUM_SREGS
)
4223 return SIM_ARM_FPS_REGNUM
+ reg
;
4226 internal_error (__FILE__
, __LINE__
, _("Bad REGNUM %d"), regnum
);
4229 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4230 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4231 It is thought that this is is the floating-point register format on
4232 little-endian systems. */
4235 convert_from_extended (const struct floatformat
*fmt
, const void *ptr
,
4236 void *dbl
, int endianess
)
4240 if (endianess
== BFD_ENDIAN_BIG
)
4241 floatformat_to_doublest (&floatformat_arm_ext_big
, ptr
, &d
);
4243 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4245 floatformat_from_doublest (fmt
, &d
, dbl
);
4249 convert_to_extended (const struct floatformat
*fmt
, void *dbl
, const void *ptr
,
4254 floatformat_to_doublest (fmt
, ptr
, &d
);
4255 if (endianess
== BFD_ENDIAN_BIG
)
4256 floatformat_from_doublest (&floatformat_arm_ext_big
, &d
, dbl
);
4258 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword
,
4263 condition_true (unsigned long cond
, unsigned long status_reg
)
4265 if (cond
== INST_AL
|| cond
== INST_NV
)
4271 return ((status_reg
& FLAG_Z
) != 0);
4273 return ((status_reg
& FLAG_Z
) == 0);
4275 return ((status_reg
& FLAG_C
) != 0);
4277 return ((status_reg
& FLAG_C
) == 0);
4279 return ((status_reg
& FLAG_N
) != 0);
4281 return ((status_reg
& FLAG_N
) == 0);
4283 return ((status_reg
& FLAG_V
) != 0);
4285 return ((status_reg
& FLAG_V
) == 0);
4287 return ((status_reg
& (FLAG_C
| FLAG_Z
)) == FLAG_C
);
4289 return ((status_reg
& (FLAG_C
| FLAG_Z
)) != FLAG_C
);
4291 return (((status_reg
& FLAG_N
) == 0) == ((status_reg
& FLAG_V
) == 0));
4293 return (((status_reg
& FLAG_N
) == 0) != ((status_reg
& FLAG_V
) == 0));
4295 return (((status_reg
& FLAG_Z
) == 0)
4296 && (((status_reg
& FLAG_N
) == 0)
4297 == ((status_reg
& FLAG_V
) == 0)));
4299 return (((status_reg
& FLAG_Z
) != 0)
4300 || (((status_reg
& FLAG_N
) == 0)
4301 != ((status_reg
& FLAG_V
) == 0)));
4306 static unsigned long
4307 shifted_reg_val (struct frame_info
*frame
, unsigned long inst
, int carry
,
4308 unsigned long pc_val
, unsigned long status_reg
)
4310 unsigned long res
, shift
;
4311 int rm
= bits (inst
, 0, 3);
4312 unsigned long shifttype
= bits (inst
, 5, 6);
4316 int rs
= bits (inst
, 8, 11);
4317 shift
= (rs
== 15 ? pc_val
+ 8
4318 : get_frame_register_unsigned (frame
, rs
)) & 0xFF;
4321 shift
= bits (inst
, 7, 11);
4323 res
= (rm
== ARM_PC_REGNUM
4324 ? (pc_val
+ (bit (inst
, 4) ? 12 : 8))
4325 : get_frame_register_unsigned (frame
, rm
));
4330 res
= shift
>= 32 ? 0 : res
<< shift
;
4334 res
= shift
>= 32 ? 0 : res
>> shift
;
4340 res
= ((res
& 0x80000000L
)
4341 ? ~((~res
) >> shift
) : res
>> shift
);
4344 case 3: /* ROR/RRX */
4347 res
= (res
>> 1) | (carry
? 0x80000000L
: 0);
4349 res
= (res
>> shift
) | (res
<< (32 - shift
));
4353 return res
& 0xffffffff;
4356 /* Return number of 1-bits in VAL. */
4359 bitcount (unsigned long val
)
4362 for (nbits
= 0; val
!= 0; nbits
++)
4363 val
&= val
- 1; /* Delete rightmost 1-bit in val. */
4367 /* Return the size in bytes of the complete Thumb instruction whose
4368 first halfword is INST1. */
4371 thumb_insn_size (unsigned short inst1
)
4373 if ((inst1
& 0xe000) == 0xe000 && (inst1
& 0x1800) != 0)
4380 thumb_advance_itstate (unsigned int itstate
)
4382 /* Preserve IT[7:5], the first three bits of the condition. Shift
4383 the upcoming condition flags left by one bit. */
4384 itstate
= (itstate
& 0xe0) | ((itstate
<< 1) & 0x1f);
4386 /* If we have finished the IT block, clear the state. */
4387 if ((itstate
& 0x0f) == 0)
4393 /* Find the next PC after the current instruction executes. In some
4394 cases we can not statically determine the answer (see the IT state
4395 handling in this function); in that case, a breakpoint may be
4396 inserted in addition to the returned PC, which will be used to set
4397 another breakpoint by our caller. */
4400 thumb_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4402 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4403 struct address_space
*aspace
= get_frame_address_space (frame
);
4404 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4405 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4406 unsigned long pc_val
= ((unsigned long) pc
) + 4; /* PC after prefetch */
4407 unsigned short inst1
;
4408 CORE_ADDR nextpc
= pc
+ 2; /* Default is next instruction. */
4409 unsigned long offset
;
4410 ULONGEST status
, itstate
;
4412 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4413 pc_val
= MAKE_THUMB_ADDR (pc_val
);
4415 inst1
= read_memory_unsigned_integer (pc
, 2, byte_order_for_code
);
4417 /* Thumb-2 conditional execution support. There are eight bits in
4418 the CPSR which describe conditional execution state. Once
4419 reconstructed (they're in a funny order), the low five bits
4420 describe the low bit of the condition for each instruction and
4421 how many instructions remain. The high three bits describe the
4422 base condition. One of the low four bits will be set if an IT
4423 block is active. These bits read as zero on earlier
4425 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4426 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
4428 /* If-Then handling. On GNU/Linux, where this routine is used, we
4429 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4430 can disable execution of the undefined instruction. So we might
4431 miss the breakpoint if we set it on a skipped conditional
4432 instruction. Because conditional instructions can change the
4433 flags, affecting the execution of further instructions, we may
4434 need to set two breakpoints. */
4436 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
!= NULL
)
4438 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
4440 /* An IT instruction. Because this instruction does not
4441 modify the flags, we can accurately predict the next
4442 executed instruction. */
4443 itstate
= inst1
& 0x00ff;
4444 pc
+= thumb_insn_size (inst1
);
4446 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4448 inst1
= read_memory_unsigned_integer (pc
, 2,
4449 byte_order_for_code
);
4450 pc
+= thumb_insn_size (inst1
);
4451 itstate
= thumb_advance_itstate (itstate
);
4454 return MAKE_THUMB_ADDR (pc
);
4456 else if (itstate
!= 0)
4458 /* We are in a conditional block. Check the condition. */
4459 if (! condition_true (itstate
>> 4, status
))
4461 /* Advance to the next executed instruction. */
4462 pc
+= thumb_insn_size (inst1
);
4463 itstate
= thumb_advance_itstate (itstate
);
4465 while (itstate
!= 0 && ! condition_true (itstate
>> 4, status
))
4467 inst1
= read_memory_unsigned_integer (pc
, 2,
4468 byte_order_for_code
);
4469 pc
+= thumb_insn_size (inst1
);
4470 itstate
= thumb_advance_itstate (itstate
);
4473 return MAKE_THUMB_ADDR (pc
);
4475 else if ((itstate
& 0x0f) == 0x08)
4477 /* This is the last instruction of the conditional
4478 block, and it is executed. We can handle it normally
4479 because the following instruction is not conditional,
4480 and we must handle it normally because it is
4481 permitted to branch. Fall through. */
4487 /* There are conditional instructions after this one.
4488 If this instruction modifies the flags, then we can
4489 not predict what the next executed instruction will
4490 be. Fortunately, this instruction is architecturally
4491 forbidden to branch; we know it will fall through.
4492 Start by skipping past it. */
4493 pc
+= thumb_insn_size (inst1
);
4494 itstate
= thumb_advance_itstate (itstate
);
4496 /* Set a breakpoint on the following instruction. */
4497 gdb_assert ((itstate
& 0x0f) != 0);
4498 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
4499 MAKE_THUMB_ADDR (pc
));
4500 cond_negated
= (itstate
>> 4) & 1;
4502 /* Skip all following instructions with the same
4503 condition. If there is a later instruction in the IT
4504 block with the opposite condition, set the other
4505 breakpoint there. If not, then set a breakpoint on
4506 the instruction after the IT block. */
4509 inst1
= read_memory_unsigned_integer (pc
, 2,
4510 byte_order_for_code
);
4511 pc
+= thumb_insn_size (inst1
);
4512 itstate
= thumb_advance_itstate (itstate
);
4514 while (itstate
!= 0 && ((itstate
>> 4) & 1) == cond_negated
);
4516 return MAKE_THUMB_ADDR (pc
);
4520 else if (itstate
& 0x0f)
4522 /* We are in a conditional block. Check the condition. */
4523 int cond
= itstate
>> 4;
4525 if (! condition_true (cond
, status
))
4526 /* Advance to the next instruction. All the 32-bit
4527 instructions share a common prefix. */
4528 return MAKE_THUMB_ADDR (pc
+ thumb_insn_size (inst1
));
4530 /* Otherwise, handle the instruction normally. */
4533 if ((inst1
& 0xff00) == 0xbd00) /* pop {rlist, pc} */
4537 /* Fetch the saved PC from the stack. It's stored above
4538 all of the other registers. */
4539 offset
= bitcount (bits (inst1
, 0, 7)) * INT_REGISTER_SIZE
;
4540 sp
= get_frame_register_unsigned (frame
, ARM_SP_REGNUM
);
4541 nextpc
= read_memory_unsigned_integer (sp
+ offset
, 4, byte_order
);
4543 else if ((inst1
& 0xf000) == 0xd000) /* conditional branch */
4545 unsigned long cond
= bits (inst1
, 8, 11);
4546 if (cond
== 0x0f) /* 0x0f = SWI */
4548 struct gdbarch_tdep
*tdep
;
4549 tdep
= gdbarch_tdep (gdbarch
);
4551 if (tdep
->syscall_next_pc
!= NULL
)
4552 nextpc
= tdep
->syscall_next_pc (frame
);
4555 else if (cond
!= 0x0f && condition_true (cond
, status
))
4556 nextpc
= pc_val
+ (sbits (inst1
, 0, 7) << 1);
4558 else if ((inst1
& 0xf800) == 0xe000) /* unconditional branch */
4560 nextpc
= pc_val
+ (sbits (inst1
, 0, 10) << 1);
4562 else if (thumb_insn_size (inst1
) == 4) /* 32-bit instruction */
4564 unsigned short inst2
;
4565 inst2
= read_memory_unsigned_integer (pc
+ 2, 2, byte_order_for_code
);
4567 /* Default to the next instruction. */
4569 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4571 if ((inst1
& 0xf800) == 0xf000 && (inst2
& 0x8000) == 0x8000)
4573 /* Branches and miscellaneous control instructions. */
4575 if ((inst2
& 0x1000) != 0 || (inst2
& 0xd001) == 0xc000)
4578 int j1
, j2
, imm1
, imm2
;
4580 imm1
= sbits (inst1
, 0, 10);
4581 imm2
= bits (inst2
, 0, 10);
4582 j1
= bit (inst2
, 13);
4583 j2
= bit (inst2
, 11);
4585 offset
= ((imm1
<< 12) + (imm2
<< 1));
4586 offset
^= ((!j2
) << 22) | ((!j1
) << 23);
4588 nextpc
= pc_val
+ offset
;
4589 /* For BLX make sure to clear the low bits. */
4590 if (bit (inst2
, 12) == 0)
4591 nextpc
= nextpc
& 0xfffffffc;
4593 else if (inst1
== 0xf3de && (inst2
& 0xff00) == 0x3f00)
4595 /* SUBS PC, LR, #imm8. */
4596 nextpc
= get_frame_register_unsigned (frame
, ARM_LR_REGNUM
);
4597 nextpc
-= inst2
& 0x00ff;
4599 else if ((inst2
& 0xd000) == 0x8000 && (inst1
& 0x0380) != 0x0380)
4601 /* Conditional branch. */
4602 if (condition_true (bits (inst1
, 6, 9), status
))
4604 int sign
, j1
, j2
, imm1
, imm2
;
4606 sign
= sbits (inst1
, 10, 10);
4607 imm1
= bits (inst1
, 0, 5);
4608 imm2
= bits (inst2
, 0, 10);
4609 j1
= bit (inst2
, 13);
4610 j2
= bit (inst2
, 11);
4612 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
4613 offset
+= (imm1
<< 12) + (imm2
<< 1);
4615 nextpc
= pc_val
+ offset
;
4619 else if ((inst1
& 0xfe50) == 0xe810)
4621 /* Load multiple or RFE. */
4622 int rn
, offset
, load_pc
= 1;
4624 rn
= bits (inst1
, 0, 3);
4625 if (bit (inst1
, 7) && !bit (inst1
, 8))
4628 if (!bit (inst2
, 15))
4630 offset
= bitcount (inst2
) * 4 - 4;
4632 else if (!bit (inst1
, 7) && bit (inst1
, 8))
4635 if (!bit (inst2
, 15))
4639 else if (bit (inst1
, 7) && bit (inst1
, 8))
4644 else if (!bit (inst1
, 7) && !bit (inst1
, 8))
4654 CORE_ADDR addr
= get_frame_register_unsigned (frame
, rn
);
4655 nextpc
= get_frame_memory_unsigned (frame
, addr
+ offset
, 4);
4658 else if ((inst1
& 0xffef) == 0xea4f && (inst2
& 0xfff0) == 0x0f00)
4660 /* MOV PC or MOVS PC. */
4661 nextpc
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4662 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4664 else if ((inst1
& 0xff70) == 0xf850 && (inst2
& 0xf000) == 0xf000)
4668 int rn
, load_pc
= 1;
4670 rn
= bits (inst1
, 0, 3);
4671 base
= get_frame_register_unsigned (frame
, rn
);
4672 if (rn
== ARM_PC_REGNUM
)
4674 base
= (base
+ 4) & ~(CORE_ADDR
) 0x3;
4676 base
+= bits (inst2
, 0, 11);
4678 base
-= bits (inst2
, 0, 11);
4680 else if (bit (inst1
, 7))
4681 base
+= bits (inst2
, 0, 11);
4682 else if (bit (inst2
, 11))
4684 if (bit (inst2
, 10))
4687 base
+= bits (inst2
, 0, 7);
4689 base
-= bits (inst2
, 0, 7);
4692 else if ((inst2
& 0x0fc0) == 0x0000)
4694 int shift
= bits (inst2
, 4, 5), rm
= bits (inst2
, 0, 3);
4695 base
+= get_frame_register_unsigned (frame
, rm
) << shift
;
4702 nextpc
= get_frame_memory_unsigned (frame
, base
, 4);
4704 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf000)
4707 CORE_ADDR tbl_reg
, table
, offset
, length
;
4709 tbl_reg
= bits (inst1
, 0, 3);
4710 if (tbl_reg
== 0x0f)
4711 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4713 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4715 offset
= get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4716 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 1);
4717 nextpc
= pc_val
+ length
;
4719 else if ((inst1
& 0xfff0) == 0xe8d0 && (inst2
& 0xfff0) == 0xf010)
4722 CORE_ADDR tbl_reg
, table
, offset
, length
;
4724 tbl_reg
= bits (inst1
, 0, 3);
4725 if (tbl_reg
== 0x0f)
4726 table
= pc
+ 4; /* Regcache copy of PC isn't right yet. */
4728 table
= get_frame_register_unsigned (frame
, tbl_reg
);
4730 offset
= 2 * get_frame_register_unsigned (frame
, bits (inst2
, 0, 3));
4731 length
= 2 * get_frame_memory_unsigned (frame
, table
+ offset
, 2);
4732 nextpc
= pc_val
+ length
;
4735 else if ((inst1
& 0xff00) == 0x4700) /* bx REG, blx REG */
4737 if (bits (inst1
, 3, 6) == 0x0f)
4738 nextpc
= UNMAKE_THUMB_ADDR (pc_val
);
4740 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4742 else if ((inst1
& 0xff87) == 0x4687) /* mov pc, REG */
4744 if (bits (inst1
, 3, 6) == 0x0f)
4747 nextpc
= get_frame_register_unsigned (frame
, bits (inst1
, 3, 6));
4749 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4751 else if ((inst1
& 0xf500) == 0xb100)
4754 int imm
= (bit (inst1
, 9) << 6) + (bits (inst1
, 3, 7) << 1);
4755 ULONGEST reg
= get_frame_register_unsigned (frame
, bits (inst1
, 0, 2));
4757 if (bit (inst1
, 11) && reg
!= 0)
4758 nextpc
= pc_val
+ imm
;
4759 else if (!bit (inst1
, 11) && reg
== 0)
4760 nextpc
= pc_val
+ imm
;
4765 /* Get the raw next address. PC is the current program counter, in
4766 FRAME, which is assumed to be executing in ARM mode.
4768 The value returned has the execution state of the next instruction
4769 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4770 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4774 arm_get_next_pc_raw (struct frame_info
*frame
, CORE_ADDR pc
)
4776 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
4777 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
4778 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
4779 unsigned long pc_val
;
4780 unsigned long this_instr
;
4781 unsigned long status
;
4784 pc_val
= (unsigned long) pc
;
4785 this_instr
= read_memory_unsigned_integer (pc
, 4, byte_order_for_code
);
4787 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
4788 nextpc
= (CORE_ADDR
) (pc_val
+ 4); /* Default case */
4790 if (bits (this_instr
, 28, 31) == INST_NV
)
4791 switch (bits (this_instr
, 24, 27))
4796 /* Branch with Link and change to Thumb. */
4797 nextpc
= BranchDest (pc
, this_instr
);
4798 nextpc
|= bit (this_instr
, 24) << 1;
4799 nextpc
= MAKE_THUMB_ADDR (nextpc
);
4805 /* Coprocessor register transfer. */
4806 if (bits (this_instr
, 12, 15) == 15)
4807 error (_("Invalid update to pc in instruction"));
4810 else if (condition_true (bits (this_instr
, 28, 31), status
))
4812 switch (bits (this_instr
, 24, 27))
4815 case 0x1: /* data processing */
4819 unsigned long operand1
, operand2
, result
= 0;
4823 if (bits (this_instr
, 12, 15) != 15)
4826 if (bits (this_instr
, 22, 25) == 0
4827 && bits (this_instr
, 4, 7) == 9) /* multiply */
4828 error (_("Invalid update to pc in instruction"));
4830 /* BX <reg>, BLX <reg> */
4831 if (bits (this_instr
, 4, 27) == 0x12fff1
4832 || bits (this_instr
, 4, 27) == 0x12fff3)
4834 rn
= bits (this_instr
, 0, 3);
4835 nextpc
= ((rn
== ARM_PC_REGNUM
)
4837 : get_frame_register_unsigned (frame
, rn
));
4842 /* Multiply into PC. */
4843 c
= (status
& FLAG_C
) ? 1 : 0;
4844 rn
= bits (this_instr
, 16, 19);
4845 operand1
= ((rn
== ARM_PC_REGNUM
)
4847 : get_frame_register_unsigned (frame
, rn
));
4849 if (bit (this_instr
, 25))
4851 unsigned long immval
= bits (this_instr
, 0, 7);
4852 unsigned long rotate
= 2 * bits (this_instr
, 8, 11);
4853 operand2
= ((immval
>> rotate
) | (immval
<< (32 - rotate
)))
4856 else /* operand 2 is a shifted register. */
4857 operand2
= shifted_reg_val (frame
, this_instr
, c
,
4860 switch (bits (this_instr
, 21, 24))
4863 result
= operand1
& operand2
;
4867 result
= operand1
^ operand2
;
4871 result
= operand1
- operand2
;
4875 result
= operand2
- operand1
;
4879 result
= operand1
+ operand2
;
4883 result
= operand1
+ operand2
+ c
;
4887 result
= operand1
- operand2
+ c
;
4891 result
= operand2
- operand1
+ c
;
4897 case 0xb: /* tst, teq, cmp, cmn */
4898 result
= (unsigned long) nextpc
;
4902 result
= operand1
| operand2
;
4906 /* Always step into a function. */
4911 result
= operand1
& ~operand2
;
4919 /* In 26-bit APCS the bottom two bits of the result are
4920 ignored, and we always end up in ARM state. */
4922 nextpc
= arm_addr_bits_remove (gdbarch
, result
);
4930 case 0x5: /* data transfer */
4933 if (bits (this_instr
, 25, 27) == 0x3 && bit (this_instr
, 4) == 1)
4935 /* Media instructions and architecturally undefined
4940 if (bit (this_instr
, 20))
4943 if (bits (this_instr
, 12, 15) == 15)
4949 if (bit (this_instr
, 22))
4950 error (_("Invalid update to pc in instruction"));
4952 /* byte write to PC */
4953 rn
= bits (this_instr
, 16, 19);
4954 base
= ((rn
== ARM_PC_REGNUM
)
4956 : get_frame_register_unsigned (frame
, rn
));
4958 if (bit (this_instr
, 24))
4961 int c
= (status
& FLAG_C
) ? 1 : 0;
4962 unsigned long offset
=
4963 (bit (this_instr
, 25)
4964 ? shifted_reg_val (frame
, this_instr
, c
, pc_val
, status
)
4965 : bits (this_instr
, 0, 11));
4967 if (bit (this_instr
, 23))
4973 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
) base
,
4980 case 0x9: /* block transfer */
4981 if (bit (this_instr
, 20))
4984 if (bit (this_instr
, 15))
4988 unsigned long rn_val
4989 = get_frame_register_unsigned (frame
,
4990 bits (this_instr
, 16, 19));
4992 if (bit (this_instr
, 23))
4995 unsigned long reglist
= bits (this_instr
, 0, 14);
4996 offset
= bitcount (reglist
) * 4;
4997 if (bit (this_instr
, 24)) /* pre */
5000 else if (bit (this_instr
, 24))
5004 (CORE_ADDR
) read_memory_unsigned_integer ((CORE_ADDR
)
5011 case 0xb: /* branch & link */
5012 case 0xa: /* branch */
5014 nextpc
= BranchDest (pc
, this_instr
);
5020 case 0xe: /* coproc ops */
5024 struct gdbarch_tdep
*tdep
;
5025 tdep
= gdbarch_tdep (gdbarch
);
5027 if (tdep
->syscall_next_pc
!= NULL
)
5028 nextpc
= tdep
->syscall_next_pc (frame
);
5034 fprintf_filtered (gdb_stderr
, _("Bad bit-field extraction\n"));
5042 /* Determine next PC after current instruction executes. Will call either
5043 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5044 loop is detected. */
5047 arm_get_next_pc (struct frame_info
*frame
, CORE_ADDR pc
)
5051 if (arm_frame_is_thumb (frame
))
5052 nextpc
= thumb_get_next_pc_raw (frame
, pc
);
5054 nextpc
= arm_get_next_pc_raw (frame
, pc
);
5059 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5060 of the appropriate mode (as encoded in the PC value), even if this
5061 differs from what would be expected according to the symbol tables. */
5064 arm_insert_single_step_breakpoint (struct gdbarch
*gdbarch
,
5065 struct address_space
*aspace
,
5068 struct cleanup
*old_chain
5069 = make_cleanup_restore_integer (&arm_override_mode
);
5071 arm_override_mode
= IS_THUMB_ADDR (pc
);
5072 pc
= gdbarch_addr_bits_remove (gdbarch
, pc
);
5074 insert_single_step_breakpoint (gdbarch
, aspace
, pc
);
5076 do_cleanups (old_chain
);
5079 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5080 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5081 is found, attempt to step through it. A breakpoint is placed at the end of
5085 thumb_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5087 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5088 struct address_space
*aspace
= get_frame_address_space (frame
);
5089 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5090 CORE_ADDR pc
= get_frame_pc (frame
);
5091 CORE_ADDR breaks
[2] = {-1, -1};
5093 unsigned short insn1
, insn2
;
5096 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5097 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5098 ULONGEST status
, itstate
;
5100 /* We currently do not support atomic sequences within an IT block. */
5101 status
= get_frame_register_unsigned (frame
, ARM_PS_REGNUM
);
5102 itstate
= ((status
>> 8) & 0xfc) | ((status
>> 25) & 0x3);
5106 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5107 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5109 if (thumb_insn_size (insn1
) != 4)
5112 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5114 if (!((insn1
& 0xfff0) == 0xe850
5115 || ((insn1
& 0xfff0) == 0xe8d0 && (insn2
& 0x00c0) == 0x0040)))
5118 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5120 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5122 insn1
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5125 if (thumb_insn_size (insn1
) != 4)
5127 /* Assume that there is at most one conditional branch in the
5128 atomic sequence. If a conditional branch is found, put a
5129 breakpoint in its destination address. */
5130 if ((insn1
& 0xf000) == 0xd000 && bits (insn1
, 8, 11) != 0x0f)
5132 if (last_breakpoint
> 0)
5133 return 0; /* More than one conditional branch found,
5134 fallback to the standard code. */
5136 breaks
[1] = loc
+ 2 + (sbits (insn1
, 0, 7) << 1);
5140 /* We do not support atomic sequences that use any *other*
5141 instructions but conditional branches to change the PC.
5142 Fall back to standard code to avoid losing control of
5144 else if (thumb_instruction_changes_pc (insn1
))
5149 insn2
= read_memory_unsigned_integer (loc
, 2, byte_order_for_code
);
5152 /* Assume that there is at most one conditional branch in the
5153 atomic sequence. If a conditional branch is found, put a
5154 breakpoint in its destination address. */
5155 if ((insn1
& 0xf800) == 0xf000
5156 && (insn2
& 0xd000) == 0x8000
5157 && (insn1
& 0x0380) != 0x0380)
5159 int sign
, j1
, j2
, imm1
, imm2
;
5160 unsigned int offset
;
5162 sign
= sbits (insn1
, 10, 10);
5163 imm1
= bits (insn1
, 0, 5);
5164 imm2
= bits (insn2
, 0, 10);
5165 j1
= bit (insn2
, 13);
5166 j2
= bit (insn2
, 11);
5168 offset
= (sign
<< 20) + (j2
<< 19) + (j1
<< 18);
5169 offset
+= (imm1
<< 12) + (imm2
<< 1);
5171 if (last_breakpoint
> 0)
5172 return 0; /* More than one conditional branch found,
5173 fallback to the standard code. */
5175 breaks
[1] = loc
+ offset
;
5179 /* We do not support atomic sequences that use any *other*
5180 instructions but conditional branches to change the PC.
5181 Fall back to standard code to avoid losing control of
5183 else if (thumb2_instruction_changes_pc (insn1
, insn2
))
5186 /* If we find a strex{,b,h,d}, we're done. */
5187 if ((insn1
& 0xfff0) == 0xe840
5188 || ((insn1
& 0xfff0) == 0xe8c0 && (insn2
& 0x00c0) == 0x0040))
5193 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5194 if (insn_count
== atomic_sequence_length
)
5197 /* Insert a breakpoint right after the end of the atomic sequence. */
5200 /* Check for duplicated breakpoints. Check also for a breakpoint
5201 placed (branch instruction's destination) anywhere in sequence. */
5203 && (breaks
[1] == breaks
[0]
5204 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5205 last_breakpoint
= 0;
5207 /* Effectively inserts the breakpoints. */
5208 for (index
= 0; index
<= last_breakpoint
; index
++)
5209 arm_insert_single_step_breakpoint (gdbarch
, aspace
,
5210 MAKE_THUMB_ADDR (breaks
[index
]));
5216 arm_deal_with_atomic_sequence_raw (struct frame_info
*frame
)
5218 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5219 struct address_space
*aspace
= get_frame_address_space (frame
);
5220 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
5221 CORE_ADDR pc
= get_frame_pc (frame
);
5222 CORE_ADDR breaks
[2] = {-1, -1};
5227 int last_breakpoint
= 0; /* Defaults to 0 (no breakpoints placed). */
5228 const int atomic_sequence_length
= 16; /* Instruction sequence length. */
5230 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5231 Note that we do not currently support conditionally executed atomic
5233 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5235 if ((insn
& 0xff9000f0) != 0xe1900090)
5238 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5240 for (insn_count
= 0; insn_count
< atomic_sequence_length
; ++insn_count
)
5242 insn
= read_memory_unsigned_integer (loc
, 4, byte_order_for_code
);
5245 /* Assume that there is at most one conditional branch in the atomic
5246 sequence. If a conditional branch is found, put a breakpoint in
5247 its destination address. */
5248 if (bits (insn
, 24, 27) == 0xa)
5250 if (last_breakpoint
> 0)
5251 return 0; /* More than one conditional branch found, fallback
5252 to the standard single-step code. */
5254 breaks
[1] = BranchDest (loc
- 4, insn
);
5258 /* We do not support atomic sequences that use any *other* instructions
5259 but conditional branches to change the PC. Fall back to standard
5260 code to avoid losing control of execution. */
5261 else if (arm_instruction_changes_pc (insn
))
5264 /* If we find a strex{,b,h,d}, we're done. */
5265 if ((insn
& 0xff9000f0) == 0xe1800090)
5269 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5270 if (insn_count
== atomic_sequence_length
)
5273 /* Insert a breakpoint right after the end of the atomic sequence. */
5276 /* Check for duplicated breakpoints. Check also for a breakpoint
5277 placed (branch instruction's destination) anywhere in sequence. */
5279 && (breaks
[1] == breaks
[0]
5280 || (breaks
[1] >= pc
&& breaks
[1] < loc
)))
5281 last_breakpoint
= 0;
5283 /* Effectively inserts the breakpoints. */
5284 for (index
= 0; index
<= last_breakpoint
; index
++)
5285 arm_insert_single_step_breakpoint (gdbarch
, aspace
, breaks
[index
]);
5291 arm_deal_with_atomic_sequence (struct frame_info
*frame
)
5293 if (arm_frame_is_thumb (frame
))
5294 return thumb_deal_with_atomic_sequence_raw (frame
);
5296 return arm_deal_with_atomic_sequence_raw (frame
);
5299 /* single_step() is called just before we want to resume the inferior,
5300 if we want to single-step it but there is no hardware or kernel
5301 single-step support. We find the target of the coming instruction
5302 and breakpoint it. */
5305 arm_software_single_step (struct frame_info
*frame
)
5307 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
5308 struct address_space
*aspace
= get_frame_address_space (frame
);
5311 if (arm_deal_with_atomic_sequence (frame
))
5314 next_pc
= arm_get_next_pc (frame
, get_frame_pc (frame
));
5315 arm_insert_single_step_breakpoint (gdbarch
, aspace
, next_pc
);
5320 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5321 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5322 NULL if an error occurs. BUF is freed. */
5325 extend_buffer_earlier (gdb_byte
*buf
, CORE_ADDR endaddr
,
5326 int old_len
, int new_len
)
5329 int bytes_to_read
= new_len
- old_len
;
5331 new_buf
= (gdb_byte
*) xmalloc (new_len
);
5332 memcpy (new_buf
+ bytes_to_read
, buf
, old_len
);
5334 if (target_read_memory (endaddr
- new_len
, new_buf
, bytes_to_read
) != 0)
5342 /* An IT block is at most the 2-byte IT instruction followed by
5343 four 4-byte instructions. The furthest back we must search to
5344 find an IT block that affects the current instruction is thus
5345 2 + 3 * 4 == 14 bytes. */
5346 #define MAX_IT_BLOCK_PREFIX 14
5348 /* Use a quick scan if there are more than this many bytes of
5350 #define IT_SCAN_THRESHOLD 32
5352 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5353 A breakpoint in an IT block may not be hit, depending on the
5356 arm_adjust_breakpoint_address (struct gdbarch
*gdbarch
, CORE_ADDR bpaddr
)
5360 CORE_ADDR boundary
, func_start
;
5362 enum bfd_endian order
= gdbarch_byte_order_for_code (gdbarch
);
5363 int i
, any
, last_it
, last_it_count
;
5365 /* If we are using BKPT breakpoints, none of this is necessary. */
5366 if (gdbarch_tdep (gdbarch
)->thumb2_breakpoint
== NULL
)
5369 /* ARM mode does not have this problem. */
5370 if (!arm_pc_is_thumb (gdbarch
, bpaddr
))
5373 /* We are setting a breakpoint in Thumb code that could potentially
5374 contain an IT block. The first step is to find how much Thumb
5375 code there is; we do not need to read outside of known Thumb
5377 map_type
= arm_find_mapping_symbol (bpaddr
, &boundary
);
5379 /* Thumb-2 code must have mapping symbols to have a chance. */
5382 bpaddr
= gdbarch_addr_bits_remove (gdbarch
, bpaddr
);
5384 if (find_pc_partial_function (bpaddr
, NULL
, &func_start
, NULL
)
5385 && func_start
> boundary
)
5386 boundary
= func_start
;
5388 /* Search for a candidate IT instruction. We have to do some fancy
5389 footwork to distinguish a real IT instruction from the second
5390 half of a 32-bit instruction, but there is no need for that if
5391 there's no candidate. */
5392 buf_len
= min (bpaddr
- boundary
, MAX_IT_BLOCK_PREFIX
);
5394 /* No room for an IT instruction. */
5397 buf
= (gdb_byte
*) xmalloc (buf_len
);
5398 if (target_read_memory (bpaddr
- buf_len
, buf
, buf_len
) != 0)
5401 for (i
= 0; i
< buf_len
; i
+= 2)
5403 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5404 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5416 /* OK, the code bytes before this instruction contain at least one
5417 halfword which resembles an IT instruction. We know that it's
5418 Thumb code, but there are still two possibilities. Either the
5419 halfword really is an IT instruction, or it is the second half of
5420 a 32-bit Thumb instruction. The only way we can tell is to
5421 scan forwards from a known instruction boundary. */
5422 if (bpaddr
- boundary
> IT_SCAN_THRESHOLD
)
5426 /* There's a lot of code before this instruction. Start with an
5427 optimistic search; it's easy to recognize halfwords that can
5428 not be the start of a 32-bit instruction, and use that to
5429 lock on to the instruction boundaries. */
5430 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, IT_SCAN_THRESHOLD
);
5433 buf_len
= IT_SCAN_THRESHOLD
;
5436 for (i
= 0; i
< buf_len
- sizeof (buf
) && ! definite
; i
+= 2)
5438 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5439 if (thumb_insn_size (inst1
) == 2)
5446 /* At this point, if DEFINITE, BUF[I] is the first place we
5447 are sure that we know the instruction boundaries, and it is far
5448 enough from BPADDR that we could not miss an IT instruction
5449 affecting BPADDR. If ! DEFINITE, give up - start from a
5453 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
,
5457 buf_len
= bpaddr
- boundary
;
5463 buf
= extend_buffer_earlier (buf
, bpaddr
, buf_len
, bpaddr
- boundary
);
5466 buf_len
= bpaddr
- boundary
;
5470 /* Scan forwards. Find the last IT instruction before BPADDR. */
5475 unsigned short inst1
= extract_unsigned_integer (&buf
[i
], 2, order
);
5477 if ((inst1
& 0xff00) == 0xbf00 && (inst1
& 0x000f) != 0)
5482 else if (inst1
& 0x0002)
5484 else if (inst1
& 0x0004)
5489 i
+= thumb_insn_size (inst1
);
5495 /* There wasn't really an IT instruction after all. */
5498 if (last_it_count
< 1)
5499 /* It was too far away. */
5502 /* This really is a trouble spot. Move the breakpoint to the IT
5504 return bpaddr
- buf_len
+ last_it
;
5507 /* ARM displaced stepping support.
5509 Generally ARM displaced stepping works as follows:
5511 1. When an instruction is to be single-stepped, it is first decoded by
5512 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5513 Depending on the type of instruction, it is then copied to a scratch
5514 location, possibly in a modified form. The copy_* set of functions
5515 performs such modification, as necessary. A breakpoint is placed after
5516 the modified instruction in the scratch space to return control to GDB.
5517 Note in particular that instructions which modify the PC will no longer
5518 do so after modification.
5520 2. The instruction is single-stepped, by setting the PC to the scratch
5521 location address, and resuming. Control returns to GDB when the
5524 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5525 function used for the current instruction. This function's job is to
5526 put the CPU/memory state back to what it would have been if the
5527 instruction had been executed unmodified in its original location. */
5529 /* NOP instruction (mov r0, r0). */
5530 #define ARM_NOP 0xe1a00000
5531 #define THUMB_NOP 0x4600
5533 /* Helper for register reads for displaced stepping. In particular, this
5534 returns the PC as it would be seen by the instruction at its original
5538 displaced_read_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5542 CORE_ADDR from
= dsc
->insn_addr
;
5544 if (regno
== ARM_PC_REGNUM
)
5546 /* Compute pipeline offset:
5547 - When executing an ARM instruction, PC reads as the address of the
5548 current instruction plus 8.
5549 - When executing a Thumb instruction, PC reads as the address of the
5550 current instruction plus 4. */
5557 if (debug_displaced
)
5558 fprintf_unfiltered (gdb_stdlog
, "displaced: read pc value %.8lx\n",
5559 (unsigned long) from
);
5560 return (ULONGEST
) from
;
5564 regcache_cooked_read_unsigned (regs
, regno
, &ret
);
5565 if (debug_displaced
)
5566 fprintf_unfiltered (gdb_stdlog
, "displaced: read r%d value %.8lx\n",
5567 regno
, (unsigned long) ret
);
5573 displaced_in_arm_mode (struct regcache
*regs
)
5576 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5578 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5580 return (ps
& t_bit
) == 0;
5583 /* Write to the PC as from a branch instruction. */
5586 branch_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5590 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5591 architecture versions < 6. */
5592 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5593 val
& ~(ULONGEST
) 0x3);
5595 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
5596 val
& ~(ULONGEST
) 0x1);
5599 /* Write to the PC as from a branch-exchange instruction. */
5602 bx_write_pc (struct regcache
*regs
, ULONGEST val
)
5605 ULONGEST t_bit
= arm_psr_thumb_bit (get_regcache_arch (regs
));
5607 regcache_cooked_read_unsigned (regs
, ARM_PS_REGNUM
, &ps
);
5611 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
| t_bit
);
5612 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffe);
5614 else if ((val
& 2) == 0)
5616 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5617 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
);
5621 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5622 mode, align dest to 4 bytes). */
5623 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5624 regcache_cooked_write_unsigned (regs
, ARM_PS_REGNUM
, ps
& ~t_bit
);
5625 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
, val
& 0xfffffffc);
5629 /* Write to the PC as if from a load instruction. */
5632 load_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5635 if (DISPLACED_STEPPING_ARCH_VERSION
>= 5)
5636 bx_write_pc (regs
, val
);
5638 branch_write_pc (regs
, dsc
, val
);
5641 /* Write to the PC as if from an ALU instruction. */
5644 alu_write_pc (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5647 if (DISPLACED_STEPPING_ARCH_VERSION
>= 7 && !dsc
->is_thumb
)
5648 bx_write_pc (regs
, val
);
5650 branch_write_pc (regs
, dsc
, val
);
5653 /* Helper for writing to registers for displaced stepping. Writing to the PC
5654 has a varying effects depending on the instruction which does the write:
5655 this is controlled by the WRITE_PC argument. */
5658 displaced_write_reg (struct regcache
*regs
, struct displaced_step_closure
*dsc
,
5659 int regno
, ULONGEST val
, enum pc_write_style write_pc
)
5661 if (regno
== ARM_PC_REGNUM
)
5663 if (debug_displaced
)
5664 fprintf_unfiltered (gdb_stdlog
, "displaced: writing pc %.8lx\n",
5665 (unsigned long) val
);
5668 case BRANCH_WRITE_PC
:
5669 branch_write_pc (regs
, dsc
, val
);
5673 bx_write_pc (regs
, val
);
5677 load_write_pc (regs
, dsc
, val
);
5681 alu_write_pc (regs
, dsc
, val
);
5684 case CANNOT_WRITE_PC
:
5685 warning (_("Instruction wrote to PC in an unexpected way when "
5686 "single-stepping"));
5690 internal_error (__FILE__
, __LINE__
,
5691 _("Invalid argument to displaced_write_reg"));
5694 dsc
->wrote_to_pc
= 1;
5698 if (debug_displaced
)
5699 fprintf_unfiltered (gdb_stdlog
, "displaced: writing r%d value %.8lx\n",
5700 regno
, (unsigned long) val
);
5701 regcache_cooked_write_unsigned (regs
, regno
, val
);
5705 /* This function is used to concisely determine if an instruction INSN
5706 references PC. Register fields of interest in INSN should have the
5707 corresponding fields of BITMASK set to 0b1111. The function
5708 returns return 1 if any of these fields in INSN reference the PC
5709 (also 0b1111, r15), else it returns 0. */
5712 insn_references_pc (uint32_t insn
, uint32_t bitmask
)
5714 uint32_t lowbit
= 1;
5716 while (bitmask
!= 0)
5720 for (; lowbit
&& (bitmask
& lowbit
) == 0; lowbit
<<= 1)
5726 mask
= lowbit
* 0xf;
5728 if ((insn
& mask
) == mask
)
5737 /* The simplest copy function. Many instructions have the same effect no
5738 matter what address they are executed at: in those cases, use this. */
5741 arm_copy_unmodified (struct gdbarch
*gdbarch
, uint32_t insn
,
5742 const char *iname
, struct displaced_step_closure
*dsc
)
5744 if (debug_displaced
)
5745 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx, "
5746 "opcode/class '%s' unmodified\n", (unsigned long) insn
,
5749 dsc
->modinsn
[0] = insn
;
5755 thumb_copy_unmodified_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
5756 uint16_t insn2
, const char *iname
,
5757 struct displaced_step_closure
*dsc
)
5759 if (debug_displaced
)
5760 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x %.4x, "
5761 "opcode/class '%s' unmodified\n", insn1
, insn2
,
5764 dsc
->modinsn
[0] = insn1
;
5765 dsc
->modinsn
[1] = insn2
;
5771 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5774 thumb_copy_unmodified_16bit (struct gdbarch
*gdbarch
, unsigned int insn
,
5776 struct displaced_step_closure
*dsc
)
5778 if (debug_displaced
)
5779 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x, "
5780 "opcode/class '%s' unmodified\n", insn
,
5783 dsc
->modinsn
[0] = insn
;
5788 /* Preload instructions with immediate offset. */
5791 cleanup_preload (struct gdbarch
*gdbarch
,
5792 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5794 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5795 if (!dsc
->u
.preload
.immed
)
5796 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
5800 install_preload (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5801 struct displaced_step_closure
*dsc
, unsigned int rn
)
5804 /* Preload instructions:
5806 {pli/pld} [rn, #+/-imm]
5808 {pli/pld} [r0, #+/-imm]. */
5810 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5811 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5812 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5813 dsc
->u
.preload
.immed
= 1;
5815 dsc
->cleanup
= &cleanup_preload
;
5819 arm_copy_preload (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
5820 struct displaced_step_closure
*dsc
)
5822 unsigned int rn
= bits (insn
, 16, 19);
5824 if (!insn_references_pc (insn
, 0x000f0000ul
))
5825 return arm_copy_unmodified (gdbarch
, insn
, "preload", dsc
);
5827 if (debug_displaced
)
5828 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5829 (unsigned long) insn
);
5831 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5833 install_preload (gdbarch
, regs
, dsc
, rn
);
5839 thumb2_copy_preload (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
5840 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
5842 unsigned int rn
= bits (insn1
, 0, 3);
5843 unsigned int u_bit
= bit (insn1
, 7);
5844 int imm12
= bits (insn2
, 0, 11);
5847 if (rn
!= ARM_PC_REGNUM
)
5848 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "preload", dsc
);
5850 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5851 PLD (literal) Encoding T1. */
5852 if (debug_displaced
)
5853 fprintf_unfiltered (gdb_stdlog
,
5854 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5855 (unsigned int) dsc
->insn_addr
, u_bit
? '+' : '-',
5861 /* Rewrite instruction {pli/pld} PC imm12 into:
5862 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5866 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5868 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5869 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5871 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
5873 displaced_write_reg (regs
, dsc
, 0, pc_val
, CANNOT_WRITE_PC
);
5874 displaced_write_reg (regs
, dsc
, 1, imm12
, CANNOT_WRITE_PC
);
5875 dsc
->u
.preload
.immed
= 0;
5877 /* {pli/pld} [r0, r1] */
5878 dsc
->modinsn
[0] = insn1
& 0xfff0;
5879 dsc
->modinsn
[1] = 0xf001;
5882 dsc
->cleanup
= &cleanup_preload
;
5886 /* Preload instructions with register offset. */
5889 install_preload_reg(struct gdbarch
*gdbarch
, struct regcache
*regs
,
5890 struct displaced_step_closure
*dsc
, unsigned int rn
,
5893 ULONGEST rn_val
, rm_val
;
5895 /* Preload register-offset instructions:
5897 {pli/pld} [rn, rm {, shift}]
5899 {pli/pld} [r0, r1 {, shift}]. */
5901 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5902 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
5903 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5904 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
5905 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5906 displaced_write_reg (regs
, dsc
, 1, rm_val
, CANNOT_WRITE_PC
);
5907 dsc
->u
.preload
.immed
= 0;
5909 dsc
->cleanup
= &cleanup_preload
;
5913 arm_copy_preload_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
5914 struct regcache
*regs
,
5915 struct displaced_step_closure
*dsc
)
5917 unsigned int rn
= bits (insn
, 16, 19);
5918 unsigned int rm
= bits (insn
, 0, 3);
5921 if (!insn_references_pc (insn
, 0x000f000ful
))
5922 return arm_copy_unmodified (gdbarch
, insn
, "preload reg", dsc
);
5924 if (debug_displaced
)
5925 fprintf_unfiltered (gdb_stdlog
, "displaced: copying preload insn %.8lx\n",
5926 (unsigned long) insn
);
5928 dsc
->modinsn
[0] = (insn
& 0xfff0fff0) | 0x1;
5930 install_preload_reg (gdbarch
, regs
, dsc
, rn
, rm
);
5934 /* Copy/cleanup coprocessor load and store instructions. */
5937 cleanup_copro_load_store (struct gdbarch
*gdbarch
,
5938 struct regcache
*regs
,
5939 struct displaced_step_closure
*dsc
)
5941 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 0);
5943 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
5945 if (dsc
->u
.ldst
.writeback
)
5946 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, LOAD_WRITE_PC
);
5950 install_copro_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
5951 struct displaced_step_closure
*dsc
,
5952 int writeback
, unsigned int rn
)
5956 /* Coprocessor load/store instructions:
5958 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5960 {stc/stc2} [r0, #+/-imm].
5962 ldc/ldc2 are handled identically. */
5964 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
5965 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
5966 /* PC should be 4-byte aligned. */
5967 rn_val
= rn_val
& 0xfffffffc;
5968 displaced_write_reg (regs
, dsc
, 0, rn_val
, CANNOT_WRITE_PC
);
5970 dsc
->u
.ldst
.writeback
= writeback
;
5971 dsc
->u
.ldst
.rn
= rn
;
5973 dsc
->cleanup
= &cleanup_copro_load_store
;
5977 arm_copy_copro_load_store (struct gdbarch
*gdbarch
, uint32_t insn
,
5978 struct regcache
*regs
,
5979 struct displaced_step_closure
*dsc
)
5981 unsigned int rn
= bits (insn
, 16, 19);
5983 if (!insn_references_pc (insn
, 0x000f0000ul
))
5984 return arm_copy_unmodified (gdbarch
, insn
, "copro load/store", dsc
);
5986 if (debug_displaced
)
5987 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
5988 "load/store insn %.8lx\n", (unsigned long) insn
);
5990 dsc
->modinsn
[0] = insn
& 0xfff0ffff;
5992 install_copro_load_store (gdbarch
, regs
, dsc
, bit (insn
, 25), rn
);
5998 thumb2_copy_copro_load_store (struct gdbarch
*gdbarch
, uint16_t insn1
,
5999 uint16_t insn2
, struct regcache
*regs
,
6000 struct displaced_step_closure
*dsc
)
6002 unsigned int rn
= bits (insn1
, 0, 3);
6004 if (rn
!= ARM_PC_REGNUM
)
6005 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
6006 "copro load/store", dsc
);
6008 if (debug_displaced
)
6009 fprintf_unfiltered (gdb_stdlog
, "displaced: copying coprocessor "
6010 "load/store insn %.4x%.4x\n", insn1
, insn2
);
6012 dsc
->modinsn
[0] = insn1
& 0xfff0;
6013 dsc
->modinsn
[1] = insn2
;
6016 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6017 doesn't support writeback, so pass 0. */
6018 install_copro_load_store (gdbarch
, regs
, dsc
, 0, rn
);
6023 /* Clean up branch instructions (actually perform the branch, by setting
6027 cleanup_branch (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6028 struct displaced_step_closure
*dsc
)
6030 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6031 int branch_taken
= condition_true (dsc
->u
.branch
.cond
, status
);
6032 enum pc_write_style write_pc
= dsc
->u
.branch
.exchange
6033 ? BX_WRITE_PC
: BRANCH_WRITE_PC
;
6038 if (dsc
->u
.branch
.link
)
6040 /* The value of LR should be the next insn of current one. In order
6041 not to confuse logic hanlding later insn `bx lr', if current insn mode
6042 is Thumb, the bit 0 of LR value should be set to 1. */
6043 ULONGEST next_insn_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
6046 next_insn_addr
|= 0x1;
6048 displaced_write_reg (regs
, dsc
, ARM_LR_REGNUM
, next_insn_addr
,
6052 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, dsc
->u
.branch
.dest
, write_pc
);
6055 /* Copy B/BL/BLX instructions with immediate destinations. */
6058 install_b_bl_blx (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6059 struct displaced_step_closure
*dsc
,
6060 unsigned int cond
, int exchange
, int link
, long offset
)
6062 /* Implement "BL<cond> <label>" as:
6064 Preparation: cond <- instruction condition
6065 Insn: mov r0, r0 (nop)
6066 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6068 B<cond> similar, but don't set r14 in cleanup. */
6070 dsc
->u
.branch
.cond
= cond
;
6071 dsc
->u
.branch
.link
= link
;
6072 dsc
->u
.branch
.exchange
= exchange
;
6074 dsc
->u
.branch
.dest
= dsc
->insn_addr
;
6075 if (link
&& exchange
)
6076 /* For BLX, offset is computed from the Align (PC, 4). */
6077 dsc
->u
.branch
.dest
= dsc
->u
.branch
.dest
& 0xfffffffc;
6080 dsc
->u
.branch
.dest
+= 4 + offset
;
6082 dsc
->u
.branch
.dest
+= 8 + offset
;
6084 dsc
->cleanup
= &cleanup_branch
;
6087 arm_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint32_t insn
,
6088 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6090 unsigned int cond
= bits (insn
, 28, 31);
6091 int exchange
= (cond
== 0xf);
6092 int link
= exchange
|| bit (insn
, 24);
6095 if (debug_displaced
)
6096 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s immediate insn "
6097 "%.8lx\n", (exchange
) ? "blx" : (link
) ? "bl" : "b",
6098 (unsigned long) insn
);
6100 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6101 then arrange the switch into Thumb mode. */
6102 offset
= (bits (insn
, 0, 23) << 2) | (bit (insn
, 24) << 1) | 1;
6104 offset
= bits (insn
, 0, 23) << 2;
6106 if (bit (offset
, 25))
6107 offset
= offset
| ~0x3ffffff;
6109 dsc
->modinsn
[0] = ARM_NOP
;
6111 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6116 thumb2_copy_b_bl_blx (struct gdbarch
*gdbarch
, uint16_t insn1
,
6117 uint16_t insn2
, struct regcache
*regs
,
6118 struct displaced_step_closure
*dsc
)
6120 int link
= bit (insn2
, 14);
6121 int exchange
= link
&& !bit (insn2
, 12);
6124 int j1
= bit (insn2
, 13);
6125 int j2
= bit (insn2
, 11);
6126 int s
= sbits (insn1
, 10, 10);
6127 int i1
= !(j1
^ bit (insn1
, 10));
6128 int i2
= !(j2
^ bit (insn1
, 10));
6130 if (!link
&& !exchange
) /* B */
6132 offset
= (bits (insn2
, 0, 10) << 1);
6133 if (bit (insn2
, 12)) /* Encoding T4 */
6135 offset
|= (bits (insn1
, 0, 9) << 12)
6141 else /* Encoding T3 */
6143 offset
|= (bits (insn1
, 0, 5) << 12)
6147 cond
= bits (insn1
, 6, 9);
6152 offset
= (bits (insn1
, 0, 9) << 12);
6153 offset
|= ((i2
<< 22) | (i1
<< 23) | (s
<< 24));
6154 offset
|= exchange
?
6155 (bits (insn2
, 1, 10) << 2) : (bits (insn2
, 0, 10) << 1);
6158 if (debug_displaced
)
6159 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s insn "
6160 "%.4x %.4x with offset %.8lx\n",
6161 link
? (exchange
) ? "blx" : "bl" : "b",
6162 insn1
, insn2
, offset
);
6164 dsc
->modinsn
[0] = THUMB_NOP
;
6166 install_b_bl_blx (gdbarch
, regs
, dsc
, cond
, exchange
, link
, offset
);
6170 /* Copy B Thumb instructions. */
6172 thumb_copy_b (struct gdbarch
*gdbarch
, unsigned short insn
,
6173 struct displaced_step_closure
*dsc
)
6175 unsigned int cond
= 0;
6177 unsigned short bit_12_15
= bits (insn
, 12, 15);
6178 CORE_ADDR from
= dsc
->insn_addr
;
6180 if (bit_12_15
== 0xd)
6182 /* offset = SignExtend (imm8:0, 32) */
6183 offset
= sbits ((insn
<< 1), 0, 8);
6184 cond
= bits (insn
, 8, 11);
6186 else if (bit_12_15
== 0xe) /* Encoding T2 */
6188 offset
= sbits ((insn
<< 1), 0, 11);
6192 if (debug_displaced
)
6193 fprintf_unfiltered (gdb_stdlog
,
6194 "displaced: copying b immediate insn %.4x "
6195 "with offset %d\n", insn
, offset
);
6197 dsc
->u
.branch
.cond
= cond
;
6198 dsc
->u
.branch
.link
= 0;
6199 dsc
->u
.branch
.exchange
= 0;
6200 dsc
->u
.branch
.dest
= from
+ 4 + offset
;
6202 dsc
->modinsn
[0] = THUMB_NOP
;
6204 dsc
->cleanup
= &cleanup_branch
;
6209 /* Copy BX/BLX with register-specified destinations. */
6212 install_bx_blx_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6213 struct displaced_step_closure
*dsc
, int link
,
6214 unsigned int cond
, unsigned int rm
)
6216 /* Implement {BX,BLX}<cond> <reg>" as:
6218 Preparation: cond <- instruction condition
6219 Insn: mov r0, r0 (nop)
6220 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6222 Don't set r14 in cleanup for BX. */
6224 dsc
->u
.branch
.dest
= displaced_read_reg (regs
, dsc
, rm
);
6226 dsc
->u
.branch
.cond
= cond
;
6227 dsc
->u
.branch
.link
= link
;
6229 dsc
->u
.branch
.exchange
= 1;
6231 dsc
->cleanup
= &cleanup_branch
;
6235 arm_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6236 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6238 unsigned int cond
= bits (insn
, 28, 31);
6241 int link
= bit (insn
, 5);
6242 unsigned int rm
= bits (insn
, 0, 3);
6244 if (debug_displaced
)
6245 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.8lx",
6246 (unsigned long) insn
);
6248 dsc
->modinsn
[0] = ARM_NOP
;
6250 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, cond
, rm
);
6255 thumb_copy_bx_blx_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6256 struct regcache
*regs
,
6257 struct displaced_step_closure
*dsc
)
6259 int link
= bit (insn
, 7);
6260 unsigned int rm
= bits (insn
, 3, 6);
6262 if (debug_displaced
)
6263 fprintf_unfiltered (gdb_stdlog
, "displaced: copying insn %.4x",
6264 (unsigned short) insn
);
6266 dsc
->modinsn
[0] = THUMB_NOP
;
6268 install_bx_blx_reg (gdbarch
, regs
, dsc
, link
, INST_AL
, rm
);
6274 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6277 cleanup_alu_imm (struct gdbarch
*gdbarch
,
6278 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6280 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6281 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6282 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6283 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6287 arm_copy_alu_imm (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6288 struct displaced_step_closure
*dsc
)
6290 unsigned int rn
= bits (insn
, 16, 19);
6291 unsigned int rd
= bits (insn
, 12, 15);
6292 unsigned int op
= bits (insn
, 21, 24);
6293 int is_mov
= (op
== 0xd);
6294 ULONGEST rd_val
, rn_val
;
6296 if (!insn_references_pc (insn
, 0x000ff000ul
))
6297 return arm_copy_unmodified (gdbarch
, insn
, "ALU immediate", dsc
);
6299 if (debug_displaced
)
6300 fprintf_unfiltered (gdb_stdlog
, "displaced: copying immediate %s insn "
6301 "%.8lx\n", is_mov
? "move" : "ALU",
6302 (unsigned long) insn
);
6304 /* Instruction is of form:
6306 <op><cond> rd, [rn,] #imm
6310 Preparation: tmp1, tmp2 <- r0, r1;
6312 Insn: <op><cond> r0, r1, #imm
6313 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6316 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6317 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6318 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6319 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6320 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6321 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6325 dsc
->modinsn
[0] = insn
& 0xfff00fff;
6327 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x10000;
6329 dsc
->cleanup
= &cleanup_alu_imm
;
6335 thumb2_copy_alu_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6336 uint16_t insn2
, struct regcache
*regs
,
6337 struct displaced_step_closure
*dsc
)
6339 unsigned int op
= bits (insn1
, 5, 8);
6340 unsigned int rn
, rm
, rd
;
6341 ULONGEST rd_val
, rn_val
;
6343 rn
= bits (insn1
, 0, 3); /* Rn */
6344 rm
= bits (insn2
, 0, 3); /* Rm */
6345 rd
= bits (insn2
, 8, 11); /* Rd */
6347 /* This routine is only called for instruction MOV. */
6348 gdb_assert (op
== 0x2 && rn
== 0xf);
6350 if (rm
!= ARM_PC_REGNUM
&& rd
!= ARM_PC_REGNUM
)
6351 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ALU imm", dsc
);
6353 if (debug_displaced
)
6354 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.4x%.4x\n",
6355 "ALU", insn1
, insn2
);
6357 /* Instruction is of form:
6359 <op><cond> rd, [rn,] #imm
6363 Preparation: tmp1, tmp2 <- r0, r1;
6365 Insn: <op><cond> r0, r1, #imm
6366 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6369 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6370 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6371 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6372 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6373 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6374 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6377 dsc
->modinsn
[0] = insn1
;
6378 dsc
->modinsn
[1] = ((insn2
& 0xf0f0) | 0x1);
6381 dsc
->cleanup
= &cleanup_alu_imm
;
6386 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6389 cleanup_alu_reg (struct gdbarch
*gdbarch
,
6390 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6395 rd_val
= displaced_read_reg (regs
, dsc
, 0);
6397 for (i
= 0; i
< 3; i
++)
6398 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6400 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6404 install_alu_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6405 struct displaced_step_closure
*dsc
,
6406 unsigned int rd
, unsigned int rn
, unsigned int rm
)
6408 ULONGEST rd_val
, rn_val
, rm_val
;
6410 /* Instruction is of form:
6412 <op><cond> rd, [rn,] rm [, <shift>]
6416 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6417 r0, r1, r2 <- rd, rn, rm
6418 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6419 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6422 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6423 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6424 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6425 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6426 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6427 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6428 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6429 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6430 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6433 dsc
->cleanup
= &cleanup_alu_reg
;
6437 arm_copy_alu_reg (struct gdbarch
*gdbarch
, uint32_t insn
, struct regcache
*regs
,
6438 struct displaced_step_closure
*dsc
)
6440 unsigned int op
= bits (insn
, 21, 24);
6441 int is_mov
= (op
== 0xd);
6443 if (!insn_references_pc (insn
, 0x000ff00ful
))
6444 return arm_copy_unmodified (gdbarch
, insn
, "ALU reg", dsc
);
6446 if (debug_displaced
)
6447 fprintf_unfiltered (gdb_stdlog
, "displaced: copying reg %s insn %.8lx\n",
6448 is_mov
? "move" : "ALU", (unsigned long) insn
);
6451 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x2;
6453 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x10002;
6455 install_alu_reg (gdbarch
, regs
, dsc
, bits (insn
, 12, 15), bits (insn
, 16, 19),
6461 thumb_copy_alu_reg (struct gdbarch
*gdbarch
, uint16_t insn
,
6462 struct regcache
*regs
,
6463 struct displaced_step_closure
*dsc
)
6467 rm
= bits (insn
, 3, 6);
6468 rd
= (bit (insn
, 7) << 3) | bits (insn
, 0, 2);
6470 if (rd
!= ARM_PC_REGNUM
&& rm
!= ARM_PC_REGNUM
)
6471 return thumb_copy_unmodified_16bit (gdbarch
, insn
, "ALU reg", dsc
);
6473 if (debug_displaced
)
6474 fprintf_unfiltered (gdb_stdlog
, "displaced: copying ALU reg insn %.4x\n",
6475 (unsigned short) insn
);
6477 dsc
->modinsn
[0] = ((insn
& 0xff00) | 0x10);
6479 install_alu_reg (gdbarch
, regs
, dsc
, rd
, rd
, rm
);
6484 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6487 cleanup_alu_shifted_reg (struct gdbarch
*gdbarch
,
6488 struct regcache
*regs
,
6489 struct displaced_step_closure
*dsc
)
6491 ULONGEST rd_val
= displaced_read_reg (regs
, dsc
, 0);
6494 for (i
= 0; i
< 4; i
++)
6495 displaced_write_reg (regs
, dsc
, i
, dsc
->tmp
[i
], CANNOT_WRITE_PC
);
6497 displaced_write_reg (regs
, dsc
, dsc
->rd
, rd_val
, ALU_WRITE_PC
);
6501 install_alu_shifted_reg (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6502 struct displaced_step_closure
*dsc
,
6503 unsigned int rd
, unsigned int rn
, unsigned int rm
,
6507 ULONGEST rd_val
, rn_val
, rm_val
, rs_val
;
6509 /* Instruction is of form:
6511 <op><cond> rd, [rn,] rm, <shift> rs
6515 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6516 r0, r1, r2, r3 <- rd, rn, rm, rs
6517 Insn: <op><cond> r0, r1, r2, <shift> r3
6519 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6523 for (i
= 0; i
< 4; i
++)
6524 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
6526 rd_val
= displaced_read_reg (regs
, dsc
, rd
);
6527 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6528 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6529 rs_val
= displaced_read_reg (regs
, dsc
, rs
);
6530 displaced_write_reg (regs
, dsc
, 0, rd_val
, CANNOT_WRITE_PC
);
6531 displaced_write_reg (regs
, dsc
, 1, rn_val
, CANNOT_WRITE_PC
);
6532 displaced_write_reg (regs
, dsc
, 2, rm_val
, CANNOT_WRITE_PC
);
6533 displaced_write_reg (regs
, dsc
, 3, rs_val
, CANNOT_WRITE_PC
);
6535 dsc
->cleanup
= &cleanup_alu_shifted_reg
;
6539 arm_copy_alu_shifted_reg (struct gdbarch
*gdbarch
, uint32_t insn
,
6540 struct regcache
*regs
,
6541 struct displaced_step_closure
*dsc
)
6543 unsigned int op
= bits (insn
, 21, 24);
6544 int is_mov
= (op
== 0xd);
6545 unsigned int rd
, rn
, rm
, rs
;
6547 if (!insn_references_pc (insn
, 0x000fff0ful
))
6548 return arm_copy_unmodified (gdbarch
, insn
, "ALU shifted reg", dsc
);
6550 if (debug_displaced
)
6551 fprintf_unfiltered (gdb_stdlog
, "displaced: copying shifted reg %s insn "
6552 "%.8lx\n", is_mov
? "move" : "ALU",
6553 (unsigned long) insn
);
6555 rn
= bits (insn
, 16, 19);
6556 rm
= bits (insn
, 0, 3);
6557 rs
= bits (insn
, 8, 11);
6558 rd
= bits (insn
, 12, 15);
6561 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x302;
6563 dsc
->modinsn
[0] = (insn
& 0xfff000f0) | 0x10302;
6565 install_alu_shifted_reg (gdbarch
, regs
, dsc
, rd
, rn
, rm
, rs
);
6570 /* Clean up load instructions. */
6573 cleanup_load (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6574 struct displaced_step_closure
*dsc
)
6576 ULONGEST rt_val
, rt_val2
= 0, rn_val
;
6578 rt_val
= displaced_read_reg (regs
, dsc
, 0);
6579 if (dsc
->u
.ldst
.xfersize
== 8)
6580 rt_val2
= displaced_read_reg (regs
, dsc
, 1);
6581 rn_val
= displaced_read_reg (regs
, dsc
, 2);
6583 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6584 if (dsc
->u
.ldst
.xfersize
> 4)
6585 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6586 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6587 if (!dsc
->u
.ldst
.immed
)
6588 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6590 /* Handle register writeback. */
6591 if (dsc
->u
.ldst
.writeback
)
6592 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6593 /* Put result in right place. */
6594 displaced_write_reg (regs
, dsc
, dsc
->rd
, rt_val
, LOAD_WRITE_PC
);
6595 if (dsc
->u
.ldst
.xfersize
== 8)
6596 displaced_write_reg (regs
, dsc
, dsc
->rd
+ 1, rt_val2
, LOAD_WRITE_PC
);
6599 /* Clean up store instructions. */
6602 cleanup_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6603 struct displaced_step_closure
*dsc
)
6605 ULONGEST rn_val
= displaced_read_reg (regs
, dsc
, 2);
6607 displaced_write_reg (regs
, dsc
, 0, dsc
->tmp
[0], CANNOT_WRITE_PC
);
6608 if (dsc
->u
.ldst
.xfersize
> 4)
6609 displaced_write_reg (regs
, dsc
, 1, dsc
->tmp
[1], CANNOT_WRITE_PC
);
6610 displaced_write_reg (regs
, dsc
, 2, dsc
->tmp
[2], CANNOT_WRITE_PC
);
6611 if (!dsc
->u
.ldst
.immed
)
6612 displaced_write_reg (regs
, dsc
, 3, dsc
->tmp
[3], CANNOT_WRITE_PC
);
6613 if (!dsc
->u
.ldst
.restore_r4
)
6614 displaced_write_reg (regs
, dsc
, 4, dsc
->tmp
[4], CANNOT_WRITE_PC
);
6617 if (dsc
->u
.ldst
.writeback
)
6618 displaced_write_reg (regs
, dsc
, dsc
->u
.ldst
.rn
, rn_val
, CANNOT_WRITE_PC
);
6621 /* Copy "extra" load/store instructions. These are halfword/doubleword
6622 transfers, which have a different encoding to byte/word transfers. */
6625 arm_copy_extra_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
, int unpriveleged
,
6626 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
6628 unsigned int op1
= bits (insn
, 20, 24);
6629 unsigned int op2
= bits (insn
, 5, 6);
6630 unsigned int rt
= bits (insn
, 12, 15);
6631 unsigned int rn
= bits (insn
, 16, 19);
6632 unsigned int rm
= bits (insn
, 0, 3);
6633 char load
[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6634 char bytesize
[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6635 int immed
= (op1
& 0x4) != 0;
6637 ULONGEST rt_val
, rt_val2
= 0, rn_val
, rm_val
= 0;
6639 if (!insn_references_pc (insn
, 0x000ff00ful
))
6640 return arm_copy_unmodified (gdbarch
, insn
, "extra load/store", dsc
);
6642 if (debug_displaced
)
6643 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %sextra load/store "
6644 "insn %.8lx\n", unpriveleged
? "unpriveleged " : "",
6645 (unsigned long) insn
);
6647 opcode
= ((op2
<< 2) | (op1
& 0x1) | ((op1
& 0x4) >> 1)) - 4;
6650 internal_error (__FILE__
, __LINE__
,
6651 _("copy_extra_ld_st: instruction decode error"));
6653 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6654 dsc
->tmp
[1] = displaced_read_reg (regs
, dsc
, 1);
6655 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6657 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6659 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6660 if (bytesize
[opcode
] == 8)
6661 rt_val2
= displaced_read_reg (regs
, dsc
, rt
+ 1);
6662 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6664 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6666 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6667 if (bytesize
[opcode
] == 8)
6668 displaced_write_reg (regs
, dsc
, 1, rt_val2
, CANNOT_WRITE_PC
);
6669 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6671 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6674 dsc
->u
.ldst
.xfersize
= bytesize
[opcode
];
6675 dsc
->u
.ldst
.rn
= rn
;
6676 dsc
->u
.ldst
.immed
= immed
;
6677 dsc
->u
.ldst
.writeback
= bit (insn
, 24) == 0 || bit (insn
, 21) != 0;
6678 dsc
->u
.ldst
.restore_r4
= 0;
6681 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6683 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6684 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6686 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6688 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6689 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6691 dsc
->cleanup
= load
[opcode
] ? &cleanup_load
: &cleanup_store
;
6696 /* Copy byte/half word/word loads and stores. */
6699 install_load_store (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6700 struct displaced_step_closure
*dsc
, int load
,
6701 int immed
, int writeback
, int size
, int usermode
,
6702 int rt
, int rm
, int rn
)
6704 ULONGEST rt_val
, rn_val
, rm_val
= 0;
6706 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6707 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6709 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6711 dsc
->tmp
[4] = displaced_read_reg (regs
, dsc
, 4);
6713 rt_val
= displaced_read_reg (regs
, dsc
, rt
);
6714 rn_val
= displaced_read_reg (regs
, dsc
, rn
);
6716 rm_val
= displaced_read_reg (regs
, dsc
, rm
);
6718 displaced_write_reg (regs
, dsc
, 0, rt_val
, CANNOT_WRITE_PC
);
6719 displaced_write_reg (regs
, dsc
, 2, rn_val
, CANNOT_WRITE_PC
);
6721 displaced_write_reg (regs
, dsc
, 3, rm_val
, CANNOT_WRITE_PC
);
6723 dsc
->u
.ldst
.xfersize
= size
;
6724 dsc
->u
.ldst
.rn
= rn
;
6725 dsc
->u
.ldst
.immed
= immed
;
6726 dsc
->u
.ldst
.writeback
= writeback
;
6728 /* To write PC we can do:
6730 Before this sequence of instructions:
6731 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6732 r2 is the Rn value got from dispalced_read_reg.
6734 Insn1: push {pc} Write address of STR instruction + offset on stack
6735 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6736 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6737 = addr(Insn1) + offset - addr(Insn3) - 8
6739 Insn4: add r4, r4, #8 r4 = offset - 8
6740 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6742 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6744 Otherwise we don't know what value to write for PC, since the offset is
6745 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6746 of this can be found in Section "Saving from r15" in
6747 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6749 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6754 thumb2_copy_load_literal (struct gdbarch
*gdbarch
, uint16_t insn1
,
6755 uint16_t insn2
, struct regcache
*regs
,
6756 struct displaced_step_closure
*dsc
, int size
)
6758 unsigned int u_bit
= bit (insn1
, 7);
6759 unsigned int rt
= bits (insn2
, 12, 15);
6760 int imm12
= bits (insn2
, 0, 11);
6763 if (debug_displaced
)
6764 fprintf_unfiltered (gdb_stdlog
,
6765 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6766 (unsigned int) dsc
->insn_addr
, rt
, u_bit
? '+' : '-',
6772 /* Rewrite instruction LDR Rt imm12 into:
6774 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6778 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6781 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
6782 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
6783 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
6785 pc_val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
6787 pc_val
= pc_val
& 0xfffffffc;
6789 displaced_write_reg (regs
, dsc
, 2, pc_val
, CANNOT_WRITE_PC
);
6790 displaced_write_reg (regs
, dsc
, 3, imm12
, CANNOT_WRITE_PC
);
6794 dsc
->u
.ldst
.xfersize
= size
;
6795 dsc
->u
.ldst
.immed
= 0;
6796 dsc
->u
.ldst
.writeback
= 0;
6797 dsc
->u
.ldst
.restore_r4
= 0;
6799 /* LDR R0, R2, R3 */
6800 dsc
->modinsn
[0] = 0xf852;
6801 dsc
->modinsn
[1] = 0x3;
6804 dsc
->cleanup
= &cleanup_load
;
6810 thumb2_copy_load_reg_imm (struct gdbarch
*gdbarch
, uint16_t insn1
,
6811 uint16_t insn2
, struct regcache
*regs
,
6812 struct displaced_step_closure
*dsc
,
6813 int writeback
, int immed
)
6815 unsigned int rt
= bits (insn2
, 12, 15);
6816 unsigned int rn
= bits (insn1
, 0, 3);
6817 unsigned int rm
= bits (insn2
, 0, 3); /* Only valid if !immed. */
6818 /* In LDR (register), there is also a register Rm, which is not allowed to
6819 be PC, so we don't have to check it. */
6821 if (rt
!= ARM_PC_REGNUM
&& rn
!= ARM_PC_REGNUM
)
6822 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "load",
6825 if (debug_displaced
)
6826 fprintf_unfiltered (gdb_stdlog
,
6827 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6828 rt
, rn
, insn1
, insn2
);
6830 install_load_store (gdbarch
, regs
, dsc
, 1, immed
, writeback
, 4,
6833 dsc
->u
.ldst
.restore_r4
= 0;
6836 /* ldr[b]<cond> rt, [rn, #imm], etc.
6838 ldr[b]<cond> r0, [r2, #imm]. */
6840 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6841 dsc
->modinsn
[1] = insn2
& 0x0fff;
6844 /* ldr[b]<cond> rt, [rn, rm], etc.
6846 ldr[b]<cond> r0, [r2, r3]. */
6848 dsc
->modinsn
[0] = (insn1
& 0xfff0) | 0x2;
6849 dsc
->modinsn
[1] = (insn2
& 0x0ff0) | 0x3;
6859 arm_copy_ldr_str_ldrb_strb (struct gdbarch
*gdbarch
, uint32_t insn
,
6860 struct regcache
*regs
,
6861 struct displaced_step_closure
*dsc
,
6862 int load
, int size
, int usermode
)
6864 int immed
= !bit (insn
, 25);
6865 int writeback
= (bit (insn
, 24) == 0 || bit (insn
, 21) != 0);
6866 unsigned int rt
= bits (insn
, 12, 15);
6867 unsigned int rn
= bits (insn
, 16, 19);
6868 unsigned int rm
= bits (insn
, 0, 3); /* Only valid if !immed. */
6870 if (!insn_references_pc (insn
, 0x000ff00ful
))
6871 return arm_copy_unmodified (gdbarch
, insn
, "load/store", dsc
);
6873 if (debug_displaced
)
6874 fprintf_unfiltered (gdb_stdlog
,
6875 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6876 load
? (size
== 1 ? "ldrb" : "ldr")
6877 : (size
== 1 ? "strb" : "str"), usermode
? "t" : "",
6879 (unsigned long) insn
);
6881 install_load_store (gdbarch
, regs
, dsc
, load
, immed
, writeback
, size
,
6882 usermode
, rt
, rm
, rn
);
6884 if (load
|| rt
!= ARM_PC_REGNUM
)
6886 dsc
->u
.ldst
.restore_r4
= 0;
6889 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6891 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6892 dsc
->modinsn
[0] = (insn
& 0xfff00fff) | 0x20000;
6894 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6896 {ldr,str}[b]<cond> r0, [r2, r3]. */
6897 dsc
->modinsn
[0] = (insn
& 0xfff00ff0) | 0x20003;
6901 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6902 dsc
->u
.ldst
.restore_r4
= 1;
6903 dsc
->modinsn
[0] = 0xe92d8000; /* push {pc} */
6904 dsc
->modinsn
[1] = 0xe8bd0010; /* pop {r4} */
6905 dsc
->modinsn
[2] = 0xe044400f; /* sub r4, r4, pc. */
6906 dsc
->modinsn
[3] = 0xe2844008; /* add r4, r4, #8. */
6907 dsc
->modinsn
[4] = 0xe0800004; /* add r0, r0, r4. */
6911 dsc
->modinsn
[5] = (insn
& 0xfff00fff) | 0x20000;
6913 dsc
->modinsn
[5] = (insn
& 0xfff00ff0) | 0x20003;
6918 dsc
->cleanup
= load
? &cleanup_load
: &cleanup_store
;
6923 /* Cleanup LDM instructions with fully-populated register list. This is an
6924 unfortunate corner case: it's impossible to implement correctly by modifying
6925 the instruction. The issue is as follows: we have an instruction,
6929 which we must rewrite to avoid loading PC. A possible solution would be to
6930 do the load in two halves, something like (with suitable cleanup
6934 ldm[id][ab] r8!, {r0-r7}
6936 ldm[id][ab] r8, {r7-r14}
6939 but at present there's no suitable place for <temp>, since the scratch space
6940 is overwritten before the cleanup routine is called. For now, we simply
6941 emulate the instruction. */
6944 cleanup_block_load_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
6945 struct displaced_step_closure
*dsc
)
6947 int inc
= dsc
->u
.block
.increment
;
6948 int bump_before
= dsc
->u
.block
.before
? (inc
? 4 : -4) : 0;
6949 int bump_after
= dsc
->u
.block
.before
? 0 : (inc
? 4 : -4);
6950 uint32_t regmask
= dsc
->u
.block
.regmask
;
6951 int regno
= inc
? 0 : 15;
6952 CORE_ADDR xfer_addr
= dsc
->u
.block
.xfer_addr
;
6953 int exception_return
= dsc
->u
.block
.load
&& dsc
->u
.block
.user
6954 && (regmask
& 0x8000) != 0;
6955 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
6956 int do_transfer
= condition_true (dsc
->u
.block
.cond
, status
);
6957 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
6962 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6963 sensible we can do here. Complain loudly. */
6964 if (exception_return
)
6965 error (_("Cannot single-step exception return"));
6967 /* We don't handle any stores here for now. */
6968 gdb_assert (dsc
->u
.block
.load
!= 0);
6970 if (debug_displaced
)
6971 fprintf_unfiltered (gdb_stdlog
, "displaced: emulating block transfer: "
6972 "%s %s %s\n", dsc
->u
.block
.load
? "ldm" : "stm",
6973 dsc
->u
.block
.increment
? "inc" : "dec",
6974 dsc
->u
.block
.before
? "before" : "after");
6981 while (regno
<= ARM_PC_REGNUM
&& (regmask
& (1 << regno
)) == 0)
6984 while (regno
>= 0 && (regmask
& (1 << regno
)) == 0)
6987 xfer_addr
+= bump_before
;
6989 memword
= read_memory_unsigned_integer (xfer_addr
, 4, byte_order
);
6990 displaced_write_reg (regs
, dsc
, regno
, memword
, LOAD_WRITE_PC
);
6992 xfer_addr
+= bump_after
;
6994 regmask
&= ~(1 << regno
);
6997 if (dsc
->u
.block
.writeback
)
6998 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, xfer_addr
,
7002 /* Clean up an STM which included the PC in the register list. */
7005 cleanup_block_store_pc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7006 struct displaced_step_closure
*dsc
)
7008 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7009 int store_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7010 CORE_ADDR pc_stored_at
, transferred_regs
= bitcount (dsc
->u
.block
.regmask
);
7011 CORE_ADDR stm_insn_addr
;
7014 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
7016 /* If condition code fails, there's nothing else to do. */
7017 if (!store_executed
)
7020 if (dsc
->u
.block
.increment
)
7022 pc_stored_at
= dsc
->u
.block
.xfer_addr
+ 4 * transferred_regs
;
7024 if (dsc
->u
.block
.before
)
7029 pc_stored_at
= dsc
->u
.block
.xfer_addr
;
7031 if (dsc
->u
.block
.before
)
7035 pc_val
= read_memory_unsigned_integer (pc_stored_at
, 4, byte_order
);
7036 stm_insn_addr
= dsc
->scratch_base
;
7037 offset
= pc_val
- stm_insn_addr
;
7039 if (debug_displaced
)
7040 fprintf_unfiltered (gdb_stdlog
, "displaced: detected PC offset %.8lx for "
7041 "STM instruction\n", offset
);
7043 /* Rewrite the stored PC to the proper value for the non-displaced original
7045 write_memory_unsigned_integer (pc_stored_at
, 4, byte_order
,
7046 dsc
->insn_addr
+ offset
);
7049 /* Clean up an LDM which includes the PC in the register list. We clumped all
7050 the registers in the transferred list into a contiguous range r0...rX (to
7051 avoid loading PC directly and losing control of the debugged program), so we
7052 must undo that here. */
7055 cleanup_block_load_pc (struct gdbarch
*gdbarch
,
7056 struct regcache
*regs
,
7057 struct displaced_step_closure
*dsc
)
7059 uint32_t status
= displaced_read_reg (regs
, dsc
, ARM_PS_REGNUM
);
7060 int load_executed
= condition_true (dsc
->u
.block
.cond
, status
);
7061 unsigned int mask
= dsc
->u
.block
.regmask
, write_reg
= ARM_PC_REGNUM
;
7062 unsigned int regs_loaded
= bitcount (mask
);
7063 unsigned int num_to_shuffle
= regs_loaded
, clobbered
;
7065 /* The method employed here will fail if the register list is fully populated
7066 (we need to avoid loading PC directly). */
7067 gdb_assert (num_to_shuffle
< 16);
7072 clobbered
= (1 << num_to_shuffle
) - 1;
7074 while (num_to_shuffle
> 0)
7076 if ((mask
& (1 << write_reg
)) != 0)
7078 unsigned int read_reg
= num_to_shuffle
- 1;
7080 if (read_reg
!= write_reg
)
7082 ULONGEST rval
= displaced_read_reg (regs
, dsc
, read_reg
);
7083 displaced_write_reg (regs
, dsc
, write_reg
, rval
, LOAD_WRITE_PC
);
7084 if (debug_displaced
)
7085 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: move "
7086 "loaded register r%d to r%d\n"), read_reg
,
7089 else if (debug_displaced
)
7090 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: register "
7091 "r%d already in the right place\n"),
7094 clobbered
&= ~(1 << write_reg
);
7102 /* Restore any registers we scribbled over. */
7103 for (write_reg
= 0; clobbered
!= 0; write_reg
++)
7105 if ((clobbered
& (1 << write_reg
)) != 0)
7107 displaced_write_reg (regs
, dsc
, write_reg
, dsc
->tmp
[write_reg
],
7109 if (debug_displaced
)
7110 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM: restored "
7111 "clobbered register r%d\n"), write_reg
);
7112 clobbered
&= ~(1 << write_reg
);
7116 /* Perform register writeback manually. */
7117 if (dsc
->u
.block
.writeback
)
7119 ULONGEST new_rn_val
= dsc
->u
.block
.xfer_addr
;
7121 if (dsc
->u
.block
.increment
)
7122 new_rn_val
+= regs_loaded
* 4;
7124 new_rn_val
-= regs_loaded
* 4;
7126 displaced_write_reg (regs
, dsc
, dsc
->u
.block
.rn
, new_rn_val
,
7131 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7132 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7135 arm_copy_block_xfer (struct gdbarch
*gdbarch
, uint32_t insn
,
7136 struct regcache
*regs
,
7137 struct displaced_step_closure
*dsc
)
7139 int load
= bit (insn
, 20);
7140 int user
= bit (insn
, 22);
7141 int increment
= bit (insn
, 23);
7142 int before
= bit (insn
, 24);
7143 int writeback
= bit (insn
, 21);
7144 int rn
= bits (insn
, 16, 19);
7146 /* Block transfers which don't mention PC can be run directly
7148 if (rn
!= ARM_PC_REGNUM
&& (insn
& 0x8000) == 0)
7149 return arm_copy_unmodified (gdbarch
, insn
, "ldm/stm", dsc
);
7151 if (rn
== ARM_PC_REGNUM
)
7153 warning (_("displaced: Unpredictable LDM or STM with "
7154 "base register r15"));
7155 return arm_copy_unmodified (gdbarch
, insn
, "unpredictable ldm/stm", dsc
);
7158 if (debug_displaced
)
7159 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7160 "%.8lx\n", (unsigned long) insn
);
7162 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7163 dsc
->u
.block
.rn
= rn
;
7165 dsc
->u
.block
.load
= load
;
7166 dsc
->u
.block
.user
= user
;
7167 dsc
->u
.block
.increment
= increment
;
7168 dsc
->u
.block
.before
= before
;
7169 dsc
->u
.block
.writeback
= writeback
;
7170 dsc
->u
.block
.cond
= bits (insn
, 28, 31);
7172 dsc
->u
.block
.regmask
= insn
& 0xffff;
7176 if ((insn
& 0xffff) == 0xffff)
7178 /* LDM with a fully-populated register list. This case is
7179 particularly tricky. Implement for now by fully emulating the
7180 instruction (which might not behave perfectly in all cases, but
7181 these instructions should be rare enough for that not to matter
7183 dsc
->modinsn
[0] = ARM_NOP
;
7185 dsc
->cleanup
= &cleanup_block_load_all
;
7189 /* LDM of a list of registers which includes PC. Implement by
7190 rewriting the list of registers to be transferred into a
7191 contiguous chunk r0...rX before doing the transfer, then shuffling
7192 registers into the correct places in the cleanup routine. */
7193 unsigned int regmask
= insn
& 0xffff;
7194 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7195 unsigned int to
= 0, from
= 0, i
, new_rn
;
7197 for (i
= 0; i
< num_in_list
; i
++)
7198 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7200 /* Writeback makes things complicated. We need to avoid clobbering
7201 the base register with one of the registers in our modified
7202 register list, but just using a different register can't work in
7205 ldm r14!, {r0-r13,pc}
7207 which would need to be rewritten as:
7211 but that can't work, because there's no free register for N.
7213 Solve this by turning off the writeback bit, and emulating
7214 writeback manually in the cleanup routine. */
7219 new_regmask
= (1 << num_in_list
) - 1;
7221 if (debug_displaced
)
7222 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7223 "{..., pc}: original reg list %.4x, modified "
7224 "list %.4x\n"), rn
, writeback
? "!" : "",
7225 (int) insn
& 0xffff, new_regmask
);
7227 dsc
->modinsn
[0] = (insn
& ~0xffff) | (new_regmask
& 0xffff);
7229 dsc
->cleanup
= &cleanup_block_load_pc
;
7234 /* STM of a list of registers which includes PC. Run the instruction
7235 as-is, but out of line: this will store the wrong value for the PC,
7236 so we must manually fix up the memory in the cleanup routine.
7237 Doing things this way has the advantage that we can auto-detect
7238 the offset of the PC write (which is architecture-dependent) in
7239 the cleanup routine. */
7240 dsc
->modinsn
[0] = insn
;
7242 dsc
->cleanup
= &cleanup_block_store_pc
;
7249 thumb2_copy_block_xfer (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7250 struct regcache
*regs
,
7251 struct displaced_step_closure
*dsc
)
7253 int rn
= bits (insn1
, 0, 3);
7254 int load
= bit (insn1
, 4);
7255 int writeback
= bit (insn1
, 5);
7257 /* Block transfers which don't mention PC can be run directly
7259 if (rn
!= ARM_PC_REGNUM
&& (insn2
& 0x8000) == 0)
7260 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "ldm/stm", dsc
);
7262 if (rn
== ARM_PC_REGNUM
)
7264 warning (_("displaced: Unpredictable LDM or STM with "
7265 "base register r15"));
7266 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7267 "unpredictable ldm/stm", dsc
);
7270 if (debug_displaced
)
7271 fprintf_unfiltered (gdb_stdlog
, "displaced: copying block transfer insn "
7272 "%.4x%.4x\n", insn1
, insn2
);
7274 /* Clear bit 13, since it should be always zero. */
7275 dsc
->u
.block
.regmask
= (insn2
& 0xdfff);
7276 dsc
->u
.block
.rn
= rn
;
7278 dsc
->u
.block
.load
= load
;
7279 dsc
->u
.block
.user
= 0;
7280 dsc
->u
.block
.increment
= bit (insn1
, 7);
7281 dsc
->u
.block
.before
= bit (insn1
, 8);
7282 dsc
->u
.block
.writeback
= writeback
;
7283 dsc
->u
.block
.cond
= INST_AL
;
7284 dsc
->u
.block
.xfer_addr
= displaced_read_reg (regs
, dsc
, rn
);
7288 if (dsc
->u
.block
.regmask
== 0xffff)
7290 /* This branch is impossible to happen. */
7295 unsigned int regmask
= dsc
->u
.block
.regmask
;
7296 unsigned int num_in_list
= bitcount (regmask
), new_regmask
, bit
= 1;
7297 unsigned int to
= 0, from
= 0, i
, new_rn
;
7299 for (i
= 0; i
< num_in_list
; i
++)
7300 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
7305 new_regmask
= (1 << num_in_list
) - 1;
7307 if (debug_displaced
)
7308 fprintf_unfiltered (gdb_stdlog
, _("displaced: LDM r%d%s, "
7309 "{..., pc}: original reg list %.4x, modified "
7310 "list %.4x\n"), rn
, writeback
? "!" : "",
7311 (int) dsc
->u
.block
.regmask
, new_regmask
);
7313 dsc
->modinsn
[0] = insn1
;
7314 dsc
->modinsn
[1] = (new_regmask
& 0xffff);
7317 dsc
->cleanup
= &cleanup_block_load_pc
;
7322 dsc
->modinsn
[0] = insn1
;
7323 dsc
->modinsn
[1] = insn2
;
7325 dsc
->cleanup
= &cleanup_block_store_pc
;
7330 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7331 for Linux, where some SVC instructions must be treated specially. */
7334 cleanup_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7335 struct displaced_step_closure
*dsc
)
7337 CORE_ADDR resume_addr
= dsc
->insn_addr
+ dsc
->insn_size
;
7339 if (debug_displaced
)
7340 fprintf_unfiltered (gdb_stdlog
, "displaced: cleanup for svc, resume at "
7341 "%.8lx\n", (unsigned long) resume_addr
);
7343 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, resume_addr
, BRANCH_WRITE_PC
);
7347 /* Common copy routine for svc instruciton. */
7350 install_svc (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7351 struct displaced_step_closure
*dsc
)
7353 /* Preparation: none.
7354 Insn: unmodified svc.
7355 Cleanup: pc <- insn_addr + insn_size. */
7357 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7359 dsc
->wrote_to_pc
= 1;
7361 /* Allow OS-specific code to override SVC handling. */
7362 if (dsc
->u
.svc
.copy_svc_os
)
7363 return dsc
->u
.svc
.copy_svc_os (gdbarch
, regs
, dsc
);
7366 dsc
->cleanup
= &cleanup_svc
;
7372 arm_copy_svc (struct gdbarch
*gdbarch
, uint32_t insn
,
7373 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7376 if (debug_displaced
)
7377 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.8lx\n",
7378 (unsigned long) insn
);
7380 dsc
->modinsn
[0] = insn
;
7382 return install_svc (gdbarch
, regs
, dsc
);
7386 thumb_copy_svc (struct gdbarch
*gdbarch
, uint16_t insn
,
7387 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7390 if (debug_displaced
)
7391 fprintf_unfiltered (gdb_stdlog
, "displaced: copying svc insn %.4x\n",
7394 dsc
->modinsn
[0] = insn
;
7396 return install_svc (gdbarch
, regs
, dsc
);
7399 /* Copy undefined instructions. */
7402 arm_copy_undef (struct gdbarch
*gdbarch
, uint32_t insn
,
7403 struct displaced_step_closure
*dsc
)
7405 if (debug_displaced
)
7406 fprintf_unfiltered (gdb_stdlog
,
7407 "displaced: copying undefined insn %.8lx\n",
7408 (unsigned long) insn
);
7410 dsc
->modinsn
[0] = insn
;
7416 thumb_32bit_copy_undef (struct gdbarch
*gdbarch
, uint16_t insn1
, uint16_t insn2
,
7417 struct displaced_step_closure
*dsc
)
7420 if (debug_displaced
)
7421 fprintf_unfiltered (gdb_stdlog
, "displaced: copying undefined insn "
7422 "%.4x %.4x\n", (unsigned short) insn1
,
7423 (unsigned short) insn2
);
7425 dsc
->modinsn
[0] = insn1
;
7426 dsc
->modinsn
[1] = insn2
;
7432 /* Copy unpredictable instructions. */
7435 arm_copy_unpred (struct gdbarch
*gdbarch
, uint32_t insn
,
7436 struct displaced_step_closure
*dsc
)
7438 if (debug_displaced
)
7439 fprintf_unfiltered (gdb_stdlog
, "displaced: copying unpredictable insn "
7440 "%.8lx\n", (unsigned long) insn
);
7442 dsc
->modinsn
[0] = insn
;
7447 /* The decode_* functions are instruction decoding helpers. They mostly follow
7448 the presentation in the ARM ARM. */
7451 arm_decode_misc_memhint_neon (struct gdbarch
*gdbarch
, uint32_t insn
,
7452 struct regcache
*regs
,
7453 struct displaced_step_closure
*dsc
)
7455 unsigned int op1
= bits (insn
, 20, 26), op2
= bits (insn
, 4, 7);
7456 unsigned int rn
= bits (insn
, 16, 19);
7458 if (op1
== 0x10 && (op2
& 0x2) == 0x0 && (rn
& 0xe) == 0x0)
7459 return arm_copy_unmodified (gdbarch
, insn
, "cps", dsc
);
7460 else if (op1
== 0x10 && op2
== 0x0 && (rn
& 0xe) == 0x1)
7461 return arm_copy_unmodified (gdbarch
, insn
, "setend", dsc
);
7462 else if ((op1
& 0x60) == 0x20)
7463 return arm_copy_unmodified (gdbarch
, insn
, "neon dataproc", dsc
);
7464 else if ((op1
& 0x71) == 0x40)
7465 return arm_copy_unmodified (gdbarch
, insn
, "neon elt/struct load/store",
7467 else if ((op1
& 0x77) == 0x41)
7468 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7469 else if ((op1
& 0x77) == 0x45)
7470 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pli. */
7471 else if ((op1
& 0x77) == 0x51)
7474 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7476 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7478 else if ((op1
& 0x77) == 0x55)
7479 return arm_copy_preload (gdbarch
, insn
, regs
, dsc
); /* pld/pldw. */
7480 else if (op1
== 0x57)
7483 case 0x1: return arm_copy_unmodified (gdbarch
, insn
, "clrex", dsc
);
7484 case 0x4: return arm_copy_unmodified (gdbarch
, insn
, "dsb", dsc
);
7485 case 0x5: return arm_copy_unmodified (gdbarch
, insn
, "dmb", dsc
);
7486 case 0x6: return arm_copy_unmodified (gdbarch
, insn
, "isb", dsc
);
7487 default: return arm_copy_unpred (gdbarch
, insn
, dsc
);
7489 else if ((op1
& 0x63) == 0x43)
7490 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7491 else if ((op2
& 0x1) == 0x0)
7492 switch (op1
& ~0x80)
7495 return arm_copy_unmodified (gdbarch
, insn
, "unallocated mem hint", dsc
);
7497 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
); /* pli reg. */
7498 case 0x71: case 0x75:
7500 return arm_copy_preload_reg (gdbarch
, insn
, regs
, dsc
);
7501 case 0x63: case 0x67: case 0x73: case 0x77:
7502 return arm_copy_unpred (gdbarch
, insn
, dsc
);
7504 return arm_copy_undef (gdbarch
, insn
, dsc
);
7507 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Probably unreachable. */
7511 arm_decode_unconditional (struct gdbarch
*gdbarch
, uint32_t insn
,
7512 struct regcache
*regs
,
7513 struct displaced_step_closure
*dsc
)
7515 if (bit (insn
, 27) == 0)
7516 return arm_decode_misc_memhint_neon (gdbarch
, insn
, regs
, dsc
);
7517 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7518 else switch (((insn
& 0x7000000) >> 23) | ((insn
& 0x100000) >> 20))
7521 return arm_copy_unmodified (gdbarch
, insn
, "srs", dsc
);
7524 return arm_copy_unmodified (gdbarch
, insn
, "rfe", dsc
);
7526 case 0x4: case 0x5: case 0x6: case 0x7:
7527 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7530 switch ((insn
& 0xe00000) >> 21)
7532 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7534 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7537 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7540 return arm_copy_undef (gdbarch
, insn
, dsc
);
7545 int rn_f
= (bits (insn
, 16, 19) == 0xf);
7546 switch ((insn
& 0xe00000) >> 21)
7549 /* ldc/ldc2 imm (undefined for rn == pc). */
7550 return rn_f
? arm_copy_undef (gdbarch
, insn
, dsc
)
7551 : arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7554 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7556 case 0x4: case 0x5: case 0x6: case 0x7:
7557 /* ldc/ldc2 lit (undefined for rn != pc). */
7558 return rn_f
? arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
)
7559 : arm_copy_undef (gdbarch
, insn
, dsc
);
7562 return arm_copy_undef (gdbarch
, insn
, dsc
);
7567 return arm_copy_unmodified (gdbarch
, insn
, "stc/stc2", dsc
);
7570 if (bits (insn
, 16, 19) == 0xf)
7572 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7574 return arm_copy_undef (gdbarch
, insn
, dsc
);
7578 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7580 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7584 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7586 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7589 return arm_copy_undef (gdbarch
, insn
, dsc
);
7593 /* Decode miscellaneous instructions in dp/misc encoding space. */
7596 arm_decode_miscellaneous (struct gdbarch
*gdbarch
, uint32_t insn
,
7597 struct regcache
*regs
,
7598 struct displaced_step_closure
*dsc
)
7600 unsigned int op2
= bits (insn
, 4, 6);
7601 unsigned int op
= bits (insn
, 21, 22);
7602 unsigned int op1
= bits (insn
, 16, 19);
7607 return arm_copy_unmodified (gdbarch
, insn
, "mrs/msr", dsc
);
7610 if (op
== 0x1) /* bx. */
7611 return arm_copy_bx_blx_reg (gdbarch
, insn
, regs
, dsc
);
7613 return arm_copy_unmodified (gdbarch
, insn
, "clz", dsc
);
7615 return arm_copy_undef (gdbarch
, insn
, dsc
);
7619 /* Not really supported. */
7620 return arm_copy_unmodified (gdbarch
, insn
, "bxj", dsc
);
7622 return arm_copy_undef (gdbarch
, insn
, dsc
);
7626 return arm_copy_bx_blx_reg (gdbarch
, insn
,
7627 regs
, dsc
); /* blx register. */
7629 return arm_copy_undef (gdbarch
, insn
, dsc
);
7632 return arm_copy_unmodified (gdbarch
, insn
, "saturating add/sub", dsc
);
7636 return arm_copy_unmodified (gdbarch
, insn
, "bkpt", dsc
);
7638 /* Not really supported. */
7639 return arm_copy_unmodified (gdbarch
, insn
, "smc", dsc
);
7642 return arm_copy_undef (gdbarch
, insn
, dsc
);
7647 arm_decode_dp_misc (struct gdbarch
*gdbarch
, uint32_t insn
,
7648 struct regcache
*regs
,
7649 struct displaced_step_closure
*dsc
)
7652 switch (bits (insn
, 20, 24))
7655 return arm_copy_unmodified (gdbarch
, insn
, "movw", dsc
);
7658 return arm_copy_unmodified (gdbarch
, insn
, "movt", dsc
);
7660 case 0x12: case 0x16:
7661 return arm_copy_unmodified (gdbarch
, insn
, "msr imm", dsc
);
7664 return arm_copy_alu_imm (gdbarch
, insn
, regs
, dsc
);
7668 uint32_t op1
= bits (insn
, 20, 24), op2
= bits (insn
, 4, 7);
7670 if ((op1
& 0x19) != 0x10 && (op2
& 0x1) == 0x0)
7671 return arm_copy_alu_reg (gdbarch
, insn
, regs
, dsc
);
7672 else if ((op1
& 0x19) != 0x10 && (op2
& 0x9) == 0x1)
7673 return arm_copy_alu_shifted_reg (gdbarch
, insn
, regs
, dsc
);
7674 else if ((op1
& 0x19) == 0x10 && (op2
& 0x8) == 0x0)
7675 return arm_decode_miscellaneous (gdbarch
, insn
, regs
, dsc
);
7676 else if ((op1
& 0x19) == 0x10 && (op2
& 0x9) == 0x8)
7677 return arm_copy_unmodified (gdbarch
, insn
, "halfword mul/mla", dsc
);
7678 else if ((op1
& 0x10) == 0x00 && op2
== 0x9)
7679 return arm_copy_unmodified (gdbarch
, insn
, "mul/mla", dsc
);
7680 else if ((op1
& 0x10) == 0x10 && op2
== 0x9)
7681 return arm_copy_unmodified (gdbarch
, insn
, "synch", dsc
);
7682 else if (op2
== 0xb || (op2
& 0xd) == 0xd)
7683 /* 2nd arg means "unpriveleged". */
7684 return arm_copy_extra_ld_st (gdbarch
, insn
, (op1
& 0x12) == 0x02, regs
,
7688 /* Should be unreachable. */
7693 arm_decode_ld_st_word_ubyte (struct gdbarch
*gdbarch
, uint32_t insn
,
7694 struct regcache
*regs
,
7695 struct displaced_step_closure
*dsc
)
7697 int a
= bit (insn
, 25), b
= bit (insn
, 4);
7698 uint32_t op1
= bits (insn
, 20, 24);
7699 int rn_f
= bits (insn
, 16, 19) == 0xf;
7701 if ((!a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02)
7702 || (a
&& (op1
& 0x05) == 0x00 && (op1
& 0x17) != 0x02 && !b
))
7703 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 0);
7704 else if ((!a
&& (op1
& 0x17) == 0x02)
7705 || (a
&& (op1
& 0x17) == 0x02 && !b
))
7706 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 4, 1);
7707 else if ((!a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03)
7708 || (a
&& (op1
& 0x05) == 0x01 && (op1
& 0x17) != 0x03 && !b
))
7709 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 0);
7710 else if ((!a
&& (op1
& 0x17) == 0x03)
7711 || (a
&& (op1
& 0x17) == 0x03 && !b
))
7712 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 4, 1);
7713 else if ((!a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06)
7714 || (a
&& (op1
& 0x05) == 0x04 && (op1
& 0x17) != 0x06 && !b
))
7715 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 0);
7716 else if ((!a
&& (op1
& 0x17) == 0x06)
7717 || (a
&& (op1
& 0x17) == 0x06 && !b
))
7718 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 0, 1, 1);
7719 else if ((!a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07)
7720 || (a
&& (op1
& 0x05) == 0x05 && (op1
& 0x17) != 0x07 && !b
))
7721 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 0);
7722 else if ((!a
&& (op1
& 0x17) == 0x07)
7723 || (a
&& (op1
& 0x17) == 0x07 && !b
))
7724 return arm_copy_ldr_str_ldrb_strb (gdbarch
, insn
, regs
, dsc
, 1, 1, 1);
7726 /* Should be unreachable. */
7731 arm_decode_media (struct gdbarch
*gdbarch
, uint32_t insn
,
7732 struct displaced_step_closure
*dsc
)
7734 switch (bits (insn
, 20, 24))
7736 case 0x00: case 0x01: case 0x02: case 0x03:
7737 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub signed", dsc
);
7739 case 0x04: case 0x05: case 0x06: case 0x07:
7740 return arm_copy_unmodified (gdbarch
, insn
, "parallel add/sub unsigned", dsc
);
7742 case 0x08: case 0x09: case 0x0a: case 0x0b:
7743 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7744 return arm_copy_unmodified (gdbarch
, insn
,
7745 "decode/pack/unpack/saturate/reverse", dsc
);
7748 if (bits (insn
, 5, 7) == 0) /* op2. */
7750 if (bits (insn
, 12, 15) == 0xf)
7751 return arm_copy_unmodified (gdbarch
, insn
, "usad8", dsc
);
7753 return arm_copy_unmodified (gdbarch
, insn
, "usada8", dsc
);
7756 return arm_copy_undef (gdbarch
, insn
, dsc
);
7758 case 0x1a: case 0x1b:
7759 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7760 return arm_copy_unmodified (gdbarch
, insn
, "sbfx", dsc
);
7762 return arm_copy_undef (gdbarch
, insn
, dsc
);
7764 case 0x1c: case 0x1d:
7765 if (bits (insn
, 5, 6) == 0x0) /* op2[1:0]. */
7767 if (bits (insn
, 0, 3) == 0xf)
7768 return arm_copy_unmodified (gdbarch
, insn
, "bfc", dsc
);
7770 return arm_copy_unmodified (gdbarch
, insn
, "bfi", dsc
);
7773 return arm_copy_undef (gdbarch
, insn
, dsc
);
7775 case 0x1e: case 0x1f:
7776 if (bits (insn
, 5, 6) == 0x2) /* op2[1:0]. */
7777 return arm_copy_unmodified (gdbarch
, insn
, "ubfx", dsc
);
7779 return arm_copy_undef (gdbarch
, insn
, dsc
);
7782 /* Should be unreachable. */
7787 arm_decode_b_bl_ldmstm (struct gdbarch
*gdbarch
, int32_t insn
,
7788 struct regcache
*regs
,
7789 struct displaced_step_closure
*dsc
)
7792 return arm_copy_b_bl_blx (gdbarch
, insn
, regs
, dsc
);
7794 return arm_copy_block_xfer (gdbarch
, insn
, regs
, dsc
);
7798 arm_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint32_t insn
,
7799 struct regcache
*regs
,
7800 struct displaced_step_closure
*dsc
)
7802 unsigned int opcode
= bits (insn
, 20, 24);
7806 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7807 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon mrrc/mcrr", dsc
);
7809 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7810 case 0x12: case 0x16:
7811 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vstm/vpush", dsc
);
7813 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7814 case 0x13: case 0x17:
7815 return arm_copy_unmodified (gdbarch
, insn
, "vfp/neon vldm/vpop", dsc
);
7817 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7818 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7819 /* Note: no writeback for these instructions. Bit 25 will always be
7820 zero though (via caller), so the following works OK. */
7821 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7824 /* Should be unreachable. */
7828 /* Decode shifted register instructions. */
7831 thumb2_decode_dp_shift_reg (struct gdbarch
*gdbarch
, uint16_t insn1
,
7832 uint16_t insn2
, struct regcache
*regs
,
7833 struct displaced_step_closure
*dsc
)
7835 /* PC is only allowed to be used in instruction MOV. */
7837 unsigned int op
= bits (insn1
, 5, 8);
7838 unsigned int rn
= bits (insn1
, 0, 3);
7840 if (op
== 0x2 && rn
== 0xf) /* MOV */
7841 return thumb2_copy_alu_imm (gdbarch
, insn1
, insn2
, regs
, dsc
);
7843 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7844 "dp (shift reg)", dsc
);
7848 /* Decode extension register load/store. Exactly the same as
7849 arm_decode_ext_reg_ld_st. */
7852 thumb2_decode_ext_reg_ld_st (struct gdbarch
*gdbarch
, uint16_t insn1
,
7853 uint16_t insn2
, struct regcache
*regs
,
7854 struct displaced_step_closure
*dsc
)
7856 unsigned int opcode
= bits (insn1
, 4, 8);
7860 case 0x04: case 0x05:
7861 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7862 "vfp/neon vmov", dsc
);
7864 case 0x08: case 0x0c: /* 01x00 */
7865 case 0x0a: case 0x0e: /* 01x10 */
7866 case 0x12: case 0x16: /* 10x10 */
7867 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7868 "vfp/neon vstm/vpush", dsc
);
7870 case 0x09: case 0x0d: /* 01x01 */
7871 case 0x0b: case 0x0f: /* 01x11 */
7872 case 0x13: case 0x17: /* 10x11 */
7873 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7874 "vfp/neon vldm/vpop", dsc
);
7876 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7877 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7879 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7880 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
, regs
, dsc
);
7883 /* Should be unreachable. */
7888 arm_decode_svc_copro (struct gdbarch
*gdbarch
, uint32_t insn
, CORE_ADDR to
,
7889 struct regcache
*regs
, struct displaced_step_closure
*dsc
)
7891 unsigned int op1
= bits (insn
, 20, 25);
7892 int op
= bit (insn
, 4);
7893 unsigned int coproc
= bits (insn
, 8, 11);
7894 unsigned int rn
= bits (insn
, 16, 19);
7896 if ((op1
& 0x20) == 0x00 && (op1
& 0x3a) != 0x00 && (coproc
& 0xe) == 0xa)
7897 return arm_decode_ext_reg_ld_st (gdbarch
, insn
, regs
, dsc
);
7898 else if ((op1
& 0x21) == 0x00 && (op1
& 0x3a) != 0x00
7899 && (coproc
& 0xe) != 0xa)
7901 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7902 else if ((op1
& 0x21) == 0x01 && (op1
& 0x3a) != 0x00
7903 && (coproc
& 0xe) != 0xa)
7904 /* ldc/ldc2 imm/lit. */
7905 return arm_copy_copro_load_store (gdbarch
, insn
, regs
, dsc
);
7906 else if ((op1
& 0x3e) == 0x00)
7907 return arm_copy_undef (gdbarch
, insn
, dsc
);
7908 else if ((op1
& 0x3e) == 0x04 && (coproc
& 0xe) == 0xa)
7909 return arm_copy_unmodified (gdbarch
, insn
, "neon 64bit xfer", dsc
);
7910 else if (op1
== 0x04 && (coproc
& 0xe) != 0xa)
7911 return arm_copy_unmodified (gdbarch
, insn
, "mcrr/mcrr2", dsc
);
7912 else if (op1
== 0x05 && (coproc
& 0xe) != 0xa)
7913 return arm_copy_unmodified (gdbarch
, insn
, "mrrc/mrrc2", dsc
);
7914 else if ((op1
& 0x30) == 0x20 && !op
)
7916 if ((coproc
& 0xe) == 0xa)
7917 return arm_copy_unmodified (gdbarch
, insn
, "vfp dataproc", dsc
);
7919 return arm_copy_unmodified (gdbarch
, insn
, "cdp/cdp2", dsc
);
7921 else if ((op1
& 0x30) == 0x20 && op
)
7922 return arm_copy_unmodified (gdbarch
, insn
, "neon 8/16/32 bit xfer", dsc
);
7923 else if ((op1
& 0x31) == 0x20 && op
&& (coproc
& 0xe) != 0xa)
7924 return arm_copy_unmodified (gdbarch
, insn
, "mcr/mcr2", dsc
);
7925 else if ((op1
& 0x31) == 0x21 && op
&& (coproc
& 0xe) != 0xa)
7926 return arm_copy_unmodified (gdbarch
, insn
, "mrc/mrc2", dsc
);
7927 else if ((op1
& 0x30) == 0x30)
7928 return arm_copy_svc (gdbarch
, insn
, regs
, dsc
);
7930 return arm_copy_undef (gdbarch
, insn
, dsc
); /* Possibly unreachable. */
7934 thumb2_decode_svc_copro (struct gdbarch
*gdbarch
, uint16_t insn1
,
7935 uint16_t insn2
, struct regcache
*regs
,
7936 struct displaced_step_closure
*dsc
)
7938 unsigned int coproc
= bits (insn2
, 8, 11);
7939 unsigned int op1
= bits (insn1
, 4, 9);
7940 unsigned int bit_5_8
= bits (insn1
, 5, 8);
7941 unsigned int bit_9
= bit (insn1
, 9);
7942 unsigned int bit_4
= bit (insn1
, 4);
7943 unsigned int rn
= bits (insn1
, 0, 3);
7948 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7949 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7951 else if (bit_5_8
== 0) /* UNDEFINED. */
7952 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
7955 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7956 if ((coproc
& 0xe) == 0xa)
7957 return thumb2_decode_ext_reg_ld_st (gdbarch
, insn1
, insn2
, regs
,
7959 else /* coproc is not 101x. */
7961 if (bit_4
== 0) /* STC/STC2. */
7962 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
7964 else /* LDC/LDC2 {literal, immeidate}. */
7965 return thumb2_copy_copro_load_store (gdbarch
, insn1
, insn2
,
7971 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
, "coproc", dsc
);
7977 install_pc_relative (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7978 struct displaced_step_closure
*dsc
, int rd
)
7984 Preparation: Rd <- PC
7990 int val
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
7991 displaced_write_reg (regs
, dsc
, rd
, val
, CANNOT_WRITE_PC
);
7995 thumb_copy_pc_relative_16bit (struct gdbarch
*gdbarch
, struct regcache
*regs
,
7996 struct displaced_step_closure
*dsc
,
7997 int rd
, unsigned int imm
)
8000 /* Encoding T2: ADDS Rd, #imm */
8001 dsc
->modinsn
[0] = (0x3000 | (rd
<< 8) | imm
);
8003 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8009 thumb_decode_pc_relative_16bit (struct gdbarch
*gdbarch
, uint16_t insn
,
8010 struct regcache
*regs
,
8011 struct displaced_step_closure
*dsc
)
8013 unsigned int rd
= bits (insn
, 8, 10);
8014 unsigned int imm8
= bits (insn
, 0, 7);
8016 if (debug_displaced
)
8017 fprintf_unfiltered (gdb_stdlog
,
8018 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8021 return thumb_copy_pc_relative_16bit (gdbarch
, regs
, dsc
, rd
, imm8
);
8025 thumb_copy_pc_relative_32bit (struct gdbarch
*gdbarch
, uint16_t insn1
,
8026 uint16_t insn2
, struct regcache
*regs
,
8027 struct displaced_step_closure
*dsc
)
8029 unsigned int rd
= bits (insn2
, 8, 11);
8030 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8031 extract raw immediate encoding rather than computing immediate. When
8032 generating ADD or SUB instruction, we can simply perform OR operation to
8033 set immediate into ADD. */
8034 unsigned int imm_3_8
= insn2
& 0x70ff;
8035 unsigned int imm_i
= insn1
& 0x0400; /* Clear all bits except bit 10. */
8037 if (debug_displaced
)
8038 fprintf_unfiltered (gdb_stdlog
,
8039 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8040 rd
, imm_i
, imm_3_8
, insn1
, insn2
);
8042 if (bit (insn1
, 7)) /* Encoding T2 */
8044 /* Encoding T3: SUB Rd, Rd, #imm */
8045 dsc
->modinsn
[0] = (0xf1a0 | rd
| imm_i
);
8046 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8048 else /* Encoding T3 */
8050 /* Encoding T3: ADD Rd, Rd, #imm */
8051 dsc
->modinsn
[0] = (0xf100 | rd
| imm_i
);
8052 dsc
->modinsn
[1] = ((rd
<< 8) | imm_3_8
);
8056 install_pc_relative (gdbarch
, regs
, dsc
, rd
);
8062 thumb_copy_16bit_ldr_literal (struct gdbarch
*gdbarch
, unsigned short insn1
,
8063 struct regcache
*regs
,
8064 struct displaced_step_closure
*dsc
)
8066 unsigned int rt
= bits (insn1
, 8, 10);
8068 int imm8
= (bits (insn1
, 0, 7) << 2);
8069 CORE_ADDR from
= dsc
->insn_addr
;
8075 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8077 Insn: LDR R0, [R2, R3];
8078 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8080 if (debug_displaced
)
8081 fprintf_unfiltered (gdb_stdlog
,
8082 "displaced: copying thumb ldr r%d [pc #%d]\n"
8085 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 0);
8086 dsc
->tmp
[2] = displaced_read_reg (regs
, dsc
, 2);
8087 dsc
->tmp
[3] = displaced_read_reg (regs
, dsc
, 3);
8088 pc
= displaced_read_reg (regs
, dsc
, ARM_PC_REGNUM
);
8089 /* The assembler calculates the required value of the offset from the
8090 Align(PC,4) value of this instruction to the label. */
8091 pc
= pc
& 0xfffffffc;
8093 displaced_write_reg (regs
, dsc
, 2, pc
, CANNOT_WRITE_PC
);
8094 displaced_write_reg (regs
, dsc
, 3, imm8
, CANNOT_WRITE_PC
);
8097 dsc
->u
.ldst
.xfersize
= 4;
8099 dsc
->u
.ldst
.immed
= 0;
8100 dsc
->u
.ldst
.writeback
= 0;
8101 dsc
->u
.ldst
.restore_r4
= 0;
8103 dsc
->modinsn
[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8105 dsc
->cleanup
= &cleanup_load
;
8110 /* Copy Thumb cbnz/cbz insruction. */
8113 thumb_copy_cbnz_cbz (struct gdbarch
*gdbarch
, uint16_t insn1
,
8114 struct regcache
*regs
,
8115 struct displaced_step_closure
*dsc
)
8117 int non_zero
= bit (insn1
, 11);
8118 unsigned int imm5
= (bit (insn1
, 9) << 6) | (bits (insn1
, 3, 7) << 1);
8119 CORE_ADDR from
= dsc
->insn_addr
;
8120 int rn
= bits (insn1
, 0, 2);
8121 int rn_val
= displaced_read_reg (regs
, dsc
, rn
);
8123 dsc
->u
.branch
.cond
= (rn_val
&& non_zero
) || (!rn_val
&& !non_zero
);
8124 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8125 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8126 condition is false, let it be, cleanup_branch will do nothing. */
8127 if (dsc
->u
.branch
.cond
)
8129 dsc
->u
.branch
.cond
= INST_AL
;
8130 dsc
->u
.branch
.dest
= from
+ 4 + imm5
;
8133 dsc
->u
.branch
.dest
= from
+ 2;
8135 dsc
->u
.branch
.link
= 0;
8136 dsc
->u
.branch
.exchange
= 0;
8138 if (debug_displaced
)
8139 fprintf_unfiltered (gdb_stdlog
, "displaced: copying %s [r%d = 0x%x]"
8140 " insn %.4x to %.8lx\n", non_zero
? "cbnz" : "cbz",
8141 rn
, rn_val
, insn1
, dsc
->u
.branch
.dest
);
8143 dsc
->modinsn
[0] = THUMB_NOP
;
8145 dsc
->cleanup
= &cleanup_branch
;
8149 /* Copy Table Branch Byte/Halfword */
8151 thumb2_copy_table_branch (struct gdbarch
*gdbarch
, uint16_t insn1
,
8152 uint16_t insn2
, struct regcache
*regs
,
8153 struct displaced_step_closure
*dsc
)
8155 ULONGEST rn_val
, rm_val
;
8156 int is_tbh
= bit (insn2
, 4);
8157 CORE_ADDR halfwords
= 0;
8158 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8160 rn_val
= displaced_read_reg (regs
, dsc
, bits (insn1
, 0, 3));
8161 rm_val
= displaced_read_reg (regs
, dsc
, bits (insn2
, 0, 3));
8167 target_read_memory (rn_val
+ 2 * rm_val
, buf
, 2);
8168 halfwords
= extract_unsigned_integer (buf
, 2, byte_order
);
8174 target_read_memory (rn_val
+ rm_val
, buf
, 1);
8175 halfwords
= extract_unsigned_integer (buf
, 1, byte_order
);
8178 if (debug_displaced
)
8179 fprintf_unfiltered (gdb_stdlog
, "displaced: %s base 0x%x offset 0x%x"
8180 " offset 0x%x\n", is_tbh
? "tbh" : "tbb",
8181 (unsigned int) rn_val
, (unsigned int) rm_val
,
8182 (unsigned int) halfwords
);
8184 dsc
->u
.branch
.cond
= INST_AL
;
8185 dsc
->u
.branch
.link
= 0;
8186 dsc
->u
.branch
.exchange
= 0;
8187 dsc
->u
.branch
.dest
= dsc
->insn_addr
+ 4 + 2 * halfwords
;
8189 dsc
->cleanup
= &cleanup_branch
;
8195 cleanup_pop_pc_16bit_all (struct gdbarch
*gdbarch
, struct regcache
*regs
,
8196 struct displaced_step_closure
*dsc
)
8199 int val
= displaced_read_reg (regs
, dsc
, 7);
8200 displaced_write_reg (regs
, dsc
, ARM_PC_REGNUM
, val
, BX_WRITE_PC
);
8203 val
= displaced_read_reg (regs
, dsc
, 8);
8204 displaced_write_reg (regs
, dsc
, 7, val
, CANNOT_WRITE_PC
);
8207 displaced_write_reg (regs
, dsc
, 8, dsc
->tmp
[0], CANNOT_WRITE_PC
);
8212 thumb_copy_pop_pc_16bit (struct gdbarch
*gdbarch
, unsigned short insn1
,
8213 struct regcache
*regs
,
8214 struct displaced_step_closure
*dsc
)
8216 dsc
->u
.block
.regmask
= insn1
& 0x00ff;
8218 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8221 (1) register list is full, that is, r0-r7 are used.
8222 Prepare: tmp[0] <- r8
8224 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8225 MOV r8, r7; Move value of r7 to r8;
8226 POP {r7}; Store PC value into r7.
8228 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8230 (2) register list is not full, supposing there are N registers in
8231 register list (except PC, 0 <= N <= 7).
8232 Prepare: for each i, 0 - N, tmp[i] <- ri.
8234 POP {r0, r1, ...., rN};
8236 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8237 from tmp[] properly.
8239 if (debug_displaced
)
8240 fprintf_unfiltered (gdb_stdlog
,
8241 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8242 dsc
->u
.block
.regmask
, insn1
);
8244 if (dsc
->u
.block
.regmask
== 0xff)
8246 dsc
->tmp
[0] = displaced_read_reg (regs
, dsc
, 8);
8248 dsc
->modinsn
[0] = (insn1
& 0xfeff); /* POP {r0,r1,...,r6, r7} */
8249 dsc
->modinsn
[1] = 0x46b8; /* MOV r8, r7 */
8250 dsc
->modinsn
[2] = 0xbc80; /* POP {r7} */
8253 dsc
->cleanup
= &cleanup_pop_pc_16bit_all
;
8257 unsigned int num_in_list
= bitcount (dsc
->u
.block
.regmask
);
8258 unsigned int new_regmask
, bit
= 1;
8259 unsigned int to
= 0, from
= 0, i
, new_rn
;
8261 for (i
= 0; i
< num_in_list
+ 1; i
++)
8262 dsc
->tmp
[i
] = displaced_read_reg (regs
, dsc
, i
);
8264 new_regmask
= (1 << (num_in_list
+ 1)) - 1;
8266 if (debug_displaced
)
8267 fprintf_unfiltered (gdb_stdlog
, _("displaced: POP "
8268 "{..., pc}: original reg list %.4x,"
8269 " modified list %.4x\n"),
8270 (int) dsc
->u
.block
.regmask
, new_regmask
);
8272 dsc
->u
.block
.regmask
|= 0x8000;
8273 dsc
->u
.block
.writeback
= 0;
8274 dsc
->u
.block
.cond
= INST_AL
;
8276 dsc
->modinsn
[0] = (insn1
& ~0x1ff) | (new_regmask
& 0xff);
8278 dsc
->cleanup
= &cleanup_block_load_pc
;
8285 thumb_process_displaced_16bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8286 struct regcache
*regs
,
8287 struct displaced_step_closure
*dsc
)
8289 unsigned short op_bit_12_15
= bits (insn1
, 12, 15);
8290 unsigned short op_bit_10_11
= bits (insn1
, 10, 11);
8293 /* 16-bit thumb instructions. */
8294 switch (op_bit_12_15
)
8296 /* Shift (imme), add, subtract, move and compare. */
8297 case 0: case 1: case 2: case 3:
8298 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8299 "shift/add/sub/mov/cmp",
8303 switch (op_bit_10_11
)
8305 case 0: /* Data-processing */
8306 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
,
8310 case 1: /* Special data instructions and branch and exchange. */
8312 unsigned short op
= bits (insn1
, 7, 9);
8313 if (op
== 6 || op
== 7) /* BX or BLX */
8314 err
= thumb_copy_bx_blx_reg (gdbarch
, insn1
, regs
, dsc
);
8315 else if (bits (insn1
, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8316 err
= thumb_copy_alu_reg (gdbarch
, insn1
, regs
, dsc
);
8318 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "special data",
8322 default: /* LDR (literal) */
8323 err
= thumb_copy_16bit_ldr_literal (gdbarch
, insn1
, regs
, dsc
);
8326 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8327 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldr/str", dsc
);
8330 if (op_bit_10_11
< 2) /* Generate PC-relative address */
8331 err
= thumb_decode_pc_relative_16bit (gdbarch
, insn1
, regs
, dsc
);
8332 else /* Generate SP-relative address */
8333 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "sp-relative", dsc
);
8335 case 11: /* Misc 16-bit instructions */
8337 switch (bits (insn1
, 8, 11))
8339 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8340 err
= thumb_copy_cbnz_cbz (gdbarch
, insn1
, regs
, dsc
);
8342 case 12: case 13: /* POP */
8343 if (bit (insn1
, 8)) /* PC is in register list. */
8344 err
= thumb_copy_pop_pc_16bit (gdbarch
, insn1
, regs
, dsc
);
8346 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "pop", dsc
);
8348 case 15: /* If-Then, and hints */
8349 if (bits (insn1
, 0, 3))
8350 /* If-Then makes up to four following instructions conditional.
8351 IT instruction itself is not conditional, so handle it as a
8352 common unmodified instruction. */
8353 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "If-Then",
8356 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "hints", dsc
);
8359 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "misc", dsc
);
8364 if (op_bit_10_11
< 2) /* Store multiple registers */
8365 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "stm", dsc
);
8366 else /* Load multiple registers */
8367 err
= thumb_copy_unmodified_16bit (gdbarch
, insn1
, "ldm", dsc
);
8369 case 13: /* Conditional branch and supervisor call */
8370 if (bits (insn1
, 9, 11) != 7) /* conditional branch */
8371 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8373 err
= thumb_copy_svc (gdbarch
, insn1
, regs
, dsc
);
8375 case 14: /* Unconditional branch */
8376 err
= thumb_copy_b (gdbarch
, insn1
, dsc
);
8383 internal_error (__FILE__
, __LINE__
,
8384 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8388 decode_thumb_32bit_ld_mem_hints (struct gdbarch
*gdbarch
,
8389 uint16_t insn1
, uint16_t insn2
,
8390 struct regcache
*regs
,
8391 struct displaced_step_closure
*dsc
)
8393 int rt
= bits (insn2
, 12, 15);
8394 int rn
= bits (insn1
, 0, 3);
8395 int op1
= bits (insn1
, 7, 8);
8398 switch (bits (insn1
, 5, 6))
8400 case 0: /* Load byte and memory hints */
8401 if (rt
== 0xf) /* PLD/PLI */
8404 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8405 return thumb2_copy_preload (gdbarch
, insn1
, insn2
, regs
, dsc
);
8407 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8412 if (rn
== 0xf) /* LDRB/LDRSB (literal) */
8413 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8416 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8417 "ldrb{reg, immediate}/ldrbt",
8422 case 1: /* Load halfword and memory hints. */
8423 if (rt
== 0xf) /* PLD{W} and Unalloc memory hint. */
8424 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8425 "pld/unalloc memhint", dsc
);
8429 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
,
8432 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8436 case 2: /* Load word */
8438 int insn2_bit_8_11
= bits (insn2
, 8, 11);
8441 return thumb2_copy_load_literal (gdbarch
, insn1
, insn2
, regs
, dsc
, 4);
8442 else if (op1
== 0x1) /* Encoding T3 */
8443 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
, dsc
,
8445 else /* op1 == 0x0 */
8447 if (insn2_bit_8_11
== 0xc || (insn2_bit_8_11
& 0x9) == 0x9)
8448 /* LDR (immediate) */
8449 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8450 dsc
, bit (insn2
, 8), 1);
8451 else if (insn2_bit_8_11
== 0xe) /* LDRT */
8452 return thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8455 /* LDR (register) */
8456 return thumb2_copy_load_reg_imm (gdbarch
, insn1
, insn2
, regs
,
8462 return thumb_32bit_copy_undef (gdbarch
, insn1
, insn2
, dsc
);
8469 thumb_process_displaced_32bit_insn (struct gdbarch
*gdbarch
, uint16_t insn1
,
8470 uint16_t insn2
, struct regcache
*regs
,
8471 struct displaced_step_closure
*dsc
)
8474 unsigned short op
= bit (insn2
, 15);
8475 unsigned int op1
= bits (insn1
, 11, 12);
8481 switch (bits (insn1
, 9, 10))
8486 /* Load/store {dual, execlusive}, table branch. */
8487 if (bits (insn1
, 7, 8) == 1 && bits (insn1
, 4, 5) == 1
8488 && bits (insn2
, 5, 7) == 0)
8489 err
= thumb2_copy_table_branch (gdbarch
, insn1
, insn2
, regs
,
8492 /* PC is not allowed to use in load/store {dual, exclusive}
8494 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8495 "load/store dual/ex", dsc
);
8497 else /* load/store multiple */
8499 switch (bits (insn1
, 7, 8))
8501 case 0: case 3: /* SRS, RFE */
8502 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8505 case 1: case 2: /* LDM/STM/PUSH/POP */
8506 err
= thumb2_copy_block_xfer (gdbarch
, insn1
, insn2
, regs
, dsc
);
8513 /* Data-processing (shift register). */
8514 err
= thumb2_decode_dp_shift_reg (gdbarch
, insn1
, insn2
, regs
,
8517 default: /* Coprocessor instructions. */
8518 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8523 case 2: /* op1 = 2 */
8524 if (op
) /* Branch and misc control. */
8526 if (bit (insn2
, 14) /* BLX/BL */
8527 || bit (insn2
, 12) /* Unconditional branch */
8528 || (bits (insn1
, 7, 9) != 0x7)) /* Conditional branch */
8529 err
= thumb2_copy_b_bl_blx (gdbarch
, insn1
, insn2
, regs
, dsc
);
8531 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8536 if (bit (insn1
, 9)) /* Data processing (plain binary imm). */
8538 int op
= bits (insn1
, 4, 8);
8539 int rn
= bits (insn1
, 0, 3);
8540 if ((op
== 0 || op
== 0xa) && rn
== 0xf)
8541 err
= thumb_copy_pc_relative_32bit (gdbarch
, insn1
, insn2
,
8544 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8547 else /* Data processing (modified immeidate) */
8548 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8552 case 3: /* op1 = 3 */
8553 switch (bits (insn1
, 9, 10))
8557 err
= decode_thumb_32bit_ld_mem_hints (gdbarch
, insn1
, insn2
,
8559 else /* NEON Load/Store and Store single data item */
8560 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8561 "neon elt/struct load/store",
8564 case 1: /* op1 = 3, bits (9, 10) == 1 */
8565 switch (bits (insn1
, 7, 8))
8567 case 0: case 1: /* Data processing (register) */
8568 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8571 case 2: /* Multiply and absolute difference */
8572 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8573 "mul/mua/diff", dsc
);
8575 case 3: /* Long multiply and divide */
8576 err
= thumb_copy_unmodified_32bit (gdbarch
, insn1
, insn2
,
8581 default: /* Coprocessor instructions */
8582 err
= thumb2_decode_svc_copro (gdbarch
, insn1
, insn2
, regs
, dsc
);
8591 internal_error (__FILE__
, __LINE__
,
8592 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8597 thumb_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8598 CORE_ADDR to
, struct regcache
*regs
,
8599 struct displaced_step_closure
*dsc
)
8601 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8603 = read_memory_unsigned_integer (from
, 2, byte_order_for_code
);
8605 if (debug_displaced
)
8606 fprintf_unfiltered (gdb_stdlog
, "displaced: process thumb insn %.4x "
8607 "at %.8lx\n", insn1
, (unsigned long) from
);
8610 dsc
->insn_size
= thumb_insn_size (insn1
);
8611 if (thumb_insn_size (insn1
) == 4)
8614 = read_memory_unsigned_integer (from
+ 2, 2, byte_order_for_code
);
8615 thumb_process_displaced_32bit_insn (gdbarch
, insn1
, insn2
, regs
, dsc
);
8618 thumb_process_displaced_16bit_insn (gdbarch
, insn1
, regs
, dsc
);
8622 arm_process_displaced_insn (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8623 CORE_ADDR to
, struct regcache
*regs
,
8624 struct displaced_step_closure
*dsc
)
8627 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8630 /* Most displaced instructions use a 1-instruction scratch space, so set this
8631 here and override below if/when necessary. */
8633 dsc
->insn_addr
= from
;
8634 dsc
->scratch_base
= to
;
8635 dsc
->cleanup
= NULL
;
8636 dsc
->wrote_to_pc
= 0;
8638 if (!displaced_in_arm_mode (regs
))
8639 return thumb_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8643 insn
= read_memory_unsigned_integer (from
, 4, byte_order_for_code
);
8644 if (debug_displaced
)
8645 fprintf_unfiltered (gdb_stdlog
, "displaced: stepping insn %.8lx "
8646 "at %.8lx\n", (unsigned long) insn
,
8647 (unsigned long) from
);
8649 if ((insn
& 0xf0000000) == 0xf0000000)
8650 err
= arm_decode_unconditional (gdbarch
, insn
, regs
, dsc
);
8651 else switch (((insn
& 0x10) >> 4) | ((insn
& 0xe000000) >> 24))
8653 case 0x0: case 0x1: case 0x2: case 0x3:
8654 err
= arm_decode_dp_misc (gdbarch
, insn
, regs
, dsc
);
8657 case 0x4: case 0x5: case 0x6:
8658 err
= arm_decode_ld_st_word_ubyte (gdbarch
, insn
, regs
, dsc
);
8662 err
= arm_decode_media (gdbarch
, insn
, dsc
);
8665 case 0x8: case 0x9: case 0xa: case 0xb:
8666 err
= arm_decode_b_bl_ldmstm (gdbarch
, insn
, regs
, dsc
);
8669 case 0xc: case 0xd: case 0xe: case 0xf:
8670 err
= arm_decode_svc_copro (gdbarch
, insn
, to
, regs
, dsc
);
8675 internal_error (__FILE__
, __LINE__
,
8676 _("arm_process_displaced_insn: Instruction decode error"));
8679 /* Actually set up the scratch space for a displaced instruction. */
8682 arm_displaced_init_closure (struct gdbarch
*gdbarch
, CORE_ADDR from
,
8683 CORE_ADDR to
, struct displaced_step_closure
*dsc
)
8685 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8686 unsigned int i
, len
, offset
;
8687 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8688 int size
= dsc
->is_thumb
? 2 : 4;
8689 const gdb_byte
*bkp_insn
;
8692 /* Poke modified instruction(s). */
8693 for (i
= 0; i
< dsc
->numinsns
; i
++)
8695 if (debug_displaced
)
8697 fprintf_unfiltered (gdb_stdlog
, "displaced: writing insn ");
8699 fprintf_unfiltered (gdb_stdlog
, "%.8lx",
8702 fprintf_unfiltered (gdb_stdlog
, "%.4x",
8703 (unsigned short)dsc
->modinsn
[i
]);
8705 fprintf_unfiltered (gdb_stdlog
, " at %.8lx\n",
8706 (unsigned long) to
+ offset
);
8709 write_memory_unsigned_integer (to
+ offset
, size
,
8710 byte_order_for_code
,
8715 /* Choose the correct breakpoint instruction. */
8718 bkp_insn
= tdep
->thumb_breakpoint
;
8719 len
= tdep
->thumb_breakpoint_size
;
8723 bkp_insn
= tdep
->arm_breakpoint
;
8724 len
= tdep
->arm_breakpoint_size
;
8727 /* Put breakpoint afterwards. */
8728 write_memory (to
+ offset
, bkp_insn
, len
);
8730 if (debug_displaced
)
8731 fprintf_unfiltered (gdb_stdlog
, "displaced: copy %s->%s: ",
8732 paddress (gdbarch
, from
), paddress (gdbarch
, to
));
8735 /* Entry point for copying an instruction into scratch space for displaced
8738 struct displaced_step_closure
*
8739 arm_displaced_step_copy_insn (struct gdbarch
*gdbarch
,
8740 CORE_ADDR from
, CORE_ADDR to
,
8741 struct regcache
*regs
)
8743 struct displaced_step_closure
*dsc
= XNEW (struct displaced_step_closure
);
8745 arm_process_displaced_insn (gdbarch
, from
, to
, regs
, dsc
);
8746 arm_displaced_init_closure (gdbarch
, from
, to
, dsc
);
8751 /* Entry point for cleaning things up after a displaced instruction has been
8755 arm_displaced_step_fixup (struct gdbarch
*gdbarch
,
8756 struct displaced_step_closure
*dsc
,
8757 CORE_ADDR from
, CORE_ADDR to
,
8758 struct regcache
*regs
)
8761 dsc
->cleanup (gdbarch
, regs
, dsc
);
8763 if (!dsc
->wrote_to_pc
)
8764 regcache_cooked_write_unsigned (regs
, ARM_PC_REGNUM
,
8765 dsc
->insn_addr
+ dsc
->insn_size
);
8769 #include "bfd-in2.h"
8770 #include "libcoff.h"
8773 gdb_print_insn_arm (bfd_vma memaddr
, disassemble_info
*info
)
8775 struct gdbarch
*gdbarch
= (struct gdbarch
*) info
->application_data
;
8777 if (arm_pc_is_thumb (gdbarch
, memaddr
))
8779 static asymbol
*asym
;
8780 static combined_entry_type ce
;
8781 static struct coff_symbol_struct csym
;
8782 static struct bfd fake_bfd
;
8783 static bfd_target fake_target
;
8785 if (csym
.native
== NULL
)
8787 /* Create a fake symbol vector containing a Thumb symbol.
8788 This is solely so that the code in print_insn_little_arm()
8789 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8790 the presence of a Thumb symbol and switch to decoding
8791 Thumb instructions. */
8793 fake_target
.flavour
= bfd_target_coff_flavour
;
8794 fake_bfd
.xvec
= &fake_target
;
8795 ce
.u
.syment
.n_sclass
= C_THUMBEXTFUNC
;
8797 csym
.symbol
.the_bfd
= &fake_bfd
;
8798 csym
.symbol
.name
= "fake";
8799 asym
= (asymbol
*) & csym
;
8802 memaddr
= UNMAKE_THUMB_ADDR (memaddr
);
8803 info
->symbols
= &asym
;
8806 info
->symbols
= NULL
;
8808 if (info
->endian
== BFD_ENDIAN_BIG
)
8809 return print_insn_big_arm (memaddr
, info
);
8811 return print_insn_little_arm (memaddr
, info
);
8814 /* The following define instruction sequences that will cause ARM
8815 cpu's to take an undefined instruction trap. These are used to
8816 signal a breakpoint to GDB.
8818 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8819 modes. A different instruction is required for each mode. The ARM
8820 cpu's can also be big or little endian. Thus four different
8821 instructions are needed to support all cases.
8823 Note: ARMv4 defines several new instructions that will take the
8824 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8825 not in fact add the new instructions. The new undefined
8826 instructions in ARMv4 are all instructions that had no defined
8827 behaviour in earlier chips. There is no guarantee that they will
8828 raise an exception, but may be treated as NOP's. In practice, it
8829 may only safe to rely on instructions matching:
8831 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8832 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8833 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8835 Even this may only true if the condition predicate is true. The
8836 following use a condition predicate of ALWAYS so it is always TRUE.
8838 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8839 and NetBSD all use a software interrupt rather than an undefined
8840 instruction to force a trap. This can be handled by by the
8841 abi-specific code during establishment of the gdbarch vector. */
8843 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8844 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8845 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8846 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8848 static const gdb_byte arm_default_arm_le_breakpoint
[] = ARM_LE_BREAKPOINT
;
8849 static const gdb_byte arm_default_arm_be_breakpoint
[] = ARM_BE_BREAKPOINT
;
8850 static const gdb_byte arm_default_thumb_le_breakpoint
[] = THUMB_LE_BREAKPOINT
;
8851 static const gdb_byte arm_default_thumb_be_breakpoint
[] = THUMB_BE_BREAKPOINT
;
8853 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8854 the program counter value to determine whether a 16-bit or 32-bit
8855 breakpoint should be used. It returns a pointer to a string of
8856 bytes that encode a breakpoint instruction, stores the length of
8857 the string to *lenptr, and adjusts the program counter (if
8858 necessary) to point to the actual memory location where the
8859 breakpoint should be inserted. */
8861 static const unsigned char *
8862 arm_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
, int *lenptr
)
8864 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
8865 enum bfd_endian byte_order_for_code
= gdbarch_byte_order_for_code (gdbarch
);
8867 if (arm_pc_is_thumb (gdbarch
, *pcptr
))
8869 *pcptr
= UNMAKE_THUMB_ADDR (*pcptr
);
8871 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8872 check whether we are replacing a 32-bit instruction. */
8873 if (tdep
->thumb2_breakpoint
!= NULL
)
8876 if (target_read_memory (*pcptr
, buf
, 2) == 0)
8878 unsigned short inst1
;
8879 inst1
= extract_unsigned_integer (buf
, 2, byte_order_for_code
);
8880 if (thumb_insn_size (inst1
) == 4)
8882 *lenptr
= tdep
->thumb2_breakpoint_size
;
8883 return tdep
->thumb2_breakpoint
;
8888 *lenptr
= tdep
->thumb_breakpoint_size
;
8889 return tdep
->thumb_breakpoint
;
8893 *lenptr
= tdep
->arm_breakpoint_size
;
8894 return tdep
->arm_breakpoint
;
8899 arm_remote_breakpoint_from_pc (struct gdbarch
*gdbarch
, CORE_ADDR
*pcptr
,
8902 arm_breakpoint_from_pc (gdbarch
, pcptr
, kindptr
);
8904 if (arm_pc_is_thumb (gdbarch
, *pcptr
) && *kindptr
== 4)
8905 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8906 that this is not confused with a 32-bit ARM breakpoint. */
8910 /* Extract from an array REGBUF containing the (raw) register state a
8911 function return value of type TYPE, and copy that, in virtual
8912 format, into VALBUF. */
8915 arm_extract_return_value (struct type
*type
, struct regcache
*regs
,
8918 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
8919 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
8921 if (TYPE_CODE_FLT
== TYPE_CODE (type
))
8923 switch (gdbarch_tdep (gdbarch
)->fp_model
)
8927 /* The value is in register F0 in internal format. We need to
8928 extract the raw value and then convert it to the desired
8930 bfd_byte tmpbuf
[FP_REGISTER_SIZE
];
8932 regcache_cooked_read (regs
, ARM_F0_REGNUM
, tmpbuf
);
8933 convert_from_extended (floatformat_from_type (type
), tmpbuf
,
8934 valbuf
, gdbarch_byte_order (gdbarch
));
8938 case ARM_FLOAT_SOFT_FPA
:
8939 case ARM_FLOAT_SOFT_VFP
:
8940 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8941 not using the VFP ABI code. */
8943 regcache_cooked_read (regs
, ARM_A1_REGNUM
, valbuf
);
8944 if (TYPE_LENGTH (type
) > 4)
8945 regcache_cooked_read (regs
, ARM_A1_REGNUM
+ 1,
8946 valbuf
+ INT_REGISTER_SIZE
);
8950 internal_error (__FILE__
, __LINE__
,
8951 _("arm_extract_return_value: "
8952 "Floating point model not supported"));
8956 else if (TYPE_CODE (type
) == TYPE_CODE_INT
8957 || TYPE_CODE (type
) == TYPE_CODE_CHAR
8958 || TYPE_CODE (type
) == TYPE_CODE_BOOL
8959 || TYPE_CODE (type
) == TYPE_CODE_PTR
8960 || TYPE_CODE (type
) == TYPE_CODE_REF
8961 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
8963 /* If the type is a plain integer, then the access is
8964 straight-forward. Otherwise we have to play around a bit
8966 int len
= TYPE_LENGTH (type
);
8967 int regno
= ARM_A1_REGNUM
;
8972 /* By using store_unsigned_integer we avoid having to do
8973 anything special for small big-endian values. */
8974 regcache_cooked_read_unsigned (regs
, regno
++, &tmp
);
8975 store_unsigned_integer (valbuf
,
8976 (len
> INT_REGISTER_SIZE
8977 ? INT_REGISTER_SIZE
: len
),
8979 len
-= INT_REGISTER_SIZE
;
8980 valbuf
+= INT_REGISTER_SIZE
;
8985 /* For a structure or union the behaviour is as if the value had
8986 been stored to word-aligned memory and then loaded into
8987 registers with 32-bit load instruction(s). */
8988 int len
= TYPE_LENGTH (type
);
8989 int regno
= ARM_A1_REGNUM
;
8990 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
8994 regcache_cooked_read (regs
, regno
++, tmpbuf
);
8995 memcpy (valbuf
, tmpbuf
,
8996 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
8997 len
-= INT_REGISTER_SIZE
;
8998 valbuf
+= INT_REGISTER_SIZE
;
9004 /* Will a function return an aggregate type in memory or in a
9005 register? Return 0 if an aggregate type can be returned in a
9006 register, 1 if it must be returned in memory. */
9009 arm_return_in_memory (struct gdbarch
*gdbarch
, struct type
*type
)
9012 enum type_code code
;
9014 type
= check_typedef (type
);
9016 /* In the ARM ABI, "integer" like aggregate types are returned in
9017 registers. For an aggregate type to be integer like, its size
9018 must be less than or equal to INT_REGISTER_SIZE and the
9019 offset of each addressable subfield must be zero. Note that bit
9020 fields are not addressable, and all addressable subfields of
9021 unions always start at offset zero.
9023 This function is based on the behaviour of GCC 2.95.1.
9024 See: gcc/arm.c: arm_return_in_memory() for details.
9026 Note: All versions of GCC before GCC 2.95.2 do not set up the
9027 parameters correctly for a function returning the following
9028 structure: struct { float f;}; This should be returned in memory,
9029 not a register. Richard Earnshaw sent me a patch, but I do not
9030 know of any way to detect if a function like the above has been
9031 compiled with the correct calling convention. */
9033 /* All aggregate types that won't fit in a register must be returned
9035 if (TYPE_LENGTH (type
) > INT_REGISTER_SIZE
)
9040 /* The AAPCS says all aggregates not larger than a word are returned
9042 if (gdbarch_tdep (gdbarch
)->arm_abi
!= ARM_ABI_APCS
)
9045 /* The only aggregate types that can be returned in a register are
9046 structs and unions. Arrays must be returned in memory. */
9047 code
= TYPE_CODE (type
);
9048 if ((TYPE_CODE_STRUCT
!= code
) && (TYPE_CODE_UNION
!= code
))
9053 /* Assume all other aggregate types can be returned in a register.
9054 Run a check for structures, unions and arrays. */
9057 if ((TYPE_CODE_STRUCT
== code
) || (TYPE_CODE_UNION
== code
))
9060 /* Need to check if this struct/union is "integer" like. For
9061 this to be true, its size must be less than or equal to
9062 INT_REGISTER_SIZE and the offset of each addressable
9063 subfield must be zero. Note that bit fields are not
9064 addressable, and unions always start at offset zero. If any
9065 of the subfields is a floating point type, the struct/union
9066 cannot be an integer type. */
9068 /* For each field in the object, check:
9069 1) Is it FP? --> yes, nRc = 1;
9070 2) Is it addressable (bitpos != 0) and
9071 not packed (bitsize == 0)?
9075 for (i
= 0; i
< TYPE_NFIELDS (type
); i
++)
9077 enum type_code field_type_code
;
9078 field_type_code
= TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type
,
9081 /* Is it a floating point type field? */
9082 if (field_type_code
== TYPE_CODE_FLT
)
9088 /* If bitpos != 0, then we have to care about it. */
9089 if (TYPE_FIELD_BITPOS (type
, i
) != 0)
9091 /* Bitfields are not addressable. If the field bitsize is
9092 zero, then the field is not packed. Hence it cannot be
9093 a bitfield or any other packed type. */
9094 if (TYPE_FIELD_BITSIZE (type
, i
) == 0)
9106 /* Write into appropriate registers a function return value of type
9107 TYPE, given in virtual format. */
9110 arm_store_return_value (struct type
*type
, struct regcache
*regs
,
9111 const gdb_byte
*valbuf
)
9113 struct gdbarch
*gdbarch
= get_regcache_arch (regs
);
9114 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9116 if (TYPE_CODE (type
) == TYPE_CODE_FLT
)
9118 gdb_byte buf
[MAX_REGISTER_SIZE
];
9120 switch (gdbarch_tdep (gdbarch
)->fp_model
)
9124 convert_to_extended (floatformat_from_type (type
), buf
, valbuf
,
9125 gdbarch_byte_order (gdbarch
));
9126 regcache_cooked_write (regs
, ARM_F0_REGNUM
, buf
);
9129 case ARM_FLOAT_SOFT_FPA
:
9130 case ARM_FLOAT_SOFT_VFP
:
9131 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9132 not using the VFP ABI code. */
9134 regcache_cooked_write (regs
, ARM_A1_REGNUM
, valbuf
);
9135 if (TYPE_LENGTH (type
) > 4)
9136 regcache_cooked_write (regs
, ARM_A1_REGNUM
+ 1,
9137 valbuf
+ INT_REGISTER_SIZE
);
9141 internal_error (__FILE__
, __LINE__
,
9142 _("arm_store_return_value: Floating "
9143 "point model not supported"));
9147 else if (TYPE_CODE (type
) == TYPE_CODE_INT
9148 || TYPE_CODE (type
) == TYPE_CODE_CHAR
9149 || TYPE_CODE (type
) == TYPE_CODE_BOOL
9150 || TYPE_CODE (type
) == TYPE_CODE_PTR
9151 || TYPE_CODE (type
) == TYPE_CODE_REF
9152 || TYPE_CODE (type
) == TYPE_CODE_ENUM
)
9154 if (TYPE_LENGTH (type
) <= 4)
9156 /* Values of one word or less are zero/sign-extended and
9158 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9159 LONGEST val
= unpack_long (type
, valbuf
);
9161 store_signed_integer (tmpbuf
, INT_REGISTER_SIZE
, byte_order
, val
);
9162 regcache_cooked_write (regs
, ARM_A1_REGNUM
, tmpbuf
);
9166 /* Integral values greater than one word are stored in consecutive
9167 registers starting with r0. This will always be a multiple of
9168 the regiser size. */
9169 int len
= TYPE_LENGTH (type
);
9170 int regno
= ARM_A1_REGNUM
;
9174 regcache_cooked_write (regs
, regno
++, valbuf
);
9175 len
-= INT_REGISTER_SIZE
;
9176 valbuf
+= INT_REGISTER_SIZE
;
9182 /* For a structure or union the behaviour is as if the value had
9183 been stored to word-aligned memory and then loaded into
9184 registers with 32-bit load instruction(s). */
9185 int len
= TYPE_LENGTH (type
);
9186 int regno
= ARM_A1_REGNUM
;
9187 bfd_byte tmpbuf
[INT_REGISTER_SIZE
];
9191 memcpy (tmpbuf
, valbuf
,
9192 len
> INT_REGISTER_SIZE
? INT_REGISTER_SIZE
: len
);
9193 regcache_cooked_write (regs
, regno
++, tmpbuf
);
9194 len
-= INT_REGISTER_SIZE
;
9195 valbuf
+= INT_REGISTER_SIZE
;
9201 /* Handle function return values. */
9203 static enum return_value_convention
9204 arm_return_value (struct gdbarch
*gdbarch
, struct value
*function
,
9205 struct type
*valtype
, struct regcache
*regcache
,
9206 gdb_byte
*readbuf
, const gdb_byte
*writebuf
)
9208 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9209 struct type
*func_type
= function
? value_type (function
) : NULL
;
9210 enum arm_vfp_cprc_base_type vfp_base_type
;
9213 if (arm_vfp_abi_for_function (gdbarch
, func_type
)
9214 && arm_vfp_call_candidate (valtype
, &vfp_base_type
, &vfp_base_count
))
9216 int reg_char
= arm_vfp_cprc_reg_char (vfp_base_type
);
9217 int unit_length
= arm_vfp_cprc_unit_length (vfp_base_type
);
9219 for (i
= 0; i
< vfp_base_count
; i
++)
9221 if (reg_char
== 'q')
9224 arm_neon_quad_write (gdbarch
, regcache
, i
,
9225 writebuf
+ i
* unit_length
);
9228 arm_neon_quad_read (gdbarch
, regcache
, i
,
9229 readbuf
+ i
* unit_length
);
9236 xsnprintf (name_buf
, sizeof (name_buf
), "%c%d", reg_char
, i
);
9237 regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9240 regcache_cooked_write (regcache
, regnum
,
9241 writebuf
+ i
* unit_length
);
9243 regcache_cooked_read (regcache
, regnum
,
9244 readbuf
+ i
* unit_length
);
9247 return RETURN_VALUE_REGISTER_CONVENTION
;
9250 if (TYPE_CODE (valtype
) == TYPE_CODE_STRUCT
9251 || TYPE_CODE (valtype
) == TYPE_CODE_UNION
9252 || TYPE_CODE (valtype
) == TYPE_CODE_ARRAY
)
9254 if (tdep
->struct_return
== pcc_struct_return
9255 || arm_return_in_memory (gdbarch
, valtype
))
9256 return RETURN_VALUE_STRUCT_CONVENTION
;
9259 /* AAPCS returns complex types longer than a register in memory. */
9260 if (tdep
->arm_abi
!= ARM_ABI_APCS
9261 && TYPE_CODE (valtype
) == TYPE_CODE_COMPLEX
9262 && TYPE_LENGTH (valtype
) > INT_REGISTER_SIZE
)
9263 return RETURN_VALUE_STRUCT_CONVENTION
;
9266 arm_store_return_value (valtype
, regcache
, writebuf
);
9269 arm_extract_return_value (valtype
, regcache
, readbuf
);
9271 return RETURN_VALUE_REGISTER_CONVENTION
;
9276 arm_get_longjmp_target (struct frame_info
*frame
, CORE_ADDR
*pc
)
9278 struct gdbarch
*gdbarch
= get_frame_arch (frame
);
9279 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
9280 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
9282 gdb_byte buf
[INT_REGISTER_SIZE
];
9284 jb_addr
= get_frame_register_unsigned (frame
, ARM_A1_REGNUM
);
9286 if (target_read_memory (jb_addr
+ tdep
->jb_pc
* tdep
->jb_elt_size
, buf
,
9290 *pc
= extract_unsigned_integer (buf
, INT_REGISTER_SIZE
, byte_order
);
9294 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9295 return the target PC. Otherwise return 0. */
9298 arm_skip_stub (struct frame_info
*frame
, CORE_ADDR pc
)
9302 CORE_ADDR start_addr
;
9304 /* Find the starting address and name of the function containing the PC. */
9305 if (find_pc_partial_function (pc
, &name
, &start_addr
, NULL
) == 0)
9307 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9309 start_addr
= arm_skip_bx_reg (frame
, pc
);
9310 if (start_addr
!= 0)
9316 /* If PC is in a Thumb call or return stub, return the address of the
9317 target PC, which is in a register. The thunk functions are called
9318 _call_via_xx, where x is the register name. The possible names
9319 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9320 functions, named __ARM_call_via_r[0-7]. */
9321 if (startswith (name
, "_call_via_")
9322 || startswith (name
, "__ARM_call_via_"))
9324 /* Use the name suffix to determine which register contains the
9326 static char *table
[15] =
9327 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9328 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9331 int offset
= strlen (name
) - 2;
9333 for (regno
= 0; regno
<= 14; regno
++)
9334 if (strcmp (&name
[offset
], table
[regno
]) == 0)
9335 return get_frame_register_unsigned (frame
, regno
);
9338 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9339 non-interworking calls to foo. We could decode the stubs
9340 to find the target but it's easier to use the symbol table. */
9341 namelen
= strlen (name
);
9342 if (name
[0] == '_' && name
[1] == '_'
9343 && ((namelen
> 2 + strlen ("_from_thumb")
9344 && startswith (name
+ namelen
- strlen ("_from_thumb"), "_from_thumb"))
9345 || (namelen
> 2 + strlen ("_from_arm")
9346 && startswith (name
+ namelen
- strlen ("_from_arm"), "_from_arm"))))
9349 int target_len
= namelen
- 2;
9350 struct bound_minimal_symbol minsym
;
9351 struct objfile
*objfile
;
9352 struct obj_section
*sec
;
9354 if (name
[namelen
- 1] == 'b')
9355 target_len
-= strlen ("_from_thumb");
9357 target_len
-= strlen ("_from_arm");
9359 target_name
= (char *) alloca (target_len
+ 1);
9360 memcpy (target_name
, name
+ 2, target_len
);
9361 target_name
[target_len
] = '\0';
9363 sec
= find_pc_section (pc
);
9364 objfile
= (sec
== NULL
) ? NULL
: sec
->objfile
;
9365 minsym
= lookup_minimal_symbol (target_name
, NULL
, objfile
);
9366 if (minsym
.minsym
!= NULL
)
9367 return BMSYMBOL_VALUE_ADDRESS (minsym
);
9372 return 0; /* not a stub */
9376 set_arm_command (char *args
, int from_tty
)
9378 printf_unfiltered (_("\
9379 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9380 help_list (setarmcmdlist
, "set arm ", all_commands
, gdb_stdout
);
9384 show_arm_command (char *args
, int from_tty
)
9386 cmd_show_list (showarmcmdlist
, from_tty
, "");
9390 arm_update_current_architecture (void)
9392 struct gdbarch_info info
;
9394 /* If the current architecture is not ARM, we have nothing to do. */
9395 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch
!= bfd_arch_arm
)
9398 /* Update the architecture. */
9399 gdbarch_info_init (&info
);
9401 if (!gdbarch_update_p (info
))
9402 internal_error (__FILE__
, __LINE__
, _("could not update architecture"));
9406 set_fp_model_sfunc (char *args
, int from_tty
,
9407 struct cmd_list_element
*c
)
9411 for (fp_model
= ARM_FLOAT_AUTO
; fp_model
!= ARM_FLOAT_LAST
; fp_model
++)
9412 if (strcmp (current_fp_model
, fp_model_strings
[fp_model
]) == 0)
9414 arm_fp_model
= (enum arm_float_model
) fp_model
;
9418 if (fp_model
== ARM_FLOAT_LAST
)
9419 internal_error (__FILE__
, __LINE__
, _("Invalid fp model accepted: %s."),
9422 arm_update_current_architecture ();
9426 show_fp_model (struct ui_file
*file
, int from_tty
,
9427 struct cmd_list_element
*c
, const char *value
)
9429 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9431 if (arm_fp_model
== ARM_FLOAT_AUTO
9432 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9433 fprintf_filtered (file
, _("\
9434 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9435 fp_model_strings
[tdep
->fp_model
]);
9437 fprintf_filtered (file
, _("\
9438 The current ARM floating point model is \"%s\".\n"),
9439 fp_model_strings
[arm_fp_model
]);
9443 arm_set_abi (char *args
, int from_tty
,
9444 struct cmd_list_element
*c
)
9448 for (arm_abi
= ARM_ABI_AUTO
; arm_abi
!= ARM_ABI_LAST
; arm_abi
++)
9449 if (strcmp (arm_abi_string
, arm_abi_strings
[arm_abi
]) == 0)
9451 arm_abi_global
= (enum arm_abi_kind
) arm_abi
;
9455 if (arm_abi
== ARM_ABI_LAST
)
9456 internal_error (__FILE__
, __LINE__
, _("Invalid ABI accepted: %s."),
9459 arm_update_current_architecture ();
9463 arm_show_abi (struct ui_file
*file
, int from_tty
,
9464 struct cmd_list_element
*c
, const char *value
)
9466 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9468 if (arm_abi_global
== ARM_ABI_AUTO
9469 && gdbarch_bfd_arch_info (target_gdbarch ())->arch
== bfd_arch_arm
)
9470 fprintf_filtered (file
, _("\
9471 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9472 arm_abi_strings
[tdep
->arm_abi
]);
9474 fprintf_filtered (file
, _("The current ARM ABI is \"%s\".\n"),
9479 arm_show_fallback_mode (struct ui_file
*file
, int from_tty
,
9480 struct cmd_list_element
*c
, const char *value
)
9482 fprintf_filtered (file
,
9483 _("The current execution mode assumed "
9484 "(when symbols are unavailable) is \"%s\".\n"),
9485 arm_fallback_mode_string
);
9489 arm_show_force_mode (struct ui_file
*file
, int from_tty
,
9490 struct cmd_list_element
*c
, const char *value
)
9492 struct gdbarch_tdep
*tdep
= gdbarch_tdep (target_gdbarch ());
9494 fprintf_filtered (file
,
9495 _("The current execution mode assumed "
9496 "(even when symbols are available) is \"%s\".\n"),
9497 arm_force_mode_string
);
9500 /* If the user changes the register disassembly style used for info
9501 register and other commands, we have to also switch the style used
9502 in opcodes for disassembly output. This function is run in the "set
9503 arm disassembly" command, and does that. */
9506 set_disassembly_style_sfunc (char *args
, int from_tty
,
9507 struct cmd_list_element
*c
)
9509 set_disassembly_style ();
9512 /* Return the ARM register name corresponding to register I. */
9514 arm_register_name (struct gdbarch
*gdbarch
, int i
)
9516 const int num_regs
= gdbarch_num_regs (gdbarch
);
9518 if (gdbarch_tdep (gdbarch
)->have_vfp_pseudos
9519 && i
>= num_regs
&& i
< num_regs
+ 32)
9521 static const char *const vfp_pseudo_names
[] = {
9522 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9523 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9524 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9525 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9528 return vfp_pseudo_names
[i
- num_regs
];
9531 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
9532 && i
>= num_regs
+ 32 && i
< num_regs
+ 32 + 16)
9534 static const char *const neon_pseudo_names
[] = {
9535 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9536 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9539 return neon_pseudo_names
[i
- num_regs
- 32];
9542 if (i
>= ARRAY_SIZE (arm_register_names
))
9543 /* These registers are only supported on targets which supply
9544 an XML description. */
9547 return arm_register_names
[i
];
9551 set_disassembly_style (void)
9555 /* Find the style that the user wants. */
9556 for (current
= 0; current
< num_disassembly_options
; current
++)
9557 if (disassembly_style
== valid_disassembly_styles
[current
])
9559 gdb_assert (current
< num_disassembly_options
);
9561 /* Synchronize the disassembler. */
9562 set_arm_regname_option (current
);
9565 /* Test whether the coff symbol specific value corresponds to a Thumb
9569 coff_sym_is_thumb (int val
)
9571 return (val
== C_THUMBEXT
9572 || val
== C_THUMBSTAT
9573 || val
== C_THUMBEXTFUNC
9574 || val
== C_THUMBSTATFUNC
9575 || val
== C_THUMBLABEL
);
9578 /* arm_coff_make_msymbol_special()
9579 arm_elf_make_msymbol_special()
9581 These functions test whether the COFF or ELF symbol corresponds to
9582 an address in thumb code, and set a "special" bit in a minimal
9583 symbol to indicate that it does. */
9586 arm_elf_make_msymbol_special(asymbol
*sym
, struct minimal_symbol
*msym
)
9588 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type
*)sym
)->internal_elf_sym
)
9589 == ST_BRANCH_TO_THUMB
)
9590 MSYMBOL_SET_SPECIAL (msym
);
9594 arm_coff_make_msymbol_special(int val
, struct minimal_symbol
*msym
)
9596 if (coff_sym_is_thumb (val
))
9597 MSYMBOL_SET_SPECIAL (msym
);
9601 arm_objfile_data_free (struct objfile
*objfile
, void *arg
)
9603 struct arm_per_objfile
*data
= (struct arm_per_objfile
*) arg
;
9606 for (i
= 0; i
< objfile
->obfd
->section_count
; i
++)
9607 VEC_free (arm_mapping_symbol_s
, data
->section_maps
[i
]);
9611 arm_record_special_symbol (struct gdbarch
*gdbarch
, struct objfile
*objfile
,
9614 const char *name
= bfd_asymbol_name (sym
);
9615 struct arm_per_objfile
*data
;
9616 VEC(arm_mapping_symbol_s
) **map_p
;
9617 struct arm_mapping_symbol new_map_sym
;
9619 gdb_assert (name
[0] == '$');
9620 if (name
[1] != 'a' && name
[1] != 't' && name
[1] != 'd')
9623 data
= (struct arm_per_objfile
*) objfile_data (objfile
,
9624 arm_objfile_data_key
);
9627 data
= OBSTACK_ZALLOC (&objfile
->objfile_obstack
,
9628 struct arm_per_objfile
);
9629 set_objfile_data (objfile
, arm_objfile_data_key
, data
);
9630 data
->section_maps
= OBSTACK_CALLOC (&objfile
->objfile_obstack
,
9631 objfile
->obfd
->section_count
,
9632 VEC(arm_mapping_symbol_s
) *);
9634 map_p
= &data
->section_maps
[bfd_get_section (sym
)->index
];
9636 new_map_sym
.value
= sym
->value
;
9637 new_map_sym
.type
= name
[1];
9639 /* Assume that most mapping symbols appear in order of increasing
9640 value. If they were randomly distributed, it would be faster to
9641 always push here and then sort at first use. */
9642 if (!VEC_empty (arm_mapping_symbol_s
, *map_p
))
9644 struct arm_mapping_symbol
*prev_map_sym
;
9646 prev_map_sym
= VEC_last (arm_mapping_symbol_s
, *map_p
);
9647 if (prev_map_sym
->value
>= sym
->value
)
9650 idx
= VEC_lower_bound (arm_mapping_symbol_s
, *map_p
, &new_map_sym
,
9651 arm_compare_mapping_symbols
);
9652 VEC_safe_insert (arm_mapping_symbol_s
, *map_p
, idx
, &new_map_sym
);
9657 VEC_safe_push (arm_mapping_symbol_s
, *map_p
, &new_map_sym
);
9661 arm_write_pc (struct regcache
*regcache
, CORE_ADDR pc
)
9663 struct gdbarch
*gdbarch
= get_regcache_arch (regcache
);
9664 regcache_cooked_write_unsigned (regcache
, ARM_PC_REGNUM
, pc
);
9666 /* If necessary, set the T bit. */
9669 ULONGEST val
, t_bit
;
9670 regcache_cooked_read_unsigned (regcache
, ARM_PS_REGNUM
, &val
);
9671 t_bit
= arm_psr_thumb_bit (gdbarch
);
9672 if (arm_pc_is_thumb (gdbarch
, pc
))
9673 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9676 regcache_cooked_write_unsigned (regcache
, ARM_PS_REGNUM
,
9681 /* Read the contents of a NEON quad register, by reading from two
9682 double registers. This is used to implement the quad pseudo
9683 registers, and for argument passing in case the quad registers are
9684 missing; vectors are passed in quad registers when using the VFP
9685 ABI, even if a NEON unit is not present. REGNUM is the index of
9686 the quad register, in [0, 15]. */
9688 static enum register_status
9689 arm_neon_quad_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9690 int regnum
, gdb_byte
*buf
)
9693 gdb_byte reg_buf
[8];
9694 int offset
, double_regnum
;
9695 enum register_status status
;
9697 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9698 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9701 /* d0 is always the least significant half of q0. */
9702 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9707 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9708 if (status
!= REG_VALID
)
9710 memcpy (buf
+ offset
, reg_buf
, 8);
9712 offset
= 8 - offset
;
9713 status
= regcache_raw_read (regcache
, double_regnum
+ 1, reg_buf
);
9714 if (status
!= REG_VALID
)
9716 memcpy (buf
+ offset
, reg_buf
, 8);
9721 static enum register_status
9722 arm_pseudo_read (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9723 int regnum
, gdb_byte
*buf
)
9725 const int num_regs
= gdbarch_num_regs (gdbarch
);
9727 gdb_byte reg_buf
[8];
9728 int offset
, double_regnum
;
9730 gdb_assert (regnum
>= num_regs
);
9733 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9734 /* Quad-precision register. */
9735 return arm_neon_quad_read (gdbarch
, regcache
, regnum
- 32, buf
);
9738 enum register_status status
;
9740 /* Single-precision register. */
9741 gdb_assert (regnum
< 32);
9743 /* s0 is always the least significant half of d0. */
9744 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9745 offset
= (regnum
& 1) ? 0 : 4;
9747 offset
= (regnum
& 1) ? 4 : 0;
9749 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9750 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9753 status
= regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9754 if (status
== REG_VALID
)
9755 memcpy (buf
, reg_buf
+ offset
, 4);
9760 /* Store the contents of BUF to a NEON quad register, by writing to
9761 two double registers. This is used to implement the quad pseudo
9762 registers, and for argument passing in case the quad registers are
9763 missing; vectors are passed in quad registers when using the VFP
9764 ABI, even if a NEON unit is not present. REGNUM is the index
9765 of the quad register, in [0, 15]. */
9768 arm_neon_quad_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9769 int regnum
, const gdb_byte
*buf
)
9772 int offset
, double_regnum
;
9774 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
<< 1);
9775 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9778 /* d0 is always the least significant half of q0. */
9779 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9784 regcache_raw_write (regcache
, double_regnum
, buf
+ offset
);
9785 offset
= 8 - offset
;
9786 regcache_raw_write (regcache
, double_regnum
+ 1, buf
+ offset
);
9790 arm_pseudo_write (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
9791 int regnum
, const gdb_byte
*buf
)
9793 const int num_regs
= gdbarch_num_regs (gdbarch
);
9795 gdb_byte reg_buf
[8];
9796 int offset
, double_regnum
;
9798 gdb_assert (regnum
>= num_regs
);
9801 if (gdbarch_tdep (gdbarch
)->have_neon_pseudos
&& regnum
>= 32 && regnum
< 48)
9802 /* Quad-precision register. */
9803 arm_neon_quad_write (gdbarch
, regcache
, regnum
- 32, buf
);
9806 /* Single-precision register. */
9807 gdb_assert (regnum
< 32);
9809 /* s0 is always the least significant half of d0. */
9810 if (gdbarch_byte_order (gdbarch
) == BFD_ENDIAN_BIG
)
9811 offset
= (regnum
& 1) ? 0 : 4;
9813 offset
= (regnum
& 1) ? 4 : 0;
9815 xsnprintf (name_buf
, sizeof (name_buf
), "d%d", regnum
>> 1);
9816 double_regnum
= user_reg_map_name_to_regnum (gdbarch
, name_buf
,
9819 regcache_raw_read (regcache
, double_regnum
, reg_buf
);
9820 memcpy (reg_buf
+ offset
, buf
, 4);
9821 regcache_raw_write (regcache
, double_regnum
, reg_buf
);
9825 static struct value
*
9826 value_of_arm_user_reg (struct frame_info
*frame
, const void *baton
)
9828 const int *reg_p
= (const int *) baton
;
9829 return value_of_register (*reg_p
, frame
);
9832 static enum gdb_osabi
9833 arm_elf_osabi_sniffer (bfd
*abfd
)
9835 unsigned int elfosabi
;
9836 enum gdb_osabi osabi
= GDB_OSABI_UNKNOWN
;
9838 elfosabi
= elf_elfheader (abfd
)->e_ident
[EI_OSABI
];
9840 if (elfosabi
== ELFOSABI_ARM
)
9841 /* GNU tools use this value. Check note sections in this case,
9843 bfd_map_over_sections (abfd
,
9844 generic_elf_osabi_sniff_abi_tag_sections
,
9847 /* Anything else will be handled by the generic ELF sniffer. */
9852 arm_register_reggroup_p (struct gdbarch
*gdbarch
, int regnum
,
9853 struct reggroup
*group
)
9855 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9856 this, FPS register belongs to save_regroup, restore_reggroup, and
9857 all_reggroup, of course. */
9858 if (regnum
== ARM_FPS_REGNUM
)
9859 return (group
== float_reggroup
9860 || group
== save_reggroup
9861 || group
== restore_reggroup
9862 || group
== all_reggroup
);
9864 return default_register_reggroup_p (gdbarch
, regnum
, group
);
9868 /* For backward-compatibility we allow two 'g' packet lengths with
9869 the remote protocol depending on whether FPA registers are
9870 supplied. M-profile targets do not have FPA registers, but some
9871 stubs already exist in the wild which use a 'g' packet which
9872 supplies them albeit with dummy values. The packet format which
9873 includes FPA registers should be considered deprecated for
9874 M-profile targets. */
9877 arm_register_g_packet_guesses (struct gdbarch
*gdbarch
)
9879 if (gdbarch_tdep (gdbarch
)->is_m
)
9881 /* If we know from the executable this is an M-profile target,
9882 cater for remote targets whose register set layout is the
9883 same as the FPA layout. */
9884 register_remote_g_packet_guess (gdbarch
,
9885 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9886 (16 * INT_REGISTER_SIZE
)
9887 + (8 * FP_REGISTER_SIZE
)
9888 + (2 * INT_REGISTER_SIZE
),
9889 tdesc_arm_with_m_fpa_layout
);
9891 /* The regular M-profile layout. */
9892 register_remote_g_packet_guess (gdbarch
,
9893 /* r0-r12,sp,lr,pc; xpsr */
9894 (16 * INT_REGISTER_SIZE
)
9895 + INT_REGISTER_SIZE
,
9898 /* M-profile plus M4F VFP. */
9899 register_remote_g_packet_guess (gdbarch
,
9900 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9901 (16 * INT_REGISTER_SIZE
)
9902 + (16 * VFP_REGISTER_SIZE
)
9903 + (2 * INT_REGISTER_SIZE
),
9904 tdesc_arm_with_m_vfp_d16
);
9907 /* Otherwise we don't have a useful guess. */
9911 /* Initialize the current architecture based on INFO. If possible,
9912 re-use an architecture from ARCHES, which is a list of
9913 architectures already created during this debugging session.
9915 Called e.g. at program startup, when reading a core file, and when
9916 reading a binary file. */
9918 static struct gdbarch
*
9919 arm_gdbarch_init (struct gdbarch_info info
, struct gdbarch_list
*arches
)
9921 struct gdbarch_tdep
*tdep
;
9922 struct gdbarch
*gdbarch
;
9923 struct gdbarch_list
*best_arch
;
9924 enum arm_abi_kind arm_abi
= arm_abi_global
;
9925 enum arm_float_model fp_model
= arm_fp_model
;
9926 struct tdesc_arch_data
*tdesc_data
= NULL
;
9928 int vfp_register_count
= 0, have_vfp_pseudos
= 0, have_neon_pseudos
= 0;
9929 int have_wmmx_registers
= 0;
9931 int have_fpa_registers
= 1;
9932 const struct target_desc
*tdesc
= info
.target_desc
;
9934 /* If we have an object to base this architecture on, try to determine
9937 if (arm_abi
== ARM_ABI_AUTO
&& info
.abfd
!= NULL
)
9939 int ei_osabi
, e_flags
;
9941 switch (bfd_get_flavour (info
.abfd
))
9943 case bfd_target_aout_flavour
:
9944 /* Assume it's an old APCS-style ABI. */
9945 arm_abi
= ARM_ABI_APCS
;
9948 case bfd_target_coff_flavour
:
9949 /* Assume it's an old APCS-style ABI. */
9951 arm_abi
= ARM_ABI_APCS
;
9954 case bfd_target_elf_flavour
:
9955 ei_osabi
= elf_elfheader (info
.abfd
)->e_ident
[EI_OSABI
];
9956 e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
9958 if (ei_osabi
== ELFOSABI_ARM
)
9960 /* GNU tools used to use this value, but do not for EABI
9961 objects. There's nowhere to tag an EABI version
9962 anyway, so assume APCS. */
9963 arm_abi
= ARM_ABI_APCS
;
9965 else if (ei_osabi
== ELFOSABI_NONE
|| ei_osabi
== ELFOSABI_GNU
)
9967 int eabi_ver
= EF_ARM_EABI_VERSION (e_flags
);
9968 int attr_arch
, attr_profile
;
9972 case EF_ARM_EABI_UNKNOWN
:
9973 /* Assume GNU tools. */
9974 arm_abi
= ARM_ABI_APCS
;
9977 case EF_ARM_EABI_VER4
:
9978 case EF_ARM_EABI_VER5
:
9979 arm_abi
= ARM_ABI_AAPCS
;
9980 /* EABI binaries default to VFP float ordering.
9981 They may also contain build attributes that can
9982 be used to identify if the VFP argument-passing
9984 if (fp_model
== ARM_FLOAT_AUTO
)
9987 switch (bfd_elf_get_obj_attr_int (info
.abfd
,
9991 case AEABI_VFP_args_base
:
9992 /* "The user intended FP parameter/result
9993 passing to conform to AAPCS, base
9995 fp_model
= ARM_FLOAT_SOFT_VFP
;
9997 case AEABI_VFP_args_vfp
:
9998 /* "The user intended FP parameter/result
9999 passing to conform to AAPCS, VFP
10001 fp_model
= ARM_FLOAT_VFP
;
10003 case AEABI_VFP_args_toolchain
:
10004 /* "The user intended FP parameter/result
10005 passing to conform to tool chain-specific
10006 conventions" - we don't know any such
10007 conventions, so leave it as "auto". */
10009 case AEABI_VFP_args_compatible
:
10010 /* "Code is compatible with both the base
10011 and VFP variants; the user did not permit
10012 non-variadic functions to pass FP
10013 parameters/results" - leave it as
10017 /* Attribute value not mentioned in the
10018 November 2012 ABI, so leave it as
10023 fp_model
= ARM_FLOAT_SOFT_VFP
;
10029 /* Leave it as "auto". */
10030 warning (_("unknown ARM EABI version 0x%x"), eabi_ver
);
10035 /* Detect M-profile programs. This only works if the
10036 executable file includes build attributes; GCC does
10037 copy them to the executable, but e.g. RealView does
10039 attr_arch
= bfd_elf_get_obj_attr_int (info
.abfd
, OBJ_ATTR_PROC
,
10041 attr_profile
= bfd_elf_get_obj_attr_int (info
.abfd
,
10043 Tag_CPU_arch_profile
);
10044 /* GCC specifies the profile for v6-M; RealView only
10045 specifies the profile for architectures starting with
10046 V7 (as opposed to architectures with a tag
10047 numerically greater than TAG_CPU_ARCH_V7). */
10048 if (!tdesc_has_registers (tdesc
)
10049 && (attr_arch
== TAG_CPU_ARCH_V6_M
10050 || attr_arch
== TAG_CPU_ARCH_V6S_M
10051 || attr_profile
== 'M'))
10056 if (fp_model
== ARM_FLOAT_AUTO
)
10058 int e_flags
= elf_elfheader (info
.abfd
)->e_flags
;
10060 switch (e_flags
& (EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
))
10063 /* Leave it as "auto". Strictly speaking this case
10064 means FPA, but almost nobody uses that now, and
10065 many toolchains fail to set the appropriate bits
10066 for the floating-point model they use. */
10068 case EF_ARM_SOFT_FLOAT
:
10069 fp_model
= ARM_FLOAT_SOFT_FPA
;
10071 case EF_ARM_VFP_FLOAT
:
10072 fp_model
= ARM_FLOAT_VFP
;
10074 case EF_ARM_SOFT_FLOAT
| EF_ARM_VFP_FLOAT
:
10075 fp_model
= ARM_FLOAT_SOFT_VFP
;
10080 if (e_flags
& EF_ARM_BE8
)
10081 info
.byte_order_for_code
= BFD_ENDIAN_LITTLE
;
10086 /* Leave it as "auto". */
10091 /* Check any target description for validity. */
10092 if (tdesc_has_registers (tdesc
))
10094 /* For most registers we require GDB's default names; but also allow
10095 the numeric names for sp / lr / pc, as a convenience. */
10096 static const char *const arm_sp_names
[] = { "r13", "sp", NULL
};
10097 static const char *const arm_lr_names
[] = { "r14", "lr", NULL
};
10098 static const char *const arm_pc_names
[] = { "r15", "pc", NULL
};
10100 const struct tdesc_feature
*feature
;
10103 feature
= tdesc_find_feature (tdesc
,
10104 "org.gnu.gdb.arm.core");
10105 if (feature
== NULL
)
10107 feature
= tdesc_find_feature (tdesc
,
10108 "org.gnu.gdb.arm.m-profile");
10109 if (feature
== NULL
)
10115 tdesc_data
= tdesc_data_alloc ();
10118 for (i
= 0; i
< ARM_SP_REGNUM
; i
++)
10119 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10120 arm_register_names
[i
]);
10121 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10124 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10127 valid_p
&= tdesc_numbered_register_choices (feature
, tdesc_data
,
10131 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10132 ARM_PS_REGNUM
, "xpsr");
10134 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10135 ARM_PS_REGNUM
, "cpsr");
10139 tdesc_data_cleanup (tdesc_data
);
10143 feature
= tdesc_find_feature (tdesc
,
10144 "org.gnu.gdb.arm.fpa");
10145 if (feature
!= NULL
)
10148 for (i
= ARM_F0_REGNUM
; i
<= ARM_FPS_REGNUM
; i
++)
10149 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
, i
,
10150 arm_register_names
[i
]);
10153 tdesc_data_cleanup (tdesc_data
);
10158 have_fpa_registers
= 0;
10160 feature
= tdesc_find_feature (tdesc
,
10161 "org.gnu.gdb.xscale.iwmmxt");
10162 if (feature
!= NULL
)
10164 static const char *const iwmmxt_names
[] = {
10165 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10166 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10167 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10168 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10172 for (i
= ARM_WR0_REGNUM
; i
<= ARM_WR15_REGNUM
; i
++)
10174 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10175 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10177 /* Check for the control registers, but do not fail if they
10179 for (i
= ARM_WC0_REGNUM
; i
<= ARM_WCASF_REGNUM
; i
++)
10180 tdesc_numbered_register (feature
, tdesc_data
, i
,
10181 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10183 for (i
= ARM_WCGR0_REGNUM
; i
<= ARM_WCGR3_REGNUM
; i
++)
10185 &= tdesc_numbered_register (feature
, tdesc_data
, i
,
10186 iwmmxt_names
[i
- ARM_WR0_REGNUM
]);
10190 tdesc_data_cleanup (tdesc_data
);
10194 have_wmmx_registers
= 1;
10197 /* If we have a VFP unit, check whether the single precision registers
10198 are present. If not, then we will synthesize them as pseudo
10200 feature
= tdesc_find_feature (tdesc
,
10201 "org.gnu.gdb.arm.vfp");
10202 if (feature
!= NULL
)
10204 static const char *const vfp_double_names
[] = {
10205 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10206 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10207 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10208 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10211 /* Require the double precision registers. There must be either
10214 for (i
= 0; i
< 32; i
++)
10216 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10218 vfp_double_names
[i
]);
10222 if (!valid_p
&& i
== 16)
10225 /* Also require FPSCR. */
10226 valid_p
&= tdesc_numbered_register (feature
, tdesc_data
,
10227 ARM_FPSCR_REGNUM
, "fpscr");
10230 tdesc_data_cleanup (tdesc_data
);
10234 if (tdesc_unnumbered_register (feature
, "s0") == 0)
10235 have_vfp_pseudos
= 1;
10237 vfp_register_count
= i
;
10239 /* If we have VFP, also check for NEON. The architecture allows
10240 NEON without VFP (integer vector operations only), but GDB
10241 does not support that. */
10242 feature
= tdesc_find_feature (tdesc
,
10243 "org.gnu.gdb.arm.neon");
10244 if (feature
!= NULL
)
10246 /* NEON requires 32 double-precision registers. */
10249 tdesc_data_cleanup (tdesc_data
);
10253 /* If there are quad registers defined by the stub, use
10254 their type; otherwise (normally) provide them with
10255 the default type. */
10256 if (tdesc_unnumbered_register (feature
, "q0") == 0)
10257 have_neon_pseudos
= 1;
10264 /* If there is already a candidate, use it. */
10265 for (best_arch
= gdbarch_list_lookup_by_info (arches
, &info
);
10267 best_arch
= gdbarch_list_lookup_by_info (best_arch
->next
, &info
))
10269 if (arm_abi
!= ARM_ABI_AUTO
10270 && arm_abi
!= gdbarch_tdep (best_arch
->gdbarch
)->arm_abi
)
10273 if (fp_model
!= ARM_FLOAT_AUTO
10274 && fp_model
!= gdbarch_tdep (best_arch
->gdbarch
)->fp_model
)
10277 /* There are various other properties in tdep that we do not
10278 need to check here: those derived from a target description,
10279 since gdbarches with a different target description are
10280 automatically disqualified. */
10282 /* Do check is_m, though, since it might come from the binary. */
10283 if (is_m
!= gdbarch_tdep (best_arch
->gdbarch
)->is_m
)
10286 /* Found a match. */
10290 if (best_arch
!= NULL
)
10292 if (tdesc_data
!= NULL
)
10293 tdesc_data_cleanup (tdesc_data
);
10294 return best_arch
->gdbarch
;
10297 tdep
= XCNEW (struct gdbarch_tdep
);
10298 gdbarch
= gdbarch_alloc (&info
, tdep
);
10300 /* Record additional information about the architecture we are defining.
10301 These are gdbarch discriminators, like the OSABI. */
10302 tdep
->arm_abi
= arm_abi
;
10303 tdep
->fp_model
= fp_model
;
10305 tdep
->have_fpa_registers
= have_fpa_registers
;
10306 tdep
->have_wmmx_registers
= have_wmmx_registers
;
10307 gdb_assert (vfp_register_count
== 0
10308 || vfp_register_count
== 16
10309 || vfp_register_count
== 32);
10310 tdep
->vfp_register_count
= vfp_register_count
;
10311 tdep
->have_vfp_pseudos
= have_vfp_pseudos
;
10312 tdep
->have_neon_pseudos
= have_neon_pseudos
;
10313 tdep
->have_neon
= have_neon
;
10315 arm_register_g_packet_guesses (gdbarch
);
10318 switch (info
.byte_order_for_code
)
10320 case BFD_ENDIAN_BIG
:
10321 tdep
->arm_breakpoint
= arm_default_arm_be_breakpoint
;
10322 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_be_breakpoint
);
10323 tdep
->thumb_breakpoint
= arm_default_thumb_be_breakpoint
;
10324 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_be_breakpoint
);
10328 case BFD_ENDIAN_LITTLE
:
10329 tdep
->arm_breakpoint
= arm_default_arm_le_breakpoint
;
10330 tdep
->arm_breakpoint_size
= sizeof (arm_default_arm_le_breakpoint
);
10331 tdep
->thumb_breakpoint
= arm_default_thumb_le_breakpoint
;
10332 tdep
->thumb_breakpoint_size
= sizeof (arm_default_thumb_le_breakpoint
);
10337 internal_error (__FILE__
, __LINE__
,
10338 _("arm_gdbarch_init: bad byte order for float format"));
10341 /* On ARM targets char defaults to unsigned. */
10342 set_gdbarch_char_signed (gdbarch
, 0);
10344 /* Note: for displaced stepping, this includes the breakpoint, and one word
10345 of additional scratch space. This setting isn't used for anything beside
10346 displaced stepping at present. */
10347 set_gdbarch_max_insn_length (gdbarch
, 4 * DISPLACED_MODIFIED_INSNS
);
10349 /* This should be low enough for everything. */
10350 tdep
->lowest_pc
= 0x20;
10351 tdep
->jb_pc
= -1; /* Longjump support not enabled by default. */
10353 /* The default, for both APCS and AAPCS, is to return small
10354 structures in registers. */
10355 tdep
->struct_return
= reg_struct_return
;
10357 set_gdbarch_push_dummy_call (gdbarch
, arm_push_dummy_call
);
10358 set_gdbarch_frame_align (gdbarch
, arm_frame_align
);
10360 set_gdbarch_write_pc (gdbarch
, arm_write_pc
);
10362 /* Frame handling. */
10363 set_gdbarch_dummy_id (gdbarch
, arm_dummy_id
);
10364 set_gdbarch_unwind_pc (gdbarch
, arm_unwind_pc
);
10365 set_gdbarch_unwind_sp (gdbarch
, arm_unwind_sp
);
10367 frame_base_set_default (gdbarch
, &arm_normal_base
);
10369 /* Address manipulation. */
10370 set_gdbarch_addr_bits_remove (gdbarch
, arm_addr_bits_remove
);
10372 /* Advance PC across function entry code. */
10373 set_gdbarch_skip_prologue (gdbarch
, arm_skip_prologue
);
10375 /* Detect whether PC is at a point where the stack has been destroyed. */
10376 set_gdbarch_stack_frame_destroyed_p (gdbarch
, arm_stack_frame_destroyed_p
);
10378 /* Skip trampolines. */
10379 set_gdbarch_skip_trampoline_code (gdbarch
, arm_skip_stub
);
10381 /* The stack grows downward. */
10382 set_gdbarch_inner_than (gdbarch
, core_addr_lessthan
);
10384 /* Breakpoint manipulation. */
10385 set_gdbarch_breakpoint_from_pc (gdbarch
, arm_breakpoint_from_pc
);
10386 set_gdbarch_remote_breakpoint_from_pc (gdbarch
,
10387 arm_remote_breakpoint_from_pc
);
10389 /* Information about registers, etc. */
10390 set_gdbarch_sp_regnum (gdbarch
, ARM_SP_REGNUM
);
10391 set_gdbarch_pc_regnum (gdbarch
, ARM_PC_REGNUM
);
10392 set_gdbarch_num_regs (gdbarch
, ARM_NUM_REGS
);
10393 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10394 set_gdbarch_register_reggroup_p (gdbarch
, arm_register_reggroup_p
);
10396 /* This "info float" is FPA-specific. Use the generic version if we
10397 do not have FPA. */
10398 if (gdbarch_tdep (gdbarch
)->have_fpa_registers
)
10399 set_gdbarch_print_float_info (gdbarch
, arm_print_float_info
);
10401 /* Internal <-> external register number maps. */
10402 set_gdbarch_dwarf2_reg_to_regnum (gdbarch
, arm_dwarf_reg_to_regnum
);
10403 set_gdbarch_register_sim_regno (gdbarch
, arm_register_sim_regno
);
10405 set_gdbarch_register_name (gdbarch
, arm_register_name
);
10407 /* Returning results. */
10408 set_gdbarch_return_value (gdbarch
, arm_return_value
);
10411 set_gdbarch_print_insn (gdbarch
, gdb_print_insn_arm
);
10413 /* Minsymbol frobbing. */
10414 set_gdbarch_elf_make_msymbol_special (gdbarch
, arm_elf_make_msymbol_special
);
10415 set_gdbarch_coff_make_msymbol_special (gdbarch
,
10416 arm_coff_make_msymbol_special
);
10417 set_gdbarch_record_special_symbol (gdbarch
, arm_record_special_symbol
);
10419 /* Thumb-2 IT block support. */
10420 set_gdbarch_adjust_breakpoint_address (gdbarch
,
10421 arm_adjust_breakpoint_address
);
10423 /* Virtual tables. */
10424 set_gdbarch_vbit_in_delta (gdbarch
, 1);
10426 /* Hook in the ABI-specific overrides, if they have been registered. */
10427 gdbarch_init_osabi (info
, gdbarch
);
10429 dwarf2_frame_set_init_reg (gdbarch
, arm_dwarf2_frame_init_reg
);
10431 /* Add some default predicates. */
10433 frame_unwind_append_unwinder (gdbarch
, &arm_m_exception_unwind
);
10434 frame_unwind_append_unwinder (gdbarch
, &arm_stub_unwind
);
10435 dwarf2_append_unwinders (gdbarch
);
10436 frame_unwind_append_unwinder (gdbarch
, &arm_exidx_unwind
);
10437 frame_unwind_append_unwinder (gdbarch
, &arm_prologue_unwind
);
10439 /* Now we have tuned the configuration, set a few final things,
10440 based on what the OS ABI has told us. */
10442 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10443 binaries are always marked. */
10444 if (tdep
->arm_abi
== ARM_ABI_AUTO
)
10445 tdep
->arm_abi
= ARM_ABI_APCS
;
10447 /* Watchpoints are not steppable. */
10448 set_gdbarch_have_nonsteppable_watchpoint (gdbarch
, 1);
10450 /* We used to default to FPA for generic ARM, but almost nobody
10451 uses that now, and we now provide a way for the user to force
10452 the model. So default to the most useful variant. */
10453 if (tdep
->fp_model
== ARM_FLOAT_AUTO
)
10454 tdep
->fp_model
= ARM_FLOAT_SOFT_FPA
;
10456 if (tdep
->jb_pc
>= 0)
10457 set_gdbarch_get_longjmp_target (gdbarch
, arm_get_longjmp_target
);
10459 /* Floating point sizes and format. */
10460 set_gdbarch_float_format (gdbarch
, floatformats_ieee_single
);
10461 if (tdep
->fp_model
== ARM_FLOAT_SOFT_FPA
|| tdep
->fp_model
== ARM_FLOAT_FPA
)
10463 set_gdbarch_double_format
10464 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10465 set_gdbarch_long_double_format
10466 (gdbarch
, floatformats_ieee_double_littlebyte_bigword
);
10470 set_gdbarch_double_format (gdbarch
, floatformats_ieee_double
);
10471 set_gdbarch_long_double_format (gdbarch
, floatformats_ieee_double
);
10474 if (have_vfp_pseudos
)
10476 /* NOTE: These are the only pseudo registers used by
10477 the ARM target at the moment. If more are added, a
10478 little more care in numbering will be needed. */
10480 int num_pseudos
= 32;
10481 if (have_neon_pseudos
)
10483 set_gdbarch_num_pseudo_regs (gdbarch
, num_pseudos
);
10484 set_gdbarch_pseudo_register_read (gdbarch
, arm_pseudo_read
);
10485 set_gdbarch_pseudo_register_write (gdbarch
, arm_pseudo_write
);
10490 set_tdesc_pseudo_register_name (gdbarch
, arm_register_name
);
10492 tdesc_use_registers (gdbarch
, tdesc
, tdesc_data
);
10494 /* Override tdesc_register_type to adjust the types of VFP
10495 registers for NEON. */
10496 set_gdbarch_register_type (gdbarch
, arm_register_type
);
10499 /* Add standard register aliases. We add aliases even for those
10500 nanes which are used by the current architecture - it's simpler,
10501 and does no harm, since nothing ever lists user registers. */
10502 for (i
= 0; i
< ARRAY_SIZE (arm_register_aliases
); i
++)
10503 user_reg_add (gdbarch
, arm_register_aliases
[i
].name
,
10504 value_of_arm_user_reg
, &arm_register_aliases
[i
].regnum
);
10510 arm_dump_tdep (struct gdbarch
*gdbarch
, struct ui_file
*file
)
10512 struct gdbarch_tdep
*tdep
= gdbarch_tdep (gdbarch
);
10517 fprintf_unfiltered (file
, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10518 (unsigned long) tdep
->lowest_pc
);
10521 extern initialize_file_ftype _initialize_arm_tdep
; /* -Wmissing-prototypes */
10524 _initialize_arm_tdep (void)
10526 struct ui_file
*stb
;
10528 struct cmd_list_element
*new_set
, *new_show
;
10529 const char *setname
;
10530 const char *setdesc
;
10531 const char *const *regnames
;
10533 static char *helptext
;
10534 char regdesc
[1024], *rdptr
= regdesc
;
10535 size_t rest
= sizeof (regdesc
);
10537 gdbarch_register (bfd_arch_arm
, arm_gdbarch_init
, arm_dump_tdep
);
10539 arm_objfile_data_key
10540 = register_objfile_data_with_cleanup (NULL
, arm_objfile_data_free
);
10542 /* Add ourselves to objfile event chain. */
10543 observer_attach_new_objfile (arm_exidx_new_objfile
);
10545 = register_objfile_data_with_cleanup (NULL
, arm_exidx_data_free
);
10547 /* Register an ELF OS ABI sniffer for ARM binaries. */
10548 gdbarch_register_osabi_sniffer (bfd_arch_arm
,
10549 bfd_target_elf_flavour
,
10550 arm_elf_osabi_sniffer
);
10552 /* Initialize the standard target descriptions. */
10553 initialize_tdesc_arm_with_m ();
10554 initialize_tdesc_arm_with_m_fpa_layout ();
10555 initialize_tdesc_arm_with_m_vfp_d16 ();
10556 initialize_tdesc_arm_with_iwmmxt ();
10557 initialize_tdesc_arm_with_vfpv2 ();
10558 initialize_tdesc_arm_with_vfpv3 ();
10559 initialize_tdesc_arm_with_neon ();
10561 /* Get the number of possible sets of register names defined in opcodes. */
10562 num_disassembly_options
= get_arm_regname_num_options ();
10564 /* Add root prefix command for all "set arm"/"show arm" commands. */
10565 add_prefix_cmd ("arm", no_class
, set_arm_command
,
10566 _("Various ARM-specific commands."),
10567 &setarmcmdlist
, "set arm ", 0, &setlist
);
10569 add_prefix_cmd ("arm", no_class
, show_arm_command
,
10570 _("Various ARM-specific commands."),
10571 &showarmcmdlist
, "show arm ", 0, &showlist
);
10573 /* Sync the opcode insn printer with our register viewer. */
10574 parse_arm_disassembler_option ("reg-names-std");
10576 /* Initialize the array that will be passed to
10577 add_setshow_enum_cmd(). */
10578 valid_disassembly_styles
= XNEWVEC (const char *,
10579 num_disassembly_options
+ 1);
10580 for (i
= 0; i
< num_disassembly_options
; i
++)
10582 numregs
= get_arm_regnames (i
, &setname
, &setdesc
, ®names
);
10583 valid_disassembly_styles
[i
] = setname
;
10584 length
= snprintf (rdptr
, rest
, "%s - %s\n", setname
, setdesc
);
10587 /* When we find the default names, tell the disassembler to use
10589 if (!strcmp (setname
, "std"))
10591 disassembly_style
= setname
;
10592 set_arm_regname_option (i
);
10595 /* Mark the end of valid options. */
10596 valid_disassembly_styles
[num_disassembly_options
] = NULL
;
10598 /* Create the help text. */
10599 stb
= mem_fileopen ();
10600 fprintf_unfiltered (stb
, "%s%s%s",
10601 _("The valid values are:\n"),
10603 _("The default is \"std\"."));
10604 helptext
= ui_file_xstrdup (stb
, NULL
);
10605 ui_file_delete (stb
);
10607 add_setshow_enum_cmd("disassembler", no_class
,
10608 valid_disassembly_styles
, &disassembly_style
,
10609 _("Set the disassembly style."),
10610 _("Show the disassembly style."),
10612 set_disassembly_style_sfunc
,
10613 NULL
, /* FIXME: i18n: The disassembly style is
10615 &setarmcmdlist
, &showarmcmdlist
);
10617 add_setshow_boolean_cmd ("apcs32", no_class
, &arm_apcs_32
,
10618 _("Set usage of ARM 32-bit mode."),
10619 _("Show usage of ARM 32-bit mode."),
10620 _("When off, a 26-bit PC will be used."),
10622 NULL
, /* FIXME: i18n: Usage of ARM 32-bit
10624 &setarmcmdlist
, &showarmcmdlist
);
10626 /* Add a command to allow the user to force the FPU model. */
10627 add_setshow_enum_cmd ("fpu", no_class
, fp_model_strings
, ¤t_fp_model
,
10628 _("Set the floating point type."),
10629 _("Show the floating point type."),
10630 _("auto - Determine the FP typefrom the OS-ABI.\n\
10631 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10632 fpa - FPA co-processor (GCC compiled).\n\
10633 softvfp - Software FP with pure-endian doubles.\n\
10634 vfp - VFP co-processor."),
10635 set_fp_model_sfunc
, show_fp_model
,
10636 &setarmcmdlist
, &showarmcmdlist
);
10638 /* Add a command to allow the user to force the ABI. */
10639 add_setshow_enum_cmd ("abi", class_support
, arm_abi_strings
, &arm_abi_string
,
10641 _("Show the ABI."),
10642 NULL
, arm_set_abi
, arm_show_abi
,
10643 &setarmcmdlist
, &showarmcmdlist
);
10645 /* Add two commands to allow the user to force the assumed
10647 add_setshow_enum_cmd ("fallback-mode", class_support
,
10648 arm_mode_strings
, &arm_fallback_mode_string
,
10649 _("Set the mode assumed when symbols are unavailable."),
10650 _("Show the mode assumed when symbols are unavailable."),
10651 NULL
, NULL
, arm_show_fallback_mode
,
10652 &setarmcmdlist
, &showarmcmdlist
);
10653 add_setshow_enum_cmd ("force-mode", class_support
,
10654 arm_mode_strings
, &arm_force_mode_string
,
10655 _("Set the mode assumed even when symbols are available."),
10656 _("Show the mode assumed even when symbols are available."),
10657 NULL
, NULL
, arm_show_force_mode
,
10658 &setarmcmdlist
, &showarmcmdlist
);
10660 /* Debugging flag. */
10661 add_setshow_boolean_cmd ("arm", class_maintenance
, &arm_debug
,
10662 _("Set ARM debugging."),
10663 _("Show ARM debugging."),
10664 _("When on, arm-specific debugging is enabled."),
10666 NULL
, /* FIXME: i18n: "ARM debugging is %s. */
10667 &setdebuglist
, &showdebuglist
);
10670 /* ARM-reversible process record data structures. */
10672 #define ARM_INSN_SIZE_BYTES 4
10673 #define THUMB_INSN_SIZE_BYTES 2
10674 #define THUMB2_INSN_SIZE_BYTES 4
10677 /* Position of the bit within a 32-bit ARM instruction
10678 that defines whether the instruction is a load or store. */
10679 #define INSN_S_L_BIT_NUM 20
10681 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10684 unsigned int reg_len = LENGTH; \
10687 REGS = XNEWVEC (uint32_t, reg_len); \
10688 memcpy(®S[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10693 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10696 unsigned int mem_len = LENGTH; \
10699 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10700 memcpy(&MEMS->len, &RECORD_BUF[0], \
10701 sizeof(struct arm_mem_r) * LENGTH); \
10706 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10707 #define INSN_RECORDED(ARM_RECORD) \
10708 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10710 /* ARM memory record structure. */
10713 uint32_t len
; /* Record length. */
10714 uint32_t addr
; /* Memory address. */
10717 /* ARM instruction record contains opcode of current insn
10718 and execution state (before entry to decode_insn()),
10719 contains list of to-be-modified registers and
10720 memory blocks (on return from decode_insn()). */
10722 typedef struct insn_decode_record_t
10724 struct gdbarch
*gdbarch
;
10725 struct regcache
*regcache
;
10726 CORE_ADDR this_addr
; /* Address of the insn being decoded. */
10727 uint32_t arm_insn
; /* Should accommodate thumb. */
10728 uint32_t cond
; /* Condition code. */
10729 uint32_t opcode
; /* Insn opcode. */
10730 uint32_t decode
; /* Insn decode bits. */
10731 uint32_t mem_rec_count
; /* No of mem records. */
10732 uint32_t reg_rec_count
; /* No of reg records. */
10733 uint32_t *arm_regs
; /* Registers to be saved for this record. */
10734 struct arm_mem_r
*arm_mems
; /* Memory to be saved for this record. */
10735 } insn_decode_record
;
10738 /* Checks ARM SBZ and SBO mandatory fields. */
10741 sbo_sbz (uint32_t insn
, uint32_t bit_num
, uint32_t len
, uint32_t sbo
)
10743 uint32_t ones
= bits (insn
, bit_num
- 1, (bit_num
-1) + (len
- 1));
10762 enum arm_record_result
10764 ARM_RECORD_SUCCESS
= 0,
10765 ARM_RECORD_FAILURE
= 1
10772 } arm_record_strx_t
;
10783 arm_record_strx (insn_decode_record
*arm_insn_r
, uint32_t *record_buf
,
10784 uint32_t *record_buf_mem
, arm_record_strx_t str_type
)
10787 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10788 ULONGEST u_regval
[2]= {0};
10790 uint32_t reg_src1
= 0, reg_src2
= 0;
10791 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10792 uint32_t opcode1
= 0;
10794 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
10795 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
10796 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
10799 if (14 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
10801 /* 1) Handle misc store, immediate offset. */
10802 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10803 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10804 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10805 regcache_raw_read_unsigned (reg_cache
, reg_src1
,
10807 if (ARM_PC_REGNUM
== reg_src1
)
10809 /* If R15 was used as Rn, hence current PC+8. */
10810 u_regval
[0] = u_regval
[0] + 8;
10812 offset_8
= (immed_high
<< 4) | immed_low
;
10813 /* Calculate target store address. */
10814 if (14 == arm_insn_r
->opcode
)
10816 tgt_mem_addr
= u_regval
[0] + offset_8
;
10820 tgt_mem_addr
= u_regval
[0] - offset_8
;
10822 if (ARM_RECORD_STRH
== str_type
)
10824 record_buf_mem
[0] = 2;
10825 record_buf_mem
[1] = tgt_mem_addr
;
10826 arm_insn_r
->mem_rec_count
= 1;
10828 else if (ARM_RECORD_STRD
== str_type
)
10830 record_buf_mem
[0] = 4;
10831 record_buf_mem
[1] = tgt_mem_addr
;
10832 record_buf_mem
[2] = 4;
10833 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10834 arm_insn_r
->mem_rec_count
= 2;
10837 else if (12 == arm_insn_r
->opcode
|| 8 == arm_insn_r
->opcode
)
10839 /* 2) Store, register offset. */
10841 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10843 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10844 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10845 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10846 if (15 == reg_src2
)
10848 /* If R15 was used as Rn, hence current PC+8. */
10849 u_regval
[0] = u_regval
[0] + 8;
10851 /* Calculate target store address, Rn +/- Rm, register offset. */
10852 if (12 == arm_insn_r
->opcode
)
10854 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10858 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10860 if (ARM_RECORD_STRH
== str_type
)
10862 record_buf_mem
[0] = 2;
10863 record_buf_mem
[1] = tgt_mem_addr
;
10864 arm_insn_r
->mem_rec_count
= 1;
10866 else if (ARM_RECORD_STRD
== str_type
)
10868 record_buf_mem
[0] = 4;
10869 record_buf_mem
[1] = tgt_mem_addr
;
10870 record_buf_mem
[2] = 4;
10871 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10872 arm_insn_r
->mem_rec_count
= 2;
10875 else if (11 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
10876 || 2 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10878 /* 3) Store, immediate pre-indexed. */
10879 /* 5) Store, immediate post-indexed. */
10880 immed_low
= bits (arm_insn_r
->arm_insn
, 0, 3);
10881 immed_high
= bits (arm_insn_r
->arm_insn
, 8, 11);
10882 offset_8
= (immed_high
<< 4) | immed_low
;
10883 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
10884 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10885 /* Calculate target store address, Rn +/- Rm, register offset. */
10886 if (15 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
)
10888 tgt_mem_addr
= u_regval
[0] + offset_8
;
10892 tgt_mem_addr
= u_regval
[0] - offset_8
;
10894 if (ARM_RECORD_STRH
== str_type
)
10896 record_buf_mem
[0] = 2;
10897 record_buf_mem
[1] = tgt_mem_addr
;
10898 arm_insn_r
->mem_rec_count
= 1;
10900 else if (ARM_RECORD_STRD
== str_type
)
10902 record_buf_mem
[0] = 4;
10903 record_buf_mem
[1] = tgt_mem_addr
;
10904 record_buf_mem
[2] = 4;
10905 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10906 arm_insn_r
->mem_rec_count
= 2;
10908 /* Record Rn also as it changes. */
10909 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10910 arm_insn_r
->reg_rec_count
= 1;
10912 else if (9 == arm_insn_r
->opcode
|| 13 == arm_insn_r
->opcode
10913 || 0 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10915 /* 4) Store, register pre-indexed. */
10916 /* 6) Store, register post -indexed. */
10917 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
10918 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
10919 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
10920 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
10921 /* Calculate target store address, Rn +/- Rm, register offset. */
10922 if (13 == arm_insn_r
->opcode
|| 4 == arm_insn_r
->opcode
)
10924 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
10928 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
10930 if (ARM_RECORD_STRH
== str_type
)
10932 record_buf_mem
[0] = 2;
10933 record_buf_mem
[1] = tgt_mem_addr
;
10934 arm_insn_r
->mem_rec_count
= 1;
10936 else if (ARM_RECORD_STRD
== str_type
)
10938 record_buf_mem
[0] = 4;
10939 record_buf_mem
[1] = tgt_mem_addr
;
10940 record_buf_mem
[2] = 4;
10941 record_buf_mem
[3] = tgt_mem_addr
+ 4;
10942 arm_insn_r
->mem_rec_count
= 2;
10944 /* Record Rn also as it changes. */
10945 *(record_buf
) = bits (arm_insn_r
->arm_insn
, 16, 19);
10946 arm_insn_r
->reg_rec_count
= 1;
10951 /* Handling ARM extension space insns. */
10954 arm_record_extension_space (insn_decode_record
*arm_insn_r
)
10956 uint32_t ret
= 0; /* Return value: -1:record failure ; 0:success */
10957 uint32_t opcode1
= 0, opcode2
= 0, insn_op1
= 0;
10958 uint32_t record_buf
[8], record_buf_mem
[8];
10959 uint32_t reg_src1
= 0;
10960 uint32_t immed_high
= 0, immed_low
= 0,offset_8
= 0, tgt_mem_addr
= 0;
10961 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
10962 ULONGEST u_regval
= 0;
10964 gdb_assert (!INSN_RECORDED(arm_insn_r
));
10965 /* Handle unconditional insn extension space. */
10967 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 27);
10968 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10969 if (arm_insn_r
->cond
)
10971 /* PLD has no affect on architectural state, it just affects
10973 if (5 == ((opcode1
& 0xE0) >> 5))
10976 record_buf
[0] = ARM_PS_REGNUM
;
10977 record_buf
[1] = ARM_LR_REGNUM
;
10978 arm_insn_r
->reg_rec_count
= 2;
10980 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10984 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
10985 if (3 == opcode1
&& bit (arm_insn_r
->arm_insn
, 4))
10988 /* Undefined instruction on ARM V5; need to handle if later
10989 versions define it. */
10992 opcode1
= bits (arm_insn_r
->arm_insn
, 24, 27);
10993 opcode2
= bits (arm_insn_r
->arm_insn
, 4, 7);
10994 insn_op1
= bits (arm_insn_r
->arm_insn
, 20, 23);
10996 /* Handle arithmetic insn extension space. */
10997 if (!opcode1
&& 9 == opcode2
&& 1 != arm_insn_r
->cond
10998 && !INSN_RECORDED(arm_insn_r
))
11000 /* Handle MLA(S) and MUL(S). */
11001 if (0 <= insn_op1
&& 3 >= insn_op1
)
11003 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11004 record_buf
[1] = ARM_PS_REGNUM
;
11005 arm_insn_r
->reg_rec_count
= 2;
11007 else if (4 <= insn_op1
&& 15 >= insn_op1
)
11009 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11010 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11011 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11012 record_buf
[2] = ARM_PS_REGNUM
;
11013 arm_insn_r
->reg_rec_count
= 3;
11017 opcode1
= bits (arm_insn_r
->arm_insn
, 26, 27);
11018 opcode2
= bits (arm_insn_r
->arm_insn
, 23, 24);
11019 insn_op1
= bits (arm_insn_r
->arm_insn
, 21, 22);
11021 /* Handle control insn extension space. */
11023 if (!opcode1
&& 2 == opcode2
&& !bit (arm_insn_r
->arm_insn
, 20)
11024 && 1 != arm_insn_r
->cond
&& !INSN_RECORDED(arm_insn_r
))
11026 if (!bit (arm_insn_r
->arm_insn
,25))
11028 if (!bits (arm_insn_r
->arm_insn
, 4, 7))
11030 if ((0 == insn_op1
) || (2 == insn_op1
))
11033 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11034 arm_insn_r
->reg_rec_count
= 1;
11036 else if (1 == insn_op1
)
11038 /* CSPR is going to be changed. */
11039 record_buf
[0] = ARM_PS_REGNUM
;
11040 arm_insn_r
->reg_rec_count
= 1;
11042 else if (3 == insn_op1
)
11044 /* SPSR is going to be changed. */
11045 /* We need to get SPSR value, which is yet to be done. */
11046 printf_unfiltered (_("Process record does not support "
11047 "instruction 0x%0x at address %s.\n"),
11048 arm_insn_r
->arm_insn
,
11049 paddress (arm_insn_r
->gdbarch
,
11050 arm_insn_r
->this_addr
));
11054 else if (1 == bits (arm_insn_r
->arm_insn
, 4, 7))
11059 record_buf
[0] = ARM_PS_REGNUM
;
11060 arm_insn_r
->reg_rec_count
= 1;
11062 else if (3 == insn_op1
)
11065 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11066 arm_insn_r
->reg_rec_count
= 1;
11069 else if (3 == bits (arm_insn_r
->arm_insn
, 4, 7))
11072 record_buf
[0] = ARM_PS_REGNUM
;
11073 record_buf
[1] = ARM_LR_REGNUM
;
11074 arm_insn_r
->reg_rec_count
= 2;
11076 else if (5 == bits (arm_insn_r
->arm_insn
, 4, 7))
11078 /* QADD, QSUB, QDADD, QDSUB */
11079 record_buf
[0] = ARM_PS_REGNUM
;
11080 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11081 arm_insn_r
->reg_rec_count
= 2;
11083 else if (7 == bits (arm_insn_r
->arm_insn
, 4, 7))
11086 record_buf
[0] = ARM_PS_REGNUM
;
11087 record_buf
[1] = ARM_LR_REGNUM
;
11088 arm_insn_r
->reg_rec_count
= 2;
11090 /* Save SPSR also;how? */
11091 printf_unfiltered (_("Process record does not support "
11092 "instruction 0x%0x at address %s.\n"),
11093 arm_insn_r
->arm_insn
,
11094 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11097 else if(8 == bits (arm_insn_r
->arm_insn
, 4, 7)
11098 || 10 == bits (arm_insn_r
->arm_insn
, 4, 7)
11099 || 12 == bits (arm_insn_r
->arm_insn
, 4, 7)
11100 || 14 == bits (arm_insn_r
->arm_insn
, 4, 7)
11103 if (0 == insn_op1
|| 1 == insn_op1
)
11105 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11106 /* We dont do optimization for SMULW<y> where we
11108 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11109 record_buf
[1] = ARM_PS_REGNUM
;
11110 arm_insn_r
->reg_rec_count
= 2;
11112 else if (2 == insn_op1
)
11115 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11116 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
11117 arm_insn_r
->reg_rec_count
= 2;
11119 else if (3 == insn_op1
)
11122 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11123 arm_insn_r
->reg_rec_count
= 1;
11129 /* MSR : immediate form. */
11132 /* CSPR is going to be changed. */
11133 record_buf
[0] = ARM_PS_REGNUM
;
11134 arm_insn_r
->reg_rec_count
= 1;
11136 else if (3 == insn_op1
)
11138 /* SPSR is going to be changed. */
11139 /* we need to get SPSR value, which is yet to be done */
11140 printf_unfiltered (_("Process record does not support "
11141 "instruction 0x%0x at address %s.\n"),
11142 arm_insn_r
->arm_insn
,
11143 paddress (arm_insn_r
->gdbarch
,
11144 arm_insn_r
->this_addr
));
11150 opcode1
= bits (arm_insn_r
->arm_insn
, 25, 27);
11151 opcode2
= bits (arm_insn_r
->arm_insn
, 20, 24);
11152 insn_op1
= bits (arm_insn_r
->arm_insn
, 5, 6);
11154 /* Handle load/store insn extension space. */
11156 if (!opcode1
&& bit (arm_insn_r
->arm_insn
, 7)
11157 && bit (arm_insn_r
->arm_insn
, 4) && 1 != arm_insn_r
->cond
11158 && !INSN_RECORDED(arm_insn_r
))
11163 /* These insn, changes register and memory as well. */
11164 /* SWP or SWPB insn. */
11165 /* Get memory address given by Rn. */
11166 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11167 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
11168 /* SWP insn ?, swaps word. */
11169 if (8 == arm_insn_r
->opcode
)
11171 record_buf_mem
[0] = 4;
11175 /* SWPB insn, swaps only byte. */
11176 record_buf_mem
[0] = 1;
11178 record_buf_mem
[1] = u_regval
;
11179 arm_insn_r
->mem_rec_count
= 1;
11180 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11181 arm_insn_r
->reg_rec_count
= 1;
11183 else if (1 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11186 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11189 else if (2 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11192 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11193 record_buf
[1] = record_buf
[0] + 1;
11194 arm_insn_r
->reg_rec_count
= 2;
11196 else if (3 == insn_op1
&& !bit (arm_insn_r
->arm_insn
, 20))
11199 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11202 else if (bit (arm_insn_r
->arm_insn
, 20) && insn_op1
<= 3)
11204 /* LDRH, LDRSB, LDRSH. */
11205 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11206 arm_insn_r
->reg_rec_count
= 1;
11211 opcode1
= bits (arm_insn_r
->arm_insn
, 23, 27);
11212 if (24 == opcode1
&& bit (arm_insn_r
->arm_insn
, 21)
11213 && !INSN_RECORDED(arm_insn_r
))
11216 /* Handle coprocessor insn extension space. */
11219 /* To be done for ARMv5 and later; as of now we return -1. */
11221 printf_unfiltered (_("Process record does not support instruction x%0x "
11222 "at address %s.\n"),arm_insn_r
->arm_insn
,
11223 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11226 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11227 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11232 /* Handling opcode 000 insns. */
11235 arm_record_data_proc_misc_ld_str (insn_decode_record
*arm_insn_r
)
11237 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11238 uint32_t record_buf
[8], record_buf_mem
[8];
11239 ULONGEST u_regval
[2] = {0};
11241 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11242 uint32_t immed_high
= 0, immed_low
= 0, offset_8
= 0, tgt_mem_addr
= 0;
11243 uint32_t opcode1
= 0;
11245 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11246 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11247 opcode1
= bits (arm_insn_r
->arm_insn
, 20, 24);
11249 /* Data processing insn /multiply insn. */
11250 if (9 == arm_insn_r
->decode
11251 && ((4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11252 || (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)))
11254 /* Handle multiply instructions. */
11255 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11256 if (0 == arm_insn_r
->opcode
|| 1 == arm_insn_r
->opcode
)
11258 /* Handle MLA and MUL. */
11259 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11260 record_buf
[1] = ARM_PS_REGNUM
;
11261 arm_insn_r
->reg_rec_count
= 2;
11263 else if (4 <= arm_insn_r
->opcode
&& 7 >= arm_insn_r
->opcode
)
11265 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11266 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 16, 19);
11267 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 12, 15);
11268 record_buf
[2] = ARM_PS_REGNUM
;
11269 arm_insn_r
->reg_rec_count
= 3;
11272 else if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11273 && (11 == arm_insn_r
->decode
|| 13 == arm_insn_r
->decode
))
11275 /* Handle misc load insns, as 20th bit (L = 1). */
11276 /* LDR insn has a capability to do branching, if
11277 MOV LR, PC is precceded by LDR insn having Rn as R15
11278 in that case, it emulates branch and link insn, and hence we
11279 need to save CSPR and PC as well. I am not sure this is right
11280 place; as opcode = 010 LDR insn make this happen, if R15 was
11282 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11283 if (15 != reg_dest
)
11285 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11286 arm_insn_r
->reg_rec_count
= 1;
11290 record_buf
[0] = reg_dest
;
11291 record_buf
[1] = ARM_PS_REGNUM
;
11292 arm_insn_r
->reg_rec_count
= 2;
11295 else if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11296 && sbo_sbz (arm_insn_r
->arm_insn
, 5, 12, 0)
11297 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11298 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21))
11300 /* Handle MSR insn. */
11301 if (9 == arm_insn_r
->opcode
)
11303 /* CSPR is going to be changed. */
11304 record_buf
[0] = ARM_PS_REGNUM
;
11305 arm_insn_r
->reg_rec_count
= 1;
11309 /* SPSR is going to be changed. */
11310 /* How to read SPSR value? */
11311 printf_unfiltered (_("Process record does not support instruction "
11312 "0x%0x at address %s.\n"),
11313 arm_insn_r
->arm_insn
,
11314 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11318 else if (9 == arm_insn_r
->decode
11319 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11320 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11322 /* Handling SWP, SWPB. */
11323 /* These insn, changes register and memory as well. */
11324 /* SWP or SWPB insn. */
11326 reg_src1
= bits (arm_insn_r
->arm_insn
, 16, 19);
11327 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11328 /* SWP insn ?, swaps word. */
11329 if (8 == arm_insn_r
->opcode
)
11331 record_buf_mem
[0] = 4;
11335 /* SWPB insn, swaps only byte. */
11336 record_buf_mem
[0] = 1;
11338 record_buf_mem
[1] = u_regval
[0];
11339 arm_insn_r
->mem_rec_count
= 1;
11340 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11341 arm_insn_r
->reg_rec_count
= 1;
11343 else if (3 == arm_insn_r
->decode
&& 0x12 == opcode1
11344 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11346 /* Handle BLX, branch and link/exchange. */
11347 if (9 == arm_insn_r
->opcode
)
11349 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11350 and R14 stores the return address. */
11351 record_buf
[0] = ARM_PS_REGNUM
;
11352 record_buf
[1] = ARM_LR_REGNUM
;
11353 arm_insn_r
->reg_rec_count
= 2;
11356 else if (7 == arm_insn_r
->decode
&& 0x12 == opcode1
)
11358 /* Handle enhanced software breakpoint insn, BKPT. */
11359 /* CPSR is changed to be executed in ARM state, disabling normal
11360 interrupts, entering abort mode. */
11361 /* According to high vector configuration PC is set. */
11362 /* user hit breakpoint and type reverse, in
11363 that case, we need to go back with previous CPSR and
11364 Program Counter. */
11365 record_buf
[0] = ARM_PS_REGNUM
;
11366 record_buf
[1] = ARM_LR_REGNUM
;
11367 arm_insn_r
->reg_rec_count
= 2;
11369 /* Save SPSR also; how? */
11370 printf_unfiltered (_("Process record does not support instruction "
11371 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11372 paddress (arm_insn_r
->gdbarch
,
11373 arm_insn_r
->this_addr
));
11376 else if (11 == arm_insn_r
->decode
11377 && !bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11379 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11381 /* Handle str(x) insn */
11382 arm_record_strx(arm_insn_r
, &record_buf
[0], &record_buf_mem
[0],
11385 else if (1 == arm_insn_r
->decode
&& 0x12 == opcode1
11386 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 12, 1))
11388 /* Handle BX, branch and link/exchange. */
11389 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11390 record_buf
[0] = ARM_PS_REGNUM
;
11391 arm_insn_r
->reg_rec_count
= 1;
11393 else if (1 == arm_insn_r
->decode
&& 0x16 == opcode1
11394 && sbo_sbz (arm_insn_r
->arm_insn
, 9, 4, 1)
11395 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1))
11397 /* Count leading zeros: CLZ. */
11398 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11399 arm_insn_r
->reg_rec_count
= 1;
11401 else if (!bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
)
11402 && (8 == arm_insn_r
->opcode
|| 10 == arm_insn_r
->opcode
)
11403 && sbo_sbz (arm_insn_r
->arm_insn
, 17, 4, 1)
11404 && sbo_sbz (arm_insn_r
->arm_insn
, 1, 12, 0)
11407 /* Handle MRS insn. */
11408 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11409 arm_insn_r
->reg_rec_count
= 1;
11411 else if (arm_insn_r
->opcode
<= 15)
11413 /* Normal data processing insns. */
11414 /* Out of 11 shifter operands mode, all the insn modifies destination
11415 register, which is specified by 13-16 decode. */
11416 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11417 record_buf
[1] = ARM_PS_REGNUM
;
11418 arm_insn_r
->reg_rec_count
= 2;
11425 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11426 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11430 /* Handling opcode 001 insns. */
11433 arm_record_data_proc_imm (insn_decode_record
*arm_insn_r
)
11435 uint32_t record_buf
[8], record_buf_mem
[8];
11437 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11438 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11440 if ((9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
)
11441 && 2 == bits (arm_insn_r
->arm_insn
, 20, 21)
11442 && sbo_sbz (arm_insn_r
->arm_insn
, 13, 4, 1)
11445 /* Handle MSR insn. */
11446 if (9 == arm_insn_r
->opcode
)
11448 /* CSPR is going to be changed. */
11449 record_buf
[0] = ARM_PS_REGNUM
;
11450 arm_insn_r
->reg_rec_count
= 1;
11454 /* SPSR is going to be changed. */
11457 else if (arm_insn_r
->opcode
<= 15)
11459 /* Normal data processing insns. */
11460 /* Out of 11 shifter operands mode, all the insn modifies destination
11461 register, which is specified by 13-16 decode. */
11462 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11463 record_buf
[1] = ARM_PS_REGNUM
;
11464 arm_insn_r
->reg_rec_count
= 2;
11471 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11472 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11476 /* Handle ARM mode instructions with opcode 010. */
11479 arm_record_ld_st_imm_offset (insn_decode_record
*arm_insn_r
)
11481 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11483 uint32_t reg_base
, reg_dest
;
11484 uint32_t offset_12
, tgt_mem_addr
;
11485 uint32_t record_buf
[8], record_buf_mem
[8];
11486 unsigned char wback
;
11489 /* Calculate wback. */
11490 wback
= (bit (arm_insn_r
->arm_insn
, 24) == 0)
11491 || (bit (arm_insn_r
->arm_insn
, 21) == 1);
11493 arm_insn_r
->reg_rec_count
= 0;
11494 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11496 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11498 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11501 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11502 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_dest
;
11504 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11505 preceeds a LDR instruction having R15 as reg_base, it
11506 emulates a branch and link instruction, and hence we need to save
11507 CPSR and PC as well. */
11508 if (ARM_PC_REGNUM
== reg_dest
)
11509 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11511 /* If wback is true, also save the base register, which is going to be
11514 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11518 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11520 offset_12
= bits (arm_insn_r
->arm_insn
, 0, 11);
11521 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11523 /* Handle bit U. */
11524 if (bit (arm_insn_r
->arm_insn
, 23))
11526 /* U == 1: Add the offset. */
11527 tgt_mem_addr
= (uint32_t) u_regval
+ offset_12
;
11531 /* U == 0: subtract the offset. */
11532 tgt_mem_addr
= (uint32_t) u_regval
- offset_12
;
11535 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11537 if (bit (arm_insn_r
->arm_insn
, 22))
11539 /* STRB and STRBT: 1 byte. */
11540 record_buf_mem
[0] = 1;
11544 /* STR and STRT: 4 bytes. */
11545 record_buf_mem
[0] = 4;
11548 /* Handle bit P. */
11549 if (bit (arm_insn_r
->arm_insn
, 24))
11550 record_buf_mem
[1] = tgt_mem_addr
;
11552 record_buf_mem
[1] = (uint32_t) u_regval
;
11554 arm_insn_r
->mem_rec_count
= 1;
11556 /* If wback is true, also save the base register, which is going to be
11559 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11562 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11563 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11567 /* Handling opcode 011 insns. */
11570 arm_record_ld_st_reg_offset (insn_decode_record
*arm_insn_r
)
11572 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11574 uint32_t shift_imm
= 0;
11575 uint32_t reg_src1
= 0, reg_src2
= 0, reg_dest
= 0;
11576 uint32_t offset_12
= 0, tgt_mem_addr
= 0;
11577 uint32_t record_buf
[8], record_buf_mem
[8];
11580 ULONGEST u_regval
[2];
11582 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 21, 24);
11583 arm_insn_r
->decode
= bits (arm_insn_r
->arm_insn
, 4, 7);
11585 /* Handle enhanced store insns and LDRD DSP insn,
11586 order begins according to addressing modes for store insns
11590 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11592 reg_dest
= bits (arm_insn_r
->arm_insn
, 12, 15);
11593 /* LDR insn has a capability to do branching, if
11594 MOV LR, PC is precedded by LDR insn having Rn as R15
11595 in that case, it emulates branch and link insn, and hence we
11596 need to save CSPR and PC as well. */
11597 if (15 != reg_dest
)
11599 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
11600 arm_insn_r
->reg_rec_count
= 1;
11604 record_buf
[0] = reg_dest
;
11605 record_buf
[1] = ARM_PS_REGNUM
;
11606 arm_insn_r
->reg_rec_count
= 2;
11611 if (! bits (arm_insn_r
->arm_insn
, 4, 11))
11613 /* Store insn, register offset and register pre-indexed,
11614 register post-indexed. */
11616 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11618 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11619 regcache_raw_read_unsigned (reg_cache
, reg_src1
11621 regcache_raw_read_unsigned (reg_cache
, reg_src2
11623 if (15 == reg_src2
)
11625 /* If R15 was used as Rn, hence current PC+8. */
11626 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11627 u_regval
[0] = u_regval
[0] + 8;
11629 /* Calculate target store address, Rn +/- Rm, register offset. */
11631 if (bit (arm_insn_r
->arm_insn
, 23))
11633 tgt_mem_addr
= u_regval
[0] + u_regval
[1];
11637 tgt_mem_addr
= u_regval
[1] - u_regval
[0];
11640 switch (arm_insn_r
->opcode
)
11654 record_buf_mem
[0] = 4;
11669 record_buf_mem
[0] = 1;
11673 gdb_assert_not_reached ("no decoding pattern found");
11676 record_buf_mem
[1] = tgt_mem_addr
;
11677 arm_insn_r
->mem_rec_count
= 1;
11679 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11680 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11681 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11682 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11683 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11684 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11687 /* Rn is going to be changed in pre-indexed mode and
11688 post-indexed mode as well. */
11689 record_buf
[0] = reg_src2
;
11690 arm_insn_r
->reg_rec_count
= 1;
11695 /* Store insn, scaled register offset; scaled pre-indexed. */
11696 offset_12
= bits (arm_insn_r
->arm_insn
, 5, 6);
11698 reg_src1
= bits (arm_insn_r
->arm_insn
, 0, 3);
11700 reg_src2
= bits (arm_insn_r
->arm_insn
, 16, 19);
11701 /* Get shift_imm. */
11702 shift_imm
= bits (arm_insn_r
->arm_insn
, 7, 11);
11703 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
11704 regcache_raw_read_signed (reg_cache
, reg_src1
, &s_word
);
11705 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11706 /* Offset_12 used as shift. */
11710 /* Offset_12 used as index. */
11711 offset_12
= u_regval
[0] << shift_imm
;
11715 offset_12
= (!shift_imm
)?0:u_regval
[0] >> shift_imm
;
11721 if (bit (u_regval
[0], 31))
11723 offset_12
= 0xFFFFFFFF;
11732 /* This is arithmetic shift. */
11733 offset_12
= s_word
>> shift_imm
;
11740 regcache_raw_read_unsigned (reg_cache
, ARM_PS_REGNUM
,
11742 /* Get C flag value and shift it by 31. */
11743 offset_12
= (((bit (u_regval
[1], 29)) << 31) \
11744 | (u_regval
[0]) >> 1);
11748 offset_12
= (u_regval
[0] >> shift_imm
) \
11750 (sizeof(uint32_t) - shift_imm
));
11755 gdb_assert_not_reached ("no decoding pattern found");
11759 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
11761 if (bit (arm_insn_r
->arm_insn
, 23))
11763 tgt_mem_addr
= u_regval
[1] + offset_12
;
11767 tgt_mem_addr
= u_regval
[1] - offset_12
;
11770 switch (arm_insn_r
->opcode
)
11784 record_buf_mem
[0] = 4;
11799 record_buf_mem
[0] = 1;
11803 gdb_assert_not_reached ("no decoding pattern found");
11806 record_buf_mem
[1] = tgt_mem_addr
;
11807 arm_insn_r
->mem_rec_count
= 1;
11809 if (9 == arm_insn_r
->opcode
|| 11 == arm_insn_r
->opcode
11810 || 13 == arm_insn_r
->opcode
|| 15 == arm_insn_r
->opcode
11811 || 0 == arm_insn_r
->opcode
|| 2 == arm_insn_r
->opcode
11812 || 4 == arm_insn_r
->opcode
|| 6 == arm_insn_r
->opcode
11813 || 1 == arm_insn_r
->opcode
|| 3 == arm_insn_r
->opcode
11814 || 5 == arm_insn_r
->opcode
|| 7 == arm_insn_r
->opcode
11817 /* Rn is going to be changed in register scaled pre-indexed
11818 mode,and scaled post indexed mode. */
11819 record_buf
[0] = reg_src2
;
11820 arm_insn_r
->reg_rec_count
= 1;
11825 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11826 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11830 /* Handle ARM mode instructions with opcode 100. */
11833 arm_record_ld_st_multiple (insn_decode_record
*arm_insn_r
)
11835 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
11836 uint32_t register_count
= 0, register_bits
;
11837 uint32_t reg_base
, addr_mode
;
11838 uint32_t record_buf
[24], record_buf_mem
[48];
11842 /* Fetch the list of registers. */
11843 register_bits
= bits (arm_insn_r
->arm_insn
, 0, 15);
11844 arm_insn_r
->reg_rec_count
= 0;
11846 /* Fetch the base register that contains the address we are loading data
11848 reg_base
= bits (arm_insn_r
->arm_insn
, 16, 19);
11850 /* Calculate wback. */
11851 wback
= (bit (arm_insn_r
->arm_insn
, 21) == 1);
11853 if (bit (arm_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
11855 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11857 /* Find out which registers are going to be loaded from memory. */
11858 while (register_bits
)
11860 if (register_bits
& 0x00000001)
11861 record_buf
[arm_insn_r
->reg_rec_count
++] = register_count
;
11862 register_bits
= register_bits
>> 1;
11867 /* If wback is true, also save the base register, which is going to be
11870 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11872 /* Save the CPSR register. */
11873 record_buf
[arm_insn_r
->reg_rec_count
++] = ARM_PS_REGNUM
;
11877 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11879 addr_mode
= bits (arm_insn_r
->arm_insn
, 23, 24);
11881 regcache_raw_read_unsigned (reg_cache
, reg_base
, &u_regval
);
11883 /* Find out how many registers are going to be stored to memory. */
11884 while (register_bits
)
11886 if (register_bits
& 0x00000001)
11888 register_bits
= register_bits
>> 1;
11893 /* STMDA (STMED): Decrement after. */
11895 record_buf_mem
[1] = (uint32_t) u_regval
11896 - register_count
* INT_REGISTER_SIZE
+ 4;
11898 /* STM (STMIA, STMEA): Increment after. */
11900 record_buf_mem
[1] = (uint32_t) u_regval
;
11902 /* STMDB (STMFD): Decrement before. */
11904 record_buf_mem
[1] = (uint32_t) u_regval
11905 - register_count
* INT_REGISTER_SIZE
;
11907 /* STMIB (STMFA): Increment before. */
11909 record_buf_mem
[1] = (uint32_t) u_regval
+ INT_REGISTER_SIZE
;
11912 gdb_assert_not_reached ("no decoding pattern found");
11916 record_buf_mem
[0] = register_count
* INT_REGISTER_SIZE
;
11917 arm_insn_r
->mem_rec_count
= 1;
11919 /* If wback is true, also save the base register, which is going to be
11922 record_buf
[arm_insn_r
->reg_rec_count
++] = reg_base
;
11925 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11926 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
11930 /* Handling opcode 101 insns. */
11933 arm_record_b_bl (insn_decode_record
*arm_insn_r
)
11935 uint32_t record_buf
[8];
11937 /* Handle B, BL, BLX(1) insns. */
11938 /* B simply branches so we do nothing here. */
11939 /* Note: BLX(1) doesnt fall here but instead it falls into
11940 extension space. */
11941 if (bit (arm_insn_r
->arm_insn
, 24))
11943 record_buf
[0] = ARM_LR_REGNUM
;
11944 arm_insn_r
->reg_rec_count
= 1;
11947 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
11952 /* Handling opcode 110 insns. */
11955 arm_record_unsupported_insn (insn_decode_record
*arm_insn_r
)
11957 printf_unfiltered (_("Process record does not support instruction "
11958 "0x%0x at address %s.\n"),arm_insn_r
->arm_insn
,
11959 paddress (arm_insn_r
->gdbarch
, arm_insn_r
->this_addr
));
11964 /* Record handler for vector data transfer instructions. */
11967 arm_record_vdata_transfer_insn (insn_decode_record
*arm_insn_r
)
11969 uint32_t bits_a
, bit_c
, bit_l
, reg_t
, reg_v
;
11970 uint32_t record_buf
[4];
11972 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
11973 reg_t
= bits (arm_insn_r
->arm_insn
, 12, 15);
11974 reg_v
= bits (arm_insn_r
->arm_insn
, 21, 23);
11975 bits_a
= bits (arm_insn_r
->arm_insn
, 21, 23);
11976 bit_l
= bit (arm_insn_r
->arm_insn
, 20);
11977 bit_c
= bit (arm_insn_r
->arm_insn
, 8);
11979 /* Handle VMOV instruction. */
11980 if (bit_l
&& bit_c
)
11982 record_buf
[0] = reg_t
;
11983 arm_insn_r
->reg_rec_count
= 1;
11985 else if (bit_l
&& !bit_c
)
11987 /* Handle VMOV instruction. */
11988 if (bits_a
== 0x00)
11990 if (bit (arm_insn_r
->arm_insn
, 20))
11991 record_buf
[0] = reg_t
;
11993 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
11996 arm_insn_r
->reg_rec_count
= 1;
11998 /* Handle VMRS instruction. */
11999 else if (bits_a
== 0x07)
12002 reg_t
= ARM_PS_REGNUM
;
12004 record_buf
[0] = reg_t
;
12005 arm_insn_r
->reg_rec_count
= 1;
12008 else if (!bit_l
&& !bit_c
)
12010 /* Handle VMOV instruction. */
12011 if (bits_a
== 0x00)
12013 if (bit (arm_insn_r
->arm_insn
, 20))
12014 record_buf
[0] = reg_t
;
12016 record_buf
[0] = num_regs
+ (bit (arm_insn_r
->arm_insn
, 7) |
12019 arm_insn_r
->reg_rec_count
= 1;
12021 /* Handle VMSR instruction. */
12022 else if (bits_a
== 0x07)
12024 record_buf
[0] = ARM_FPSCR_REGNUM
;
12025 arm_insn_r
->reg_rec_count
= 1;
12028 else if (!bit_l
&& bit_c
)
12030 /* Handle VMOV instruction. */
12031 if (!(bits_a
& 0x04))
12033 record_buf
[0] = (reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4))
12035 arm_insn_r
->reg_rec_count
= 1;
12037 /* Handle VDUP instruction. */
12040 if (bit (arm_insn_r
->arm_insn
, 21))
12042 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12043 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12044 record_buf
[1] = reg_v
+ ARM_D0_REGNUM
+ 1;
12045 arm_insn_r
->reg_rec_count
= 2;
12049 reg_v
= reg_v
| (bit (arm_insn_r
->arm_insn
, 7) << 4);
12050 record_buf
[0] = reg_v
+ ARM_D0_REGNUM
;
12051 arm_insn_r
->reg_rec_count
= 1;
12056 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12060 /* Record handler for extension register load/store instructions. */
12063 arm_record_exreg_ld_st_insn (insn_decode_record
*arm_insn_r
)
12065 uint32_t opcode
, single_reg
;
12066 uint8_t op_vldm_vstm
;
12067 uint32_t record_buf
[8], record_buf_mem
[128];
12068 ULONGEST u_regval
= 0;
12070 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12071 const int num_regs
= gdbarch_num_regs (arm_insn_r
->gdbarch
);
12073 opcode
= bits (arm_insn_r
->arm_insn
, 20, 24);
12074 single_reg
= bit (arm_insn_r
->arm_insn
, 8);
12075 op_vldm_vstm
= opcode
& 0x1b;
12077 /* Handle VMOV instructions. */
12078 if ((opcode
& 0x1e) == 0x04)
12080 if (bit (arm_insn_r
->arm_insn
, 4))
12082 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12083 record_buf
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12084 arm_insn_r
->reg_rec_count
= 2;
12088 uint8_t reg_m
= (bits (arm_insn_r
->arm_insn
, 0, 3) << 1)
12089 | bit (arm_insn_r
->arm_insn
, 5);
12093 record_buf
[0] = num_regs
+ reg_m
;
12094 record_buf
[1] = num_regs
+ reg_m
+ 1;
12095 arm_insn_r
->reg_rec_count
= 2;
12099 record_buf
[0] = reg_m
+ ARM_D0_REGNUM
;
12100 arm_insn_r
->reg_rec_count
= 1;
12104 /* Handle VSTM and VPUSH instructions. */
12105 else if (op_vldm_vstm
== 0x08 || op_vldm_vstm
== 0x0a
12106 || op_vldm_vstm
== 0x12)
12108 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12109 uint32_t memory_index
= 0;
12111 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12112 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12113 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12114 imm_off32
= imm_off8
<< 24;
12115 memory_count
= imm_off8
;
12117 if (bit (arm_insn_r
->arm_insn
, 23))
12118 start_address
= u_regval
;
12120 start_address
= u_regval
- imm_off32
;
12122 if (bit (arm_insn_r
->arm_insn
, 21))
12124 record_buf
[0] = reg_rn
;
12125 arm_insn_r
->reg_rec_count
= 1;
12128 while (memory_count
> 0)
12132 record_buf_mem
[memory_index
] = start_address
;
12133 record_buf_mem
[memory_index
+ 1] = 4;
12134 start_address
= start_address
+ 4;
12135 memory_index
= memory_index
+ 2;
12139 record_buf_mem
[memory_index
] = start_address
;
12140 record_buf_mem
[memory_index
+ 1] = 4;
12141 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12142 record_buf_mem
[memory_index
+ 3] = 4;
12143 start_address
= start_address
+ 8;
12144 memory_index
= memory_index
+ 4;
12148 arm_insn_r
->mem_rec_count
= (memory_index
>> 1);
12150 /* Handle VLDM instructions. */
12151 else if (op_vldm_vstm
== 0x09 || op_vldm_vstm
== 0x0b
12152 || op_vldm_vstm
== 0x13)
12154 uint32_t reg_count
, reg_vd
;
12155 uint32_t reg_index
= 0;
12157 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12158 reg_count
= bits (arm_insn_r
->arm_insn
, 0, 7);
12161 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12163 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12165 if (bit (arm_insn_r
->arm_insn
, 21))
12166 record_buf
[reg_index
++] = bits (arm_insn_r
->arm_insn
, 16, 19);
12168 while (reg_count
> 0)
12171 record_buf
[reg_index
++] = num_regs
+ reg_vd
+ reg_count
- 1;
12173 record_buf
[reg_index
++] = ARM_D0_REGNUM
+ reg_vd
+ reg_count
- 1;
12177 arm_insn_r
->reg_rec_count
= reg_index
;
12179 /* VSTR Vector store register. */
12180 else if ((opcode
& 0x13) == 0x10)
12182 uint32_t start_address
, reg_rn
, imm_off32
, imm_off8
, memory_count
;
12183 uint32_t memory_index
= 0;
12185 reg_rn
= bits (arm_insn_r
->arm_insn
, 16, 19);
12186 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
12187 imm_off8
= bits (arm_insn_r
->arm_insn
, 0, 7);
12188 imm_off32
= imm_off8
<< 24;
12189 memory_count
= imm_off8
;
12191 if (bit (arm_insn_r
->arm_insn
, 23))
12192 start_address
= u_regval
+ imm_off32
;
12194 start_address
= u_regval
- imm_off32
;
12198 record_buf_mem
[memory_index
] = start_address
;
12199 record_buf_mem
[memory_index
+ 1] = 4;
12200 arm_insn_r
->mem_rec_count
= 1;
12204 record_buf_mem
[memory_index
] = start_address
;
12205 record_buf_mem
[memory_index
+ 1] = 4;
12206 record_buf_mem
[memory_index
+ 2] = start_address
+ 4;
12207 record_buf_mem
[memory_index
+ 3] = 4;
12208 arm_insn_r
->mem_rec_count
= 2;
12211 /* VLDR Vector load register. */
12212 else if ((opcode
& 0x13) == 0x11)
12214 uint32_t reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12218 reg_vd
= reg_vd
| (bit (arm_insn_r
->arm_insn
, 22) << 4);
12219 record_buf
[0] = ARM_D0_REGNUM
+ reg_vd
;
12223 reg_vd
= (reg_vd
<< 1) | bit (arm_insn_r
->arm_insn
, 22);
12224 record_buf
[0] = num_regs
+ reg_vd
;
12226 arm_insn_r
->reg_rec_count
= 1;
12229 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12230 MEM_ALLOC (arm_insn_r
->arm_mems
, arm_insn_r
->mem_rec_count
, record_buf_mem
);
12234 /* Record handler for arm/thumb mode VFP data processing instructions. */
12237 arm_record_vfp_data_proc_insn (insn_decode_record
*arm_insn_r
)
12239 uint32_t opc1
, opc2
, opc3
, dp_op_sz
, bit_d
, reg_vd
;
12240 uint32_t record_buf
[4];
12241 enum insn_types
{INSN_T0
, INSN_T1
, INSN_T2
, INSN_T3
, INSN_INV
};
12242 enum insn_types curr_insn_type
= INSN_INV
;
12244 reg_vd
= bits (arm_insn_r
->arm_insn
, 12, 15);
12245 opc1
= bits (arm_insn_r
->arm_insn
, 20, 23);
12246 opc2
= bits (arm_insn_r
->arm_insn
, 16, 19);
12247 opc3
= bits (arm_insn_r
->arm_insn
, 6, 7);
12248 dp_op_sz
= bit (arm_insn_r
->arm_insn
, 8);
12249 bit_d
= bit (arm_insn_r
->arm_insn
, 22);
12250 opc1
= opc1
& 0x04;
12252 /* Handle VMLA, VMLS. */
12255 if (bit (arm_insn_r
->arm_insn
, 10))
12257 if (bit (arm_insn_r
->arm_insn
, 6))
12258 curr_insn_type
= INSN_T0
;
12260 curr_insn_type
= INSN_T1
;
12265 curr_insn_type
= INSN_T1
;
12267 curr_insn_type
= INSN_T2
;
12270 /* Handle VNMLA, VNMLS, VNMUL. */
12271 else if (opc1
== 0x01)
12274 curr_insn_type
= INSN_T1
;
12276 curr_insn_type
= INSN_T2
;
12279 else if (opc1
== 0x02 && !(opc3
& 0x01))
12281 if (bit (arm_insn_r
->arm_insn
, 10))
12283 if (bit (arm_insn_r
->arm_insn
, 6))
12284 curr_insn_type
= INSN_T0
;
12286 curr_insn_type
= INSN_T1
;
12291 curr_insn_type
= INSN_T1
;
12293 curr_insn_type
= INSN_T2
;
12296 /* Handle VADD, VSUB. */
12297 else if (opc1
== 0x03)
12299 if (!bit (arm_insn_r
->arm_insn
, 9))
12301 if (bit (arm_insn_r
->arm_insn
, 6))
12302 curr_insn_type
= INSN_T0
;
12304 curr_insn_type
= INSN_T1
;
12309 curr_insn_type
= INSN_T1
;
12311 curr_insn_type
= INSN_T2
;
12315 else if (opc1
== 0x0b)
12318 curr_insn_type
= INSN_T1
;
12320 curr_insn_type
= INSN_T2
;
12322 /* Handle all other vfp data processing instructions. */
12323 else if (opc1
== 0x0b)
12326 if (!(opc3
& 0x01) || (opc2
== 0x00 && opc3
== 0x01))
12328 if (bit (arm_insn_r
->arm_insn
, 4))
12330 if (bit (arm_insn_r
->arm_insn
, 6))
12331 curr_insn_type
= INSN_T0
;
12333 curr_insn_type
= INSN_T1
;
12338 curr_insn_type
= INSN_T1
;
12340 curr_insn_type
= INSN_T2
;
12343 /* Handle VNEG and VABS. */
12344 else if ((opc2
== 0x01 && opc3
== 0x01)
12345 || (opc2
== 0x00 && opc3
== 0x03))
12347 if (!bit (arm_insn_r
->arm_insn
, 11))
12349 if (bit (arm_insn_r
->arm_insn
, 6))
12350 curr_insn_type
= INSN_T0
;
12352 curr_insn_type
= INSN_T1
;
12357 curr_insn_type
= INSN_T1
;
12359 curr_insn_type
= INSN_T2
;
12362 /* Handle VSQRT. */
12363 else if (opc2
== 0x01 && opc3
== 0x03)
12366 curr_insn_type
= INSN_T1
;
12368 curr_insn_type
= INSN_T2
;
12371 else if (opc2
== 0x07 && opc3
== 0x03)
12374 curr_insn_type
= INSN_T1
;
12376 curr_insn_type
= INSN_T2
;
12378 else if (opc3
& 0x01)
12381 if ((opc2
== 0x08) || (opc2
& 0x0e) == 0x0c)
12383 if (!bit (arm_insn_r
->arm_insn
, 18))
12384 curr_insn_type
= INSN_T2
;
12388 curr_insn_type
= INSN_T1
;
12390 curr_insn_type
= INSN_T2
;
12394 else if ((opc2
& 0x0e) == 0x0a || (opc2
& 0x0e) == 0x0e)
12397 curr_insn_type
= INSN_T1
;
12399 curr_insn_type
= INSN_T2
;
12401 /* Handle VCVTB, VCVTT. */
12402 else if ((opc2
& 0x0e) == 0x02)
12403 curr_insn_type
= INSN_T2
;
12404 /* Handle VCMP, VCMPE. */
12405 else if ((opc2
& 0x0e) == 0x04)
12406 curr_insn_type
= INSN_T3
;
12410 switch (curr_insn_type
)
12413 reg_vd
= reg_vd
| (bit_d
<< 4);
12414 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12415 record_buf
[1] = reg_vd
+ ARM_D0_REGNUM
+ 1;
12416 arm_insn_r
->reg_rec_count
= 2;
12420 reg_vd
= reg_vd
| (bit_d
<< 4);
12421 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12422 arm_insn_r
->reg_rec_count
= 1;
12426 reg_vd
= (reg_vd
<< 1) | bit_d
;
12427 record_buf
[0] = reg_vd
+ ARM_D0_REGNUM
;
12428 arm_insn_r
->reg_rec_count
= 1;
12432 record_buf
[0] = ARM_FPSCR_REGNUM
;
12433 arm_insn_r
->reg_rec_count
= 1;
12437 gdb_assert_not_reached ("no decoding pattern found");
12441 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, record_buf
);
12445 /* Handling opcode 110 insns. */
12448 arm_record_asimd_vfp_coproc (insn_decode_record
*arm_insn_r
)
12450 uint32_t op
, op1
, op1_sbit
, op1_ebit
, coproc
;
12452 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12453 op1
= bits (arm_insn_r
->arm_insn
, 20, 25);
12454 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12456 if ((coproc
& 0x0e) == 0x0a)
12458 /* Handle extension register ld/st instructions. */
12460 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12462 /* 64-bit transfers between arm core and extension registers. */
12463 if ((op1
& 0x3e) == 0x04)
12464 return arm_record_exreg_ld_st_insn (arm_insn_r
);
12468 /* Handle coprocessor ld/st instructions. */
12473 return arm_record_unsupported_insn (arm_insn_r
);
12476 return arm_record_unsupported_insn (arm_insn_r
);
12479 /* Move to coprocessor from two arm core registers. */
12481 return arm_record_unsupported_insn (arm_insn_r
);
12483 /* Move to two arm core registers from coprocessor. */
12488 reg_t
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12489 reg_t
[1] = bits (arm_insn_r
->arm_insn
, 16, 19);
12490 arm_insn_r
->reg_rec_count
= 2;
12492 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
, reg_t
);
12496 return arm_record_unsupported_insn (arm_insn_r
);
12499 /* Handling opcode 111 insns. */
12502 arm_record_coproc_data_proc (insn_decode_record
*arm_insn_r
)
12504 uint32_t op
, op1_sbit
, op1_ebit
, coproc
;
12505 struct gdbarch_tdep
*tdep
= gdbarch_tdep (arm_insn_r
->gdbarch
);
12506 struct regcache
*reg_cache
= arm_insn_r
->regcache
;
12507 ULONGEST u_regval
= 0;
12509 arm_insn_r
->opcode
= bits (arm_insn_r
->arm_insn
, 24, 27);
12510 coproc
= bits (arm_insn_r
->arm_insn
, 8, 11);
12511 op1_sbit
= bit (arm_insn_r
->arm_insn
, 24);
12512 op1_ebit
= bit (arm_insn_r
->arm_insn
, 20);
12513 op
= bit (arm_insn_r
->arm_insn
, 4);
12515 /* Handle arm SWI/SVC system call instructions. */
12518 if (tdep
->arm_syscall_record
!= NULL
)
12520 ULONGEST svc_operand
, svc_number
;
12522 svc_operand
= (0x00ffffff & arm_insn_r
->arm_insn
);
12524 if (svc_operand
) /* OABI. */
12525 svc_number
= svc_operand
- 0x900000;
12527 regcache_raw_read_unsigned (reg_cache
, 7, &svc_number
);
12529 return tdep
->arm_syscall_record (reg_cache
, svc_number
);
12533 printf_unfiltered (_("no syscall record support\n"));
12538 if ((coproc
& 0x0e) == 0x0a)
12540 /* VFP data-processing instructions. */
12541 if (!op1_sbit
&& !op
)
12542 return arm_record_vfp_data_proc_insn (arm_insn_r
);
12544 /* Advanced SIMD, VFP instructions. */
12545 if (!op1_sbit
&& op
)
12546 return arm_record_vdata_transfer_insn (arm_insn_r
);
12550 /* Coprocessor data operations. */
12551 if (!op1_sbit
&& !op
)
12552 return arm_record_unsupported_insn (arm_insn_r
);
12554 /* Move to Coprocessor from ARM core register. */
12555 if (!op1_sbit
&& !op1_ebit
&& op
)
12556 return arm_record_unsupported_insn (arm_insn_r
);
12558 /* Move to arm core register from coprocessor. */
12559 if (!op1_sbit
&& op1_ebit
&& op
)
12561 uint32_t record_buf
[1];
12563 record_buf
[0] = bits (arm_insn_r
->arm_insn
, 12, 15);
12564 if (record_buf
[0] == 15)
12565 record_buf
[0] = ARM_PS_REGNUM
;
12567 arm_insn_r
->reg_rec_count
= 1;
12568 REG_ALLOC (arm_insn_r
->arm_regs
, arm_insn_r
->reg_rec_count
,
12574 return arm_record_unsupported_insn (arm_insn_r
);
12577 /* Handling opcode 000 insns. */
12580 thumb_record_shift_add_sub (insn_decode_record
*thumb_insn_r
)
12582 uint32_t record_buf
[8];
12583 uint32_t reg_src1
= 0;
12585 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12587 record_buf
[0] = ARM_PS_REGNUM
;
12588 record_buf
[1] = reg_src1
;
12589 thumb_insn_r
->reg_rec_count
= 2;
12591 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12597 /* Handling opcode 001 insns. */
12600 thumb_record_add_sub_cmp_mov (insn_decode_record
*thumb_insn_r
)
12602 uint32_t record_buf
[8];
12603 uint32_t reg_src1
= 0;
12605 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12607 record_buf
[0] = ARM_PS_REGNUM
;
12608 record_buf
[1] = reg_src1
;
12609 thumb_insn_r
->reg_rec_count
= 2;
12611 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12616 /* Handling opcode 010 insns. */
12619 thumb_record_ld_st_reg_offset (insn_decode_record
*thumb_insn_r
)
12621 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12622 uint32_t record_buf
[8], record_buf_mem
[8];
12624 uint32_t reg_src1
= 0, reg_src2
= 0;
12625 uint32_t opcode1
= 0, opcode2
= 0, opcode3
= 0;
12627 ULONGEST u_regval
[2] = {0};
12629 opcode1
= bits (thumb_insn_r
->arm_insn
, 10, 12);
12631 if (bit (thumb_insn_r
->arm_insn
, 12))
12633 /* Handle load/store register offset. */
12634 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 10);
12635 if (opcode2
>= 12 && opcode2
<= 15)
12637 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12638 reg_src1
= bits (thumb_insn_r
->arm_insn
,0, 2);
12639 record_buf
[0] = reg_src1
;
12640 thumb_insn_r
->reg_rec_count
= 1;
12642 else if (opcode2
>= 8 && opcode2
<= 10)
12644 /* STR(2), STRB(2), STRH(2) . */
12645 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12646 reg_src2
= bits (thumb_insn_r
->arm_insn
, 6, 8);
12647 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
[0]);
12648 regcache_raw_read_unsigned (reg_cache
, reg_src2
, &u_regval
[1]);
12650 record_buf_mem
[0] = 4; /* STR (2). */
12651 else if (10 == opcode2
)
12652 record_buf_mem
[0] = 1; /* STRB (2). */
12653 else if (9 == opcode2
)
12654 record_buf_mem
[0] = 2; /* STRH (2). */
12655 record_buf_mem
[1] = u_regval
[0] + u_regval
[1];
12656 thumb_insn_r
->mem_rec_count
= 1;
12659 else if (bit (thumb_insn_r
->arm_insn
, 11))
12661 /* Handle load from literal pool. */
12663 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12664 record_buf
[0] = reg_src1
;
12665 thumb_insn_r
->reg_rec_count
= 1;
12669 opcode2
= bits (thumb_insn_r
->arm_insn
, 8, 9);
12670 opcode3
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12671 if ((3 == opcode2
) && (!opcode3
))
12673 /* Branch with exchange. */
12674 record_buf
[0] = ARM_PS_REGNUM
;
12675 thumb_insn_r
->reg_rec_count
= 1;
12679 /* Format 8; special data processing insns. */
12680 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12681 record_buf
[0] = ARM_PS_REGNUM
;
12682 record_buf
[1] = reg_src1
;
12683 thumb_insn_r
->reg_rec_count
= 2;
12688 /* Format 5; data processing insns. */
12689 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12690 if (bit (thumb_insn_r
->arm_insn
, 7))
12692 reg_src1
= reg_src1
+ 8;
12694 record_buf
[0] = ARM_PS_REGNUM
;
12695 record_buf
[1] = reg_src1
;
12696 thumb_insn_r
->reg_rec_count
= 2;
12699 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12700 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12706 /* Handling opcode 001 insns. */
12709 thumb_record_ld_st_imm_offset (insn_decode_record
*thumb_insn_r
)
12711 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12712 uint32_t record_buf
[8], record_buf_mem
[8];
12714 uint32_t reg_src1
= 0;
12715 uint32_t opcode
= 0, immed_5
= 0;
12717 ULONGEST u_regval
= 0;
12719 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12724 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12725 record_buf
[0] = reg_src1
;
12726 thumb_insn_r
->reg_rec_count
= 1;
12731 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12732 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12733 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12734 record_buf_mem
[0] = 4;
12735 record_buf_mem
[1] = u_regval
+ (immed_5
* 4);
12736 thumb_insn_r
->mem_rec_count
= 1;
12739 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12740 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12746 /* Handling opcode 100 insns. */
12749 thumb_record_ld_st_stack (insn_decode_record
*thumb_insn_r
)
12751 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12752 uint32_t record_buf
[8], record_buf_mem
[8];
12754 uint32_t reg_src1
= 0;
12755 uint32_t opcode
= 0, immed_8
= 0, immed_5
= 0;
12757 ULONGEST u_regval
= 0;
12759 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12764 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12765 record_buf
[0] = reg_src1
;
12766 thumb_insn_r
->reg_rec_count
= 1;
12768 else if (1 == opcode
)
12771 reg_src1
= bits (thumb_insn_r
->arm_insn
, 0, 2);
12772 record_buf
[0] = reg_src1
;
12773 thumb_insn_r
->reg_rec_count
= 1;
12775 else if (2 == opcode
)
12778 immed_8
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12779 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12780 record_buf_mem
[0] = 4;
12781 record_buf_mem
[1] = u_regval
+ (immed_8
* 4);
12782 thumb_insn_r
->mem_rec_count
= 1;
12784 else if (0 == opcode
)
12787 immed_5
= bits (thumb_insn_r
->arm_insn
, 6, 10);
12788 reg_src1
= bits (thumb_insn_r
->arm_insn
, 3, 5);
12789 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12790 record_buf_mem
[0] = 2;
12791 record_buf_mem
[1] = u_regval
+ (immed_5
* 2);
12792 thumb_insn_r
->mem_rec_count
= 1;
12795 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12796 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12802 /* Handling opcode 101 insns. */
12805 thumb_record_misc (insn_decode_record
*thumb_insn_r
)
12807 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12809 uint32_t opcode
= 0, opcode1
= 0, opcode2
= 0;
12810 uint32_t register_bits
= 0, register_count
= 0;
12811 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12812 uint32_t record_buf
[24], record_buf_mem
[48];
12815 ULONGEST u_regval
= 0;
12817 opcode
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12818 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12819 opcode2
= bits (thumb_insn_r
->arm_insn
, 9, 12);
12824 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12825 while (register_bits
)
12827 if (register_bits
& 0x00000001)
12828 record_buf
[index
++] = register_count
;
12829 register_bits
= register_bits
>> 1;
12832 record_buf
[index
++] = ARM_PS_REGNUM
;
12833 record_buf
[index
++] = ARM_SP_REGNUM
;
12834 thumb_insn_r
->reg_rec_count
= index
;
12836 else if (10 == opcode2
)
12839 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12840 regcache_raw_read_unsigned (reg_cache
, ARM_SP_REGNUM
, &u_regval
);
12841 while (register_bits
)
12843 if (register_bits
& 0x00000001)
12845 register_bits
= register_bits
>> 1;
12847 start_address
= u_regval
- \
12848 (4 * (bit (thumb_insn_r
->arm_insn
, 8) + register_count
));
12849 thumb_insn_r
->mem_rec_count
= register_count
;
12850 while (register_count
)
12852 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12853 record_buf_mem
[(register_count
* 2) - 2] = 4;
12854 start_address
= start_address
+ 4;
12857 record_buf
[0] = ARM_SP_REGNUM
;
12858 thumb_insn_r
->reg_rec_count
= 1;
12860 else if (0x1E == opcode1
)
12863 /* Handle enhanced software breakpoint insn, BKPT. */
12864 /* CPSR is changed to be executed in ARM state, disabling normal
12865 interrupts, entering abort mode. */
12866 /* According to high vector configuration PC is set. */
12867 /* User hits breakpoint and type reverse, in that case, we need to go back with
12868 previous CPSR and Program Counter. */
12869 record_buf
[0] = ARM_PS_REGNUM
;
12870 record_buf
[1] = ARM_LR_REGNUM
;
12871 thumb_insn_r
->reg_rec_count
= 2;
12872 /* We need to save SPSR value, which is not yet done. */
12873 printf_unfiltered (_("Process record does not support instruction "
12874 "0x%0x at address %s.\n"),
12875 thumb_insn_r
->arm_insn
,
12876 paddress (thumb_insn_r
->gdbarch
,
12877 thumb_insn_r
->this_addr
));
12880 else if ((0 == opcode
) || (1 == opcode
))
12882 /* ADD(5), ADD(6). */
12883 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12884 record_buf
[0] = reg_src1
;
12885 thumb_insn_r
->reg_rec_count
= 1;
12887 else if (2 == opcode
)
12889 /* ADD(7), SUB(4). */
12890 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12891 record_buf
[0] = ARM_SP_REGNUM
;
12892 thumb_insn_r
->reg_rec_count
= 1;
12895 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12896 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12902 /* Handling opcode 110 insns. */
12905 thumb_record_ldm_stm_swi (insn_decode_record
*thumb_insn_r
)
12907 struct gdbarch_tdep
*tdep
= gdbarch_tdep (thumb_insn_r
->gdbarch
);
12908 struct regcache
*reg_cache
= thumb_insn_r
->regcache
;
12910 uint32_t ret
= 0; /* function return value: -1:record failure ; 0:success */
12911 uint32_t reg_src1
= 0;
12912 uint32_t opcode1
= 0, opcode2
= 0, register_bits
= 0, register_count
= 0;
12913 uint32_t register_list
[8] = {0}, index
= 0, start_address
= 0;
12914 uint32_t record_buf
[24], record_buf_mem
[48];
12916 ULONGEST u_regval
= 0;
12918 opcode1
= bits (thumb_insn_r
->arm_insn
, 8, 12);
12919 opcode2
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12925 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12927 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12928 while (register_bits
)
12930 if (register_bits
& 0x00000001)
12931 record_buf
[index
++] = register_count
;
12932 register_bits
= register_bits
>> 1;
12935 record_buf
[index
++] = reg_src1
;
12936 thumb_insn_r
->reg_rec_count
= index
;
12938 else if (0 == opcode2
)
12940 /* It handles both STMIA. */
12941 register_bits
= bits (thumb_insn_r
->arm_insn
, 0, 7);
12943 reg_src1
= bits (thumb_insn_r
->arm_insn
, 8, 10);
12944 regcache_raw_read_unsigned (reg_cache
, reg_src1
, &u_regval
);
12945 while (register_bits
)
12947 if (register_bits
& 0x00000001)
12949 register_bits
= register_bits
>> 1;
12951 start_address
= u_regval
;
12952 thumb_insn_r
->mem_rec_count
= register_count
;
12953 while (register_count
)
12955 record_buf_mem
[(register_count
* 2) - 1] = start_address
;
12956 record_buf_mem
[(register_count
* 2) - 2] = 4;
12957 start_address
= start_address
+ 4;
12961 else if (0x1F == opcode1
)
12963 /* Handle arm syscall insn. */
12964 if (tdep
->arm_syscall_record
!= NULL
)
12966 regcache_raw_read_unsigned (reg_cache
, 7, &u_regval
);
12967 ret
= tdep
->arm_syscall_record (reg_cache
, u_regval
);
12971 printf_unfiltered (_("no syscall record support\n"));
12976 /* B (1), conditional branch is automatically taken care in process_record,
12977 as PC is saved there. */
12979 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
12980 MEM_ALLOC (thumb_insn_r
->arm_mems
, thumb_insn_r
->mem_rec_count
,
12986 /* Handling opcode 111 insns. */
12989 thumb_record_branch (insn_decode_record
*thumb_insn_r
)
12991 uint32_t record_buf
[8];
12992 uint32_t bits_h
= 0;
12994 bits_h
= bits (thumb_insn_r
->arm_insn
, 11, 12);
12996 if (2 == bits_h
|| 3 == bits_h
)
12999 record_buf
[0] = ARM_LR_REGNUM
;
13000 thumb_insn_r
->reg_rec_count
= 1;
13002 else if (1 == bits_h
)
13005 record_buf
[0] = ARM_PS_REGNUM
;
13006 record_buf
[1] = ARM_LR_REGNUM
;
13007 thumb_insn_r
->reg_rec_count
= 2;
13010 /* B(2) is automatically taken care in process_record, as PC is
13013 REG_ALLOC (thumb_insn_r
->arm_regs
, thumb_insn_r
->reg_rec_count
, record_buf
);
13018 /* Handler for thumb2 load/store multiple instructions. */
13021 thumb2_record_ld_st_multiple (insn_decode_record
*thumb2_insn_r
)
13023 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13025 uint32_t reg_rn
, op
;
13026 uint32_t register_bits
= 0, register_count
= 0;
13027 uint32_t index
= 0, start_address
= 0;
13028 uint32_t record_buf
[24], record_buf_mem
[48];
13030 ULONGEST u_regval
= 0;
13032 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13033 op
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13035 if (0 == op
|| 3 == op
)
13037 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13039 /* Handle RFE instruction. */
13040 record_buf
[0] = ARM_PS_REGNUM
;
13041 thumb2_insn_r
->reg_rec_count
= 1;
13045 /* Handle SRS instruction after reading banked SP. */
13046 return arm_record_unsupported_insn (thumb2_insn_r
);
13049 else if (1 == op
|| 2 == op
)
13051 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13053 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13054 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13055 while (register_bits
)
13057 if (register_bits
& 0x00000001)
13058 record_buf
[index
++] = register_count
;
13061 register_bits
= register_bits
>> 1;
13063 record_buf
[index
++] = reg_rn
;
13064 record_buf
[index
++] = ARM_PS_REGNUM
;
13065 thumb2_insn_r
->reg_rec_count
= index
;
13069 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13070 register_bits
= bits (thumb2_insn_r
->arm_insn
, 0, 15);
13071 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13072 while (register_bits
)
13074 if (register_bits
& 0x00000001)
13077 register_bits
= register_bits
>> 1;
13082 /* Start address calculation for LDMDB/LDMEA. */
13083 start_address
= u_regval
;
13087 /* Start address calculation for LDMDB/LDMEA. */
13088 start_address
= u_regval
- register_count
* 4;
13091 thumb2_insn_r
->mem_rec_count
= register_count
;
13092 while (register_count
)
13094 record_buf_mem
[register_count
* 2 - 1] = start_address
;
13095 record_buf_mem
[register_count
* 2 - 2] = 4;
13096 start_address
= start_address
+ 4;
13099 record_buf
[0] = reg_rn
;
13100 record_buf
[1] = ARM_PS_REGNUM
;
13101 thumb2_insn_r
->reg_rec_count
= 2;
13105 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13107 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13109 return ARM_RECORD_SUCCESS
;
13112 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13116 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record
*thumb2_insn_r
)
13118 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13120 uint32_t reg_rd
, reg_rn
, offset_imm
;
13121 uint32_t reg_dest1
, reg_dest2
;
13122 uint32_t address
, offset_addr
;
13123 uint32_t record_buf
[8], record_buf_mem
[8];
13124 uint32_t op1
, op2
, op3
;
13127 ULONGEST u_regval
[2];
13129 op1
= bits (thumb2_insn_r
->arm_insn
, 23, 24);
13130 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 21);
13131 op3
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13133 if (bit (thumb2_insn_r
->arm_insn
, INSN_S_L_BIT_NUM
))
13135 if(!(1 == op1
&& 1 == op2
&& (0 == op3
|| 1 == op3
)))
13137 reg_dest1
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13138 record_buf
[0] = reg_dest1
;
13139 record_buf
[1] = ARM_PS_REGNUM
;
13140 thumb2_insn_r
->reg_rec_count
= 2;
13143 if (3 == op2
|| (op1
& 2) || (1 == op1
&& 1 == op2
&& 7 == op3
))
13145 reg_dest2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13146 record_buf
[2] = reg_dest2
;
13147 thumb2_insn_r
->reg_rec_count
= 3;
13152 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13153 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13155 if (0 == op1
&& 0 == op2
)
13157 /* Handle STREX. */
13158 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13159 address
= u_regval
[0] + (offset_imm
* 4);
13160 record_buf_mem
[0] = 4;
13161 record_buf_mem
[1] = address
;
13162 thumb2_insn_r
->mem_rec_count
= 1;
13163 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13164 record_buf
[0] = reg_rd
;
13165 thumb2_insn_r
->reg_rec_count
= 1;
13167 else if (1 == op1
&& 0 == op2
)
13169 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13170 record_buf
[0] = reg_rd
;
13171 thumb2_insn_r
->reg_rec_count
= 1;
13172 address
= u_regval
[0];
13173 record_buf_mem
[1] = address
;
13177 /* Handle STREXB. */
13178 record_buf_mem
[0] = 1;
13179 thumb2_insn_r
->mem_rec_count
= 1;
13183 /* Handle STREXH. */
13184 record_buf_mem
[0] = 2 ;
13185 thumb2_insn_r
->mem_rec_count
= 1;
13189 /* Handle STREXD. */
13190 address
= u_regval
[0];
13191 record_buf_mem
[0] = 4;
13192 record_buf_mem
[2] = 4;
13193 record_buf_mem
[3] = address
+ 4;
13194 thumb2_insn_r
->mem_rec_count
= 2;
13199 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13201 if (bit (thumb2_insn_r
->arm_insn
, 24))
13203 if (bit (thumb2_insn_r
->arm_insn
, 23))
13204 offset_addr
= u_regval
[0] + (offset_imm
* 4);
13206 offset_addr
= u_regval
[0] - (offset_imm
* 4);
13208 address
= offset_addr
;
13211 address
= u_regval
[0];
13213 record_buf_mem
[0] = 4;
13214 record_buf_mem
[1] = address
;
13215 record_buf_mem
[2] = 4;
13216 record_buf_mem
[3] = address
+ 4;
13217 thumb2_insn_r
->mem_rec_count
= 2;
13218 record_buf
[0] = reg_rn
;
13219 thumb2_insn_r
->reg_rec_count
= 1;
13223 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13225 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13227 return ARM_RECORD_SUCCESS
;
13230 /* Handler for thumb2 data processing (shift register and modified immediate)
13234 thumb2_record_data_proc_sreg_mimm (insn_decode_record
*thumb2_insn_r
)
13236 uint32_t reg_rd
, op
;
13237 uint32_t record_buf
[8];
13239 op
= bits (thumb2_insn_r
->arm_insn
, 21, 24);
13240 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13242 if ((0 == op
|| 4 == op
|| 8 == op
|| 13 == op
) && 15 == reg_rd
)
13244 record_buf
[0] = ARM_PS_REGNUM
;
13245 thumb2_insn_r
->reg_rec_count
= 1;
13249 record_buf
[0] = reg_rd
;
13250 record_buf
[1] = ARM_PS_REGNUM
;
13251 thumb2_insn_r
->reg_rec_count
= 2;
13254 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13256 return ARM_RECORD_SUCCESS
;
13259 /* Generic handler for thumb2 instructions which effect destination and PS
13263 thumb2_record_ps_dest_generic (insn_decode_record
*thumb2_insn_r
)
13266 uint32_t record_buf
[8];
13268 reg_rd
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13270 record_buf
[0] = reg_rd
;
13271 record_buf
[1] = ARM_PS_REGNUM
;
13272 thumb2_insn_r
->reg_rec_count
= 2;
13274 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13276 return ARM_RECORD_SUCCESS
;
13279 /* Handler for thumb2 branch and miscellaneous control instructions. */
13282 thumb2_record_branch_misc_cntrl (insn_decode_record
*thumb2_insn_r
)
13284 uint32_t op
, op1
, op2
;
13285 uint32_t record_buf
[8];
13287 op
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13288 op1
= bits (thumb2_insn_r
->arm_insn
, 12, 14);
13289 op2
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13291 /* Handle MSR insn. */
13292 if (!(op1
& 0x2) && 0x38 == op
)
13296 /* CPSR is going to be changed. */
13297 record_buf
[0] = ARM_PS_REGNUM
;
13298 thumb2_insn_r
->reg_rec_count
= 1;
13302 arm_record_unsupported_insn(thumb2_insn_r
);
13306 else if (4 == (op1
& 0x5) || 5 == (op1
& 0x5))
13309 record_buf
[0] = ARM_PS_REGNUM
;
13310 record_buf
[1] = ARM_LR_REGNUM
;
13311 thumb2_insn_r
->reg_rec_count
= 2;
13314 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13316 return ARM_RECORD_SUCCESS
;
13319 /* Handler for thumb2 store single data item instructions. */
13322 thumb2_record_str_single_data (insn_decode_record
*thumb2_insn_r
)
13324 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13326 uint32_t reg_rn
, reg_rm
, offset_imm
, shift_imm
;
13327 uint32_t address
, offset_addr
;
13328 uint32_t record_buf
[8], record_buf_mem
[8];
13331 ULONGEST u_regval
[2];
13333 op1
= bits (thumb2_insn_r
->arm_insn
, 21, 23);
13334 op2
= bits (thumb2_insn_r
->arm_insn
, 6, 11);
13335 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13336 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
[0]);
13338 if (bit (thumb2_insn_r
->arm_insn
, 23))
13341 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 11);
13342 offset_addr
= u_regval
[0] + offset_imm
;
13343 address
= offset_addr
;
13348 if ((0 == op1
|| 1 == op1
|| 2 == op1
) && !(op2
& 0x20))
13350 /* Handle STRB (register). */
13351 reg_rm
= bits (thumb2_insn_r
->arm_insn
, 0, 3);
13352 regcache_raw_read_unsigned (reg_cache
, reg_rm
, &u_regval
[1]);
13353 shift_imm
= bits (thumb2_insn_r
->arm_insn
, 4, 5);
13354 offset_addr
= u_regval
[1] << shift_imm
;
13355 address
= u_regval
[0] + offset_addr
;
13359 offset_imm
= bits (thumb2_insn_r
->arm_insn
, 0, 7);
13360 if (bit (thumb2_insn_r
->arm_insn
, 10))
13362 if (bit (thumb2_insn_r
->arm_insn
, 9))
13363 offset_addr
= u_regval
[0] + offset_imm
;
13365 offset_addr
= u_regval
[0] - offset_imm
;
13367 address
= offset_addr
;
13370 address
= u_regval
[0];
13376 /* Store byte instructions. */
13379 record_buf_mem
[0] = 1;
13381 /* Store half word instructions. */
13384 record_buf_mem
[0] = 2;
13386 /* Store word instructions. */
13389 record_buf_mem
[0] = 4;
13393 gdb_assert_not_reached ("no decoding pattern found");
13397 record_buf_mem
[1] = address
;
13398 thumb2_insn_r
->mem_rec_count
= 1;
13399 record_buf
[0] = reg_rn
;
13400 thumb2_insn_r
->reg_rec_count
= 1;
13402 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13404 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13406 return ARM_RECORD_SUCCESS
;
13409 /* Handler for thumb2 load memory hints instructions. */
13412 thumb2_record_ld_mem_hints (insn_decode_record
*thumb2_insn_r
)
13414 uint32_t record_buf
[8];
13415 uint32_t reg_rt
, reg_rn
;
13417 reg_rt
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13418 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13420 if (ARM_PC_REGNUM
!= reg_rt
)
13422 record_buf
[0] = reg_rt
;
13423 record_buf
[1] = reg_rn
;
13424 record_buf
[2] = ARM_PS_REGNUM
;
13425 thumb2_insn_r
->reg_rec_count
= 3;
13427 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13429 return ARM_RECORD_SUCCESS
;
13432 return ARM_RECORD_FAILURE
;
13435 /* Handler for thumb2 load word instructions. */
13438 thumb2_record_ld_word (insn_decode_record
*thumb2_insn_r
)
13440 uint32_t opcode1
= 0, opcode2
= 0;
13441 uint32_t record_buf
[8];
13443 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13444 record_buf
[1] = ARM_PS_REGNUM
;
13445 thumb2_insn_r
->reg_rec_count
= 2;
13447 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13449 return ARM_RECORD_SUCCESS
;
13452 /* Handler for thumb2 long multiply, long multiply accumulate, and
13453 divide instructions. */
13456 thumb2_record_lmul_lmla_div (insn_decode_record
*thumb2_insn_r
)
13458 uint32_t opcode1
= 0, opcode2
= 0;
13459 uint32_t record_buf
[8];
13460 uint32_t reg_src1
= 0;
13462 opcode1
= bits (thumb2_insn_r
->arm_insn
, 20, 22);
13463 opcode2
= bits (thumb2_insn_r
->arm_insn
, 4, 7);
13465 if (0 == opcode1
|| 2 == opcode1
|| (opcode1
>= 4 && opcode1
<= 6))
13467 /* Handle SMULL, UMULL, SMULAL. */
13468 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13469 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13470 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13471 record_buf
[2] = ARM_PS_REGNUM
;
13472 thumb2_insn_r
->reg_rec_count
= 3;
13474 else if (1 == opcode1
|| 3 == opcode2
)
13476 /* Handle SDIV and UDIV. */
13477 record_buf
[0] = bits (thumb2_insn_r
->arm_insn
, 16, 19);
13478 record_buf
[1] = bits (thumb2_insn_r
->arm_insn
, 12, 15);
13479 record_buf
[2] = ARM_PS_REGNUM
;
13480 thumb2_insn_r
->reg_rec_count
= 3;
13483 return ARM_RECORD_FAILURE
;
13485 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13487 return ARM_RECORD_SUCCESS
;
13490 /* Record handler for thumb32 coprocessor instructions. */
13493 thumb2_record_coproc_insn (insn_decode_record
*thumb2_insn_r
)
13495 if (bit (thumb2_insn_r
->arm_insn
, 25))
13496 return arm_record_coproc_data_proc (thumb2_insn_r
);
13498 return arm_record_asimd_vfp_coproc (thumb2_insn_r
);
13501 /* Record handler for advance SIMD structure load/store instructions. */
13504 thumb2_record_asimd_struct_ld_st (insn_decode_record
*thumb2_insn_r
)
13506 struct regcache
*reg_cache
= thumb2_insn_r
->regcache
;
13507 uint32_t l_bit
, a_bit
, b_bits
;
13508 uint32_t record_buf
[128], record_buf_mem
[128];
13509 uint32_t reg_rn
, reg_vd
, address
, f_esize
, f_elem
;
13510 uint32_t index_r
= 0, index_e
= 0, bf_regs
= 0, index_m
= 0, loop_t
= 0;
13513 l_bit
= bit (thumb2_insn_r
->arm_insn
, 21);
13514 a_bit
= bit (thumb2_insn_r
->arm_insn
, 23);
13515 b_bits
= bits (thumb2_insn_r
->arm_insn
, 8, 11);
13516 reg_rn
= bits (thumb2_insn_r
->arm_insn
, 16, 19);
13517 reg_vd
= bits (thumb2_insn_r
->arm_insn
, 12, 15);
13518 reg_vd
= (bit (thumb2_insn_r
->arm_insn
, 22) << 4) | reg_vd
;
13519 f_ebytes
= (1 << bits (thumb2_insn_r
->arm_insn
, 6, 7));
13520 f_esize
= 8 * f_ebytes
;
13521 f_elem
= 8 / f_ebytes
;
13525 ULONGEST u_regval
= 0;
13526 regcache_raw_read_unsigned (reg_cache
, reg_rn
, &u_regval
);
13527 address
= u_regval
;
13532 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13534 if (b_bits
== 0x07)
13536 else if (b_bits
== 0x0a)
13538 else if (b_bits
== 0x06)
13540 else if (b_bits
== 0x02)
13545 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13547 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13549 record_buf_mem
[index_m
++] = f_ebytes
;
13550 record_buf_mem
[index_m
++] = address
;
13551 address
= address
+ f_ebytes
;
13552 thumb2_insn_r
->mem_rec_count
+= 1;
13557 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13559 if (b_bits
== 0x09 || b_bits
== 0x08)
13561 else if (b_bits
== 0x03)
13566 for (index_r
= 0; index_r
< bf_regs
; index_r
++)
13567 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13569 for (loop_t
= 0; loop_t
< 2; loop_t
++)
13571 record_buf_mem
[index_m
++] = f_ebytes
;
13572 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13573 thumb2_insn_r
->mem_rec_count
+= 1;
13575 address
= address
+ (2 * f_ebytes
);
13579 else if ((b_bits
& 0x0e) == 0x04)
13581 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13583 for (loop_t
= 0; loop_t
< 3; loop_t
++)
13585 record_buf_mem
[index_m
++] = f_ebytes
;
13586 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13587 thumb2_insn_r
->mem_rec_count
+= 1;
13589 address
= address
+ (3 * f_ebytes
);
13593 else if (!(b_bits
& 0x0e))
13595 for (index_e
= 0; index_e
< f_elem
; index_e
++)
13597 for (loop_t
= 0; loop_t
< 4; loop_t
++)
13599 record_buf_mem
[index_m
++] = f_ebytes
;
13600 record_buf_mem
[index_m
++] = address
+ (loop_t
* f_ebytes
);
13601 thumb2_insn_r
->mem_rec_count
+= 1;
13603 address
= address
+ (4 * f_ebytes
);
13609 uint8_t bft_size
= bits (thumb2_insn_r
->arm_insn
, 10, 11);
13611 if (bft_size
== 0x00)
13613 else if (bft_size
== 0x01)
13615 else if (bft_size
== 0x02)
13621 if (!(b_bits
& 0x0b) || b_bits
== 0x08)
13622 thumb2_insn_r
->mem_rec_count
= 1;
13624 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09)
13625 thumb2_insn_r
->mem_rec_count
= 2;
13627 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a)
13628 thumb2_insn_r
->mem_rec_count
= 3;
13630 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b)
13631 thumb2_insn_r
->mem_rec_count
= 4;
13633 for (index_m
= 0; index_m
< thumb2_insn_r
->mem_rec_count
; index_m
++)
13635 record_buf_mem
[index_m
] = f_ebytes
;
13636 record_buf_mem
[index_m
] = address
+ (index_m
* f_ebytes
);
13645 if (b_bits
== 0x02 || b_bits
== 0x0a || (b_bits
& 0x0e) == 0x06)
13646 thumb2_insn_r
->reg_rec_count
= 1;
13648 else if (b_bits
== 0x03 || (b_bits
& 0x0e) == 0x08)
13649 thumb2_insn_r
->reg_rec_count
= 2;
13651 else if ((b_bits
& 0x0e) == 0x04)
13652 thumb2_insn_r
->reg_rec_count
= 3;
13654 else if (!(b_bits
& 0x0e))
13655 thumb2_insn_r
->reg_rec_count
= 4;
13660 if (!(b_bits
& 0x0b) || b_bits
== 0x08 || b_bits
== 0x0c)
13661 thumb2_insn_r
->reg_rec_count
= 1;
13663 else if ((b_bits
& 0x0b) == 0x01 || b_bits
== 0x09 || b_bits
== 0x0d)
13664 thumb2_insn_r
->reg_rec_count
= 2;
13666 else if ((b_bits
& 0x0b) == 0x02 || b_bits
== 0x0a || b_bits
== 0x0e)
13667 thumb2_insn_r
->reg_rec_count
= 3;
13669 else if ((b_bits
& 0x0b) == 0x03 || b_bits
== 0x0b || b_bits
== 0x0f)
13670 thumb2_insn_r
->reg_rec_count
= 4;
13672 for (index_r
= 0; index_r
< thumb2_insn_r
->reg_rec_count
; index_r
++)
13673 record_buf
[index_r
] = reg_vd
+ ARM_D0_REGNUM
+ index_r
;
13677 if (bits (thumb2_insn_r
->arm_insn
, 0, 3) != 15)
13679 record_buf
[index_r
] = reg_rn
;
13680 thumb2_insn_r
->reg_rec_count
+= 1;
13683 REG_ALLOC (thumb2_insn_r
->arm_regs
, thumb2_insn_r
->reg_rec_count
,
13685 MEM_ALLOC (thumb2_insn_r
->arm_mems
, thumb2_insn_r
->mem_rec_count
,
13690 /* Decodes thumb2 instruction type and invokes its record handler. */
13692 static unsigned int
13693 thumb2_record_decode_insn_handler (insn_decode_record
*thumb2_insn_r
)
13695 uint32_t op
, op1
, op2
;
13697 op
= bit (thumb2_insn_r
->arm_insn
, 15);
13698 op1
= bits (thumb2_insn_r
->arm_insn
, 27, 28);
13699 op2
= bits (thumb2_insn_r
->arm_insn
, 20, 26);
13703 if (!(op2
& 0x64 ))
13705 /* Load/store multiple instruction. */
13706 return thumb2_record_ld_st_multiple (thumb2_insn_r
);
13708 else if (!((op2
& 0x64) ^ 0x04))
13710 /* Load/store (dual/exclusive) and table branch instruction. */
13711 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r
);
13713 else if (!((op2
& 0x20) ^ 0x20))
13715 /* Data-processing (shifted register). */
13716 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13718 else if (op2
& 0x40)
13720 /* Co-processor instructions. */
13721 return thumb2_record_coproc_insn (thumb2_insn_r
);
13724 else if (op1
== 0x02)
13728 /* Branches and miscellaneous control instructions. */
13729 return thumb2_record_branch_misc_cntrl (thumb2_insn_r
);
13731 else if (op2
& 0x20)
13733 /* Data-processing (plain binary immediate) instruction. */
13734 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13738 /* Data-processing (modified immediate). */
13739 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r
);
13742 else if (op1
== 0x03)
13744 if (!(op2
& 0x71 ))
13746 /* Store single data item. */
13747 return thumb2_record_str_single_data (thumb2_insn_r
);
13749 else if (!((op2
& 0x71) ^ 0x10))
13751 /* Advanced SIMD or structure load/store instructions. */
13752 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r
);
13754 else if (!((op2
& 0x67) ^ 0x01))
13756 /* Load byte, memory hints instruction. */
13757 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13759 else if (!((op2
& 0x67) ^ 0x03))
13761 /* Load halfword, memory hints instruction. */
13762 return thumb2_record_ld_mem_hints (thumb2_insn_r
);
13764 else if (!((op2
& 0x67) ^ 0x05))
13766 /* Load word instruction. */
13767 return thumb2_record_ld_word (thumb2_insn_r
);
13769 else if (!((op2
& 0x70) ^ 0x20))
13771 /* Data-processing (register) instruction. */
13772 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13774 else if (!((op2
& 0x78) ^ 0x30))
13776 /* Multiply, multiply accumulate, abs diff instruction. */
13777 return thumb2_record_ps_dest_generic (thumb2_insn_r
);
13779 else if (!((op2
& 0x78) ^ 0x38))
13781 /* Long multiply, long multiply accumulate, and divide. */
13782 return thumb2_record_lmul_lmla_div (thumb2_insn_r
);
13784 else if (op2
& 0x40)
13786 /* Co-processor instructions. */
13787 return thumb2_record_coproc_insn (thumb2_insn_r
);
13794 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13795 and positive val on fauilure. */
13798 extract_arm_insn (insn_decode_record
*insn_record
, uint32_t insn_size
)
13800 gdb_byte buf
[insn_size
];
13802 memset (&buf
[0], 0, insn_size
);
13804 if (target_read_memory (insn_record
->this_addr
, &buf
[0], insn_size
))
13806 insn_record
->arm_insn
= (uint32_t) extract_unsigned_integer (&buf
[0],
13808 gdbarch_byte_order_for_code (insn_record
->gdbarch
));
13812 typedef int (*sti_arm_hdl_fp_t
) (insn_decode_record
*);
13814 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13818 decode_insn (insn_decode_record
*arm_record
, record_type_t record_type
,
13819 uint32_t insn_size
)
13822 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13823 static const sti_arm_hdl_fp_t arm_handle_insn
[8] =
13825 arm_record_data_proc_misc_ld_str
, /* 000. */
13826 arm_record_data_proc_imm
, /* 001. */
13827 arm_record_ld_st_imm_offset
, /* 010. */
13828 arm_record_ld_st_reg_offset
, /* 011. */
13829 arm_record_ld_st_multiple
, /* 100. */
13830 arm_record_b_bl
, /* 101. */
13831 arm_record_asimd_vfp_coproc
, /* 110. */
13832 arm_record_coproc_data_proc
/* 111. */
13835 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13836 static const sti_arm_hdl_fp_t thumb_handle_insn
[8] =
13838 thumb_record_shift_add_sub
, /* 000. */
13839 thumb_record_add_sub_cmp_mov
, /* 001. */
13840 thumb_record_ld_st_reg_offset
, /* 010. */
13841 thumb_record_ld_st_imm_offset
, /* 011. */
13842 thumb_record_ld_st_stack
, /* 100. */
13843 thumb_record_misc
, /* 101. */
13844 thumb_record_ldm_stm_swi
, /* 110. */
13845 thumb_record_branch
/* 111. */
13848 uint32_t ret
= 0; /* return value: negative:failure 0:success. */
13849 uint32_t insn_id
= 0;
13851 if (extract_arm_insn (arm_record
, insn_size
))
13855 printf_unfiltered (_("Process record: error reading memory at "
13856 "addr %s len = %d.\n"),
13857 paddress (arm_record
->gdbarch
, arm_record
->this_addr
), insn_size
);
13861 else if (ARM_RECORD
== record_type
)
13863 arm_record
->cond
= bits (arm_record
->arm_insn
, 28, 31);
13864 insn_id
= bits (arm_record
->arm_insn
, 25, 27);
13865 ret
= arm_record_extension_space (arm_record
);
13866 /* If this insn has fallen into extension space
13867 then we need not decode it anymore. */
13868 if (ret
!= -1 && !INSN_RECORDED(arm_record
))
13870 ret
= arm_handle_insn
[insn_id
] (arm_record
);
13873 else if (THUMB_RECORD
== record_type
)
13875 /* As thumb does not have condition codes, we set negative. */
13876 arm_record
->cond
= -1;
13877 insn_id
= bits (arm_record
->arm_insn
, 13, 15);
13878 ret
= thumb_handle_insn
[insn_id
] (arm_record
);
13880 else if (THUMB2_RECORD
== record_type
)
13882 /* As thumb does not have condition codes, we set negative. */
13883 arm_record
->cond
= -1;
13885 /* Swap first half of 32bit thumb instruction with second half. */
13886 arm_record
->arm_insn
13887 = (arm_record
->arm_insn
>> 16) | (arm_record
->arm_insn
<< 16);
13889 insn_id
= thumb2_record_decode_insn_handler (arm_record
);
13891 if (insn_id
!= ARM_RECORD_SUCCESS
)
13893 arm_record_unsupported_insn (arm_record
);
13899 /* Throw assertion. */
13900 gdb_assert_not_reached ("not a valid instruction, could not decode");
13907 /* Cleans up local record registers and memory allocations. */
13910 deallocate_reg_mem (insn_decode_record
*record
)
13912 xfree (record
->arm_regs
);
13913 xfree (record
->arm_mems
);
13917 /* Parse the current instruction and record the values of the registers and
13918 memory that will be changed in current instruction to record_arch_list".
13919 Return -1 if something is wrong. */
13922 arm_process_record (struct gdbarch
*gdbarch
, struct regcache
*regcache
,
13923 CORE_ADDR insn_addr
)
13926 enum bfd_endian byte_order
= gdbarch_byte_order (gdbarch
);
13927 uint32_t no_of_rec
= 0;
13928 uint32_t ret
= 0; /* return value: -1:record failure ; 0:success */
13929 ULONGEST t_bit
= 0, insn_id
= 0;
13931 ULONGEST u_regval
= 0;
13933 insn_decode_record arm_record
;
13935 memset (&arm_record
, 0, sizeof (insn_decode_record
));
13936 arm_record
.regcache
= regcache
;
13937 arm_record
.this_addr
= insn_addr
;
13938 arm_record
.gdbarch
= gdbarch
;
13941 if (record_debug
> 1)
13943 fprintf_unfiltered (gdb_stdlog
, "Process record: arm_process_record "
13945 paddress (gdbarch
, arm_record
.this_addr
));
13948 if (extract_arm_insn (&arm_record
, 2))
13952 printf_unfiltered (_("Process record: error reading memory at "
13953 "addr %s len = %d.\n"),
13954 paddress (arm_record
.gdbarch
,
13955 arm_record
.this_addr
), 2);
13960 /* Check the insn, whether it is thumb or arm one. */
13962 t_bit
= arm_psr_thumb_bit (arm_record
.gdbarch
);
13963 regcache_raw_read_unsigned (arm_record
.regcache
, ARM_PS_REGNUM
, &u_regval
);
13966 if (!(u_regval
& t_bit
))
13968 /* We are decoding arm insn. */
13969 ret
= decode_insn (&arm_record
, ARM_RECORD
, ARM_INSN_SIZE_BYTES
);
13973 insn_id
= bits (arm_record
.arm_insn
, 11, 15);
13974 /* is it thumb2 insn? */
13975 if ((0x1D == insn_id
) || (0x1E == insn_id
) || (0x1F == insn_id
))
13977 ret
= decode_insn (&arm_record
, THUMB2_RECORD
,
13978 THUMB2_INSN_SIZE_BYTES
);
13982 /* We are decoding thumb insn. */
13983 ret
= decode_insn (&arm_record
, THUMB_RECORD
, THUMB_INSN_SIZE_BYTES
);
13989 /* Record registers. */
13990 record_full_arch_list_add_reg (arm_record
.regcache
, ARM_PC_REGNUM
);
13991 if (arm_record
.arm_regs
)
13993 for (no_of_rec
= 0; no_of_rec
< arm_record
.reg_rec_count
; no_of_rec
++)
13995 if (record_full_arch_list_add_reg
13996 (arm_record
.regcache
, arm_record
.arm_regs
[no_of_rec
]))
14000 /* Record memories. */
14001 if (arm_record
.arm_mems
)
14003 for (no_of_rec
= 0; no_of_rec
< arm_record
.mem_rec_count
; no_of_rec
++)
14005 if (record_full_arch_list_add_mem
14006 ((CORE_ADDR
)arm_record
.arm_mems
[no_of_rec
].addr
,
14007 arm_record
.arm_mems
[no_of_rec
].len
))
14012 if (record_full_arch_list_add_end ())
14017 deallocate_reg_mem (&arm_record
);